diff options
author | hclam@chromium.org <hclam@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-04-15 22:08:31 +0000 |
---|---|---|
committer | hclam@chromium.org <hclam@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-04-15 22:08:31 +0000 |
commit | 18f393efffc657f86738c26593522dbe3540f50a (patch) | |
tree | 4542ba45b11ee069c45db9577335fd28f8bb69dc /media | |
parent | 8cf181c31aaa93d60f22d7e7e3fed3233f8ab8a5 (diff) | |
download | chromium_src-18f393efffc657f86738c26593522dbe3540f50a.zip chromium_src-18f393efffc657f86738c26593522dbe3540f50a.tar.gz chromium_src-18f393efffc657f86738c26593522dbe3540f50a.tar.bz2 |
Revert of Cast: cast_sender_app transcode a video file using FFmpeg (https://codereview.chromium.org/236563002/)
Reason for revert:
FAILED: C:\b\depot_tools\python276_bin\python.exe gyp-win-tool link-with-manifests environment.x64 True cast_sender_app.exe "C:\b\depot_tools\python276_bin\python.exe gyp-win-tool link-wrapper environment.x64 False link.exe /nologo /OUT:cast_sender_app.exe @cast_sender_app.exe.rsp" 1 mt.exe rc.exe "obj\media\cast\cast_sender_app.cast_sender_app.exe.intermediate.manifest" obj\media\cast\cast_sender_app.cast_sender_app.exe.generated.manifest ..\..\build\win\compatibility.manifest
cast_sender_app.sender.obj : error LNK2019: unresolved external symbol "enum media::VideoFrame::Format __cdecl media::PixelFormatToVideoFormat(enum AVPixelFormat)" (?PixelFormatToVideoFormat@media@@YA?AW4Format@VideoFrame@1@W4AVPixelFormat@@@Z) referenced in function "public: __cdecl media::cast::SendProcess::SendProcess(class scoped_refptr<class base::SingleThreadTaskRunner>,class base::TickClock *,struct media::cast::VideoSenderConfig const &,class scoped_refptr<class media::cast::AudioFrameInput>,class scoped_refptr<class media::cast::VideoFrameInput>)" (??0SendProcess@cast@media@@QEAA@V?$scoped_refptr@VSingleThreadTaskRunner@base@@@@PEAVTickClock@base@@AEBUVideoSenderConfig@12@V?$scoped_refptr@VAudioFrameInput@cast@media@@@@V?$scoped_refptr@VVideoFrameInput@cast@media@@@@@Z)
cast_sender_app.exe : fatal error LNK1120: 1 unresolved externals
Original issue's description:
> Cast: cast_sender_app transcode a video file using FFmpeg
>
> FFmpeg video decoding is now implemented in cast_sender_app. It is used
> to decode video only. We can now give it a file and it perform
> transcoding and send to a cast receiver.
>
> Fixed two bugs in this change:
> 1. Vp8Encoder was using an incorrect timestamp. This affects bitrate
> control.
> 2. VP8 codec was set to drop frames. This caused visual stuttering.
> It is now configured to never drop frames to match cast mirroring
> using WebRTC.
> 3. Minor clean up in sender.cc to use switches for input.
>
> Committed: https://src.chromium.org/viewvc/chrome?view=rev&revision=264008
TBR=hubbe@chromium.org,miu@chromium.org
NOTREECHECKS=true
NOTRY=true
Review URL: https://codereview.chromium.org/239593003
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@264012 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media')
-rw-r--r-- | media/cast/cast.gyp | 1 | ||||
-rw-r--r-- | media/cast/test/receiver.cc | 2 | ||||
-rw-r--r-- | media/cast/test/sender.cc | 603 | ||||
-rw-r--r-- | media/cast/video_sender/codecs/vp8/vp8_encoder.cc | 28 | ||||
-rw-r--r-- | media/cast/video_sender/codecs/vp8/vp8_encoder.h | 4 |
5 files changed, 271 insertions, 367 deletions
diff --git a/media/cast/cast.gyp b/media/cast/cast.gyp index f0a77f2..401dbea 100644 --- a/media/cast/cast.gyp +++ b/media/cast/cast.gyp @@ -102,7 +102,6 @@ '<(DEPTH)/media/cast/cast_sender.gyp:*', '<(DEPTH)/media/media.gyp:media', '<(DEPTH)/testing/gtest.gyp:gtest', - '<(DEPTH)/third_party/ffmpeg/ffmpeg.gyp:ffmpeg', '<(DEPTH)/third_party/opus/opus.gyp:opus', '<(DEPTH)/media/cast/transport/cast_transport.gyp:cast_transport', '<(DEPTH)/media/cast/test/utility/utility.gyp:cast_test_utility', diff --git a/media/cast/test/receiver.cc b/media/cast/test/receiver.cc index 3f5f4de..5aaf643 100644 --- a/media/cast/test/receiver.cc +++ b/media/cast/test/receiver.cc @@ -128,7 +128,6 @@ AudioReceiverConfig GetAudioReceiverConfig() { AudioReceiverConfig audio_config = GetDefaultAudioReceiverConfig(); GetSsrcs(&audio_config); GetPayloadtype(&audio_config); - audio_config.rtp_max_delay_ms = 300; return audio_config; } @@ -144,7 +143,6 @@ VideoReceiverConfig GetVideoReceiverConfig() { VideoReceiverConfig video_config = GetDefaultVideoReceiverConfig(); GetSsrcs(&video_config); GetPayloadtype(&video_config); - video_config.rtp_max_delay_ms = 300; return video_config; } diff --git a/media/cast/test/sender.cc b/media/cast/test/sender.cc index 6793437..fa2120e 100644 --- a/media/cast/test/sender.cc +++ b/media/cast/test/sender.cc @@ -6,21 +6,14 @@ // or read from a file. #include "base/at_exit.h" -#include "base/base_paths.h" #include "base/command_line.h" #include "base/file_util.h" -#include "base/files/file_path.h" -#include "base/files/memory_mapped_file.h" #include "base/files/scoped_file.h" #include "base/logging.h" #include "base/memory/scoped_ptr.h" -#include "base/path_service.h" -#include "base/strings/string_number_conversions.h" #include "base/threading/thread.h" #include "base/time/default_tick_clock.h" -#include "media/base/media.h" #include "media/base/video_frame.h" -#include "media/base/video_util.h" #include "media/cast/cast_config.h" #include "media/cast/cast_environment.h" #include "media/cast/cast_sender.h" @@ -35,34 +28,135 @@ #include "media/cast/transport/cast_transport_defines.h" #include "media/cast/transport/cast_transport_sender.h" #include "media/cast/transport/transport/udp_transport.h" -#include "media/ffmpeg/ffmpeg_common.h" -#include "media/filters/ffmpeg_demuxer.h" -#include "media/filters/ffmpeg_glue.h" -#include "media/filters/in_memory_url_protocol.h" #include "ui/gfx/size.h" +namespace media { +namespace cast { +// Settings chosen to match default receiver settings. +#define DEFAULT_RECEIVER_PORT "2344" +#define DEFAULT_RECEIVER_IP "127.0.0.1" +#define DEFAULT_READ_FROM_FILE "0" +#define DEFAULT_AUDIO_SENDER_SSRC "1" +#define DEFAULT_AUDIO_RECEIVER_SSRC "2" +#define DEFAULT_AUDIO_PAYLOAD_TYPE "127" +#define DEFAULT_VIDEO_SENDER_SSRC "11" +#define DEFAULT_VIDEO_RECEIVER_SSRC "12" +#define DEFAULT_VIDEO_PAYLOAD_TYPE "96" +#define DEFAULT_VIDEO_CODEC_WIDTH "1280" +#define DEFAULT_VIDEO_CODEC_HEIGHT "720" +#define DEFAULT_VIDEO_CODEC_BITRATE "2000" +#define DEFAULT_VIDEO_CODEC_MAX_BITRATE "4000" +#define DEFAULT_VIDEO_CODEC_MIN_BITRATE "1000" + +#define DEFAULT_LOGGING_DURATION "10" +#define DEFAULT_COMPRESS_LOGS "1" + namespace { static const int kAudioChannels = 2; static const int kAudioSamplingFrequency = 48000; static const int kSoundFrequency = 1234; // Frequency of sinusoid wave. +// The tests are commonly implemented with |kFrameTimerMs| RunTask function; +// a normal video is 30 fps hence the 33 ms between frames. static const float kSoundVolume = 0.5f; -static const int kAudioFrameMs = 10; // Each audio frame is exactly 10ms. +static const int kFrameTimerMs = 33; // The max allowed size of serialized log. const int kMaxSerializedLogBytes = 10 * 1000 * 1000; +} // namespace -const char kSwitchAddress[] = "address"; -const char kSwitchPort[] = "port"; -const char kSwitchSourceFile[] = "source-file"; +void GetPort(int* port) { + test::InputBuilder input( + "Enter receiver port.", DEFAULT_RECEIVER_PORT, 1, INT_MAX); + *port = input.GetIntInput(); +} -} // namespace +std::string GetIpAddress(const std::string display_text) { + test::InputBuilder input(display_text, DEFAULT_RECEIVER_IP, INT_MIN, INT_MAX); + std::string ip_address = input.GetStringInput(); + // Verify correct form: + while (std::count(ip_address.begin(), ip_address.end(), '.') != 3) { + ip_address = input.GetStringInput(); + } + return ip_address; +} -namespace media { -namespace cast { +int GetLoggingDuration() { + test::InputBuilder input( + "Choose logging duration (seconds), 0 for no logging.", + DEFAULT_LOGGING_DURATION, + 0, + INT_MAX); + return input.GetIntInput(); +} + +std::string GetVideoLogFileDestination(bool compress) { + test::InputBuilder input( + "Enter video events log file destination.", + compress ? "./video_events.log.gz" : "./video_events.log", + INT_MIN, + INT_MAX); + return input.GetStringInput(); +} + +std::string GetAudioLogFileDestination(bool compress) { + test::InputBuilder input( + "Enter audio events log file destination.", + compress ? "./audio_events.log.gz" : "./audio_events.log", + INT_MIN, + INT_MAX); + return input.GetStringInput(); +} + +bool CompressLogs() { + test::InputBuilder input( + "Enter 1 to enable compression on logs.", DEFAULT_COMPRESS_LOGS, 0, 1); + return (1 == input.GetIntInput()); +} + +bool ReadFromFile() { + test::InputBuilder input( + "Enter 1 to read from file.", DEFAULT_READ_FROM_FILE, 0, 1); + return (1 == input.GetIntInput()); +} + +std::string GetVideoFile() { + test::InputBuilder input( + "Enter file and path to raw video file.", "", INT_MIN, INT_MAX); + return input.GetStringInput(); +} + +void GetSsrcs(AudioSenderConfig* audio_config) { + test::InputBuilder input_tx( + "Choose audio sender SSRC.", DEFAULT_AUDIO_SENDER_SSRC, 1, INT_MAX); + audio_config->sender_ssrc = input_tx.GetIntInput(); + + test::InputBuilder input_rx( + "Choose audio receiver SSRC.", DEFAULT_AUDIO_RECEIVER_SSRC, 1, INT_MAX); + audio_config->incoming_feedback_ssrc = input_rx.GetIntInput(); +} + +void GetSsrcs(VideoSenderConfig* video_config) { + test::InputBuilder input_tx( + "Choose video sender SSRC.", DEFAULT_VIDEO_SENDER_SSRC, 1, INT_MAX); + video_config->sender_ssrc = input_tx.GetIntInput(); + + test::InputBuilder input_rx( + "Choose video receiver SSRC.", DEFAULT_VIDEO_RECEIVER_SSRC, 1, INT_MAX); + video_config->incoming_feedback_ssrc = input_rx.GetIntInput(); +} + +void GetPayloadtype(AudioSenderConfig* audio_config) { + test::InputBuilder input( + "Choose audio sender payload type.", DEFAULT_AUDIO_PAYLOAD_TYPE, 96, 127); + audio_config->rtp_config.payload_type = input.GetIntInput(); +} AudioSenderConfig GetAudioSenderConfig() { AudioSenderConfig audio_config; + GetSsrcs(&audio_config); + GetPayloadtype(&audio_config); + audio_config.rtcp_c_name = "audio_sender@a.b.c.d"; VLOG(0) << "Using OPUS 48Khz stereo at 64kbit/s"; @@ -71,50 +165,60 @@ AudioSenderConfig GetAudioSenderConfig() { audio_config.channels = kAudioChannels; audio_config.bitrate = 64000; audio_config.codec = transport::kOpus; - audio_config.sender_ssrc = 1; - audio_config.incoming_feedback_ssrc = 2; - audio_config.rtp_config.payload_type = 127; - audio_config.rtp_config.max_delay_ms = 300; return audio_config; } +void GetPayloadtype(VideoSenderConfig* video_config) { + test::InputBuilder input( + "Choose video sender payload type.", DEFAULT_VIDEO_PAYLOAD_TYPE, 96, 127); + video_config->rtp_config.payload_type = input.GetIntInput(); +} + +void GetVideoCodecSize(VideoSenderConfig* video_config) { + test::InputBuilder input_width( + "Choose video width.", DEFAULT_VIDEO_CODEC_WIDTH, 144, 1920); + video_config->width = input_width.GetIntInput(); + + test::InputBuilder input_height( + "Choose video height.", DEFAULT_VIDEO_CODEC_HEIGHT, 176, 1080); + video_config->height = input_height.GetIntInput(); +} + +void GetVideoBitrates(VideoSenderConfig* video_config) { + test::InputBuilder input_start_br( + "Choose start bitrate[kbps].", DEFAULT_VIDEO_CODEC_BITRATE, 0, INT_MAX); + video_config->start_bitrate = input_start_br.GetIntInput() * 1000; + + test::InputBuilder input_max_br( + "Choose max bitrate[kbps].", DEFAULT_VIDEO_CODEC_MAX_BITRATE, 0, INT_MAX); + video_config->max_bitrate = input_max_br.GetIntInput() * 1000; + + test::InputBuilder input_min_br( + "Choose min bitrate[kbps].", DEFAULT_VIDEO_CODEC_MIN_BITRATE, 0, INT_MAX); + video_config->min_bitrate = input_min_br.GetIntInput() * 1000; +} + VideoSenderConfig GetVideoSenderConfig() { VideoSenderConfig video_config; + GetSsrcs(&video_config); + GetPayloadtype(&video_config); + GetVideoCodecSize(&video_config); + GetVideoBitrates(&video_config); + video_config.rtcp_c_name = "video_sender@a.b.c.d"; + video_config.use_external_encoder = false; VLOG(0) << "Using VP8 at 30 fps"; - - // Resolution. - video_config.width = 1280; - video_config.height = 720; + video_config.min_qp = 4; + video_config.max_qp = 40; video_config.max_frame_rate = 30; - - // Bitrates. - video_config.start_bitrate = 50000; - video_config.max_bitrate = 2500000; - video_config.min_bitrate = 100000; - - // Codec. video_config.codec = transport::kVp8; video_config.max_number_of_video_buffers_used = 1; - video_config.number_of_encode_threads = 2; - - // Quality options. - video_config.min_qp = 4; - video_config.max_qp = 40; - - // SSRCs and payload type. Don't change them. - video_config.sender_ssrc = 11; - video_config.incoming_feedback_ssrc = 12; - video_config.rtp_config.payload_type = 96; - video_config.rtp_config.max_delay_ms = 300; return video_config; } -void AVFreeFrame(AVFrame* frame) { avcodec_free_frame(&frame); } - class SendProcess { public: SendProcess(scoped_refptr<base::SingleThreadTaskRunner> thread_proxy, @@ -124,287 +228,108 @@ class SendProcess { scoped_refptr<VideoFrameInput> video_frame_input) : test_app_thread_proxy_(thread_proxy), video_config_(video_config), + audio_diff_(kFrameTimerMs), audio_frame_input_(audio_frame_input), video_frame_input_(video_frame_input), synthetic_count_(0), clock_(clock), - audio_frame_count_(0), - video_frame_count_(0), - weak_factory_(this), - av_format_context_(NULL), - audio_stream_index_(-1), - video_stream_index_(-1) { + start_time_(), + send_time_(), + weak_factory_(this) { audio_bus_factory_.reset(new TestAudioBusFactory(kAudioChannels, kAudioSamplingFrequency, kSoundFrequency, kSoundVolume)); - - // Load source file and prepare FFmpeg demuxer. - base::FilePath source_path = - CommandLine::ForCurrentProcess()->GetSwitchValuePath(kSwitchSourceFile); - if (source_path.empty()) - return; - - LOG(INFO) << "Source: " << source_path.value(); - if (!file_data_.Initialize(source_path)) { - LOG(ERROR) << "Cannot load file."; - return; - } - protocol_.reset( - new InMemoryUrlProtocol(file_data_.data(), file_data_.length(), false)); - glue_.reset(new FFmpegGlue(protocol_.get())); - - if (!glue_->OpenContext()) { - LOG(ERROR) << "Cannot open file."; - return; - } - - // AVFormatContext is owned by the glue. - av_format_context_ = glue_->format_context(); - if (avformat_find_stream_info(av_format_context_, NULL) < 0) { - LOG(ERROR) << "Cannot find stream information."; - return; - } - - // Prepare FFmpeg decoders. - for (unsigned int i = 0; i < av_format_context_->nb_streams; ++i) { - AVStream* av_stream = av_format_context_->streams[i]; - AVCodecContext* av_codec_context = av_stream->codec; - AVCodec* av_codec = avcodec_find_decoder(av_codec_context->codec_id); - - if (!av_codec) { - LOG(ERROR) << "Cannot find decoder for the codec: " - << av_codec_context->codec_id; - continue; - } - - // Number of threads for decoding. - av_codec_context->thread_count = 2; - av_codec_context->error_concealment = FF_EC_GUESS_MVS | FF_EC_DEBLOCK; - - if (avcodec_open2(av_codec_context, av_codec, NULL) < 0) { - LOG(ERROR) << "Cannot open AVCodecContext for the codec: " - << av_codec_context->codec_id; - return; - } - - if (av_codec->type == AVMEDIA_TYPE_AUDIO) { - if (audio_stream_index_ != -1) { - LOG(WARNING) << "Found multiple audio streams."; - } - audio_stream_index_ = static_cast<int>(i); - LOG(INFO) << "Source file has audio."; - } else if (av_codec->type == AVMEDIA_TYPE_VIDEO) { - VideoFrame::Format format = - PixelFormatToVideoFormat(av_codec_context->pix_fmt); - if (format != VideoFrame::YV12) { - LOG(ERROR) << "Cannot handle non YV12 video format: " << format; - return; - } - if (video_stream_index_ != -1) { - LOG(WARNING) << "Found multiple video streams."; - } - video_stream_index_ = static_cast<int>(i); - LOG(INFO) << "Source file has video."; - } else { - LOG(ERROR) << "Unknown stream type; ignore."; + if (ReadFromFile()) { + std::string video_file_name = GetVideoFile(); + video_file_ = fopen(video_file_name.c_str(), "r"); + if (video_file_ == NULL) { + VLOG(1) << "Failed to open file"; + exit(-1); } + } else { + video_file_ = NULL; } - - Rewind(); } ~SendProcess() { + if (video_file_) + fclose(video_file_); } - void SendNextFrame() { + void SendFrame() { + // Make sure that we don't drift. + int num_10ms_blocks = audio_diff_ / 10; + // Avoid drift. + audio_diff_ += kFrameTimerMs - num_10ms_blocks * 10; + + audio_frame_input_->InsertAudio( + audio_bus_factory_->NextAudioBus( + base::TimeDelta::FromMilliseconds(10) * num_10ms_blocks), + clock_->NowTicks()); + gfx::Size size(video_config_.width, video_config_.height); - scoped_refptr<VideoFrame> video_frame = - VideoFrame::CreateBlackFrame(size); - if (is_transcoding_video()) { - scoped_refptr<VideoFrame> decoded_frame = DecodeOneVideoFrame(); - media::CopyPlane(VideoFrame::kYPlane, - decoded_frame->data(VideoFrame::kYPlane), - decoded_frame->stride(VideoFrame::kYPlane), - decoded_frame->rows(VideoFrame::kYPlane), - video_frame); - media::CopyPlane(VideoFrame::kUPlane, - decoded_frame->data(VideoFrame::kUPlane), - decoded_frame->stride(VideoFrame::kUPlane), - decoded_frame->rows(VideoFrame::kUPlane), - video_frame); - media::CopyPlane(VideoFrame::kVPlane, - decoded_frame->data(VideoFrame::kVPlane), - decoded_frame->stride(VideoFrame::kVPlane), - decoded_frame->rows(VideoFrame::kVPlane), - video_frame); + // TODO(mikhal): Use the provided timestamp. + if (start_time_.is_null()) + start_time_ = clock_->NowTicks(); + base::TimeDelta time_diff = clock_->NowTicks() - start_time_; + scoped_refptr<media::VideoFrame> video_frame = + media::VideoFrame::CreateFrame( + VideoFrame::I420, size, gfx::Rect(size), size, time_diff); + if (video_file_) { + if (!PopulateVideoFrameFromFile(video_frame, video_file_)) + return; } else { PopulateVideoFrame(video_frame, synthetic_count_); + ++synthetic_count_; } - ++synthetic_count_; + // Time the sending of the frame to match the set frame rate. + // Sleep if that time has yet to elapse. base::TimeTicks now = clock_->NowTicks(); - if (start_time_.is_null()) - start_time_ = now; - - // Note: We don't care about the framerate of the original file. - // We want the receiver to play it at the rate we desire, i.e. 30FPS. - - // After submitting the current frame update video time for the next - // frame. - base::TimeDelta video_time = VideoFrameTime(video_frame_count_); - video_frame->set_timestamp(video_time); - video_frame_input_->InsertRawVideoFrame(video_frame, - start_time_ + video_time); - video_time = VideoFrameTime(++video_frame_count_); - - base::TimeDelta audio_time = AudioFrameTime(audio_frame_count_); - - // Send just enough audio data to match next video frame's time. - while (audio_time < video_time) { - audio_frame_input_->InsertAudio( - audio_bus_factory_->NextAudioBus( - base::TimeDelta::FromMilliseconds(kAudioFrameMs)), - start_time_ + audio_time); - audio_time = AudioFrameTime(++audio_frame_count_); - } - - // This is the time since the stream started. - const base::TimeDelta elapsed_time = now - start_time_; - - // Handle the case when decoding or frame generation cannot keep up. - // Move the time ahead to match the next frame. - while (video_time < elapsed_time) { - LOG(WARNING) << "Skipping one frame."; - video_time = VideoFrameTime(++video_frame_count_); - } - - test_app_thread_proxy_->PostDelayedTask( - FROM_HERE, - base::Bind(&SendProcess::SendNextFrame, weak_factory_.GetWeakPtr()), - video_time - elapsed_time); - } - - private: - bool is_transcoding_audio() { return audio_stream_index_ >= 0; } - bool is_transcoding_video() { return video_stream_index_ >= 0; } - - // Helper methods to compute timestamps for the frame number specified. - base::TimeDelta VideoFrameTime(int frame_number) { - return frame_number * base::TimeDelta::FromSeconds(1) / - video_config_.max_frame_rate; - } - - base::TimeDelta AudioFrameTime(int frame_number) { - return frame_number * base::TimeDelta::FromMilliseconds(kAudioFrameMs); - } - - // Go to the beginning of the stream. - void Rewind() { - CHECK(av_seek_frame(av_format_context_, -1, 0, AVSEEK_FLAG_BACKWARD) >= 0) - << "Failed to rewind to the beginning."; - } - - // Call FFmpeg to fetch one packet. - ScopedAVPacket DemuxOnePacket(bool* audio) { - ScopedAVPacket packet(new AVPacket()); - if (av_read_frame(av_format_context_, packet.get()) < 0) { - LOG(ERROR) << "Failed to read one AVPacket"; - packet.reset(); - return packet.Pass(); - } - - int stream_index = static_cast<int>(packet->stream_index); - if (stream_index == audio_stream_index_) { - *audio = true; - } else if (stream_index == video_stream_index_) { - *audio = false; + base::TimeDelta video_frame_time = + base::TimeDelta::FromMilliseconds(kFrameTimerMs); + base::TimeDelta elapsed_time = now - send_time_; + if (elapsed_time < video_frame_time) { + VLOG(1) << "Wait" << (video_frame_time - elapsed_time).InMilliseconds(); + test_app_thread_proxy_->PostDelayedTask( + FROM_HERE, + base::Bind(&SendProcess::SendVideoFrameOnTime, + weak_factory_.GetWeakPtr(), + video_frame), + video_frame_time - elapsed_time); } else { - // Ignore unknown packet. - LOG(INFO) << "Unknown packet."; - packet.reset(); - } - return packet.Pass(); - } - - scoped_refptr<VideoFrame> DecodeOneVideoFrame() { - // Read the stream until one video frame can be decoded. - while (true) { - bool audio = false; - ScopedAVPacket packet = DemuxOnePacket(&audio); - if (!packet) { - LOG(INFO) << "End of stream; Rewind."; - Rewind(); - continue; - } - - if (audio) { - // TODO(hclam): Decode audio packets. - continue; - } - - // Video. - int got_picture = 0; - AVFrame* avframe = av_frame_alloc(); - avcodec_get_frame_defaults(avframe); - av_video_context()->reordered_opaque = packet->pts; - CHECK(avcodec_decode_video2( - av_video_context(), avframe, &got_picture, packet.get()) >= 0) - << "Video decode error."; - if (!got_picture) { - continue; - } - - gfx::Size size(av_video_context()->width, av_video_context()->height); - return VideoFrame::WrapExternalYuvData(media::VideoFrame::YV12, - size, - gfx::Rect(size), - size, - avframe->linesize[0], - avframe->linesize[1], - avframe->linesize[2], - avframe->data[0], - avframe->data[1], - avframe->data[2], - base::TimeDelta(), - base::Bind(&AVFreeFrame, avframe)); + test_app_thread_proxy_->PostTask( + FROM_HERE, + base::Bind(&SendProcess::SendVideoFrameOnTime, + weak_factory_.GetWeakPtr(), + video_frame)); } } void SendVideoFrameOnTime(scoped_refptr<media::VideoFrame> video_frame) { + send_time_ = clock_->NowTicks(); + video_frame_input_->InsertRawVideoFrame(video_frame, send_time_); + test_app_thread_proxy_->PostTask( + FROM_HERE, base::Bind(&SendProcess::SendFrame, base::Unretained(this))); } - AVStream* av_audio_stream() { - return av_format_context_->streams[audio_stream_index_]; - } - AVStream* av_video_stream() { - return av_format_context_->streams[video_stream_index_]; - } - AVCodecContext* av_audio_context() { return av_audio_stream()->codec; } - AVCodecContext* av_video_context() { return av_video_stream()->codec; } - + private: scoped_refptr<base::SingleThreadTaskRunner> test_app_thread_proxy_; const VideoSenderConfig video_config_; + int audio_diff_; const scoped_refptr<AudioFrameInput> audio_frame_input_; const scoped_refptr<VideoFrameInput> video_frame_input_; + FILE* video_file_; uint8 synthetic_count_; base::TickClock* const clock_; // Not owned by this class. base::TimeTicks start_time_; - int audio_frame_count_; // Each audio frame is exactly 10ms. - int video_frame_count_; + base::TimeTicks send_time_; scoped_ptr<TestAudioBusFactory> audio_bus_factory_; // NOTE: Weak pointers must be invalidated before all other member variables. base::WeakPtrFactory<SendProcess> weak_factory_; - base::MemoryMappedFile file_data_; - scoped_ptr<InMemoryUrlProtocol> protocol_; - scoped_ptr<FFmpegGlue> glue_; - AVFormatContext* av_format_context_; - - int audio_stream_index_; - int video_stream_index_; - DISALLOW_COPY_AND_ASSIGN(SendProcess); }; @@ -448,17 +373,18 @@ net::IPEndPoint CreateUDPAddress(std::string ip_str, int port) { void DumpLoggingData(const media::cast::proto::LogMetadata& log_metadata, const media::cast::FrameEventMap& frame_events, const media::cast::PacketEventMap& packet_events, + bool compress, base::ScopedFILE log_file) { VLOG(0) << "Frame map size: " << frame_events.size(); VLOG(0) << "Packet map size: " << packet_events.size(); - scoped_ptr<char[]> event_log(new char[kMaxSerializedLogBytes]); + scoped_ptr<char[]> event_log(new char[media::cast::kMaxSerializedLogBytes]); int event_log_bytes; if (!media::cast::SerializeEvents(log_metadata, frame_events, packet_events, - true, - kMaxSerializedLogBytes, + compress, + media::cast::kMaxSerializedLogBytes, event_log.get(), &event_log_bytes)) { VLOG(0) << "Failed to serialize events."; @@ -477,7 +403,8 @@ void WriteLogsToFileAndStopSubscribing( scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber, scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber, base::ScopedFILE video_log_file, - base::ScopedFILE audio_log_file) { + base::ScopedFILE audio_log_file, + bool compress) { cast_environment->Logging()->RemoveRawEventSubscriber( video_event_subscriber.get()); cast_environment->Logging()->RemoveRawEventSubscriber( @@ -493,6 +420,7 @@ void WriteLogsToFileAndStopSubscribing( DumpLoggingData(log_metadata, frame_events, packet_events, + compress, video_log_file.Pass()); VLOG(0) << "Dumping logging data for audio stream."; @@ -502,6 +430,7 @@ void WriteLogsToFileAndStopSubscribing( DumpLoggingData(log_metadata, frame_events, packet_events, + compress, audio_log_file.Pass()); } @@ -511,15 +440,6 @@ int main(int argc, char** argv) { base::AtExitManager at_exit; CommandLine::Init(argc, argv); InitLogging(logging::LoggingSettings()); - - // Load the media module for FFmpeg decoding. - base::FilePath path; - PathService::Get(base::DIR_MODULE, &path); - if (!media::InitializeMediaLibrary(path)) { - LOG(ERROR) << "Could not initialize media library."; - return 1; - } - base::Thread test_thread("Cast sender test app thread"); base::Thread audio_thread("Cast audio encoder thread"); base::Thread video_thread("Cast video encoder thread"); @@ -529,13 +449,11 @@ int main(int argc, char** argv) { base::MessageLoopForIO io_message_loop; - // Default parameters. - CommandLine* cmd = CommandLine::ForCurrentProcess(); - std::string remote_ip_address = cmd->GetSwitchValueASCII(kSwitchAddress); - if (remote_ip_address.empty()) - remote_ip_address = "127.0.0.1"; - int remote_port = 2344; - base::StringToInt(cmd->GetSwitchValueASCII(kSwitchPort), &remote_port); + int remote_port; + media::cast::GetPort(&remote_port); + + std::string remote_ip_address = + media::cast::GetIpAddress("Enter receiver IP."); media::cast::AudioSenderConfig audio_config = media::cast::GetAudioSenderConfig(); @@ -549,6 +467,7 @@ int main(int argc, char** argv) { net::IPEndPoint remote_endpoint = CreateUDPAddress(remote_ip_address, remote_port); transport_audio_config.base.ssrc = audio_config.sender_ssrc; + VLOG(0) << "Audio ssrc: " << transport_audio_config.base.ssrc; transport_audio_config.base.rtp_config = audio_config.rtp_config; transport_video_config.base.ssrc = video_config.sender_ssrc; transport_video_config.base.rtp_config = video_config.rtp_config; @@ -600,47 +519,51 @@ int main(int argc, char** argv) { video_frame_input)); // Set up event subscribers. + int logging_duration = media::cast::GetLoggingDuration(); scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber; scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber; - std::string video_log_file_name("/tmp/video_events.log.gz"); - std::string audio_log_file_name("/tmp/audio_events.log.gz"); - LOG(INFO) << "Logging audio events to: " << audio_log_file_name; - LOG(INFO) << "Logging video events to: " << video_log_file_name; - video_event_subscriber.reset(new media::cast::EncodingEventSubscriber( - media::cast::VIDEO_EVENT, 10000)); - audio_event_subscriber.reset(new media::cast::EncodingEventSubscriber( - media::cast::AUDIO_EVENT, 10000)); - cast_environment->Logging()->AddRawEventSubscriber( - video_event_subscriber.get()); - cast_environment->Logging()->AddRawEventSubscriber( - audio_event_subscriber.get()); + if (logging_duration > 0) { + bool compress = media::cast::CompressLogs(); + std::string video_log_file_name( + media::cast::GetVideoLogFileDestination(compress)); + std::string audio_log_file_name( + media::cast::GetAudioLogFileDestination(compress)); + video_event_subscriber.reset(new media::cast::EncodingEventSubscriber( + media::cast::VIDEO_EVENT, 10000)); + audio_event_subscriber.reset(new media::cast::EncodingEventSubscriber( + media::cast::AUDIO_EVENT, 10000)); + cast_environment->Logging()->AddRawEventSubscriber( + video_event_subscriber.get()); + cast_environment->Logging()->AddRawEventSubscriber( + audio_event_subscriber.get()); + + base::ScopedFILE video_log_file(fopen(video_log_file_name.c_str(), "w")); + if (!video_log_file) { + VLOG(1) << "Failed to open video log file for writing."; + exit(-1); + } - base::ScopedFILE video_log_file(fopen(video_log_file_name.c_str(), "w")); - if (!video_log_file) { - VLOG(1) << "Failed to open video log file for writing."; - exit(-1); - } + base::ScopedFILE audio_log_file(fopen(audio_log_file_name.c_str(), "w")); + if (!audio_log_file) { + VLOG(1) << "Failed to open audio log file for writing."; + exit(-1); + } - base::ScopedFILE audio_log_file(fopen(audio_log_file_name.c_str(), "w")); - if (!audio_log_file) { - VLOG(1) << "Failed to open audio log file for writing."; - exit(-1); + io_message_loop.message_loop_proxy()->PostDelayedTask( + FROM_HERE, + base::Bind(&WriteLogsToFileAndStopSubscribing, + cast_environment, + base::Passed(&video_event_subscriber), + base::Passed(&audio_event_subscriber), + base::Passed(&video_log_file), + base::Passed(&audio_log_file), + compress), + base::TimeDelta::FromSeconds(logging_duration)); } - const int logging_duration_seconds = 300; - io_message_loop.message_loop_proxy()->PostDelayedTask( - FROM_HERE, - base::Bind(&WriteLogsToFileAndStopSubscribing, - cast_environment, - base::Passed(&video_event_subscriber), - base::Passed(&audio_event_subscriber), - base::Passed(&video_log_file), - base::Passed(&audio_log_file)), - base::TimeDelta::FromSeconds(logging_duration_seconds)); - test_thread.message_loop_proxy()->PostTask( FROM_HERE, - base::Bind(&media::cast::SendProcess::SendNextFrame, + base::Bind(&media::cast::SendProcess::SendFrame, base::Unretained(send_process.get()))); io_message_loop.Run(); diff --git a/media/cast/video_sender/codecs/vp8/vp8_encoder.cc b/media/cast/video_sender/codecs/vp8/vp8_encoder.cc index 38c7dfc..c2f06c5 100644 --- a/media/cast/video_sender/codecs/vp8/vp8_encoder.cc +++ b/media/cast/video_sender/codecs/vp8/vp8_encoder.cc @@ -35,7 +35,7 @@ Vp8Encoder::Vp8Encoder(const VideoSenderConfig& video_config, max_number_of_repeated_buffers_in_a_row_( ComputeMaxNumOfRepeatedBuffes(max_unacked_frames)), key_frame_requested_(true), - first_frame_received_(false), + timestamp_(0), last_encoded_frame_id_(kStartFrameId), number_of_repeated_buffers_(0) { // TODO(pwestin): we need to figure out how to synchronize the acking with the @@ -105,8 +105,8 @@ void Vp8Encoder::InitEncode(int number_of_encode_threads) { config_->g_threads = number_of_encode_threads; // Rate control settings. - // Never allow the encoder to drop frame internally. - config_->rc_dropframe_thresh = 0; + // TODO(pwestin): revisit these constants. Currently identical to webrtc. + config_->rc_dropframe_thresh = 30; config_->rc_end_usage = VPX_CBR; config_->g_pass = VPX_RC_ONE_PASS; config_->rc_resize_allowed = 0; @@ -121,6 +121,7 @@ void Vp8Encoder::InitEncode(int number_of_encode_threads) { // set the maximum target size of any key-frame. uint32 rc_max_intra_target = MaxIntraTarget(config_->rc_buf_optimal_sz); vpx_codec_flags_t flags = 0; + // TODO(mikhal): Tune settings. if (vpx_codec_enc_init( encoder_.get(), vpx_codec_vp8_cx(), config_.get(), flags)) { DCHECK(false) << "vpx_codec_enc_init() failed."; @@ -174,27 +175,15 @@ bool Vp8Encoder::Encode(const scoped_refptr<media::VideoFrame>& video_frame, // TODO(miu): This is a semi-hack. We should consider using // |video_frame->timestamp()| instead. uint32 duration = kVideoFrequency / cast_config_.max_frame_rate; - - // Note: Timestamp here is used for bitrate calculation. The absolute value - // is not important. - if (!first_frame_received_) { - first_frame_received_ = true; - first_frame_timestamp_ = video_frame->timestamp(); - } - - vpx_codec_pts_t timestamp = - (video_frame->timestamp() - first_frame_timestamp_).InMicroseconds() * - kVideoFrequency / base::Time::kMicrosecondsPerSecond; - if (vpx_codec_encode(encoder_.get(), raw_image_, - timestamp, + timestamp_, duration, flags, - VPX_DL_REALTIME) != VPX_CODEC_OK) { - LOG(ERROR) << "Failed to encode for once."; + VPX_DL_REALTIME)) { return false; } + timestamp_ += duration; // Get encoded frame. const vpx_codec_cx_pkt_t* pkt = NULL; @@ -300,9 +289,6 @@ uint32 Vp8Encoder::GetLatestFrameIdToReference() { } Vp8Encoder::Vp8Buffers Vp8Encoder::GetNextBufferToUpdate() { - if (!use_multiple_video_buffers_) - return kNoBuffer; - // Update at most one buffer, except for key-frames. Vp8Buffers buffer_to_update = kNoBuffer; diff --git a/media/cast/video_sender/codecs/vp8/vp8_encoder.h b/media/cast/video_sender/codecs/vp8/vp8_encoder.h index 396746d..7cc3754 100644 --- a/media/cast/video_sender/codecs/vp8/vp8_encoder.h +++ b/media/cast/video_sender/codecs/vp8/vp8_encoder.h @@ -8,7 +8,6 @@ #include "base/basictypes.h" #include "base/memory/scoped_ptr.h" #include "base/threading/thread_checker.h" -#include "base/time/time.h" #include "media/cast/cast_config.h" #include "third_party/libvpx/source/libvpx/vpx/vpx_encoder.h" @@ -82,8 +81,7 @@ class Vp8Encoder { vpx_image_t* raw_image_; bool key_frame_requested_; - bool first_frame_received_; - base::TimeDelta first_frame_timestamp_; + int64 timestamp_; uint32 last_encoded_frame_id_; uint32 used_buffers_frame_id_[kNumberOfVp8VideoBuffers]; bool acked_frame_buffers_[kNumberOfVp8VideoBuffers]; |