diff options
author | miu <miu@chromium.org> | 2015-02-02 23:05:15 -0800 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2015-02-03 07:06:07 +0000 |
commit | 9a739084bdf85aba9716205e2185ffb0a278d187 (patch) | |
tree | b3f27e3b8d09ad1b32846fa8da91bb0693afd3a2 /media/cast/test | |
parent | 57831c0d68b35843942e19e30d0c9c86d2005099 (diff) | |
download | chromium_src-9a739084bdf85aba9716205e2185ffb0a278d187.zip chromium_src-9a739084bdf85aba9716205e2185ffb0a278d187.tar.gz chromium_src-9a739084bdf85aba9716205e2185ffb0a278d187.tar.bz2 |
RELAND: [Cast] Software encoder support for varying video frame sizes.
Adds support for automatic on-line reconfiguration of the VP8 software
encoder whenever the video frame size changes. libvpx supports
shrinking frame sizes without tearing down the encoder, but growing
frames sizes still requires a full tear-down and re-creation.
Most of this change involves adding extensive unit testing to confirm
media/cast is now capable of handling variable frames sizes end-to-end.
In addition, the cast_sender_app and cast_receiver_app diagnostic tools
have been updated.
An upcoming change will eliminate the width and height properties from
media::cast::VideoSenderConfig, which are both being ignored now.
BUG=451277
TBR=hubbe@chromium.org
Review URL: https://codereview.chromium.org/892383002
Cr-Commit-Position: refs/heads/master@{#314281}
Diffstat (limited to 'media/cast/test')
-rw-r--r-- | media/cast/test/fake_media_source.cc | 92 | ||||
-rw-r--r-- | media/cast/test/fake_media_source.h | 9 | ||||
-rw-r--r-- | media/cast/test/linux_output_window.cc | 39 | ||||
-rw-r--r-- | media/cast/test/sender.cc | 107 | ||||
-rw-r--r-- | media/cast/test/simulator.cc | 12 | ||||
-rw-r--r-- | media/cast/test/utility/video_utility.cc | 18 | ||||
-rw-r--r-- | media/cast/test/utility/video_utility.h | 3 |
7 files changed, 147 insertions, 133 deletions
diff --git a/media/cast/test/fake_media_source.cc b/media/cast/test/fake_media_source.cc index 2742c05..f17a053 100644 --- a/media/cast/test/fake_media_source.cc +++ b/media/cast/test/fake_media_source.cc @@ -7,6 +7,7 @@ #include "base/files/memory_mapped_file.h" #include "base/files/scoped_file.h" #include "base/logging.h" +#include "base/rand_util.h" #include "base/strings/string_number_conversions.h" #include "media/audio/audio_parameters.h" #include "media/base/audio_buffer.h" @@ -32,11 +33,20 @@ namespace { static const int kAudioChannels = 2; static const int kAudioSamplingFrequency = 48000; -static const int kSoundFrequency = 1234; // Frequency of sinusoid wave. -static const float kSoundVolume = 0.5f; +static const int kSoundFrequency = 440; // Frequency of sinusoid wave. +static const float kSoundVolume = 0.10f; static const int kAudioFrameMs = 10; // Each audio frame is exactly 10ms. static const int kAudioPacketsPerSecond = 1000 / kAudioFrameMs; +// Bounds for variable frame size mode. +static const int kMinFakeFrameWidth = 60; +static const int kMinFakeFrameHeight = 34; +static const int kStartingFakeFrameWidth = 854; +static const int kStartingFakeFrameHeight = 480; +static const int kMaxFakeFrameWidth = 1280; +static const int kMaxFakeFrameHeight = 720; +static const int kMaxFrameSizeChangeMillis = 5000; + void AVFreeFrame(AVFrame* frame) { av_frame_free(&frame); } @@ -63,7 +73,8 @@ FakeMediaSource::FakeMediaSource( bool keep_frames) : task_runner_(task_runner), video_config_(video_config), - keep_frames_(keep_frames), + keep_frames_(keep_frames), + variable_frame_size_mode_(false), synthetic_count_(0), clock_(clock), audio_frame_count_(0), @@ -189,6 +200,10 @@ void FakeMediaSource::SetSourceFile(const base::FilePath& video_file, Rewind(); } +void FakeMediaSource::SetVariableFrameSizeMode(bool enabled) { + variable_frame_size_mode_ = enabled; +} + void FakeMediaSource::Start(scoped_refptr<AudioFrameInput> audio_frame_input, scoped_refptr<VideoFrameInput> video_frame_input) { audio_frame_input_ = audio_frame_input; @@ -207,9 +222,8 @@ void FakeMediaSource::Start(scoped_refptr<AudioFrameInput> audio_frame_input, // Send fake patterns. task_runner_->PostTask( FROM_HERE, - base::Bind( - &FakeMediaSource::SendNextFakeFrame, - base::Unretained(this))); + base::Bind(&FakeMediaSource::SendNextFakeFrame, + weak_factory_.GetWeakPtr())); return; } @@ -228,18 +242,16 @@ void FakeMediaSource::Start(scoped_refptr<AudioFrameInput> audio_frame_input, static_cast<double>(audio_params_.sample_rate()) / kAudioSamplingFrequency, audio_params_.frames_per_buffer(), - base::Bind(&FakeMediaSource::ProvideData, base::Unretained(this)))); + base::Bind(&FakeMediaSource::ProvideData, weak_factory_.GetWeakPtr()))); task_runner_->PostTask( FROM_HERE, - base::Bind( - &FakeMediaSource::SendNextFrame, - base::Unretained(this))); + base::Bind(&FakeMediaSource::SendNextFrame, weak_factory_.GetWeakPtr())); } void FakeMediaSource::SendNextFakeFrame() { - gfx::Size size(video_config_.width, video_config_.height); + UpdateNextFrameSize(); scoped_refptr<VideoFrame> video_frame = - VideoFrame::CreateBlackFrame(size); + VideoFrame::CreateBlackFrame(current_frame_size_); PopulateVideoFrame(video_frame.get(), synthetic_count_); ++synthetic_count_; @@ -288,6 +300,32 @@ void FakeMediaSource::SendNextFakeFrame() { video_time - elapsed_time); } +void FakeMediaSource::UpdateNextFrameSize() { + if (variable_frame_size_mode_) { + bool update_size_change_time = false; + if (current_frame_size_.IsEmpty()) { + current_frame_size_ = gfx::Size(kStartingFakeFrameWidth, + kStartingFakeFrameHeight); + update_size_change_time = true; + } else if (clock_->NowTicks() >= next_frame_size_change_time_) { + current_frame_size_ = gfx::Size( + base::RandInt(kMinFakeFrameWidth, kMaxFakeFrameWidth), + base::RandInt(kMinFakeFrameHeight, kMaxFakeFrameHeight)); + update_size_change_time = true; + } + + if (update_size_change_time) { + next_frame_size_change_time_ = clock_->NowTicks() + + base::TimeDelta::FromMillisecondsD( + base::RandDouble() * kMaxFrameSizeChangeMillis); + } + } else { + current_frame_size_ = gfx::Size(kStartingFakeFrameWidth, + kStartingFakeFrameHeight); + next_frame_size_change_time_ = base::TimeTicks(); + } +} + bool FakeMediaSource::SendNextTranscodedVideo(base::TimeDelta elapsed_time) { if (!is_transcoding_video()) return false; @@ -296,33 +334,13 @@ bool FakeMediaSource::SendNextTranscodedVideo(base::TimeDelta elapsed_time) { if (video_frame_queue_.empty()) return false; - scoped_refptr<VideoFrame> decoded_frame = - video_frame_queue_.front(); - if (elapsed_time < decoded_frame->timestamp()) + const scoped_refptr<VideoFrame> video_frame = video_frame_queue_.front(); + if (elapsed_time < video_frame->timestamp()) return false; - - gfx::Size size(video_config_.width, video_config_.height); - scoped_refptr<VideoFrame> video_frame = - VideoFrame::CreateBlackFrame(size); video_frame_queue_.pop(); - media::CopyPlane(VideoFrame::kYPlane, - decoded_frame->data(VideoFrame::kYPlane), - decoded_frame->stride(VideoFrame::kYPlane), - decoded_frame->rows(VideoFrame::kYPlane), - video_frame.get()); - media::CopyPlane(VideoFrame::kUPlane, - decoded_frame->data(VideoFrame::kUPlane), - decoded_frame->stride(VideoFrame::kUPlane), - decoded_frame->rows(VideoFrame::kUPlane), - video_frame.get()); - media::CopyPlane(VideoFrame::kVPlane, - decoded_frame->data(VideoFrame::kVPlane), - decoded_frame->stride(VideoFrame::kVPlane), - decoded_frame->rows(VideoFrame::kVPlane), - video_frame.get()); // Use the timestamp from the file if we're transcoding. - video_frame->set_timestamp(ScaleTimestamp(decoded_frame->timestamp())); + video_frame->set_timestamp(ScaleTimestamp(video_frame->timestamp())); if (keep_frames_) inserted_video_frame_queue_.push(video_frame); video_frame_input_->InsertRawVideoFrame( @@ -373,9 +391,7 @@ void FakeMediaSource::SendNextFrame() { // Send next send. task_runner_->PostDelayedTask( FROM_HERE, - base::Bind( - &FakeMediaSource::SendNextFrame, - base::Unretained(this)), + base::Bind(&FakeMediaSource::SendNextFrame, weak_factory_.GetWeakPtr()), base::TimeDelta::FromMilliseconds(kAudioFrameMs)); } diff --git a/media/cast/test/fake_media_source.h b/media/cast/test/fake_media_source.h index f0822ce..ef5aa86 100644 --- a/media/cast/test/fake_media_source.h +++ b/media/cast/test/fake_media_source.h @@ -57,6 +57,10 @@ class FakeMediaSource { // If |override_fps| is non zero then the file is played at the desired rate. void SetSourceFile(const base::FilePath& video_file, int override_fps); + // Set to true to randomly change the frame size at random points in time. + // Only applies when SetSourceFile() is not used. + void SetVariableFrameSizeMode(bool enabled); + void Start(scoped_refptr<AudioFrameInput> audio_frame_input, scoped_refptr<VideoFrameInput> video_frame_input); @@ -71,6 +75,8 @@ class FakeMediaSource { void SendNextFrame(); void SendNextFakeFrame(); + void UpdateNextFrameSize(); + // Return true if a frame was sent. bool SendNextTranscodedVideo(base::TimeDelta elapsed_time); @@ -104,6 +110,9 @@ class FakeMediaSource { const scoped_refptr<base::SingleThreadTaskRunner> task_runner_; const VideoSenderConfig video_config_; const bool keep_frames_; + bool variable_frame_size_mode_; + gfx::Size current_frame_size_; + base::TimeTicks next_frame_size_change_time_; scoped_refptr<AudioFrameInput> audio_frame_input_; scoped_refptr<VideoFrameInput> video_frame_input_; uint8 synthetic_count_; diff --git a/media/cast/test/linux_output_window.cc b/media/cast/test/linux_output_window.cc index 5a934aa..486db98 100644 --- a/media/cast/test/linux_output_window.cc +++ b/media/cast/test/linux_output_window.cc @@ -4,6 +4,8 @@ #include "media/cast/test/linux_output_window.h" +#include <algorithm> + #include "base/logging.h" #include "media/base/video_frame.h" #include "third_party/libyuv/include/libyuv/convert.h" @@ -118,18 +120,27 @@ void LinuxOutputWindow::CreateWindow(int x_pos, void LinuxOutputWindow::RenderFrame( const scoped_refptr<media::VideoFrame>& video_frame) { - CHECK_LE(video_frame->coded_size().width(), image_->width); - CHECK_LE(video_frame->coded_size().height(), image_->height); - libyuv::I420ToARGB(video_frame->data(VideoFrame::kYPlane), - video_frame->stride(VideoFrame::kYPlane), - video_frame->data(VideoFrame::kUPlane), - video_frame->stride(VideoFrame::kUPlane), - video_frame->data(VideoFrame::kVPlane), - video_frame->stride(VideoFrame::kVPlane), - reinterpret_cast<uint8_t*>(image_->data), - image_->bytes_per_line, - video_frame->coded_size().width(), - video_frame->coded_size().height()); + const gfx::Size damage_size(std::min(video_frame->visible_rect().width(), + image_->width), + std::min(video_frame->visible_rect().height(), + image_->height)); + + if (damage_size.width() < image_->width || + damage_size.height() < image_->height) + memset(image_->data, 0x00, image_->bytes_per_line * image_->height); + + if (!damage_size.IsEmpty()) { + libyuv::I420ToARGB(video_frame->visible_data(VideoFrame::kYPlane), + video_frame->stride(VideoFrame::kYPlane), + video_frame->visible_data(VideoFrame::kUPlane), + video_frame->stride(VideoFrame::kUPlane), + video_frame->visible_data(VideoFrame::kVPlane), + video_frame->stride(VideoFrame::kVPlane), + reinterpret_cast<uint8_t*>(image_->data), + image_->bytes_per_line, + damage_size.width(), + damage_size.height()); + } // Place image in window. XShmPutImage(display_, @@ -140,8 +151,8 @@ void LinuxOutputWindow::RenderFrame( 0, 0, 0, - video_frame->coded_size().width(), - video_frame->coded_size().height(), + image_->width, + image_->height, true); // Very important for the image to update properly! diff --git a/media/cast/test/sender.cc b/media/cast/test/sender.cc index d9cee32..7e6fd38 100644 --- a/media/cast/test/sender.cc +++ b/media/cast/test/sender.cc @@ -38,8 +38,6 @@ #include "media/cast/test/utility/input_builder.h" namespace { -static const int kAudioChannels = 2; -static const int kAudioSamplingFrequency = 48000; // The max allowed size of serialized log. const int kMaxSerializedLogBytes = 10 * 1000 * 1000; @@ -57,61 +55,15 @@ const int kMaxSerializedLogBytes = 10 * 1000 * 1000; // // --fps=xx // Override framerate of the video stream. +// +// --vary-frame-sizes +// Randomly vary the video frame sizes at random points in time. Has no +// effect if --source-file is being used. const char kSwitchAddress[] = "address"; const char kSwitchPort[] = "port"; const char kSwitchSourceFile[] = "source-file"; const char kSwitchFps[] = "fps"; - -media::cast::AudioSenderConfig GetAudioSenderConfig() { - media::cast::AudioSenderConfig audio_config; - - audio_config.use_external_encoder = false; - audio_config.frequency = kAudioSamplingFrequency; - audio_config.channels = kAudioChannels; - audio_config.bitrate = 0; // Use Opus auto-VBR mode. - audio_config.codec = media::cast::CODEC_AUDIO_OPUS; - audio_config.ssrc = 1; - audio_config.receiver_ssrc = 2; - audio_config.rtp_payload_type = 127; - // TODO(miu): The default in cast_defines.h is 100. Should this be 100, and - // should receiver.cc's config also be 100? - audio_config.max_playout_delay = base::TimeDelta::FromMilliseconds(300); - return audio_config; -} - -media::cast::VideoSenderConfig GetVideoSenderConfig() { - media::cast::VideoSenderConfig video_config; - - video_config.use_external_encoder = false; - - // Resolution. - video_config.width = 1280; - video_config.height = 720; - video_config.max_frame_rate = 30; - - // Bitrates. - video_config.max_bitrate = 2500000; - video_config.min_bitrate = 100000; - video_config.start_bitrate = video_config.min_bitrate; - - // Codec. - video_config.codec = media::cast::CODEC_VIDEO_VP8; - video_config.max_number_of_video_buffers_used = 1; - video_config.number_of_encode_threads = 2; - - // Quality options. - video_config.min_qp = 4; - video_config.max_qp = 40; - - // SSRCs and payload type. Don't change them. - video_config.ssrc = 11; - video_config.receiver_ssrc = 12; - video_config.rtp_payload_type = 96; - // TODO(miu): The default in cast_defines.h is 100. Should this be 100, and - // should receiver.cc's config also be 100? - video_config.max_playout_delay = base::TimeDelta::FromMilliseconds(300); - return video_config; -} +const char kSwitchVaryFrameSizes[] = "vary-frame-sizes"; void UpdateCastTransportStatus( media::cast::CastTransportStatus status) { @@ -149,10 +101,12 @@ void LogRawEvents( } } -void InitializationResult(media::cast::CastInitializationStatus result) { - bool end_result = result == media::cast::STATUS_AUDIO_INITIALIZED || - result == media::cast::STATUS_VIDEO_INITIALIZED; - CHECK(end_result) << "Cast sender uninitialized"; +void QuitLoopOnInitializationResult( + media::cast::CastInitializationStatus result) { + CHECK(result == media::cast::STATUS_AUDIO_INITIALIZED || + result == media::cast::STATUS_VIDEO_INITIALIZED) + << "Cast sender uninitialized"; + base::MessageLoop::current()->Quit(); } net::IPEndPoint CreateUDPAddress(std::string ip_str, uint16 port) { @@ -282,8 +236,10 @@ int main(int argc, char** argv) { LOG(INFO) << "Sending to " << remote_ip_address << ":" << remote_port << "."; - media::cast::AudioSenderConfig audio_config = GetAudioSenderConfig(); - media::cast::VideoSenderConfig video_config = GetVideoSenderConfig(); + media::cast::AudioSenderConfig audio_config = + media::cast::GetDefaultAudioSenderConfig(); + media::cast::VideoSenderConfig video_config = + media::cast::GetDefaultVideoSenderConfig(); // Running transport on the main thread. // Setting up transport config. @@ -315,6 +271,8 @@ int main(int argc, char** argv) { LOG(INFO) << "Source: " << source_path.value(); fake_media_source->SetSourceFile(source_path, override_fps); } + if (cmd->HasSwitch(kSwitchVaryFrameSizes)) + fake_media_source->SetVariableFrameSizeMode(true); // CastTransportSender initialization. scoped_ptr<media::cast::CastTransportSender> transport_sender = @@ -330,16 +288,6 @@ int main(int argc, char** argv) { media::cast::PacketReceiverCallback(), io_message_loop.message_loop_proxy()); - // CastSender initialization. - scoped_ptr<media::cast::CastSender> cast_sender = - media::cast::CastSender::Create(cast_environment, transport_sender.get()); - cast_sender->InitializeVideo( - fake_media_source->get_video_config(), - base::Bind(&InitializationResult), - media::cast::CreateDefaultVideoEncodeAcceleratorCallback(), - media::cast::CreateDefaultVideoEncodeMemoryCallback()); - cast_sender->InitializeAudio(audio_config, base::Bind(&InitializationResult)); - // Set up event subscribers. scoped_ptr<media::cast::EncodingEventSubscriber> video_event_subscriber; scoped_ptr<media::cast::EncodingEventSubscriber> audio_event_subscriber; @@ -405,9 +353,28 @@ int main(int argc, char** argv) { base::Passed(&offset_estimator)), base::TimeDelta::FromSeconds(logging_duration_seconds)); + // CastSender initialization. + scoped_ptr<media::cast::CastSender> cast_sender = + media::cast::CastSender::Create(cast_environment, transport_sender.get()); + io_message_loop.PostTask( + FROM_HERE, + base::Bind(&media::cast::CastSender::InitializeVideo, + base::Unretained(cast_sender.get()), + fake_media_source->get_video_config(), + base::Bind(&QuitLoopOnInitializationResult), + media::cast::CreateDefaultVideoEncodeAcceleratorCallback(), + media::cast::CreateDefaultVideoEncodeMemoryCallback())); + io_message_loop.Run(); // Wait for video initialization. + io_message_loop.PostTask( + FROM_HERE, + base::Bind(&media::cast::CastSender::InitializeAudio, + base::Unretained(cast_sender.get()), + audio_config, + base::Bind(&QuitLoopOnInitializationResult))); + io_message_loop.Run(); // Wait for audio initialization. + fake_media_source->Start(cast_sender->audio_frame_input(), cast_sender->video_frame_input()); - io_message_loop.Run(); return 0; } diff --git a/media/cast/test/simulator.cc b/media/cast/test/simulator.cc index d63f6e5..1e01085 100644 --- a/media/cast/test/simulator.cc +++ b/media/cast/test/simulator.cc @@ -218,7 +218,11 @@ class EncodedVideoFrameTracker : public RawEventSubscriber { void AppendYuvToFile(const base::FilePath& path, scoped_refptr<media::VideoFrame> frame) { // Write YUV420 format to file. - std::string header = "FRAME\n"; + std::string header; + base::StringAppendF( + &header, "FRAME W%d H%d\n", + frame->coded_size().width(), + frame->coded_size().height()); AppendToFile(path, header.data(), header.size()); AppendToFile(path, reinterpret_cast<char*>(frame->data(media::VideoFrame::kYPlane)), @@ -514,11 +518,7 @@ void RunSimulation(const base::FilePath& source_path, LOG(INFO) << "Writing YUV output to file: " << yuv_output_path.value(); // Write YUV4MPEG2 header. - std::string header; - base::StringAppendF( - &header, "YUV4MPEG2 W%d H%d F30000:1001 Ip A1:1 C420\n", - media_source.get_video_config().width, - media_source.get_video_config().height); + const std::string header("YUV4MPEG2 W1280 H720 F30000:1001 Ip A1:1 C420\n"); AppendToFile(yuv_output_path, header.data(), header.size()); } diff --git a/media/cast/test/utility/video_utility.cc b/media/cast/test/utility/video_utility.cc index 9741cd0..ff050da 100644 --- a/media/cast/test/utility/video_utility.cc +++ b/media/cast/test/utility/video_utility.cc @@ -59,7 +59,11 @@ double I420SSIM(const scoped_refptr<media::VideoFrame>& frame1, } void PopulateVideoFrame(VideoFrame* frame, int start_value) { - int height = frame->coded_size().height(); + const gfx::Size frame_size = frame->coded_size(); + const int stripe_size = + std::max(32, std::min(frame_size.width(), frame_size.height()) / 8) & -2; + + int height = frame_size.height(); int stride_y = frame->stride(VideoFrame::kYPlane); int stride_u = frame->stride(VideoFrame::kUPlane); int stride_v = frame->stride(VideoFrame::kVPlane); @@ -70,24 +74,30 @@ void PopulateVideoFrame(VideoFrame* frame, int start_value) { // Set Y. for (int j = 0; j < height; ++j) { + const int stripe_j = (j / stripe_size) * stripe_size; for (int i = 0; i < stride_y; ++i) { - *y_plane = static_cast<uint8>(start_value + i + j); + const int stripe_i = (i / stripe_size) * stripe_size; + *y_plane = static_cast<uint8>(start_value + stripe_i + stripe_j); ++y_plane; } } // Set U. for (int j = 0; j < half_height; ++j) { + const int stripe_j = (j / stripe_size) * stripe_size; for (int i = 0; i < stride_u; ++i) { - *u_plane = static_cast<uint8>(start_value + i + j); + const int stripe_i = (i / stripe_size) * stripe_size; + *u_plane = static_cast<uint8>(start_value + stripe_i + stripe_j); ++u_plane; } } // Set V. for (int j = 0; j < half_height; ++j) { + const int stripe_j = (j / stripe_size) * stripe_size; for (int i = 0; i < stride_v; ++i) { - *v_plane = static_cast<uint8>(start_value + i + j); + const int stripe_i = (i / stripe_size) * stripe_size; + *u_plane = static_cast<uint8>(start_value + stripe_i + stripe_j); ++v_plane; } } diff --git a/media/cast/test/utility/video_utility.h b/media/cast/test/utility/video_utility.h index 32bd5c0..a446a80 100644 --- a/media/cast/test/utility/video_utility.h +++ b/media/cast/test/utility/video_utility.h @@ -17,7 +17,8 @@ double I420PSNR(const scoped_refptr<media::VideoFrame>& frame1, double I420SSIM(const scoped_refptr<media::VideoFrame>& frame1, const scoped_refptr<media::VideoFrame>& frame2); -// Populate a video frame with values starting with the given start value. +// Populate a video |frame| with a plaid pattern, cycling from the given +// |start_value|. // Width, height and stride should be set in advance. // Memory is allocated within the function. void PopulateVideoFrame(VideoFrame* frame, int start_value); |