diff options
author | miu <miu@chromium.org> | 2014-10-06 21:54:44 -0700 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2014-10-07 04:55:55 +0000 |
commit | 7a944362f541a76411ad242be5582337a659f261 (patch) | |
tree | e2c3e4b9d552be1533bd74ff071f2a96561538fa /media | |
parent | a0a73e578b6785c23cc945021fc9a955b19f8231 (diff) | |
download | chromium_src-7a944362f541a76411ad242be5582337a659f261.zip chromium_src-7a944362f541a76411ad242be5582337a659f261.tar.gz chromium_src-7a944362f541a76411ad242be5582337a659f261.tar.bz2 |
[Cast] Video encoder clean-ups (stricter interfaces, encapsulation, etc.)
Note: There are no functional or behavioral differences in this change.
Clean-up code throughout the media/cast video encoder code paths to
simplify and clarify behavior and improve the encapsulation of
functionality in the right places.
1. Video RTP timestamps are computed from the VideoFrame::timestamp(),
not the reference time. For MediaStream sources, this has no effect
since both timestamps are currently fixed to increment at the same rate.
2. Corrected terminology, renaming "capture timestamp" to "reference
timestamp" everywhere. Added placeholder fix for the logging of a video
frame's "capture begin" event in VideoSender::InsertRawVideoFrame().
3. SoftwareVideoEncoders can no longer fail in a call to Encode(), and
VideoEncoderImpl can no longer silently drop frames it has accepted for
asynchronous encoding. VideoSender depends on this to work correctly.
4. VP8Encoder: Re-ordered vpx_codec_enc_cfg_t field assignments to match
VP8 docs and declaration ordering, and added code comments to document
meanings/decisions. Also, the VP8 "structs" are now local private
members of the VP8Encoder class (composition, instead of indirection).
BUG=418323
Review URL: https://codereview.chromium.org/602413003
Cr-Commit-Position: refs/heads/master@{#298375}
Diffstat (limited to 'media')
19 files changed, 333 insertions, 296 deletions
diff --git a/media/cast/net/cast_transport_defines.h b/media/cast/net/cast_transport_defines.h index f7d681c..c8b9fc1 100644 --- a/media/cast/net/cast_transport_defines.h +++ b/media/cast/net/cast_transport_defines.h @@ -84,13 +84,6 @@ class FrameIdWrapHelper { DISALLOW_COPY_AND_ASSIGN(FrameIdWrapHelper); }; -inline uint32 GetVideoRtpTimestamp(const base::TimeTicks& time_ticks) { - base::TimeTicks zero_time; - base::TimeDelta recorded_delta = time_ticks - zero_time; - // Timestamp is in 90 KHz for video. - return static_cast<uint32>(recorded_delta.InMilliseconds() * 90); -} - } // namespace cast } // namespace media diff --git a/media/cast/net/rtp/rtp_packetizer_unittest.cc b/media/cast/net/rtp/rtp_packetizer_unittest.cc index a3aa2ae..4a3ea84 100644 --- a/media/cast/net/rtp/rtp_packetizer_unittest.cc +++ b/media/cast/net/rtp/rtp_packetizer_unittest.cc @@ -37,7 +37,7 @@ class TestRtpPacketTransport : public PacketSender { expected_number_of_packets_(0), expected_packet_id_(0), expected_frame_id_(0), - expectd_rtp_timestamp_(0) {} + expected_rtp_timestamp_(0) {} void VerifyRtpHeader(const RtpCastTestHeader& rtp_header) { VerifyCommonRtpHeader(rtp_header); @@ -47,7 +47,7 @@ class TestRtpPacketTransport : public PacketSender { void VerifyCommonRtpHeader(const RtpCastTestHeader& rtp_header) { EXPECT_EQ(kPayload, rtp_header.payload_type); EXPECT_EQ(sequence_number_, rtp_header.sequence_number); - EXPECT_EQ(expectd_rtp_timestamp_, rtp_header.rtp_timestamp); + EXPECT_EQ(expected_rtp_timestamp_, rtp_header.rtp_timestamp); EXPECT_EQ(config_.ssrc, rtp_header.ssrc); EXPECT_EQ(0, rtp_header.num_csrcs); } @@ -83,7 +83,7 @@ class TestRtpPacketTransport : public PacketSender { } void set_rtp_timestamp(uint32 rtp_timestamp) { - expectd_rtp_timestamp_ = rtp_timestamp; + expected_rtp_timestamp_ = rtp_timestamp; } RtpPacketizerConfig config_; @@ -94,7 +94,7 @@ class TestRtpPacketTransport : public PacketSender { // Assuming packets arrive in sequence. int expected_packet_id_; uint32 expected_frame_id_; - uint32 expectd_rtp_timestamp_; + uint32 expected_rtp_timestamp_; DISALLOW_COPY_AND_ASSIGN(TestRtpPacketTransport); }; @@ -121,8 +121,7 @@ class RtpPacketizerTest : public ::testing::Test { video_frame_.frame_id = 0; video_frame_.referenced_frame_id = kStartFrameId; video_frame_.data.assign(kFrameSize, 123); - video_frame_.rtp_timestamp = - GetVideoRtpTimestamp(testing_clock_.NowTicks()); + video_frame_.rtp_timestamp = 0x0055aa11; } void RunTasks(int during_ms) { diff --git a/media/cast/receiver/video_decoder_unittest.cc b/media/cast/receiver/video_decoder_unittest.cc index b1313c6..1397b57 100644 --- a/media/cast/receiver/video_decoder_unittest.cc +++ b/media/cast/receiver/video_decoder_unittest.cc @@ -83,7 +83,7 @@ class VideoDecoderTest : public ::testing::TestWithParam<Codec> { new EncodedFrame()); // Test only supports VP8, currently. CHECK_EQ(CODEC_VIDEO_VP8, GetParam()); - vp8_encoder_.Encode(video_frame, encoded_frame.get()); + vp8_encoder_.Encode(video_frame, base::TimeTicks(), encoded_frame.get()); // Rewrite frame IDs for testing purposes. encoded_frame->frame_id = last_frame_id_ + 1 + num_dropped_frames; if (last_frame_id_ == 0) diff --git a/media/cast/sender/external_video_encoder.cc b/media/cast/sender/external_video_encoder.cc index bbda6de..a856ed3 100644 --- a/media/cast/sender/external_video_encoder.cc +++ b/media/cast/sender/external_video_encoder.cc @@ -41,14 +41,17 @@ namespace media { namespace cast { // Container for the associated data of a video frame being processed. -struct EncodedFrameReturnData { - EncodedFrameReturnData(base::TimeTicks c_time, - VideoEncoder::FrameEncodedCallback callback) { - capture_time = c_time; - frame_encoded_callback = callback; - } - base::TimeTicks capture_time; - VideoEncoder::FrameEncodedCallback frame_encoded_callback; +struct InProgressFrameEncode { + const RtpTimestamp rtp_timestamp; + const base::TimeTicks reference_time; + const VideoEncoder::FrameEncodedCallback frame_encoded_callback; + + InProgressFrameEncode(RtpTimestamp rtp, + base::TimeTicks r_time, + VideoEncoder::FrameEncodedCallback callback) + : rtp_timestamp(rtp), + reference_time(r_time), + frame_encoded_callback(callback) {} }; // The ExternalVideoEncoder class can be deleted directly by cast, while @@ -154,14 +157,16 @@ class LocalVideoEncodeAcceleratorClient void EncodeVideoFrame( const scoped_refptr<media::VideoFrame>& video_frame, - const base::TimeTicks& capture_time, + const base::TimeTicks& reference_time, bool key_frame_requested, const VideoEncoder::FrameEncodedCallback& frame_encoded_callback) { DCHECK(encoder_task_runner_.get()); DCHECK(encoder_task_runner_->RunsTasksOnCurrentThread()); - encoded_frame_data_storage_.push_back( - EncodedFrameReturnData(capture_time, frame_encoded_callback)); + in_progress_frame_encodes_.push_back(InProgressFrameEncode( + TimeDeltaToRtpDelta(video_frame->timestamp(), kVideoFrequency), + reference_time, + frame_encoded_callback)); // BitstreamBufferReady will be called once the encoder is done. video_encode_accelerator_->Encode(video_frame, key_frame_requested); @@ -226,9 +231,10 @@ class LocalVideoEncodeAcceleratorClient // with the first key frame. stream_header_.append(static_cast<const char*>(output_buffer->memory()), payload_size); - } else if (!encoded_frame_data_storage_.empty()) { - scoped_ptr<EncodedFrame> encoded_frame( - new EncodedFrame()); + } else if (!in_progress_frame_encodes_.empty()) { + const InProgressFrameEncode& request = in_progress_frame_encodes_.front(); + + scoped_ptr<EncodedFrame> encoded_frame(new EncodedFrame()); encoded_frame->dependency = key_frame ? EncodedFrame::KEY : EncodedFrame::DEPENDENT; encoded_frame->frame_id = ++last_encoded_frame_id_; @@ -236,10 +242,8 @@ class LocalVideoEncodeAcceleratorClient encoded_frame->referenced_frame_id = encoded_frame->frame_id; else encoded_frame->referenced_frame_id = encoded_frame->frame_id - 1; - encoded_frame->reference_time = - encoded_frame_data_storage_.front().capture_time; - encoded_frame->rtp_timestamp = - GetVideoRtpTimestamp(encoded_frame->reference_time); + encoded_frame->rtp_timestamp = request.rtp_timestamp; + encoded_frame->reference_time = request.reference_time; if (!stream_header_.empty()) { encoded_frame->data = stream_header_; stream_header_.clear(); @@ -259,10 +263,10 @@ class LocalVideoEncodeAcceleratorClient cast_environment_->PostTask( CastEnvironment::MAIN, FROM_HERE, - base::Bind(encoded_frame_data_storage_.front().frame_encoded_callback, + base::Bind(request.frame_encoded_callback, base::Passed(&encoded_frame))); - encoded_frame_data_storage_.pop_front(); + in_progress_frame_encodes_.pop_front(); } else { VLOG(1) << "BitstreamBufferReady(): no encoded frame data available"; } @@ -376,7 +380,7 @@ class LocalVideoEncodeAcceleratorClient ScopedVector<base::SharedMemory> output_buffers_; // FIFO list. - std::list<EncodedFrameReturnData> encoded_frame_data_storage_; + std::list<InProgressFrameEncode> in_progress_frame_encodes_; DISALLOW_COPY_AND_ASSIGN(LocalVideoEncodeAcceleratorClient); }; @@ -435,7 +439,7 @@ void ExternalVideoEncoder::OnCreateVideoEncodeAccelerator( bool ExternalVideoEncoder::EncodeVideoFrame( const scoped_refptr<media::VideoFrame>& video_frame, - const base::TimeTicks& capture_time, + const base::TimeTicks& reference_time, const FrameEncodedCallback& frame_encoded_callback) { DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); @@ -447,7 +451,7 @@ bool ExternalVideoEncoder::EncodeVideoFrame( base::Bind(&LocalVideoEncodeAcceleratorClient::EncodeVideoFrame, video_accelerator_client_, video_frame, - capture_time, + reference_time, key_frame_requested_, frame_encoded_callback)); diff --git a/media/cast/sender/external_video_encoder.h b/media/cast/sender/external_video_encoder.h index 4ae21e1..85d9ffd 100644 --- a/media/cast/sender/external_video_encoder.h +++ b/media/cast/sender/external_video_encoder.h @@ -34,19 +34,11 @@ class ExternalVideoEncoder : public VideoEncoder { virtual ~ExternalVideoEncoder(); - // Called from the main cast thread. This function post the encode task to the - // video encoder thread; - // The video_frame must be valid until the closure callback is called. - // The closure callback is called from the video encoder thread as soon as - // the encoder is done with the frame; it does not mean that the encoded frame - // has been sent out. - // Once the encoded frame is ready the frame_encoded_callback is called. + // VideoEncoder implementation. virtual bool EncodeVideoFrame( const scoped_refptr<media::VideoFrame>& video_frame, - const base::TimeTicks& capture_time, + const base::TimeTicks& reference_time, const FrameEncodedCallback& frame_encoded_callback) override; - - // The following functions are called from the main cast thread. virtual void SetBitRate(int new_bit_rate) override; virtual void GenerateKeyFrame() override; virtual void LatestFrameIdToReference(uint32 frame_id) override; diff --git a/media/cast/sender/external_video_encoder_unittest.cc b/media/cast/sender/external_video_encoder_unittest.cc index 2f6fa9e..12a6b6a 100644 --- a/media/cast/sender/external_video_encoder_unittest.cc +++ b/media/cast/sender/external_video_encoder_unittest.cc @@ -63,10 +63,12 @@ class TestVideoEncoderCallback void SetExpectedResult(uint32 expected_frame_id, uint32 expected_last_referenced_frame_id, - const base::TimeTicks& expected_capture_time) { + uint32 expected_rtp_timestamp, + const base::TimeTicks& expected_reference_time) { expected_frame_id_ = expected_frame_id; expected_last_referenced_frame_id_ = expected_last_referenced_frame_id; - expected_capture_time_ = expected_capture_time; + expected_rtp_timestamp_ = expected_rtp_timestamp; + expected_reference_time_ = expected_reference_time; } void DeliverEncodedVideoFrame( @@ -80,7 +82,8 @@ class TestVideoEncoderCallback EXPECT_EQ(expected_frame_id_, encoded_frame->frame_id); EXPECT_EQ(expected_last_referenced_frame_id_, encoded_frame->referenced_frame_id); - EXPECT_EQ(expected_capture_time_, encoded_frame->reference_time); + EXPECT_EQ(expected_rtp_timestamp_, encoded_frame->rtp_timestamp); + EXPECT_EQ(expected_reference_time_, encoded_frame->reference_time); } protected: @@ -92,7 +95,8 @@ class TestVideoEncoderCallback bool expected_key_frame_; uint32 expected_frame_id_; uint32 expected_last_referenced_frame_id_; - base::TimeTicks expected_capture_time_; + uint32 expected_rtp_timestamp_; + base::TimeTicks expected_reference_time_; DISALLOW_COPY_AND_ASSIGN(TestVideoEncoderCallback); }; @@ -122,6 +126,7 @@ class ExternalVideoEncoderTest : public ::testing::Test { PopulateVideoFrame(video_frame_.get(), 123); testing_clock_ = new base::SimpleTestTickClock(); + testing_clock_->Advance(base::TimeTicks::Now() - base::TimeTicks()); task_runner_ = new test::FakeSingleThreadTaskRunner(testing_clock_); cast_environment_ = new CastEnvironment(scoped_ptr<base::TickClock>(testing_clock_).Pass(), @@ -145,6 +150,12 @@ class ExternalVideoEncoderTest : public ::testing::Test { virtual ~ExternalVideoEncoderTest() {} + void AdvanceClockAndVideoFrameTimestamp() { + testing_clock_->Advance(base::TimeDelta::FromMilliseconds(33)); + video_frame_->set_timestamp( + video_frame_->timestamp() + base::TimeDelta::FromMilliseconds(33)); + } + base::SimpleTestTickClock* testing_clock_; // Owned by CastEnvironment. test::FakeVideoEncodeAccelerator* fake_vea_; // Owned by video_encoder_. std::vector<uint32> stored_bitrates_; @@ -165,19 +176,23 @@ TEST_F(ExternalVideoEncoderTest, EncodePattern30fpsRunningOutOfAck) { base::Bind(&TestVideoEncoderCallback::DeliverEncodedVideoFrame, test_video_encoder_callback_.get()); - base::TimeTicks capture_time; - capture_time += base::TimeDelta::FromMilliseconds(33); - test_video_encoder_callback_->SetExpectedResult(0, 0, capture_time); + test_video_encoder_callback_->SetExpectedResult( + 0, 0, TimeDeltaToRtpDelta(video_frame_->timestamp(), kVideoFrequency), + testing_clock_->NowTicks()); video_encoder_->SetBitRate(2000); EXPECT_TRUE(video_encoder_->EncodeVideoFrame( - video_frame_, capture_time, frame_encoded_callback)); + video_frame_, testing_clock_->NowTicks(), frame_encoded_callback)); task_runner_->RunTasks(); for (int i = 0; i < 6; ++i) { - capture_time += base::TimeDelta::FromMilliseconds(33); - test_video_encoder_callback_->SetExpectedResult(i + 1, i, capture_time); + AdvanceClockAndVideoFrameTimestamp(); + test_video_encoder_callback_->SetExpectedResult( + i + 1, + i, + TimeDeltaToRtpDelta(video_frame_->timestamp(), kVideoFrequency), + testing_clock_->NowTicks()); EXPECT_TRUE(video_encoder_->EncodeVideoFrame( - video_frame_, capture_time, frame_encoded_callback)); + video_frame_, testing_clock_->NowTicks(), frame_encoded_callback)); task_runner_->RunTasks(); } // We need to run the task to cleanup the GPU instance. @@ -199,11 +214,11 @@ TEST_F(ExternalVideoEncoderTest, StreamHeader) { fake_vea_->SendDummyFrameForTesting(false); // Verify the first returned bitstream buffer is still a key frame. - base::TimeTicks capture_time; - capture_time += base::TimeDelta::FromMilliseconds(33); - test_video_encoder_callback_->SetExpectedResult(0, 0, capture_time); + test_video_encoder_callback_->SetExpectedResult( + 0, 0, TimeDeltaToRtpDelta(video_frame_->timestamp(), kVideoFrequency), + testing_clock_->NowTicks()); EXPECT_TRUE(video_encoder_->EncodeVideoFrame( - video_frame_, capture_time, frame_encoded_callback)); + video_frame_, testing_clock_->NowTicks(), frame_encoded_callback)); task_runner_->RunTasks(); // We need to run the task to cleanup the GPU instance. diff --git a/media/cast/sender/fake_software_video_encoder.cc b/media/cast/sender/fake_software_video_encoder.cc index bd96f78..12b6b77 100644 --- a/media/cast/sender/fake_software_video_encoder.cc +++ b/media/cast/sender/fake_software_video_encoder.cc @@ -6,6 +6,7 @@ #include "base/json/json_writer.h" #include "base/values.h" +#include "media/base/video_frame.h" #include "media/cast/net/cast_transport_config.h" #ifndef OFFICIAL_BUILD @@ -26,29 +27,34 @@ FakeSoftwareVideoEncoder::~FakeSoftwareVideoEncoder() {} void FakeSoftwareVideoEncoder::Initialize() {} -bool FakeSoftwareVideoEncoder::Encode( +void FakeSoftwareVideoEncoder::Encode( const scoped_refptr<media::VideoFrame>& video_frame, - EncodedFrame* encoded_image) { - encoded_image->frame_id = frame_id_++; + const base::TimeTicks& reference_time, + EncodedFrame* encoded_frame) { + DCHECK(encoded_frame); + + encoded_frame->frame_id = frame_id_++; if (next_frame_is_key_) { - encoded_image->dependency = EncodedFrame::KEY; - encoded_image->referenced_frame_id = encoded_image->frame_id; + encoded_frame->dependency = EncodedFrame::KEY; + encoded_frame->referenced_frame_id = encoded_frame->frame_id; next_frame_is_key_ = false; } else { - encoded_image->dependency = EncodedFrame::DEPENDENT; - encoded_image->referenced_frame_id = encoded_image->frame_id - 1; + encoded_frame->dependency = EncodedFrame::DEPENDENT; + encoded_frame->referenced_frame_id = encoded_frame->frame_id - 1; } + encoded_frame->rtp_timestamp = + TimeDeltaToRtpDelta(video_frame->timestamp(), kVideoFrequency); + encoded_frame->reference_time = reference_time; base::DictionaryValue values; values.SetBoolean("key", - encoded_image->dependency == EncodedFrame::KEY); - values.SetInteger("ref", encoded_image->referenced_frame_id); - values.SetInteger("id", encoded_image->frame_id); + encoded_frame->dependency == EncodedFrame::KEY); + values.SetInteger("ref", encoded_frame->referenced_frame_id); + values.SetInteger("id", encoded_frame->frame_id); values.SetInteger("size", frame_size_); - base::JSONWriter::Write(&values, &encoded_image->data); - encoded_image->data.resize( - std::max<size_t>(encoded_image->data.size(), frame_size_), ' '); - return true; + base::JSONWriter::Write(&values, &encoded_frame->data); + encoded_frame->data.resize( + std::max<size_t>(encoded_frame->data.size(), frame_size_), ' '); } void FakeSoftwareVideoEncoder::UpdateRates(uint32 new_bitrate) { diff --git a/media/cast/sender/fake_software_video_encoder.h b/media/cast/sender/fake_software_video_encoder.h index 26d34aa..ced126d 100644 --- a/media/cast/sender/fake_software_video_encoder.h +++ b/media/cast/sender/fake_software_video_encoder.h @@ -18,8 +18,9 @@ class FakeSoftwareVideoEncoder : public SoftwareVideoEncoder { // SoftwareVideoEncoder implementations. virtual void Initialize() override; - virtual bool Encode(const scoped_refptr<media::VideoFrame>& video_frame, - EncodedFrame* encoded_image) override; + virtual void Encode(const scoped_refptr<media::VideoFrame>& video_frame, + const base::TimeTicks& reference_time, + EncodedFrame* encoded_frame) override; virtual void UpdateRates(uint32 new_bitrate) override; virtual void GenerateKeyFrame() override; virtual void LatestFrameIdToReference(uint32 frame_id) override; diff --git a/media/cast/sender/software_video_encoder.h b/media/cast/sender/software_video_encoder.h index 16c8cd3..e3136d5 100644 --- a/media/cast/sender/software_video_encoder.h +++ b/media/cast/sender/software_video_encoder.h @@ -8,6 +8,10 @@ #include "base/basictypes.h" #include "base/memory/ref_counted.h" +namespace base { +class TimeTicks; +} + namespace media { class VideoFrame; } @@ -25,8 +29,9 @@ class SoftwareVideoEncoder { virtual void Initialize() = 0; // Encode a raw image (as a part of a video stream). - virtual bool Encode(const scoped_refptr<media::VideoFrame>& video_frame, - EncodedFrame* encoded_image) = 0; + virtual void Encode(const scoped_refptr<media::VideoFrame>& video_frame, + const base::TimeTicks& reference_time, + EncodedFrame* encoded_frame) = 0; // Update the encoder with a new target bit rate. virtual void UpdateRates(uint32 new_bitrate) = 0; diff --git a/media/cast/sender/video_encoder.h b/media/cast/sender/video_encoder.h index d788c7b..b3bdbe4 100644 --- a/media/cast/sender/video_encoder.h +++ b/media/cast/sender/video_encoder.h @@ -20,19 +20,17 @@ namespace cast { // All these functions are called from the main cast thread. class VideoEncoder { public: - typedef base::Callback<void(scoped_ptr<EncodedFrame>)> - FrameEncodedCallback; + typedef base::Callback<void(scoped_ptr<EncodedFrame>)> FrameEncodedCallback; virtual ~VideoEncoder() {} - // The video_frame must be valid until the closure callback is called. - // The closure callback is called from the video encoder thread as soon as - // the encoder is done with the frame; it does not mean that the encoded frame - // has been sent out. - // Once the encoded frame is ready the frame_encoded_callback is called. + // If true is returned, the Encoder has accepted the request and will process + // it asynchronously, running |frame_encoded_callback| on the MAIN + // CastEnvironment thread with the result. If false is returned, nothing + // happens and the callback will not be run. virtual bool EncodeVideoFrame( const scoped_refptr<media::VideoFrame>& video_frame, - const base::TimeTicks& capture_time, + const base::TimeTicks& reference_time, const FrameEncodedCallback& frame_encoded_callback) = 0; // Inform the encoder about the new target bit rate. diff --git a/media/cast/sender/video_encoder_impl.cc b/media/cast/sender/video_encoder_impl.cc index d216497..d27fb10 100644 --- a/media/cast/sender/video_encoder_impl.cc +++ b/media/cast/sender/video_encoder_impl.cc @@ -32,7 +32,7 @@ void EncodeVideoFrameOnEncoderThread( scoped_refptr<CastEnvironment> environment, SoftwareVideoEncoder* encoder, const scoped_refptr<media::VideoFrame>& video_frame, - const base::TimeTicks& capture_time, + const base::TimeTicks& reference_time, const VideoEncoderImpl::CodecDynamicConfig& dynamic_config, const VideoEncoderImpl::FrameEncodedCallback& frame_encoded_callback) { DCHECK(environment->CurrentlyOn(CastEnvironment::VIDEO)); @@ -43,24 +43,12 @@ void EncodeVideoFrameOnEncoderThread( dynamic_config.latest_frame_id_to_reference); encoder->UpdateRates(dynamic_config.bit_rate); - scoped_ptr<EncodedFrame> encoded_frame( - new EncodedFrame()); - if (!encoder->Encode(video_frame, encoded_frame.get())) { - VLOG(1) << "Encoding failed"; - return; - } - if (encoded_frame->data.empty()) { - VLOG(1) << "Encoding resulted in an empty frame"; - return; - } - encoded_frame->rtp_timestamp = GetVideoRtpTimestamp(capture_time); - encoded_frame->reference_time = capture_time; - + scoped_ptr<EncodedFrame> encoded_frame(new EncodedFrame()); + encoder->Encode(video_frame, reference_time, encoded_frame.get()); environment->PostTask( CastEnvironment::MAIN, FROM_HERE, - base::Bind( - frame_encoded_callback, base::Passed(&encoded_frame))); + base::Bind(frame_encoded_callback, base::Passed(&encoded_frame))); } } // namespace @@ -102,7 +90,7 @@ VideoEncoderImpl::~VideoEncoderImpl() { bool VideoEncoderImpl::EncodeVideoFrame( const scoped_refptr<media::VideoFrame>& video_frame, - const base::TimeTicks& capture_time, + const base::TimeTicks& reference_time, const FrameEncodedCallback& frame_encoded_callback) { DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); cast_environment_->PostTask(CastEnvironment::VIDEO, @@ -111,7 +99,7 @@ bool VideoEncoderImpl::EncodeVideoFrame( cast_environment_, encoder_.get(), video_frame, - capture_time, + reference_time, dynamic_config_, frame_encoded_callback)); diff --git a/media/cast/sender/video_encoder_impl.h b/media/cast/sender/video_encoder_impl.h index 7261a2d..ae2b85f 100644 --- a/media/cast/sender/video_encoder_impl.h +++ b/media/cast/sender/video_encoder_impl.h @@ -35,19 +35,11 @@ class VideoEncoderImpl : public VideoEncoder { virtual ~VideoEncoderImpl(); - // Called from the main cast thread. This function post the encode task to the - // video encoder thread; - // The video_frame must be valid until the closure callback is called. - // The closure callback is called from the video encoder thread as soon as - // the encoder is done with the frame; it does not mean that the encoded frame - // has been sent out. - // Once the encoded frame is ready the frame_encoded_callback is called. + // VideoEncoder implementation. virtual bool EncodeVideoFrame( const scoped_refptr<media::VideoFrame>& video_frame, - const base::TimeTicks& capture_time, + const base::TimeTicks& reference_time, const FrameEncodedCallback& frame_encoded_callback) override; - - // The following functions are called from the main cast thread. virtual void SetBitRate(int new_bit_rate) override; virtual void GenerateKeyFrame() override; virtual void LatestFrameIdToReference(uint32 frame_id) override; diff --git a/media/cast/sender/video_encoder_impl_unittest.cc b/media/cast/sender/video_encoder_impl_unittest.cc index cc73153..adacdd0 100644 --- a/media/cast/sender/video_encoder_impl_unittest.cc +++ b/media/cast/sender/video_encoder_impl_unittest.cc @@ -35,10 +35,12 @@ class TestVideoEncoderCallback void SetExpectedResult(uint32 expected_frame_id, uint32 expected_last_referenced_frame_id, - const base::TimeTicks& expected_capture_time) { + uint32 expected_rtp_timestamp, + const base::TimeTicks& expected_reference_time) { expected_frame_id_ = expected_frame_id; expected_last_referenced_frame_id_ = expected_last_referenced_frame_id; - expected_capture_time_ = expected_capture_time; + expected_rtp_timestamp_ = expected_rtp_timestamp; + expected_reference_time_ = expected_reference_time; } void DeliverEncodedVideoFrame( @@ -52,8 +54,8 @@ class TestVideoEncoderCallback EXPECT_EQ(expected_last_referenced_frame_id_, encoded_frame->referenced_frame_id) << "frame id: " << expected_frame_id_; - EXPECT_LT(0u, encoded_frame->rtp_timestamp); - EXPECT_EQ(expected_capture_time_, encoded_frame->reference_time); + EXPECT_EQ(expected_rtp_timestamp_, encoded_frame->rtp_timestamp); + EXPECT_EQ(expected_reference_time_, encoded_frame->reference_time); EXPECT_FALSE(encoded_frame->data.empty()); ++count_frames_delivered_; } @@ -67,7 +69,8 @@ class TestVideoEncoderCallback uint32 expected_frame_id_; uint32 expected_last_referenced_frame_id_; - base::TimeTicks expected_capture_time_; + uint32 expected_rtp_timestamp_; + base::TimeTicks expected_reference_time_; DISALLOW_COPY_AND_ASSIGN(TestVideoEncoderCallback); }; @@ -110,6 +113,12 @@ class VideoEncoderImplTest : public ::testing::Test { 0 /* useless arg to be removed in later change */)); } + void AdvanceClockAndVideoFrameTimestamp() { + testing_clock_->Advance(base::TimeDelta::FromMilliseconds(33)); + video_frame_->set_timestamp( + video_frame_->timestamp() + base::TimeDelta::FromMilliseconds(33)); + } + base::SimpleTestTickClock* testing_clock_; // Owned by CastEnvironment. scoped_refptr<TestVideoEncoderCallback> test_video_encoder_callback_; VideoSenderConfig video_config_; @@ -132,15 +141,19 @@ TEST_F(VideoEncoderImplTest, GeneratesKeyFrameThenOnlyDeltaFrames) { EXPECT_EQ(0, test_video_encoder_callback_->count_frames_delivered()); test_video_encoder_callback_->SetExpectedResult( - 0, 0, testing_clock_->NowTicks()); + 0, 0, TimeDeltaToRtpDelta(video_frame_->timestamp(), kVideoFrequency), + testing_clock_->NowTicks()); EXPECT_TRUE(video_encoder_->EncodeVideoFrame( video_frame_, testing_clock_->NowTicks(), frame_encoded_callback)); task_runner_->RunTasks(); for (uint32 frame_id = 1; frame_id < 10; ++frame_id) { - testing_clock_->Advance(base::TimeDelta::FromMilliseconds(33)); + AdvanceClockAndVideoFrameTimestamp(); test_video_encoder_callback_->SetExpectedResult( - frame_id, frame_id - 1, testing_clock_->NowTicks()); + frame_id, + frame_id - 1, + TimeDeltaToRtpDelta(video_frame_->timestamp(), kVideoFrequency), + testing_clock_->NowTicks()); EXPECT_TRUE(video_encoder_->EncodeVideoFrame( video_frame_, testing_clock_->NowTicks(), frame_encoded_callback)); task_runner_->RunTasks(); @@ -161,23 +174,26 @@ TEST_F(VideoEncoderImplTest, EXPECT_EQ(0, test_video_encoder_callback_->count_frames_delivered()); test_video_encoder_callback_->SetExpectedResult( - 0, 0, testing_clock_->NowTicks()); + 0, 0, TimeDeltaToRtpDelta(video_frame_->timestamp(), kVideoFrequency), + testing_clock_->NowTicks()); EXPECT_TRUE(video_encoder_->EncodeVideoFrame( video_frame_, testing_clock_->NowTicks(), frame_encoded_callback)); task_runner_->RunTasks(); - testing_clock_->Advance(base::TimeDelta::FromMilliseconds(33)); + AdvanceClockAndVideoFrameTimestamp(); video_encoder_->LatestFrameIdToReference(0); test_video_encoder_callback_->SetExpectedResult( - 1, 0, testing_clock_->NowTicks()); + 1, 0, TimeDeltaToRtpDelta(video_frame_->timestamp(), kVideoFrequency), + testing_clock_->NowTicks()); EXPECT_TRUE(video_encoder_->EncodeVideoFrame( video_frame_, testing_clock_->NowTicks(), frame_encoded_callback)); task_runner_->RunTasks(); - testing_clock_->Advance(base::TimeDelta::FromMilliseconds(33)); + AdvanceClockAndVideoFrameTimestamp(); video_encoder_->LatestFrameIdToReference(1); test_video_encoder_callback_->SetExpectedResult( - 2, 1, testing_clock_->NowTicks()); + 2, 1, TimeDeltaToRtpDelta(video_frame_->timestamp(), kVideoFrequency), + testing_clock_->NowTicks()); EXPECT_TRUE(video_encoder_->EncodeVideoFrame( video_frame_, testing_clock_->NowTicks(), frame_encoded_callback)); task_runner_->RunTasks(); @@ -185,9 +201,11 @@ TEST_F(VideoEncoderImplTest, video_encoder_->LatestFrameIdToReference(2); for (uint32 frame_id = 3; frame_id < 10; ++frame_id) { - testing_clock_->Advance(base::TimeDelta::FromMilliseconds(33)); + AdvanceClockAndVideoFrameTimestamp(); test_video_encoder_callback_->SetExpectedResult( - frame_id, 2, testing_clock_->NowTicks()); + frame_id, 2, + TimeDeltaToRtpDelta(video_frame_->timestamp(), kVideoFrequency), + testing_clock_->NowTicks()); EXPECT_TRUE(video_encoder_->EncodeVideoFrame( video_frame_, testing_clock_->NowTicks(), frame_encoded_callback)); task_runner_->RunTasks(); diff --git a/media/cast/sender/video_sender.cc b/media/cast/sender/video_sender.cc index 784e8c6..dd8e7f5 100644 --- a/media/cast/sender/video_sender.cc +++ b/media/cast/sender/video_sender.cc @@ -116,7 +116,7 @@ VideoSender::~VideoSender() { void VideoSender::InsertRawVideoFrame( const scoped_refptr<media::VideoFrame>& video_frame, - const base::TimeTicks& capture_time) { + const base::TimeTicks& reference_time) { DCHECK(cast_environment_->CurrentlyOn(CastEnvironment::MAIN)); if (cast_initialization_status_ != STATUS_VIDEO_INITIALIZED) { NOTREACHED(); @@ -124,30 +124,35 @@ void VideoSender::InsertRawVideoFrame( } DCHECK(video_encoder_.get()) << "Invalid state"; - RtpTimestamp rtp_timestamp = GetVideoRtpTimestamp(capture_time); + const RtpTimestamp rtp_timestamp = + TimeDeltaToRtpDelta(video_frame->timestamp(), kVideoFrequency); + const base::TimeTicks insertion_time = cast_environment_->Clock()->NowTicks(); + // TODO(miu): Plumb in capture timestamps. For now, make it look like capture + // took zero time by setting the BEGIN and END event to the same timestamp. cast_environment_->Logging()->InsertFrameEvent( - capture_time, FRAME_CAPTURE_BEGIN, VIDEO_EVENT, - rtp_timestamp, kFrameIdUnknown); + insertion_time, FRAME_CAPTURE_BEGIN, VIDEO_EVENT, rtp_timestamp, + kFrameIdUnknown); cast_environment_->Logging()->InsertFrameEvent( - cast_environment_->Clock()->NowTicks(), - FRAME_CAPTURE_END, VIDEO_EVENT, - rtp_timestamp, + insertion_time, FRAME_CAPTURE_END, VIDEO_EVENT, rtp_timestamp, kFrameIdUnknown); // Used by chrome/browser/extension/api/cast_streaming/performance_test.cc TRACE_EVENT_INSTANT2( "cast_perf_test", "InsertRawVideoFrame", TRACE_EVENT_SCOPE_THREAD, - "timestamp", capture_time.ToInternalValue(), + "timestamp", reference_time.ToInternalValue(), "rtp_timestamp", rtp_timestamp); - // Drop the frame if its reference timestamp is not an increase over the last - // frame's. This protects: 1) the duration calculations that assume - // timestamps are monotonically non-decreasing, and 2) assumptions made deeper - // in the implementation where each frame's RTP timestamp needs to be unique. + // Drop the frame if either its RTP or reference timestamp is not an increase + // over the last frame's. This protects: 1) the duration calculations that + // assume timestamps are monotonically non-decreasing, and 2) assumptions made + // deeper in the implementation where each frame's RTP timestamp needs to be + // unique. if (!last_enqueued_frame_reference_time_.is_null() && - capture_time <= last_enqueued_frame_reference_time_) { - VLOG(1) << "Dropping video frame: Reference time did not increase."; + (!IsNewerRtpTimestamp(rtp_timestamp, + last_enqueued_frame_rtp_timestamp_) || + reference_time <= last_enqueued_frame_reference_time_)) { + VLOG(1) << "Dropping video frame: RTP or reference time did not increase."; return; } @@ -157,7 +162,7 @@ void VideoSender::InsertRawVideoFrame( // guess will be eliminated when |duration_in_encoder_| is updated in // OnEncodedVideoFrame(). const base::TimeDelta duration_added_by_next_frame = frames_in_encoder_ > 0 ? - capture_time - last_enqueued_frame_reference_time_ : + reference_time - last_enqueued_frame_reference_time_ : base::TimeDelta::FromSecondsD(1.0 / max_frame_rate_); if (ShouldDropNextFrame(duration_added_by_next_frame)) { @@ -173,7 +178,7 @@ void VideoSender::InsertRawVideoFrame( } uint32 bitrate = congestion_control_->GetBitrate( - capture_time + target_playout_delay_, target_playout_delay_); + reference_time + target_playout_delay_, target_playout_delay_); if (bitrate != last_bitrate_) { video_encoder_->SetBitRate(bitrate); last_bitrate_ = bitrate; @@ -181,13 +186,14 @@ void VideoSender::InsertRawVideoFrame( if (video_encoder_->EncodeVideoFrame( video_frame, - capture_time, + reference_time, base::Bind(&VideoSender::OnEncodedVideoFrame, weak_factory_.GetWeakPtr(), bitrate))) { frames_in_encoder_++; duration_in_encoder_ += duration_added_by_next_frame; - last_enqueued_frame_reference_time_ = capture_time; + last_enqueued_frame_rtp_timestamp_ = rtp_timestamp; + last_enqueued_frame_reference_time_ = reference_time; } else { VLOG(1) << "Encoder rejected a frame. Skipping..."; } diff --git a/media/cast/sender/video_sender.h b/media/cast/sender/video_sender.h index ebe0b10..6c0bad7 100644 --- a/media/cast/sender/video_sender.h +++ b/media/cast/sender/video_sender.h @@ -54,7 +54,7 @@ class VideoSender : public FrameSender, // Note: It is invalid to call this method if InitializationResult() returns // anything but STATUS_VIDEO_INITIALIZED. void InsertRawVideoFrame(const scoped_refptr<media::VideoFrame>& video_frame, - const base::TimeTicks& capture_time); + const base::TimeTicks& reference_time); protected: virtual int GetNumberOfFramesInEncoder() const override; @@ -83,6 +83,7 @@ class VideoSender : public FrameSender, base::TimeDelta duration_in_encoder_; // The timestamp of the frame that was last enqueued in |video_encoder_|. + RtpTimestamp last_enqueued_frame_rtp_timestamp_; base::TimeTicks last_enqueued_frame_reference_time_; // Remember what we set the bitrate to before, no need to set it again if diff --git a/media/cast/sender/video_sender_unittest.cc b/media/cast/sender/video_sender_unittest.cc index d0cbdb2..29cbfb4 100644 --- a/media/cast/sender/video_sender_unittest.cc +++ b/media/cast/sender/video_sender_unittest.cc @@ -221,19 +221,25 @@ class VideoSenderTest : public ::testing::Test { } scoped_refptr<media::VideoFrame> GetNewVideoFrame() { + if (first_frame_timestamp_.is_null()) + first_frame_timestamp_ = testing_clock_->NowTicks(); gfx::Size size(kWidth, kHeight); scoped_refptr<media::VideoFrame> video_frame = media::VideoFrame::CreateFrame( - VideoFrame::I420, size, gfx::Rect(size), size, base::TimeDelta()); + VideoFrame::I420, size, gfx::Rect(size), size, + testing_clock_->NowTicks() - first_frame_timestamp_); PopulateVideoFrame(video_frame.get(), last_pixel_value_++); return video_frame; } scoped_refptr<media::VideoFrame> GetLargeNewVideoFrame() { + if (first_frame_timestamp_.is_null()) + first_frame_timestamp_ = testing_clock_->NowTicks(); gfx::Size size(kWidth, kHeight); scoped_refptr<media::VideoFrame> video_frame = media::VideoFrame::CreateFrame( - VideoFrame::I420, size, gfx::Rect(size), size, base::TimeDelta()); + VideoFrame::I420, size, gfx::Rect(size), size, + testing_clock_->NowTicks() - first_frame_timestamp_); PopulateVideoFrameWithNoise(video_frame.get()); return video_frame; } @@ -250,6 +256,7 @@ class VideoSenderTest : public ::testing::Test { std::vector<uint32> stored_bitrates_; scoped_refptr<CastEnvironment> cast_environment_; int last_pixel_value_; + base::TimeTicks first_frame_timestamp_; DISALLOW_COPY_AND_ASSIGN(VideoSenderTest); }; @@ -258,8 +265,8 @@ TEST_F(VideoSenderTest, BuiltInEncoder) { EXPECT_EQ(STATUS_VIDEO_INITIALIZED, InitEncoder(false, true)); scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame(); - const base::TimeTicks capture_time = testing_clock_->NowTicks(); - video_sender_->InsertRawVideoFrame(video_frame, capture_time); + const base::TimeTicks reference_time = testing_clock_->NowTicks(); + video_sender_->InsertRawVideoFrame(video_frame, reference_time); task_runner_->RunTasks(); EXPECT_LE(1, transport_.number_of_rtp_packets()); @@ -271,12 +278,12 @@ TEST_F(VideoSenderTest, ExternalEncoder) { scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame(); - const base::TimeTicks capture_time = testing_clock_->NowTicks(); - video_sender_->InsertRawVideoFrame(video_frame, capture_time); + const base::TimeTicks reference_time = testing_clock_->NowTicks(); + video_sender_->InsertRawVideoFrame(video_frame, reference_time); task_runner_->RunTasks(); - video_sender_->InsertRawVideoFrame(video_frame, capture_time); + video_sender_->InsertRawVideoFrame(video_frame, reference_time); task_runner_->RunTasks(); - video_sender_->InsertRawVideoFrame(video_frame, capture_time); + video_sender_->InsertRawVideoFrame(video_frame, reference_time); task_runner_->RunTasks(); // Fixed bitrate is used for external encoder. Bitrate is only once @@ -298,8 +305,8 @@ TEST_F(VideoSenderTest, RtcpTimer) { scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame(); - const base::TimeTicks capture_time = testing_clock_->NowTicks(); - video_sender_->InsertRawVideoFrame(video_frame, capture_time); + const base::TimeTicks reference_time = testing_clock_->NowTicks(); + video_sender_->InsertRawVideoFrame(video_frame, reference_time); // Make sure that we send at least one RTCP packet. base::TimeDelta max_rtcp_timeout = @@ -322,8 +329,8 @@ TEST_F(VideoSenderTest, ResendTimer) { scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame(); - const base::TimeTicks capture_time = testing_clock_->NowTicks(); - video_sender_->InsertRawVideoFrame(video_frame, capture_time); + const base::TimeTicks reference_time = testing_clock_->NowTicks(); + video_sender_->InsertRawVideoFrame(video_frame, reference_time); // ACK the key frame. RtcpCastMessage cast_feedback(1); @@ -332,7 +339,7 @@ TEST_F(VideoSenderTest, ResendTimer) { video_sender_->OnReceivedCastFeedback(cast_feedback); video_frame = GetNewVideoFrame(); - video_sender_->InsertRawVideoFrame(video_frame, capture_time); + video_sender_->InsertRawVideoFrame(video_frame, reference_time); base::TimeDelta max_resend_timeout = base::TimeDelta::FromMilliseconds(1 + kDefaultRtpMaxDelayMs); @@ -354,8 +361,8 @@ TEST_F(VideoSenderTest, LogAckReceivedEvent) { for (int i = 0; i < num_frames; i++) { scoped_refptr<media::VideoFrame> video_frame = GetNewVideoFrame(); - const base::TimeTicks capture_time = testing_clock_->NowTicks(); - video_sender_->InsertRawVideoFrame(video_frame, capture_time); + const base::TimeTicks reference_time = testing_clock_->NowTicks(); + video_sender_->InsertRawVideoFrame(video_frame, reference_time); RunTasks(33); } diff --git a/media/cast/sender/vp8_encoder.cc b/media/cast/sender/vp8_encoder.cc index 918b1f4..da42fc0 100644 --- a/media/cast/sender/vp8_encoder.cc +++ b/media/cast/sender/vp8_encoder.cc @@ -4,8 +4,6 @@ #include "media/cast/sender/vp8_encoder.h" -#include <vector> - #include "base/logging.h" #include "media/base/video_frame.h" #include "media/cast/cast_defines.h" @@ -23,12 +21,15 @@ Vp8Encoder::Vp8Encoder(const VideoSenderConfig& video_config, use_multiple_video_buffers_( cast_config_.max_number_of_video_buffers_used == kNumberOfVp8VideoBuffers), + raw_image_(nullptr), key_frame_requested_(true), first_frame_received_(false), last_encoded_frame_id_(kStartFrameId), last_acked_frame_id_(kStartFrameId), frame_id_to_reference_(kStartFrameId - 1), undroppable_frames_(0) { + config_.g_timebase.den = 0; // Not initialized. + // VP8 have 3 buffers available for prediction, with // max_number_of_video_buffers_used set to 1 we maximize the coding efficiency // however in this mode we can not skip frames in the receiver to catch up @@ -44,14 +45,15 @@ Vp8Encoder::Vp8Encoder(const VideoSenderConfig& video_config, } Vp8Encoder::~Vp8Encoder() { - vpx_codec_destroy(encoder_.get()); + DCHECK(thread_checker_.CalledOnValidThread()); + if (is_initialized()) + vpx_codec_destroy(&encoder_); vpx_img_free(raw_image_); } void Vp8Encoder::Initialize() { DCHECK(thread_checker_.CalledOnValidThread()); - config_.reset(new vpx_codec_enc_cfg_t()); - encoder_.reset(new vpx_codec_ctx_t()); + DCHECK(!is_initialized()); // Creating a wrapper to the image - setting image data to NULL. Actual // pointer will be set during encode. Setting align to 1, as it is @@ -63,64 +65,65 @@ void Vp8Encoder::Initialize() { buffer_state_[i].frame_id = kStartFrameId; buffer_state_[i].state = kBufferStartState; } - InitEncode(cast_config_.number_of_encode_threads); -} -void Vp8Encoder::InitEncode(int number_of_encode_threads) { - DCHECK(thread_checker_.CalledOnValidThread()); // Populate encoder configuration with default values. - if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), config_.get(), 0)) { - DCHECK(false) << "Invalid return value"; + if (vpx_codec_enc_config_default(vpx_codec_vp8_cx(), &config_, 0)) { + NOTREACHED() << "Invalid return value"; + config_.g_timebase.den = 0; // Do not call vpx_codec_destroy() in dtor. + return; } - config_->g_w = cast_config_.width; - config_->g_h = cast_config_.height; - config_->rc_target_bitrate = cast_config_.start_bitrate / 1000; // In kbit/s. - - // Setting the codec time base. - config_->g_timebase.num = 1; - config_->g_timebase.den = kVideoFrequency; - config_->g_lag_in_frames = 0; - config_->kf_mode = VPX_KF_DISABLED; + + config_.g_threads = cast_config_.number_of_encode_threads; + config_.g_w = cast_config_.width; + config_.g_h = cast_config_.height; + config_.g_timebase.num = 1; + config_.g_timebase.den = kVideoFrequency; if (use_multiple_video_buffers_) { // We must enable error resilience when we use multiple buffers, due to // codec requirements. - config_->g_error_resilient = 1; + config_.g_error_resilient = 1; } - config_->g_threads = number_of_encode_threads; + config_.g_pass = VPX_RC_ONE_PASS; + config_.g_lag_in_frames = 0; // Immediate data output for each frame. // Rate control settings. - // Never allow the encoder to drop frame internally. - config_->rc_dropframe_thresh = 0; - config_->rc_end_usage = VPX_CBR; - config_->g_pass = VPX_RC_ONE_PASS; - config_->rc_resize_allowed = 0; - config_->rc_min_quantizer = cast_config_.min_qp; - config_->rc_max_quantizer = cast_config_.max_qp; - config_->rc_undershoot_pct = 100; - config_->rc_overshoot_pct = 15; - config_->rc_buf_initial_sz = 500; - config_->rc_buf_optimal_sz = 600; - config_->rc_buf_sz = 1000; + config_.rc_dropframe_thresh = 0; // The encoder may not drop any frames. + config_.rc_resize_allowed = 0; // TODO(miu): Why not? Investigate this. + config_.rc_end_usage = VPX_CBR; + config_.rc_target_bitrate = cast_config_.start_bitrate / 1000; // In kbit/s. + config_.rc_min_quantizer = cast_config_.min_qp; + config_.rc_max_quantizer = cast_config_.max_qp; + config_.rc_undershoot_pct = 100; + config_.rc_overshoot_pct = 15; + config_.rc_buf_initial_sz = 500; + config_.rc_buf_optimal_sz = 600; + config_.rc_buf_sz = 1000; // TODO(miu): Adjust relative to playout delay? + + config_.kf_mode = VPX_KF_DISABLED; // set the maximum target size of any key-frame. - uint32 rc_max_intra_target = MaxIntraTarget(config_->rc_buf_optimal_sz); + uint32 rc_max_intra_target = MaxIntraTarget(config_.rc_buf_optimal_sz); vpx_codec_flags_t flags = 0; - if (vpx_codec_enc_init( - encoder_.get(), vpx_codec_vp8_cx(), config_.get(), flags)) { - DCHECK(false) << "vpx_codec_enc_init() failed."; - encoder_.reset(); + if (vpx_codec_enc_init(&encoder_, vpx_codec_vp8_cx(), &config_, flags)) { + NOTREACHED() << "vpx_codec_enc_init() failed."; + config_.g_timebase.den = 0; // Do not call vpx_codec_destroy() in dtor. return; } - vpx_codec_control(encoder_.get(), VP8E_SET_STATIC_THRESHOLD, 1); - vpx_codec_control(encoder_.get(), VP8E_SET_NOISE_SENSITIVITY, 0); - vpx_codec_control(encoder_.get(), VP8E_SET_CPUUSED, -6); + vpx_codec_control(&encoder_, VP8E_SET_STATIC_THRESHOLD, 1); + vpx_codec_control(&encoder_, VP8E_SET_NOISE_SENSITIVITY, 0); + vpx_codec_control(&encoder_, VP8E_SET_CPUUSED, -6); vpx_codec_control( - encoder_.get(), VP8E_SET_MAX_INTRA_BITRATE_PCT, rc_max_intra_target); + &encoder_, VP8E_SET_MAX_INTRA_BITRATE_PCT, rc_max_intra_target); } -bool Vp8Encoder::Encode(const scoped_refptr<media::VideoFrame>& video_frame, - EncodedFrame* encoded_image) { +void Vp8Encoder::Encode(const scoped_refptr<media::VideoFrame>& video_frame, + const base::TimeTicks& reference_time, + EncodedFrame* encoded_frame) { DCHECK(thread_checker_.CalledOnValidThread()); + DCHECK(encoded_frame); + + CHECK(is_initialized()); // No illegal reference to |config_| or |encoder_|. + // Image in vpx_image_t format. // Input image is const. VP8's raw image is not defined as const. raw_image_->planes[VPX_PLANE_Y] = @@ -169,61 +172,59 @@ bool Vp8Encoder::Encode(const scoped_refptr<media::VideoFrame>& video_frame, (video_frame->timestamp() - first_frame_timestamp_).InMicroseconds() * kVideoFrequency / base::Time::kMicrosecondsPerSecond; - if (vpx_codec_encode(encoder_.get(), - raw_image_, - timestamp, - duration, - flags, - VPX_DL_REALTIME) != VPX_CODEC_OK) { - LOG(ERROR) << "Failed to encode for once."; - return false; - } - - // Get encoded frame. + CHECK_EQ(vpx_codec_encode(&encoder_, + raw_image_, + timestamp, + duration, + flags, + VPX_DL_REALTIME), + VPX_CODEC_OK) + << "BUG: Invalid arguments passed to vpx_codec_encode()."; + + // Pull data from the encoder, populating a new EncodedFrame. + encoded_frame->frame_id = ++last_encoded_frame_id_; const vpx_codec_cx_pkt_t* pkt = NULL; vpx_codec_iter_t iter = NULL; - bool is_key_frame = false; - while ((pkt = vpx_codec_get_cx_data(encoder_.get(), &iter)) != NULL) { + while ((pkt = vpx_codec_get_cx_data(&encoder_, &iter)) != NULL) { if (pkt->kind != VPX_CODEC_CX_FRAME_PKT) continue; - encoded_image->data.assign( + if (pkt->data.frame.flags & VPX_FRAME_IS_KEY) { + // TODO(hubbe): Replace "dependency" with a "bool is_key_frame". + encoded_frame->dependency = EncodedFrame::KEY; + encoded_frame->referenced_frame_id = encoded_frame->frame_id; + } else { + encoded_frame->dependency = EncodedFrame::DEPENDENT; + // Frame dependencies could theoretically be relaxed by looking for the + // VPX_FRAME_IS_DROPPABLE flag, but in recent testing (Oct 2014), this + // flag never seems to be set. + encoded_frame->referenced_frame_id = latest_frame_id_to_reference; + } + encoded_frame->rtp_timestamp = timestamp; + encoded_frame->reference_time = reference_time; + encoded_frame->data.assign( static_cast<const uint8*>(pkt->data.frame.buf), static_cast<const uint8*>(pkt->data.frame.buf) + pkt->data.frame.sz); - is_key_frame = !!(pkt->data.frame.flags & VPX_FRAME_IS_KEY); break; // Done, since all data is provided in one CX_FRAME_PKT packet. } - // Don't update frame_id for zero size frames. - if (encoded_image->data.empty()) - return true; - - // Populate the encoded frame. - encoded_image->frame_id = ++last_encoded_frame_id_; - if (is_key_frame) { - // TODO(Hubbe): Replace "dependency" with a "bool is_key_frame". - encoded_image->dependency = EncodedFrame::KEY; - encoded_image->referenced_frame_id = encoded_image->frame_id; - } else { - encoded_image->dependency = EncodedFrame::DEPENDENT; - encoded_image->referenced_frame_id = latest_frame_id_to_reference; - } + DCHECK(!encoded_frame->data.empty()) + << "BUG: Encoder must provide data since lagged encoding is disabled."; - DVLOG(1) << "VP8 encoded frame_id " << encoded_image->frame_id - << ", sized:" << encoded_image->data.size(); + DVLOG(1) << "VP8 encoded frame_id " << encoded_frame->frame_id + << ", sized:" << encoded_frame->data.size(); - if (is_key_frame) { + if (encoded_frame->dependency == EncodedFrame::KEY) { key_frame_requested_ = false; for (int i = 0; i < kNumberOfVp8VideoBuffers; ++i) { buffer_state_[i].state = kBufferSent; - buffer_state_[i].frame_id = encoded_image->frame_id; + buffer_state_[i].frame_id = encoded_frame->frame_id; } } else { if (buffer_to_update != kNoBuffer) { buffer_state_[buffer_to_update].state = kBufferSent; - buffer_state_[buffer_to_update].frame_id = encoded_image->frame_id; + buffer_state_[buffer_to_update].frame_id = encoded_frame->frame_id; } } - return true; } uint32 Vp8Encoder::GetCodecReferenceFlags(vpx_codec_flags_t* flags) { @@ -370,15 +371,19 @@ void Vp8Encoder::GetCodecUpdateFlags(Vp8Buffers buffer_to_update, void Vp8Encoder::UpdateRates(uint32 new_bitrate) { DCHECK(thread_checker_.CalledOnValidThread()); + + if (!is_initialized()) + return; + uint32 new_bitrate_kbit = new_bitrate / 1000; - if (config_->rc_target_bitrate == new_bitrate_kbit) + if (config_.rc_target_bitrate == new_bitrate_kbit) return; - config_->rc_target_bitrate = new_bitrate_kbit; + config_.rc_target_bitrate = new_bitrate_kbit; // Update encoder context. - if (vpx_codec_enc_config_set(encoder_.get(), config_.get())) { - DCHECK(false) << "Invalid return value"; + if (vpx_codec_enc_config_set(&encoder_, &config_)) { + NOTREACHED() << "Invalid return value"; } } diff --git a/media/cast/sender/vp8_encoder.h b/media/cast/sender/vp8_encoder.h index 0342703..cb9861f 100644 --- a/media/cast/sender/vp8_encoder.h +++ b/media/cast/sender/vp8_encoder.h @@ -8,7 +8,6 @@ #include "base/basictypes.h" #include "base/memory/scoped_ptr.h" #include "base/threading/thread_checker.h" -#include "base/time/time.h" #include "media/cast/cast_config.h" #include "media/cast/sender/software_video_encoder.h" #include "third_party/libvpx/source/libvpx/vpx/vpx_encoder.h" @@ -17,37 +16,27 @@ namespace media { class VideoFrame; } -// VPX forward declaration. -typedef struct vpx_codec_ctx vpx_enc_ctx_t; - namespace media { namespace cast { -const int kNumberOfVp8VideoBuffers = 3; - class Vp8Encoder : public SoftwareVideoEncoder { public: Vp8Encoder(const VideoSenderConfig& video_config, int max_unacked_frames); virtual ~Vp8Encoder(); - // Initialize the encoder before Encode() can be called. This method - // must be called on the thread that Encode() is called. + // SoftwareVideoEncoder implementations. virtual void Initialize() override; - - // Encode a raw image (as a part of a video stream). - virtual bool Encode(const scoped_refptr<media::VideoFrame>& video_frame, - EncodedFrame* encoded_image) override; - - // Update the encoder with a new target bit rate. + virtual void Encode(const scoped_refptr<media::VideoFrame>& video_frame, + const base::TimeTicks& reference_time, + EncodedFrame* encoded_frame) override; virtual void UpdateRates(uint32 new_bitrate) override; - - // Set the next frame to be a key frame. virtual void GenerateKeyFrame() override; - virtual void LatestFrameIdToReference(uint32 frame_id) override; private: + enum { kNumberOfVp8VideoBuffers = 3 }; + enum Vp8Buffers { kAltRefBuffer = 0, kGoldenBuffer = 1, @@ -60,12 +49,17 @@ class Vp8Encoder : public SoftwareVideoEncoder { kBufferSent, kBufferAcked }; + struct BufferState { uint32 frame_id; Vp8BufferState state; }; - void InitEncode(int number_of_cores); + bool is_initialized() const { + // Initialize() sets the timebase denominator value to non-zero if the + // encoder is successfully initialized, and it is zero otherwise. + return config_.g_timebase.den != 0; + } // Calculate the max target in % for a keyframe. uint32 MaxIntraTarget(uint32 optimal_buffer_size) const; @@ -84,9 +78,12 @@ class Vp8Encoder : public SoftwareVideoEncoder { const VideoSenderConfig cast_config_; const bool use_multiple_video_buffers_; - // VP8 internal objects. - scoped_ptr<vpx_codec_enc_cfg_t> config_; - scoped_ptr<vpx_enc_ctx_t> encoder_; + // VP8 internal objects. These are valid for use only while is_initialized() + // returns true. + vpx_codec_enc_cfg_t config_; + vpx_codec_ctx_t encoder_; + + // Wrapper for access to YUV data planes in a media::VideoFrame. vpx_image_t* raw_image_; bool key_frame_requested_; diff --git a/media/cast/test/end2end_unittest.cc b/media/cast/test/end2end_unittest.cc index 6c19be7..7845ca9 100644 --- a/media/cast/test/end2end_unittest.cc +++ b/media/cast/test/end2end_unittest.cc @@ -547,16 +547,16 @@ class End2EndTest : public ::testing::Test { for (int i = 0; i < count; ++i) { scoped_ptr<AudioBus> audio_bus(audio_bus_factory_->NextAudioBus( base::TimeDelta::FromMilliseconds(kAudioFrameDurationMs))); - const base::TimeTicks capture_time = + const base::TimeTicks reference_time = testing_clock_sender_->NowTicks() + i * base::TimeDelta::FromMilliseconds(kAudioFrameDurationMs); if (will_be_checked) { test_receiver_audio_callback_->AddExpectedResult( *audio_bus, - capture_time + + reference_time + base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs)); } - audio_frame_input_->InsertAudio(audio_bus.Pass(), capture_time); + audio_frame_input_->InsertAudio(audio_bus.Pass(), reference_time); } } @@ -565,14 +565,14 @@ class End2EndTest : public ::testing::Test { for (int i = 0; i < count; ++i) { scoped_ptr<AudioBus> audio_bus(audio_bus_factory_->NextAudioBus( base::TimeDelta::FromMilliseconds(kAudioFrameDurationMs))); - const base::TimeTicks capture_time = + const base::TimeTicks reference_time = testing_clock_sender_->NowTicks() + i * base::TimeDelta::FromMilliseconds(kAudioFrameDurationMs); test_receiver_audio_callback_->AddExpectedResult( *audio_bus, - capture_time + delay + + reference_time + delay + base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs)); - audio_frame_input_->InsertAudio(audio_bus.Pass(), capture_time); + audio_frame_input_->InsertAudio(audio_bus.Pass(), reference_time); } } @@ -644,23 +644,32 @@ class End2EndTest : public ::testing::Test { task_runner_->RunTasks(); } - void SendVideoFrame(int start_value, const base::TimeTicks& capture_time) { + void SendVideoFrame(int start_value, const base::TimeTicks& reference_time) { if (start_time_.is_null()) - start_time_ = capture_time; - base::TimeDelta time_diff = capture_time - start_time_; + start_time_ = reference_time; + // TODO(miu): Consider using a slightly skewed clock for the media timestamp + // since the video clock may not be the same as the reference clock. + const base::TimeDelta time_diff = reference_time - start_time_; gfx::Size size(video_sender_config_.width, video_sender_config_.height); EXPECT_TRUE(VideoFrame::IsValidConfig( VideoFrame::I420, size, gfx::Rect(size), size)); scoped_refptr<media::VideoFrame> video_frame = media::VideoFrame::CreateFrame( - VideoFrame::I420, size, gfx::Rect(size), size, time_diff); + VideoFrame::I420, size, gfx::Rect(size), size, + time_diff); PopulateVideoFrame(video_frame.get(), start_value); - video_frame_input_->InsertRawVideoFrame(video_frame, capture_time); + video_frame_input_->InsertRawVideoFrame(video_frame, reference_time); } - void SendFakeVideoFrame(const base::TimeTicks& capture_time) { - video_frame_input_->InsertRawVideoFrame( - media::VideoFrame::CreateBlackFrame(gfx::Size(2, 2)), capture_time); + void SendFakeVideoFrame(const base::TimeTicks& reference_time) { + if (start_time_.is_null()) + start_time_ = reference_time; + const scoped_refptr<media::VideoFrame> black_frame = + media::VideoFrame::CreateBlackFrame(gfx::Size(2, 2)); + // TODO(miu): Consider using a slightly skewed clock for the media timestamp + // since the video clock may not be the same as the reference clock. + black_frame->set_timestamp(reference_time - start_time_); + video_frame_input_->InsertRawVideoFrame(black_frame, reference_time); } void RunTasks(int ms) { @@ -1004,20 +1013,19 @@ TEST_F(End2EndTest, DropEveryOtherFrame3Buffers) { sender_to_receiver_.DropAllPacketsBelongingToOddFrames(); int video_start = kVideoStart; - base::TimeTicks capture_time; + base::TimeTicks reference_time; int i = 0; for (; i < 20; ++i) { - capture_time = testing_clock_sender_->NowTicks(); - SendVideoFrame(video_start, capture_time); + reference_time = testing_clock_sender_->NowTicks(); + SendVideoFrame(video_start, reference_time); if (i % 2 == 0) { test_receiver_video_callback_->AddExpectedResult( video_start, video_sender_config_.width, video_sender_config_.height, - capture_time + - base::TimeDelta::FromMilliseconds(target_delay), + reference_time + base::TimeDelta::FromMilliseconds(target_delay), i == 0); // GetRawVideoFrame will not return the frame until we are close in @@ -1051,14 +1059,15 @@ TEST_F(End2EndTest, CryptoVideo) { int frames_counter = 0; for (; frames_counter < 3; ++frames_counter) { - const base::TimeTicks capture_time = testing_clock_sender_->NowTicks(); - SendVideoFrame(frames_counter, capture_time); + const base::TimeTicks reference_time = testing_clock_sender_->NowTicks(); + SendVideoFrame(frames_counter, reference_time); test_receiver_video_callback_->AddExpectedResult( frames_counter, video_sender_config_.width, video_sender_config_.height, - capture_time + base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs), + reference_time + + base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs), true); RunTasks(kFrameTimerMs); @@ -1108,15 +1117,16 @@ TEST_F(End2EndTest, VideoLogging) { int video_start = kVideoStart; const int num_frames = 5; for (int i = 0; i < num_frames; ++i) { - base::TimeTicks capture_time = testing_clock_sender_->NowTicks(); + base::TimeTicks reference_time = testing_clock_sender_->NowTicks(); test_receiver_video_callback_->AddExpectedResult( video_start, video_sender_config_.width, video_sender_config_.height, - capture_time + base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs), + reference_time + + base::TimeDelta::FromMilliseconds(kTargetPlayoutDelayMs), true); - SendVideoFrame(video_start, capture_time); + SendVideoFrame(video_start, reference_time); RunTasks(kFrameTimerMs); cast_receiver_->RequestDecodedVideoFrame( @@ -1489,7 +1499,7 @@ TEST_F(End2EndTest, TestSetPlayoutDelay) { int64 delta = (video_ticks_[i].second - video_ticks_[i-1].second).InMilliseconds(); if (delta > 100) { - EXPECT_EQ(delta, kNewDelay - kTargetPlayoutDelayMs + kFrameTimerMs); + EXPECT_EQ(kNewDelay - kTargetPlayoutDelayMs + kFrameTimerMs, delta); EXPECT_EQ(0u, jump); jump = i; } |