diff options
author | mcasas <mcasas@chromium.org> | 2015-06-10 17:11:39 -0700 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2015-06-11 00:13:11 +0000 |
commit | 4337a747ccb21627fa8c4665550312ed3b560b0f (patch) | |
tree | e0cae64951c2bc44f035c2b6556456f4fab8ffba /media | |
parent | 584c8eadcb8580822907a11443a9af9aa16d7c96 (diff) | |
download | chromium_src-4337a747ccb21627fa8c4665550312ed3b560b0f.zip chromium_src-4337a747ccb21627fa8c4665550312ed3b560b0f.tar.gz chromium_src-4337a747ccb21627fa8c4665550312ed3b560b0f.tar.bz2 |
VideoFrame cleanup: change |allow_overlay_| to metadata and |has_alpha| to Format
VideoFrame::WrapNativeTexture() has a parameter
|allow_overlay_| that is overwhelmingly |false|
around the codebase. This CL moves this parameter
as a metadata entry, false by default except
when explicitly set.
VideoFrame::WrapNativeTexture() loses
the parameter |has_alpha| and instead gets a
VideoFrame::Format; all cases I found say ARGB,
added DCHECK to see if bots agree and to make
this situation evident. This is in preparation
for Texture frames that are not ARGB, e.g. any
4:2:2.
BUG=440843, 489744
CQ_INCLUDE_TRYBOTS=tryserver.blink:linux_blink_rel
TBR=
bbudge@chromium.org for content/renderer/pepper/video_decoder_shim.cc
reveman@chromium.org for cc/resources/video_resource_updater.cc and
a few associated unittests.
In both cases it's just an automatic change derived
from VideoFrame method smart-renaming:
video_frame->allow_overlay()
turns into
video_frame->metadata()->IsTrue(media::VideoFrameMetadata::ALLOW_OVERLAY)
Review URL: https://codereview.chromium.org/1150863008
Cr-Commit-Position: refs/heads/master@{#333855}
Diffstat (limited to 'media')
-rw-r--r-- | media/base/video_frame.cc | 30 | ||||
-rw-r--r-- | media/base/video_frame.h | 15 | ||||
-rw-r--r-- | media/base/video_frame_metadata.cc | 5 | ||||
-rw-r--r-- | media/base/video_frame_metadata.h | 12 | ||||
-rw-r--r-- | media/base/video_frame_unittest.cc | 14 | ||||
-rw-r--r-- | media/filters/decrypting_video_decoder.cc | 2 | ||||
-rw-r--r-- | media/filters/fake_video_decoder_unittest.cc | 2 | ||||
-rw-r--r-- | media/filters/ffmpeg_video_decoder_unittest.cc | 2 | ||||
-rw-r--r-- | media/filters/gpu_video_decoder.cc | 6 | ||||
-rw-r--r-- | media/filters/video_frame_stream_unittest.cc | 11 | ||||
-rw-r--r-- | media/filters/video_renderer_algorithm.cc | 2 | ||||
-rw-r--r-- | media/renderers/video_renderer_impl.cc | 4 | ||||
-rw-r--r-- | media/video/gpu_memory_buffer_video_frame_pool.cc | 7 |
13 files changed, 60 insertions, 52 deletions
diff --git a/media/base/video_frame.cc b/media/base/video_frame.cc index aac26e5..e0219fb 100644 --- a/media/base/video_frame.cc +++ b/media/base/video_frame.cc @@ -252,22 +252,24 @@ scoped_refptr<VideoFrame> VideoFrame::CreateFrame( // static scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture( + Format format, const gpu::MailboxHolder& mailbox_holder, const ReleaseMailboxCB& mailbox_holder_release_cb, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, - base::TimeDelta timestamp, - bool allow_overlay, - bool has_alpha) { + base::TimeDelta timestamp) { + if (format != ARGB) { + DLOG(ERROR) << "Only ARGB pixel format supported, got " + << FormatToString(format); + return nullptr; + } gpu::MailboxHolder mailbox_holders[kMaxPlanes]; mailbox_holders[kARGBPlane] = mailbox_holder; - Format texture_format = has_alpha ? ARGB : XRGB; scoped_refptr<VideoFrame> frame( - new VideoFrame(texture_format, STORAGE_TEXTURE, coded_size, visible_rect, + new VideoFrame(format, STORAGE_TEXTURE, coded_size, visible_rect, natural_size, mailbox_holders, timestamp)); frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb; - frame->allow_overlay_ = allow_overlay; return frame; } @@ -280,8 +282,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapYUV420NativeTextures( const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, - base::TimeDelta timestamp, - bool allow_overlay) { + base::TimeDelta timestamp) { gpu::MailboxHolder mailbox_holders[kMaxPlanes]; mailbox_holders[kYPlane] = y_mailbox_holder; mailbox_holders[kUPlane] = u_mailbox_holder; @@ -290,7 +291,6 @@ scoped_refptr<VideoFrame> VideoFrame::WrapYUV420NativeTextures( new VideoFrame(I420, STORAGE_TEXTURE, coded_size, visible_rect, natural_size, mailbox_holders, timestamp)); frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb; - frame->allow_overlay_ = allow_overlay; return frame; } @@ -455,7 +455,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame( scoped_refptr<VideoFrame> wrapped_frame(new VideoFrame( frame->format(), frame->storage_type(), frame->coded_size(), visible_rect, natural_size, frame->timestamp())); - if (frame->IsEndOfStream()) + if (frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)) frame->metadata()->SetBoolean(VideoFrameMetadata::END_OF_STREAM, true); for (size_t i = 0; i < NumPlanes(frame->format()); ++i) { @@ -613,8 +613,7 @@ VideoFrame::VideoFrame(Format format, shared_memory_handle_(base::SharedMemory::NULLHandle()), shared_memory_offset_(0), timestamp_(timestamp), - release_sync_point_(0), - allow_overlay_(false) { + release_sync_point_(0) { DCHECK(IsValidConfig(format_, storage_type, coded_size_, visible_rect_, natural_size_)); memset(&mailbox_holders_, 0, sizeof(mailbox_holders_)); @@ -850,13 +849,6 @@ void VideoFrame::AddDestructionObserver(const base::Closure& callback) { done_callbacks_.push_back(callback); } -bool VideoFrame::IsEndOfStream() const { - bool end_of_stream; - return metadata_.GetBoolean(VideoFrameMetadata::END_OF_STREAM, - &end_of_stream) && - end_of_stream; -} - void VideoFrame::UpdateReleaseSyncPoint(SyncPointClient* client) { #if defined(OS_LINUX) DCHECK(storage_type_ == STORAGE_TEXTURE || storage_type_ == STORAGE_DMABUFS); diff --git a/media/base/video_frame.h b/media/base/video_frame.h index e7dbc6b..9b9c789 100644 --- a/media/base/video_frame.h +++ b/media/base/video_frame.h @@ -134,14 +134,13 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { // |mailbox_holder|, and |mailbox_holder_release_cb| will be called with // a syncpoint as the argument when the VideoFrame is to be destroyed. static scoped_refptr<VideoFrame> WrapNativeTexture( + Format format, const gpu::MailboxHolder& mailbox_holder, const ReleaseMailboxCB& mailbox_holder_release_cb, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, - base::TimeDelta timestamp, - bool allow_overlay, - bool has_alpha); + base::TimeDelta timestamp); // Wraps a set of native textures representing YUV data with a VideoFrame. // |mailbox_holders_release_cb| will be called with a syncpoint as the @@ -154,8 +153,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, - base::TimeDelta timestamp, - bool allow_overlay); + base::TimeDelta timestamp); // Wraps packed image data residing in a memory buffer with a VideoFrame. // The image data resides in |data| and is assumed to be packed tightly in a @@ -355,8 +353,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { const VideoFrameMetadata* metadata() const { return &metadata_; } VideoFrameMetadata* metadata() { return &metadata_; } - bool allow_overlay() const { return allow_overlay_; } - #if defined(OS_LINUX) // Returns backing dmabuf file descriptor for given |plane|, if present. int dmabuf_fd(size_t plane) const; @@ -367,9 +363,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { CVPixelBufferRef cv_pixel_buffer() const; #endif - // Returns true if this VideoFrame represents the end of the stream. - bool IsEndOfStream() const; - base::TimeDelta timestamp() const { return timestamp_; } void set_timestamp(base::TimeDelta timestamp) { timestamp_ = timestamp; @@ -498,8 +491,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { VideoFrameMetadata metadata_; - bool allow_overlay_; - DISALLOW_IMPLICIT_CONSTRUCTORS(VideoFrame); }; diff --git a/media/base/video_frame_metadata.cc b/media/base/video_frame_metadata.cc index d663612..938a018 100644 --- a/media/base/video_frame_metadata.cc +++ b/media/base/video_frame_metadata.cc @@ -126,6 +126,11 @@ const base::Value* VideoFrameMetadata::GetValue(Key key) const { return result; } +bool VideoFrameMetadata::IsTrue(Key key) const { + bool value = false; + return GetBoolean(key, &value) && value; +} + void VideoFrameMetadata::MergeInternalValuesInto( base::DictionaryValue* out) const { out->MergeDictionary(&dictionary_); diff --git a/media/base/video_frame_metadata.h b/media/base/video_frame_metadata.h index 10b17be..cee4917 100644 --- a/media/base/video_frame_metadata.h +++ b/media/base/video_frame_metadata.h @@ -15,6 +15,12 @@ namespace media { class MEDIA_EXPORT VideoFrameMetadata { public: enum Key { + // Sources of VideoFrames use this marker to indicate that the associated + // VideoFrame can be overlayed, case in which its contents do not need to be + // further composited but displayed directly. Use Get/SetBoolean() for + // this Key. + ALLOW_OVERLAY, + // Video capture begin/end timestamps. Consumers can use these values for // dynamic optimizations, logging stats, etc. Use Get/SetTimeTicks() for // these keys. @@ -25,7 +31,8 @@ class MEDIA_EXPORT VideoFrameMetadata { // GetInteger()/SetInteger() and VideoFrame::ColorSpace enumeration. COLOR_SPACE, - // Indicates if the current frame is the End of its current Stream. + // Indicates if the current frame is the End of its current Stream. Use + // Get/SetBoolean() for this Key. END_OF_STREAM, // The estimated duration of this frame (i.e., the amount of time between @@ -91,6 +98,9 @@ class MEDIA_EXPORT VideoFrameMetadata { // Returns null if |key| was not present. const base::Value* GetValue(Key key) const WARN_UNUSED_RESULT; + // Convenience method that returns true if |key| exists and is set to true. + bool IsTrue(Key key) const WARN_UNUSED_RESULT; + // For serialization. void MergeInternalValuesInto(base::DictionaryValue* out) const; void MergeInternalValuesFrom(const base::DictionaryValue& in); diff --git a/media/base/video_frame_unittest.cc b/media/base/video_frame_unittest.cc index d1a09f3..02cefdf 100644 --- a/media/base/video_frame_unittest.cc +++ b/media/base/video_frame_unittest.cc @@ -155,7 +155,8 @@ TEST(VideoFrame, CreateFrame) { // Test an empty frame. frame = VideoFrame::CreateEOSFrame(); - EXPECT_TRUE(frame->IsEndOfStream()); + EXPECT_TRUE( + frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); } TEST(VideoFrame, CreateBlackFrame) { @@ -170,7 +171,8 @@ TEST(VideoFrame, CreateBlackFrame) { // Test basic properties. EXPECT_EQ(0, frame->timestamp().InMicroseconds()); - EXPECT_FALSE(frame->IsEndOfStream()); + EXPECT_FALSE( + frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); // Test |frame| properties. EXPECT_EQ(VideoFrame::YV12, frame->format()); @@ -252,14 +254,13 @@ TEST(VideoFrame, TextureNoLongerNeededCallbackIsCalled) { { scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTexture( + VideoFrame::ARGB, gpu::MailboxHolder(gpu::Mailbox(), 5, 0 /* sync_point */), base::Bind(&TextureCallback, &called_sync_point), gfx::Size(10, 10), // coded_size gfx::Rect(10, 10), // visible_rect gfx::Size(10, 10), // natural_size - base::TimeDelta(), // timestamp - false, // allow_overlay - true); // has_alpha + base::TimeDelta()); // timestamp EXPECT_EQ(VideoFrame::STORAGE_TEXTURE, frame->storage_type()); EXPECT_EQ(VideoFrame::ARGB, frame->format()); } @@ -307,8 +308,7 @@ TEST(VideoFrame, gfx::Size(10, 10), // coded_size gfx::Rect(10, 10), // visible_rect gfx::Size(10, 10), // natural_size - base::TimeDelta(), // timestamp - false); // allow_overlay + base::TimeDelta()); // timestamp EXPECT_EQ(VideoFrame::STORAGE_TEXTURE, frame->storage_type()); EXPECT_EQ(VideoFrame::I420, frame->format()); diff --git a/media/filters/decrypting_video_decoder.cc b/media/filters/decrypting_video_decoder.cc index 3627080..c3c4dc9 100644 --- a/media/filters/decrypting_video_decoder.cc +++ b/media/filters/decrypting_video_decoder.cc @@ -285,7 +285,7 @@ void DecryptingVideoDecoder::DeliverFrame( DCHECK_EQ(status, Decryptor::kSuccess); // No frame returned with kSuccess should be end-of-stream frame. - DCHECK(!frame->IsEndOfStream()); + DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); output_cb_.Run(frame); if (scoped_pending_buffer_to_decode->end_of_stream()) { diff --git a/media/filters/fake_video_decoder_unittest.cc b/media/filters/fake_video_decoder_unittest.cc index f335c02..610987d 100644 --- a/media/filters/fake_video_decoder_unittest.cc +++ b/media/filters/fake_video_decoder_unittest.cc @@ -78,7 +78,7 @@ class FakeVideoDecoderTest } void FrameReady(const scoped_refptr<VideoFrame>& frame) { - DCHECK(!frame->IsEndOfStream()); + DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); last_decoded_frame_ = frame; num_decoded_frames_++; } diff --git a/media/filters/ffmpeg_video_decoder_unittest.cc b/media/filters/ffmpeg_video_decoder_unittest.cc index 160d18f..afa3ee3 100644 --- a/media/filters/ffmpeg_video_decoder_unittest.cc +++ b/media/filters/ffmpeg_video_decoder_unittest.cc @@ -188,7 +188,7 @@ class FFmpegVideoDecoderTest : public testing::Test { } void FrameReady(const scoped_refptr<VideoFrame>& frame) { - DCHECK(!frame->IsEndOfStream()); + DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); output_frames_.push_back(frame); } diff --git a/media/filters/gpu_video_decoder.cc b/media/filters/gpu_video_decoder.cc index 504ed84..20a18c5 100644 --- a/media/filters/gpu_video_decoder.cc +++ b/media/filters/gpu_video_decoder.cc @@ -401,13 +401,15 @@ void GpuVideoDecoder::PictureReady(const media::Picture& picture) { DCHECK(decoder_texture_target_); scoped_refptr<VideoFrame> frame(VideoFrame::WrapNativeTexture( + VideoFrame::ARGB, gpu::MailboxHolder(pb.texture_mailbox(), decoder_texture_target_, 0 /* sync_point */), BindToCurrentLoop(base::Bind( &GpuVideoDecoder::ReleaseMailbox, weak_factory_.GetWeakPtr(), factories_, picture.picture_buffer_id(), pb.texture_id())), - pb.size(), visible_rect, natural_size, timestamp, picture.allow_overlay(), - true /* has_alpha */)); + pb.size(), visible_rect, natural_size, timestamp)); + if (picture.allow_overlay()) + frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true); CHECK_GT(available_pictures_, 0); --available_pictures_; bool inserted = diff --git a/media/filters/video_frame_stream_unittest.cc b/media/filters/video_frame_stream_unittest.cc index 316a538..de09110 100644 --- a/media/filters/video_frame_stream_unittest.cc +++ b/media/filters/video_frame_stream_unittest.cc @@ -189,8 +189,10 @@ class VideoFrameStreamTest DCHECK(pending_read_); frame_read_ = frame; last_read_status_ = status; - if (frame.get() && !frame->IsEndOfStream()) + if (frame.get() && + !frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)) { num_decoded_frames_++; + } pending_read_ = false; } @@ -223,7 +225,9 @@ class VideoFrameStreamTest void ReadAllFrames() { do { ReadOneFrame(); - } while (frame_read_.get() && !frame_read_->IsEndOfStream()); + } while (frame_read_.get() && + !frame_read_->metadata()->IsTrue( + VideoFrameMetadata::END_OF_STREAM)); const int total_num_frames = kNumConfigs * kNumBuffersInOneConfig; DCHECK_EQ(num_decoded_frames_, total_num_frames); @@ -576,7 +580,8 @@ TEST_P(VideoFrameStreamTest, Read_DuringEndOfStreamDecode) { // The read output should indicate end of stream. ASSERT_TRUE(frame_read_.get()); - EXPECT_TRUE(frame_read_->IsEndOfStream()); + EXPECT_TRUE( + frame_read_->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); } // No Reset() before initialization is successfully completed. diff --git a/media/filters/video_renderer_algorithm.cc b/media/filters/video_renderer_algorithm.cc index 2670823..233c130 100644 --- a/media/filters/video_renderer_algorithm.cc +++ b/media/filters/video_renderer_algorithm.cc @@ -353,7 +353,7 @@ size_t VideoRendererAlgorithm::EffectiveFramesQueued() const { void VideoRendererAlgorithm::EnqueueFrame( const scoped_refptr<VideoFrame>& frame) { DCHECK(frame); - DCHECK(!frame->IsEndOfStream()); + DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); ReadyFrame ready_frame(frame); auto it = frame_queue_.empty() ? frame_queue_.end() diff --git a/media/renderers/video_renderer_impl.cc b/media/renderers/video_renderer_impl.cc index c89aa1d..95067fa 100644 --- a/media/renderers/video_renderer_impl.cc +++ b/media/renderers/video_renderer_impl.cc @@ -486,7 +486,7 @@ void VideoRendererImpl::FrameReady(VideoFrameStream::Status status, return; } - if (frame->IsEndOfStream()) { + if (frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)) { DCHECK(!received_end_of_stream_); received_end_of_stream_ = true; @@ -618,7 +618,7 @@ void VideoRendererImpl::AddReadyFrame_Locked( const scoped_refptr<VideoFrame>& frame) { DCHECK(task_runner_->BelongsToCurrentThread()); lock_.AssertAcquired(); - DCHECK(!frame->IsEndOfStream()); + DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); frames_decoded_++; diff --git a/media/video/gpu_memory_buffer_video_frame_pool.cc b/media/video/gpu_memory_buffer_video_frame_pool.cc index fb5bec0..23a7a94 100644 --- a/media/video/gpu_memory_buffer_video_frame_pool.cc +++ b/media/video/gpu_memory_buffer_video_frame_pool.cc @@ -197,13 +197,16 @@ GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame( } // Create the VideoFrame backed by native textures. - return VideoFrame::WrapYUV420NativeTextures( + scoped_refptr<VideoFrame> frame = VideoFrame::WrapYUV420NativeTextures( mailbox_holders[VideoFrame::kYPlane], mailbox_holders[VideoFrame::kUPlane], mailbox_holders[VideoFrame::kVPlane], base::Bind(&PoolImpl::MailboxHoldersReleased, this, frame_resources), size, video_frame->visible_rect(), video_frame->natural_size(), - video_frame->timestamp(), video_frame->allow_overlay()); + video_frame->timestamp()); + if (video_frame->metadata()->IsTrue(VideoFrameMetadata::ALLOW_OVERLAY)) + frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true); + return frame; } // Destroy all the resources posting one task per FrameResources |