diff options
author | mcasas <mcasas@chromium.org> | 2015-06-10 17:11:39 -0700 |
---|---|---|
committer | Commit bot <commit-bot@chromium.org> | 2015-06-11 00:13:11 +0000 |
commit | 4337a747ccb21627fa8c4665550312ed3b560b0f (patch) | |
tree | e0cae64951c2bc44f035c2b6556456f4fab8ffba | |
parent | 584c8eadcb8580822907a11443a9af9aa16d7c96 (diff) | |
download | chromium_src-4337a747ccb21627fa8c4665550312ed3b560b0f.zip chromium_src-4337a747ccb21627fa8c4665550312ed3b560b0f.tar.gz chromium_src-4337a747ccb21627fa8c4665550312ed3b560b0f.tar.bz2 |
VideoFrame cleanup: change |allow_overlay_| to metadata and |has_alpha| to Format
VideoFrame::WrapNativeTexture() has a parameter
|allow_overlay_| that is overwhelmingly |false|
around the codebase. This CL moves this parameter
as a metadata entry, false by default except
when explicitly set.
VideoFrame::WrapNativeTexture() loses
the parameter |has_alpha| and instead gets a
VideoFrame::Format; all cases I found say ARGB,
added DCHECK to see if bots agree and to make
this situation evident. This is in preparation
for Texture frames that are not ARGB, e.g. any
4:2:2.
BUG=440843, 489744
CQ_INCLUDE_TRYBOTS=tryserver.blink:linux_blink_rel
TBR=
bbudge@chromium.org for content/renderer/pepper/video_decoder_shim.cc
reveman@chromium.org for cc/resources/video_resource_updater.cc and
a few associated unittests.
In both cases it's just an automatic change derived
from VideoFrame method smart-renaming:
video_frame->allow_overlay()
turns into
video_frame->metadata()->IsTrue(media::VideoFrameMetadata::ALLOW_OVERLAY)
Review URL: https://codereview.chromium.org/1150863008
Cr-Commit-Position: refs/heads/master@{#333855}
24 files changed, 102 insertions, 84 deletions
diff --git a/cc/layers/video_layer_impl_unittest.cc b/cc/layers/video_layer_impl_unittest.cc index 56d43a7..35a07df 100644 --- a/cc/layers/video_layer_impl_unittest.cc +++ b/cc/layers/video_layer_impl_unittest.cc @@ -363,7 +363,9 @@ TEST(VideoLayerImplTest, NativeYUVFrameGeneratesYUVQuad) { media::VideoFrame::WrapYUV420NativeTextures( mailbox_holder, mailbox_holder, mailbox_holder, base::Bind(EmptyCallback), gfx::Size(10, 10), gfx::Rect(10, 10), - gfx::Size(10, 10), base::TimeDelta(), true); + gfx::Size(10, 10), base::TimeDelta()); + video_frame->metadata()->SetBoolean(media::VideoFrameMetadata::ALLOW_OVERLAY, + true); FakeVideoFrameProvider provider; provider.set_frame(video_frame); diff --git a/cc/resources/video_resource_updater.cc b/cc/resources/video_resource_updater.cc index 7c4746f..ba63431 100644 --- a/cc/resources/video_resource_updater.cc +++ b/cc/resources/video_resource_updater.cc @@ -418,7 +418,8 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForHardwarePlanes( external_resources.mailboxes.push_back( TextureMailbox(mailbox_holder.mailbox, mailbox_holder.texture_target, mailbox_holder.sync_point, video_frame->coded_size(), - video_frame->allow_overlay())); + video_frame->metadata()->IsTrue( + media::VideoFrameMetadata::ALLOW_OVERLAY))); external_resources.release_callbacks.push_back( base::Bind(&ReturnTexture, AsWeakPtr(), video_frame)); } diff --git a/cc/resources/video_resource_updater_unittest.cc b/cc/resources/video_resource_updater_unittest.cc index 17095e6..094116e 100644 --- a/cc/resources/video_resource_updater_unittest.cc +++ b/cc/resources/video_resource_updater_unittest.cc @@ -112,14 +112,13 @@ class VideoResourceUpdaterTest : public testing::Test { const unsigned sync_point = 7; const unsigned target = GL_TEXTURE_2D; return media::VideoFrame::WrapNativeTexture( + media::VideoFrame::ARGB, gpu::MailboxHolder(mailbox, target, sync_point), base::Bind(&ReleaseMailboxCB), - size, // coded_size - gfx::Rect(size), // visible_rect - size, // natural_size - base::TimeDelta(), // timestamp - false, // allow_overlay - true); // has_alpha + size, // coded_size + gfx::Rect(size), // visible_rect + size, // natural_size + base::TimeDelta()); // timestamp } scoped_refptr<media::VideoFrame> CreateTestYUVHardareVideoFrame() { @@ -141,11 +140,10 @@ class VideoResourceUpdaterTest : public testing::Test { gpu::MailboxHolder(mailbox[media::VideoFrame::kVPlane], target, sync_point), base::Bind(&ReleaseMailboxCB), - size, // coded_size - gfx::Rect(size), // visible_rect - size, // natural_size - base::TimeDelta(), // timestamp - false); // allow_overlay + size, // coded_size + gfx::Rect(size), // visible_rect + size, // natural_size + base::TimeDelta()); // timestamp } WebGraphicsContext3DUploadCounter* context3d_; diff --git a/cc/trees/layer_tree_host_unittest_context.cc b/cc/trees/layer_tree_host_unittest_context.cc index 6f561e5..8eb4dc8 100644 --- a/cc/trees/layer_tree_host_unittest_context.cc +++ b/cc/trees/layer_tree_host_unittest_context.cc @@ -937,15 +937,15 @@ class LayerTreeHostContextTestDontUseLostResources color_video_frame_ = VideoFrame::CreateColorFrame( gfx::Size(4, 4), 0x80, 0x80, 0x80, base::TimeDelta()); hw_video_frame_ = VideoFrame::WrapNativeTexture( + media::VideoFrame::ARGB, gpu::MailboxHolder(mailbox, GL_TEXTURE_2D, sync_point), media::VideoFrame::ReleaseMailboxCB(), gfx::Size(4, 4), - gfx::Rect(0, 0, 4, 4), gfx::Size(4, 4), base::TimeDelta(), - false /* allow_overlay */, true /* has_alpha */); + gfx::Rect(0, 0, 4, 4), gfx::Size(4, 4), base::TimeDelta()); scaled_hw_video_frame_ = VideoFrame::WrapNativeTexture( + media::VideoFrame::ARGB, gpu::MailboxHolder(mailbox, GL_TEXTURE_2D, sync_point), media::VideoFrame::ReleaseMailboxCB(), gfx::Size(4, 4), - gfx::Rect(0, 0, 3, 2), gfx::Size(4, 4), base::TimeDelta(), - false /* allow_overlay */, true /* has_alpha */); + gfx::Rect(0, 0, 3, 2), gfx::Size(4, 4), base::TimeDelta()); color_frame_provider_.set_frame(color_video_frame_); hw_frame_provider_.set_frame(hw_video_frame_); diff --git a/content/browser/media/capture/aura_window_capture_machine.cc b/content/browser/media/capture/aura_window_capture_machine.cc index b17f31d..1546f65 100644 --- a/content/browser/media/capture/aura_window_capture_machine.cc +++ b/content/browser/media/capture/aura_window_capture_machine.cc @@ -288,11 +288,12 @@ bool AuraWindowCaptureMachine::ProcessCopyOutputResponse( if (!texture_mailbox.IsTexture()) return false; video_frame = media::VideoFrame::WrapNativeTexture( + media::VideoFrame::ARGB, gpu::MailboxHolder(texture_mailbox.mailbox(), texture_mailbox.target(), texture_mailbox.sync_point()), base::Bind(&RunSingleReleaseCallback, base::Passed(&release_callback)), result->size(), gfx::Rect(result->size()), result->size(), - base::TimeDelta(), false /* allow_overlay */, true /* has_alpha */); + base::TimeDelta()); capture_frame_cb.Run(video_frame, start_time, true); return true; } else { diff --git a/content/browser/renderer_host/media/video_capture_controller_unittest.cc b/content/browser/renderer_host/media/video_capture_controller_unittest.cc index 4dbec58..bbdc096 100644 --- a/content/browser/renderer_host/media/video_capture_controller_unittest.cc +++ b/content/browser/renderer_host/media/video_capture_controller_unittest.cc @@ -155,8 +155,8 @@ class VideoCaptureControllerTest : public testing::Test { const media::VideoFrame::ReleaseMailboxCB& release_cb, gfx::Size dimensions) { return media::VideoFrame::WrapNativeTexture( - holder, release_cb, dimensions, gfx::Rect(dimensions), dimensions, - base::TimeDelta(), false /* allow_overlay */, true /* has_alpha */); + media::VideoFrame::ARGB, holder, release_cb, dimensions, + gfx::Rect(dimensions), dimensions, base::TimeDelta()); } TestBrowserThreadBundle bundle_; diff --git a/content/browser/renderer_host/media/video_capture_device_client.cc b/content/browser/renderer_host/media/video_capture_device_client.cc index b739aca..465c0c7 100644 --- a/content/browser/renderer_host/media/video_capture_device_client.cc +++ b/content/browser/renderer_host/media/video_capture_device_client.cc @@ -28,6 +28,7 @@ using media::VideoCaptureFormat; using media::VideoFrame; +using media::VideoFrameMetadata; namespace content { @@ -559,14 +560,15 @@ VideoCaptureDeviceClient::TextureWrapHelper::OnIncomingCapturedGpuMemoryBuffer( scoped_refptr<media::VideoFrame> video_frame = media::VideoFrame::WrapNativeTexture( + media::VideoFrame::ARGB, mailbox_holder, media::BindToCurrentLoop(base::Bind( &VideoCaptureDeviceClient::TextureWrapHelper::ReleaseCallback, this, image_id, texture_id)), frame_format.frame_size, gfx::Rect(frame_format.frame_size), - frame_format.frame_size, base::TimeDelta(), true /* allow_overlay */, - true /* has_alpha */); - video_frame->metadata()->SetDouble(media::VideoFrameMetadata::FRAME_RATE, + frame_format.frame_size, base::TimeDelta()); + video_frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true); + video_frame->metadata()->SetDouble(VideoFrameMetadata::FRAME_RATE, frame_format.frame_rate); BrowserThread::PostTask( diff --git a/content/renderer/media/android/webmediaplayer_android.cc b/content/renderer/media/android/webmediaplayer_android.cc index ccab496..da4c064 100644 --- a/content/renderer/media/android/webmediaplayer_android.cc +++ b/content/renderer/media/android/webmediaplayer_android.cc @@ -1205,14 +1205,14 @@ void WebMediaPlayerAndroid::DrawRemotePlaybackText( GLuint texture_mailbox_sync_point = gl->InsertSyncPointCHROMIUM(); scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTexture( + VideoFrame::ARGB, gpu::MailboxHolder(texture_mailbox, texture_target, texture_mailbox_sync_point), media::BindToCurrentLoop(base::Bind(&OnReleaseTexture, stream_texture_factory_, remote_playback_texture_id)), canvas_size /* coded_size */, gfx::Rect(canvas_size) /* visible_rect */, - canvas_size /* natural_size */, base::TimeDelta() /* timestamp */, - false /* allow overlay */, true /* has_alpha */); + canvas_size /* natural_size */, base::TimeDelta() /* timestamp */); SetCurrentFrameInternal(new_frame); } @@ -1243,12 +1243,13 @@ void WebMediaPlayerAndroid::ReallocateVideoFrame() { GLuint texture_mailbox_sync_point = gl->InsertSyncPointCHROMIUM(); scoped_refptr<VideoFrame> new_frame = VideoFrame::WrapNativeTexture( + VideoFrame::ARGB, gpu::MailboxHolder(texture_mailbox_, texture_target, texture_mailbox_sync_point), media::BindToCurrentLoop(base::Bind( &OnReleaseTexture, stream_texture_factory_, texture_id_ref)), natural_size_, gfx::Rect(natural_size_), natural_size_, - base::TimeDelta(), false /* allow_overlay */, true /* has_alpha */); + base::TimeDelta()); SetCurrentFrameInternal(new_frame); } } diff --git a/content/renderer/media/rtc_video_decoder.cc b/content/renderer/media/rtc_video_decoder.cc index 2556dc6..16b76eb 100644 --- a/content/renderer/media/rtc_video_decoder.cc +++ b/content/renderer/media/rtc_video_decoder.cc @@ -422,13 +422,18 @@ scoped_refptr<media::VideoFrame> RTCVideoDecoder::CreateVideoFrame( // Convert timestamp from 90KHz to ms. base::TimeDelta timestamp_ms = base::TimeDelta::FromInternalValue( base::checked_cast<uint64_t>(timestamp) * 1000 / 90); - return media::VideoFrame::WrapNativeTexture( + scoped_refptr<media::VideoFrame> frame(media::VideoFrame::WrapNativeTexture( + media::VideoFrame::ARGB, gpu::MailboxHolder(pb.texture_mailbox(), decoder_texture_target_, 0), media::BindToCurrentLoop(base::Bind( &RTCVideoDecoder::ReleaseMailbox, weak_factory_.GetWeakPtr(), factories_, picture.picture_buffer_id(), pb.texture_id())), - pb.size(), visible_rect, visible_rect.size(), timestamp_ms, - picture.allow_overlay(), true /* has_alpha */); + pb.size(), visible_rect, visible_rect.size(), timestamp_ms)); + if (picture.allow_overlay()) { + frame->metadata()->SetBoolean(media::VideoFrameMetadata::ALLOW_OVERLAY, + true); + } + return frame; } void RTCVideoDecoder::NotifyEndOfBitstreamBuffer(int32 id) { diff --git a/content/renderer/media/video_capture_impl.cc b/content/renderer/media/video_capture_impl.cc index e5791356..23ae799 100644 --- a/content/renderer/media/video_capture_impl.cc +++ b/content/renderer/media/video_capture_impl.cc @@ -296,11 +296,11 @@ void VideoCaptureImpl::OnMailboxBufferReceived( uint32* const release_sync_point_storage = new uint32(0); // Deleted in DidFinishConsumingFrame(). scoped_refptr<media::VideoFrame> frame = media::VideoFrame::WrapNativeTexture( + media::VideoFrame::ARGB, mailbox_holder, base::Bind(&SaveReleaseSyncPoint, release_sync_point_storage), packed_frame_size, gfx::Rect(packed_frame_size), packed_frame_size, - timestamp - first_frame_timestamp_, false /* allow_overlay */, - true /* has_alpha */); + timestamp - first_frame_timestamp_); frame->AddDestructionObserver( base::Bind(&VideoCaptureImpl::DidFinishConsumingFrame, frame->metadata(), diff --git a/content/renderer/pepper/video_decoder_shim.cc b/content/renderer/pepper/video_decoder_shim.cc index b7bd4ce..ec176bf 100644 --- a/content/renderer/pepper/video_decoder_shim.cc +++ b/content/renderer/pepper/video_decoder_shim.cc @@ -788,7 +788,7 @@ void VideoDecoderShim::DecoderImpl::OnOutputComplete( DCHECK(awaiting_decoder_); scoped_ptr<PendingFrame> pending_frame; - if (!frame->IsEndOfStream()) + if (!frame->metadata()->IsTrue(media::VideoFrameMetadata::END_OF_STREAM)) pending_frame.reset(new PendingFrame(decode_id_, frame)); else pending_frame.reset(new PendingFrame(decode_id_)); diff --git a/media/base/video_frame.cc b/media/base/video_frame.cc index aac26e5..e0219fb 100644 --- a/media/base/video_frame.cc +++ b/media/base/video_frame.cc @@ -252,22 +252,24 @@ scoped_refptr<VideoFrame> VideoFrame::CreateFrame( // static scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture( + Format format, const gpu::MailboxHolder& mailbox_holder, const ReleaseMailboxCB& mailbox_holder_release_cb, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, - base::TimeDelta timestamp, - bool allow_overlay, - bool has_alpha) { + base::TimeDelta timestamp) { + if (format != ARGB) { + DLOG(ERROR) << "Only ARGB pixel format supported, got " + << FormatToString(format); + return nullptr; + } gpu::MailboxHolder mailbox_holders[kMaxPlanes]; mailbox_holders[kARGBPlane] = mailbox_holder; - Format texture_format = has_alpha ? ARGB : XRGB; scoped_refptr<VideoFrame> frame( - new VideoFrame(texture_format, STORAGE_TEXTURE, coded_size, visible_rect, + new VideoFrame(format, STORAGE_TEXTURE, coded_size, visible_rect, natural_size, mailbox_holders, timestamp)); frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb; - frame->allow_overlay_ = allow_overlay; return frame; } @@ -280,8 +282,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapYUV420NativeTextures( const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, - base::TimeDelta timestamp, - bool allow_overlay) { + base::TimeDelta timestamp) { gpu::MailboxHolder mailbox_holders[kMaxPlanes]; mailbox_holders[kYPlane] = y_mailbox_holder; mailbox_holders[kUPlane] = u_mailbox_holder; @@ -290,7 +291,6 @@ scoped_refptr<VideoFrame> VideoFrame::WrapYUV420NativeTextures( new VideoFrame(I420, STORAGE_TEXTURE, coded_size, visible_rect, natural_size, mailbox_holders, timestamp)); frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb; - frame->allow_overlay_ = allow_overlay; return frame; } @@ -455,7 +455,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame( scoped_refptr<VideoFrame> wrapped_frame(new VideoFrame( frame->format(), frame->storage_type(), frame->coded_size(), visible_rect, natural_size, frame->timestamp())); - if (frame->IsEndOfStream()) + if (frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)) frame->metadata()->SetBoolean(VideoFrameMetadata::END_OF_STREAM, true); for (size_t i = 0; i < NumPlanes(frame->format()); ++i) { @@ -613,8 +613,7 @@ VideoFrame::VideoFrame(Format format, shared_memory_handle_(base::SharedMemory::NULLHandle()), shared_memory_offset_(0), timestamp_(timestamp), - release_sync_point_(0), - allow_overlay_(false) { + release_sync_point_(0) { DCHECK(IsValidConfig(format_, storage_type, coded_size_, visible_rect_, natural_size_)); memset(&mailbox_holders_, 0, sizeof(mailbox_holders_)); @@ -850,13 +849,6 @@ void VideoFrame::AddDestructionObserver(const base::Closure& callback) { done_callbacks_.push_back(callback); } -bool VideoFrame::IsEndOfStream() const { - bool end_of_stream; - return metadata_.GetBoolean(VideoFrameMetadata::END_OF_STREAM, - &end_of_stream) && - end_of_stream; -} - void VideoFrame::UpdateReleaseSyncPoint(SyncPointClient* client) { #if defined(OS_LINUX) DCHECK(storage_type_ == STORAGE_TEXTURE || storage_type_ == STORAGE_DMABUFS); diff --git a/media/base/video_frame.h b/media/base/video_frame.h index e7dbc6b..9b9c789 100644 --- a/media/base/video_frame.h +++ b/media/base/video_frame.h @@ -134,14 +134,13 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { // |mailbox_holder|, and |mailbox_holder_release_cb| will be called with // a syncpoint as the argument when the VideoFrame is to be destroyed. static scoped_refptr<VideoFrame> WrapNativeTexture( + Format format, const gpu::MailboxHolder& mailbox_holder, const ReleaseMailboxCB& mailbox_holder_release_cb, const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, - base::TimeDelta timestamp, - bool allow_overlay, - bool has_alpha); + base::TimeDelta timestamp); // Wraps a set of native textures representing YUV data with a VideoFrame. // |mailbox_holders_release_cb| will be called with a syncpoint as the @@ -154,8 +153,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { const gfx::Size& coded_size, const gfx::Rect& visible_rect, const gfx::Size& natural_size, - base::TimeDelta timestamp, - bool allow_overlay); + base::TimeDelta timestamp); // Wraps packed image data residing in a memory buffer with a VideoFrame. // The image data resides in |data| and is assumed to be packed tightly in a @@ -355,8 +353,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { const VideoFrameMetadata* metadata() const { return &metadata_; } VideoFrameMetadata* metadata() { return &metadata_; } - bool allow_overlay() const { return allow_overlay_; } - #if defined(OS_LINUX) // Returns backing dmabuf file descriptor for given |plane|, if present. int dmabuf_fd(size_t plane) const; @@ -367,9 +363,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { CVPixelBufferRef cv_pixel_buffer() const; #endif - // Returns true if this VideoFrame represents the end of the stream. - bool IsEndOfStream() const; - base::TimeDelta timestamp() const { return timestamp_; } void set_timestamp(base::TimeDelta timestamp) { timestamp_ = timestamp; @@ -498,8 +491,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { VideoFrameMetadata metadata_; - bool allow_overlay_; - DISALLOW_IMPLICIT_CONSTRUCTORS(VideoFrame); }; diff --git a/media/base/video_frame_metadata.cc b/media/base/video_frame_metadata.cc index d663612..938a018 100644 --- a/media/base/video_frame_metadata.cc +++ b/media/base/video_frame_metadata.cc @@ -126,6 +126,11 @@ const base::Value* VideoFrameMetadata::GetValue(Key key) const { return result; } +bool VideoFrameMetadata::IsTrue(Key key) const { + bool value = false; + return GetBoolean(key, &value) && value; +} + void VideoFrameMetadata::MergeInternalValuesInto( base::DictionaryValue* out) const { out->MergeDictionary(&dictionary_); diff --git a/media/base/video_frame_metadata.h b/media/base/video_frame_metadata.h index 10b17be..cee4917 100644 --- a/media/base/video_frame_metadata.h +++ b/media/base/video_frame_metadata.h @@ -15,6 +15,12 @@ namespace media { class MEDIA_EXPORT VideoFrameMetadata { public: enum Key { + // Sources of VideoFrames use this marker to indicate that the associated + // VideoFrame can be overlayed, case in which its contents do not need to be + // further composited but displayed directly. Use Get/SetBoolean() for + // this Key. + ALLOW_OVERLAY, + // Video capture begin/end timestamps. Consumers can use these values for // dynamic optimizations, logging stats, etc. Use Get/SetTimeTicks() for // these keys. @@ -25,7 +31,8 @@ class MEDIA_EXPORT VideoFrameMetadata { // GetInteger()/SetInteger() and VideoFrame::ColorSpace enumeration. COLOR_SPACE, - // Indicates if the current frame is the End of its current Stream. + // Indicates if the current frame is the End of its current Stream. Use + // Get/SetBoolean() for this Key. END_OF_STREAM, // The estimated duration of this frame (i.e., the amount of time between @@ -91,6 +98,9 @@ class MEDIA_EXPORT VideoFrameMetadata { // Returns null if |key| was not present. const base::Value* GetValue(Key key) const WARN_UNUSED_RESULT; + // Convenience method that returns true if |key| exists and is set to true. + bool IsTrue(Key key) const WARN_UNUSED_RESULT; + // For serialization. void MergeInternalValuesInto(base::DictionaryValue* out) const; void MergeInternalValuesFrom(const base::DictionaryValue& in); diff --git a/media/base/video_frame_unittest.cc b/media/base/video_frame_unittest.cc index d1a09f3..02cefdf 100644 --- a/media/base/video_frame_unittest.cc +++ b/media/base/video_frame_unittest.cc @@ -155,7 +155,8 @@ TEST(VideoFrame, CreateFrame) { // Test an empty frame. frame = VideoFrame::CreateEOSFrame(); - EXPECT_TRUE(frame->IsEndOfStream()); + EXPECT_TRUE( + frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); } TEST(VideoFrame, CreateBlackFrame) { @@ -170,7 +171,8 @@ TEST(VideoFrame, CreateBlackFrame) { // Test basic properties. EXPECT_EQ(0, frame->timestamp().InMicroseconds()); - EXPECT_FALSE(frame->IsEndOfStream()); + EXPECT_FALSE( + frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); // Test |frame| properties. EXPECT_EQ(VideoFrame::YV12, frame->format()); @@ -252,14 +254,13 @@ TEST(VideoFrame, TextureNoLongerNeededCallbackIsCalled) { { scoped_refptr<VideoFrame> frame = VideoFrame::WrapNativeTexture( + VideoFrame::ARGB, gpu::MailboxHolder(gpu::Mailbox(), 5, 0 /* sync_point */), base::Bind(&TextureCallback, &called_sync_point), gfx::Size(10, 10), // coded_size gfx::Rect(10, 10), // visible_rect gfx::Size(10, 10), // natural_size - base::TimeDelta(), // timestamp - false, // allow_overlay - true); // has_alpha + base::TimeDelta()); // timestamp EXPECT_EQ(VideoFrame::STORAGE_TEXTURE, frame->storage_type()); EXPECT_EQ(VideoFrame::ARGB, frame->format()); } @@ -307,8 +308,7 @@ TEST(VideoFrame, gfx::Size(10, 10), // coded_size gfx::Rect(10, 10), // visible_rect gfx::Size(10, 10), // natural_size - base::TimeDelta(), // timestamp - false); // allow_overlay + base::TimeDelta()); // timestamp EXPECT_EQ(VideoFrame::STORAGE_TEXTURE, frame->storage_type()); EXPECT_EQ(VideoFrame::I420, frame->format()); diff --git a/media/filters/decrypting_video_decoder.cc b/media/filters/decrypting_video_decoder.cc index 3627080..c3c4dc9 100644 --- a/media/filters/decrypting_video_decoder.cc +++ b/media/filters/decrypting_video_decoder.cc @@ -285,7 +285,7 @@ void DecryptingVideoDecoder::DeliverFrame( DCHECK_EQ(status, Decryptor::kSuccess); // No frame returned with kSuccess should be end-of-stream frame. - DCHECK(!frame->IsEndOfStream()); + DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); output_cb_.Run(frame); if (scoped_pending_buffer_to_decode->end_of_stream()) { diff --git a/media/filters/fake_video_decoder_unittest.cc b/media/filters/fake_video_decoder_unittest.cc index f335c02..610987d 100644 --- a/media/filters/fake_video_decoder_unittest.cc +++ b/media/filters/fake_video_decoder_unittest.cc @@ -78,7 +78,7 @@ class FakeVideoDecoderTest } void FrameReady(const scoped_refptr<VideoFrame>& frame) { - DCHECK(!frame->IsEndOfStream()); + DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); last_decoded_frame_ = frame; num_decoded_frames_++; } diff --git a/media/filters/ffmpeg_video_decoder_unittest.cc b/media/filters/ffmpeg_video_decoder_unittest.cc index 160d18f..afa3ee3 100644 --- a/media/filters/ffmpeg_video_decoder_unittest.cc +++ b/media/filters/ffmpeg_video_decoder_unittest.cc @@ -188,7 +188,7 @@ class FFmpegVideoDecoderTest : public testing::Test { } void FrameReady(const scoped_refptr<VideoFrame>& frame) { - DCHECK(!frame->IsEndOfStream()); + DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); output_frames_.push_back(frame); } diff --git a/media/filters/gpu_video_decoder.cc b/media/filters/gpu_video_decoder.cc index 504ed84..20a18c5 100644 --- a/media/filters/gpu_video_decoder.cc +++ b/media/filters/gpu_video_decoder.cc @@ -401,13 +401,15 @@ void GpuVideoDecoder::PictureReady(const media::Picture& picture) { DCHECK(decoder_texture_target_); scoped_refptr<VideoFrame> frame(VideoFrame::WrapNativeTexture( + VideoFrame::ARGB, gpu::MailboxHolder(pb.texture_mailbox(), decoder_texture_target_, 0 /* sync_point */), BindToCurrentLoop(base::Bind( &GpuVideoDecoder::ReleaseMailbox, weak_factory_.GetWeakPtr(), factories_, picture.picture_buffer_id(), pb.texture_id())), - pb.size(), visible_rect, natural_size, timestamp, picture.allow_overlay(), - true /* has_alpha */)); + pb.size(), visible_rect, natural_size, timestamp)); + if (picture.allow_overlay()) + frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true); CHECK_GT(available_pictures_, 0); --available_pictures_; bool inserted = diff --git a/media/filters/video_frame_stream_unittest.cc b/media/filters/video_frame_stream_unittest.cc index 316a538..de09110 100644 --- a/media/filters/video_frame_stream_unittest.cc +++ b/media/filters/video_frame_stream_unittest.cc @@ -189,8 +189,10 @@ class VideoFrameStreamTest DCHECK(pending_read_); frame_read_ = frame; last_read_status_ = status; - if (frame.get() && !frame->IsEndOfStream()) + if (frame.get() && + !frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)) { num_decoded_frames_++; + } pending_read_ = false; } @@ -223,7 +225,9 @@ class VideoFrameStreamTest void ReadAllFrames() { do { ReadOneFrame(); - } while (frame_read_.get() && !frame_read_->IsEndOfStream()); + } while (frame_read_.get() && + !frame_read_->metadata()->IsTrue( + VideoFrameMetadata::END_OF_STREAM)); const int total_num_frames = kNumConfigs * kNumBuffersInOneConfig; DCHECK_EQ(num_decoded_frames_, total_num_frames); @@ -576,7 +580,8 @@ TEST_P(VideoFrameStreamTest, Read_DuringEndOfStreamDecode) { // The read output should indicate end of stream. ASSERT_TRUE(frame_read_.get()); - EXPECT_TRUE(frame_read_->IsEndOfStream()); + EXPECT_TRUE( + frame_read_->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); } // No Reset() before initialization is successfully completed. diff --git a/media/filters/video_renderer_algorithm.cc b/media/filters/video_renderer_algorithm.cc index 2670823..233c130 100644 --- a/media/filters/video_renderer_algorithm.cc +++ b/media/filters/video_renderer_algorithm.cc @@ -353,7 +353,7 @@ size_t VideoRendererAlgorithm::EffectiveFramesQueued() const { void VideoRendererAlgorithm::EnqueueFrame( const scoped_refptr<VideoFrame>& frame) { DCHECK(frame); - DCHECK(!frame->IsEndOfStream()); + DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); ReadyFrame ready_frame(frame); auto it = frame_queue_.empty() ? frame_queue_.end() diff --git a/media/renderers/video_renderer_impl.cc b/media/renderers/video_renderer_impl.cc index c89aa1d..95067fa 100644 --- a/media/renderers/video_renderer_impl.cc +++ b/media/renderers/video_renderer_impl.cc @@ -486,7 +486,7 @@ void VideoRendererImpl::FrameReady(VideoFrameStream::Status status, return; } - if (frame->IsEndOfStream()) { + if (frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)) { DCHECK(!received_end_of_stream_); received_end_of_stream_ = true; @@ -618,7 +618,7 @@ void VideoRendererImpl::AddReadyFrame_Locked( const scoped_refptr<VideoFrame>& frame) { DCHECK(task_runner_->BelongsToCurrentThread()); lock_.AssertAcquired(); - DCHECK(!frame->IsEndOfStream()); + DCHECK(!frame->metadata()->IsTrue(VideoFrameMetadata::END_OF_STREAM)); frames_decoded_++; diff --git a/media/video/gpu_memory_buffer_video_frame_pool.cc b/media/video/gpu_memory_buffer_video_frame_pool.cc index fb5bec0..23a7a94 100644 --- a/media/video/gpu_memory_buffer_video_frame_pool.cc +++ b/media/video/gpu_memory_buffer_video_frame_pool.cc @@ -197,13 +197,16 @@ GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame( } // Create the VideoFrame backed by native textures. - return VideoFrame::WrapYUV420NativeTextures( + scoped_refptr<VideoFrame> frame = VideoFrame::WrapYUV420NativeTextures( mailbox_holders[VideoFrame::kYPlane], mailbox_holders[VideoFrame::kUPlane], mailbox_holders[VideoFrame::kVPlane], base::Bind(&PoolImpl::MailboxHoldersReleased, this, frame_resources), size, video_frame->visible_rect(), video_frame->natural_size(), - video_frame->timestamp(), video_frame->allow_overlay()); + video_frame->timestamp()); + if (video_frame->metadata()->IsTrue(VideoFrameMetadata::ALLOW_OVERLAY)) + frame->metadata()->SetBoolean(VideoFrameMetadata::ALLOW_OVERLAY, true); + return frame; } // Destroy all the resources posting one task per FrameResources |