summaryrefslogtreecommitdiffstats
path: root/media/base
diff options
context:
space:
mode:
authorrileya@chromium.org <rileya@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-11-08 05:32:29 +0000
committerrileya@chromium.org <rileya@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-11-08 05:32:29 +0000
commit876937d2934cbd46110d9847bfb121b10cc51c95 (patch)
tree9c91480b2775307c2d42b4c5fda68a142872ccd5 /media/base
parentfd47e1bcecd99252a9ec27858f6420905ba06f46 (diff)
downloadchromium_src-876937d2934cbd46110d9847bfb121b10cc51c95.zip
chromium_src-876937d2934cbd46110d9847bfb121b10cc51c95.tar.gz
chromium_src-876937d2934cbd46110d9847bfb121b10cc51c95.tar.bz2
Remove EMPTY from VideoFrame::Format.
It was being used to indicate end-of-stream, which was sort of an abuse of the format enum. Instead, we now have a CreateEOSFrame(), rather than CreateEmptyFrame(), and an |end_of_stream_| member. BUG=313827 TBR=danakj Review URL: https://codereview.chromium.org/57403003 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@233776 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'media/base')
-rw-r--r--media/base/video_frame.cc41
-rw-r--r--media/base/video_frame.h13
-rw-r--r--media/base/video_frame_unittest.cc6
3 files changed, 31 insertions, 29 deletions
diff --git a/media/base/video_frame.cc b/media/base/video_frame.cc
index f706cde..8057e46 100644
--- a/media/base/video_frame.cc
+++ b/media/base/video_frame.cc
@@ -26,7 +26,7 @@ scoped_refptr<VideoFrame> VideoFrame::CreateFrame(
base::TimeDelta timestamp) {
DCHECK(IsValidConfig(format, coded_size, visible_rect, natural_size));
scoped_refptr<VideoFrame> frame(new VideoFrame(
- format, coded_size, visible_rect, natural_size, timestamp));
+ format, coded_size, visible_rect, natural_size, timestamp, false));
switch (format) {
case VideoFrame::YV12:
case VideoFrame::YV12A:
@@ -49,8 +49,6 @@ std::string VideoFrame::FormatToString(VideoFrame::Format format) {
return "YV12";
case VideoFrame::YV16:
return "YV16";
- case VideoFrame::EMPTY:
- return "EMPTY";
case VideoFrame::I420:
return "I420";
case VideoFrame::NATIVE_TEXTURE:
@@ -96,8 +94,12 @@ scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture(
base::TimeDelta timestamp,
const ReadPixelsCB& read_pixels_cb,
const base::Closure& no_longer_needed_cb) {
- scoped_refptr<VideoFrame> frame(new VideoFrame(
- NATIVE_TEXTURE, coded_size, visible_rect, natural_size, timestamp));
+ scoped_refptr<VideoFrame> frame(new VideoFrame(NATIVE_TEXTURE,
+ coded_size,
+ visible_rect,
+ natural_size,
+ timestamp,
+ false));
frame->texture_mailbox_holder_ = mailbox_holder;
frame->texture_target_ = texture_target;
frame->read_pixels_cb_ = read_pixels_cb;
@@ -129,7 +131,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalSharedMemory(
switch (format) {
case I420: {
scoped_refptr<VideoFrame> frame(new VideoFrame(
- format, coded_size, visible_rect, natural_size, timestamp));
+ format, coded_size, visible_rect, natural_size, timestamp, false));
frame->shared_memory_handle_ = handle;
frame->strides_[kYPlane] = coded_size.width();
frame->strides_[kUPlane] = coded_size.width() / 2;
@@ -162,7 +164,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData(
const base::Closure& no_longer_needed_cb) {
DCHECK(format == YV12 || format == YV16 || format == I420) << format;
scoped_refptr<VideoFrame> frame(new VideoFrame(
- format, coded_size, visible_rect, natural_size, timestamp));
+ format, coded_size, visible_rect, natural_size, timestamp, false));
frame->strides_[kYPlane] = y_stride;
frame->strides_[kUPlane] = u_stride;
frame->strides_[kVPlane] = v_stride;
@@ -174,10 +176,13 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData(
}
// static
-scoped_refptr<VideoFrame> VideoFrame::CreateEmptyFrame() {
- return new VideoFrame(
- VideoFrame::EMPTY, gfx::Size(), gfx::Rect(), gfx::Size(),
- base::TimeDelta());
+scoped_refptr<VideoFrame> VideoFrame::CreateEOSFrame() {
+ return new VideoFrame(VideoFrame::UNKNOWN,
+ gfx::Size(),
+ gfx::Rect(),
+ gfx::Size(),
+ kNoTimestamp(),
+ true);
}
// static
@@ -213,7 +218,7 @@ scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame(
const gfx::Size& size) {
DCHECK(IsValidConfig(VideoFrame::HOLE, size, gfx::Rect(size), size));
scoped_refptr<VideoFrame> frame(new VideoFrame(
- VideoFrame::HOLE, size, gfx::Rect(size), size, base::TimeDelta()));
+ VideoFrame::HOLE, size, gfx::Rect(size), size, base::TimeDelta(), false));
return frame;
}
#endif
@@ -232,7 +237,6 @@ size_t VideoFrame::NumPlanes(Format format) {
return 3;
case VideoFrame::YV12A:
return 4;
- case VideoFrame::EMPTY:
case VideoFrame::UNKNOWN:
break;
}
@@ -266,7 +270,6 @@ size_t VideoFrame::AllocationSize(Format format, const gfx::Size& coded_size) {
return rounded_size * 2;
}
case VideoFrame::UNKNOWN:
- case VideoFrame::EMPTY:
case VideoFrame::NATIVE_TEXTURE:
#if defined(GOOGLE_TV)
case VideoFrame::HOLE:
@@ -340,14 +343,16 @@ VideoFrame::VideoFrame(VideoFrame::Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- base::TimeDelta timestamp)
+ base::TimeDelta timestamp,
+ bool end_of_stream)
: format_(format),
coded_size_(coded_size),
visible_rect_(visible_rect),
natural_size_(natural_size),
texture_target_(0),
shared_memory_handle_(base::SharedMemory::NULLHandle()),
- timestamp_(timestamp) {
+ timestamp_(timestamp),
+ end_of_stream_(end_of_stream) {
memset(&strides_, 0, sizeof(strides_));
memset(&data_, 0, sizeof(data_));
}
@@ -437,10 +442,6 @@ base::SharedMemoryHandle VideoFrame::shared_memory_handle() const {
return shared_memory_handle_;
}
-bool VideoFrame::IsEndOfStream() const {
- return format_ == VideoFrame::EMPTY;
-}
-
void VideoFrame::HashFrameForTesting(base::MD5Context* context) {
for (int plane = 0; plane < kMaxPlanes; ++plane) {
if (!IsValidPlane(plane))
diff --git a/media/base/video_frame.h b/media/base/video_frame.h
index df0ed23..494e645 100644
--- a/media/base/video_frame.h
+++ b/media/base/video_frame.h
@@ -41,7 +41,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
UNKNOWN = 0, // Unknown format value.
YV12 = 6, // 12bpp YVU planar 1x1 Y, 2x2 VU samples
YV16 = 7, // 16bpp YVU planar 1x1 Y, 2x1 VU samples
- EMPTY = 9, // An empty frame.
I420 = 11, // 12bpp YVU planar 1x1 Y, 2x2 UV samples.
NATIVE_TEXTURE = 12, // Native texture. Pixel-format agnostic.
#if defined(GOOGLE_TV)
@@ -169,9 +168,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
base::TimeDelta timestamp,
const base::Closure& no_longer_needed_cb);
- // Creates a frame with format equals to VideoFrame::EMPTY, width, height,
- // and timestamp are all 0.
- static scoped_refptr<VideoFrame> CreateEmptyFrame();
+ // Creates a frame which indicates end-of-stream.
+ static scoped_refptr<VideoFrame> CreateEOSFrame();
// Allocates YV12 frame based on |size|, and sets its data to the YUV(y,u,v).
static scoped_refptr<VideoFrame> CreateColorFrame(
@@ -225,7 +223,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
base::SharedMemoryHandle shared_memory_handle() const;
// Returns true if this VideoFrame represents the end of the stream.
- bool IsEndOfStream() const;
+ bool end_of_stream() const { return end_of_stream_; }
base::TimeDelta GetTimestamp() const {
return timestamp_;
@@ -245,7 +243,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- base::TimeDelta timestamp);
+ base::TimeDelta timestamp,
+ bool end_of_stream);
virtual ~VideoFrame();
void AllocateYUV();
@@ -286,6 +285,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
base::TimeDelta timestamp_;
+ const bool end_of_stream_;
+
DISALLOW_IMPLICIT_CONSTRUCTORS(VideoFrame);
};
diff --git a/media/base/video_frame_unittest.cc b/media/base/video_frame_unittest.cc
index 710c69f..20210e5 100644
--- a/media/base/video_frame_unittest.cc
+++ b/media/base/video_frame_unittest.cc
@@ -159,8 +159,8 @@ TEST(VideoFrame, CreateFrame) {
EXPECT_EQ(MD5DigestToBase16(digest), "911991d51438ad2e1a40ed5f6fc7c796");
// Test an empty frame.
- frame = VideoFrame::CreateEmptyFrame();
- EXPECT_TRUE(frame->IsEndOfStream());
+ frame = VideoFrame::CreateEOSFrame();
+ EXPECT_TRUE(frame->end_of_stream());
}
TEST(VideoFrame, CreateBlackFrame) {
@@ -175,7 +175,7 @@ TEST(VideoFrame, CreateBlackFrame) {
// Test basic properties.
EXPECT_EQ(0, frame->GetTimestamp().InMicroseconds());
- EXPECT_FALSE(frame->IsEndOfStream());
+ EXPECT_FALSE(frame->end_of_stream());
// Test |frame| properties.
EXPECT_EQ(VideoFrame::YV12, frame->format());