summaryrefslogtreecommitdiffstats
path: root/media
diff options
context:
space:
mode:
Diffstat (limited to 'media')
-rw-r--r--media/base/mac/video_frame_mac.cc2
-rw-r--r--media/base/mac/video_frame_mac_unittests.cc2
-rw-r--r--media/base/video_decoder_config.cc6
-rw-r--r--media/base/video_frame.cc478
-rw-r--r--media/base/video_frame.h173
-rw-r--r--media/base/video_frame_unittest.cc13
-rw-r--r--media/blink/skcanvas_video_renderer.cc95
-rw-r--r--media/blink/video_frame_compositor.cc11
-rw-r--r--media/blink/webmediaplayer_impl.cc13
-rw-r--r--media/cast/test/end2end_unittest.cc5
-rw-r--r--media/cast/test/utility/video_utility.cc45
-rw-r--r--media/filters/ffmpeg_video_decoder.cc5
-rw-r--r--media/mojo/interfaces/media_types.mojom5
-rw-r--r--media/mojo/services/media_type_converters.cc9
-rw-r--r--media/video/gpu_memory_buffer_video_frame_pool.cc19
15 files changed, 486 insertions, 395 deletions
diff --git a/media/base/mac/video_frame_mac.cc b/media/base/mac/video_frame_mac.cc
index e532ddc..9fd290465 100644
--- a/media/base/mac/video_frame_mac.cc
+++ b/media/base/mac/video_frame_mac.cc
@@ -43,7 +43,7 @@ WrapVideoFrameInCVPixelBuffer(const VideoFrame& frame) {
// represent I420 and NV12 frames. In addition, VideoFrame does not carry
// colorimetric information, so this function assumes standard video range
// and ITU Rec 709 primaries.
- VideoFrame::Format video_frame_format = frame.format();
+ const VideoFrame::Format video_frame_format = frame.format();
OSType cv_format;
if (video_frame_format == VideoFrame::Format::I420) {
cv_format = kCVPixelFormatType_420YpCbCr8Planar;
diff --git a/media/base/mac/video_frame_mac_unittests.cc b/media/base/mac/video_frame_mac_unittests.cc
index 4a5953f..aa03795 100644
--- a/media/base/mac/video_frame_mac_unittests.cc
+++ b/media/base/mac/video_frame_mac_unittests.cc
@@ -61,9 +61,9 @@ TEST(VideoFrameMac, CheckBasicAttributes) {
}
TEST(VideoFrameMac, CheckFormats) {
+ // CreateFrame() does not support non planar YUV, e.g. NV12.
const FormatPair format_pairs[] = {
{VideoFrame::I420, kCVPixelFormatType_420YpCbCr8Planar},
-
{VideoFrame::YV12, 0},
{VideoFrame::YV16, 0},
{VideoFrame::YV12A, 0},
diff --git a/media/base/video_decoder_config.cc b/media/base/video_decoder_config.cc
index dc135d8..6f1c02f 100644
--- a/media/base/video_decoder_config.cc
+++ b/media/base/video_decoder_config.cc
@@ -78,7 +78,7 @@ void VideoDecoderConfig::Initialize(VideoCodec codec,
UmaHistogramAspectRatio("Media.VideoCodedAspectRatio", coded_size);
UMA_HISTOGRAM_COUNTS_10000("Media.VideoVisibleWidth", visible_rect.width());
UmaHistogramAspectRatio("Media.VideoVisibleAspectRatio", visible_rect);
- UMA_HISTOGRAM_ENUMERATION("Media.VideoFormat", format,
+ UMA_HISTOGRAM_ENUMERATION("Media.VideoFramePixelFormat", format,
VideoFrame::FORMAT_MAX + 1);
UMA_HISTOGRAM_ENUMERATION("Media.VideoFrameColorSpace", color_space,
VideoFrame::COLOR_SPACE_MAX + 1);
@@ -98,8 +98,8 @@ bool VideoDecoderConfig::IsValidConfig() const {
return codec_ != kUnknownVideoCodec &&
natural_size_.width() > 0 &&
natural_size_.height() > 0 &&
- VideoFrame::IsValidConfig(format_, coded_size_, visible_rect_,
- natural_size_);
+ VideoFrame::IsValidConfig(format_, VideoFrame::STORAGE_UNKNOWN,
+ coded_size_, visible_rect_, natural_size_);
}
bool VideoDecoderConfig::Matches(const VideoDecoderConfig& config) const {
diff --git a/media/base/video_frame.cc b/media/base/video_frame.cc
index 12b46d5..8e323a6 100644
--- a/media/base/video_frame.cc
+++ b/media/base/video_frame.cc
@@ -53,15 +53,14 @@ static gfx::Size SampleSize(VideoFrame::Format format, size_t plane) {
case VideoFrame::YV12:
case VideoFrame::I420:
case VideoFrame::YV12A:
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
case VideoFrame::NV12:
+#endif
return gfx::Size(2, 2);
case VideoFrame::UNKNOWN:
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- case VideoFrame::NATIVE_TEXTURE:
case VideoFrame::ARGB:
+ case VideoFrame::XRGB:
break;
}
}
@@ -82,16 +81,11 @@ static gfx::Size CommonAlignment(VideoFrame::Format format) {
return gfx::Size(max_sample_width, max_sample_height);
}
-// Returns the number of bytes per element for given |plane| and |format|. E.g.
-// 2 for the UV plane in NV12.
+// Returns the number of bytes per element for given |plane| and |format|.
static int BytesPerElement(VideoFrame::Format format, size_t plane) {
DCHECK(VideoFrame::IsValidPlane(plane, format));
- if (format == VideoFrame::ARGB)
+ if (format == VideoFrame::ARGB || format == VideoFrame::XRGB)
return 4;
-
- if (format == VideoFrame::NV12 && plane == VideoFrame::kUVPlane)
- return 2;
-
return 1;
}
@@ -103,78 +97,70 @@ static gfx::Size AdjustCodedSize(VideoFrame::Format format,
RoundUp(coded_size.height(), alignment.height()));
}
-// static
-scoped_refptr<VideoFrame> VideoFrame::CreateFrame(
- VideoFrame::Format format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- base::TimeDelta timestamp) {
- switch (format) {
- case VideoFrame::YV12:
- case VideoFrame::YV16:
- case VideoFrame::I420:
- case VideoFrame::YV12A:
- case VideoFrame::YV24:
- break;
+// Release data allocated by AllocateYUV().
+static void ReleaseData(uint8* data) {
+ DCHECK(data);
+ base::AlignedFree(data);
+}
- case VideoFrame::UNKNOWN:
- case VideoFrame::NV12:
- case VideoFrame::NATIVE_TEXTURE:
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- case VideoFrame::ARGB:
- NOTIMPLEMENTED();
- return nullptr;
+//static
+bool VideoFrame::IsYuvPlanar(Format format) {
+ switch (format) {
+ case YV12:
+ case I420:
+ case YV16:
+ case YV12A:
+ case YV24:
+ return true;
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ case NV12:
+#endif
+ case UNKNOWN:
+ case ARGB:
+ case XRGB:
+ return false;
}
+ return false;
+}
- // Since we're creating a new YUV frame (and allocating memory for it
- // ourselves), we can pad the requested |coded_size| if necessary if the
- // request does not line up on sample boundaries.
- const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size);
- DCHECK(IsValidConfig(format, new_coded_size, visible_rect, natural_size));
-
- gpu::MailboxHolder mailboxes[kMaxPlanes];
- scoped_refptr<VideoFrame> frame(
- new VideoFrame(format, new_coded_size, visible_rect, natural_size,
- mailboxes, TEXTURE_RGBA, timestamp, false));
- frame->AllocateYUV();
- return frame;
+//static
+bool VideoFrame::IsMappable(StorageType storage_type) {
+ return storage_type == STORAGE_SHMEM ||
+ storage_type == STORAGE_OWNED_MEMORY ||
+ storage_type == STORAGE_UNOWNED_MEMORY;
}
// static
-std::string VideoFrame::FormatToString(VideoFrame::Format format) {
+std::string VideoFrame::FormatToString(Format format) {
switch (format) {
- case VideoFrame::UNKNOWN:
+ case UNKNOWN:
return "UNKNOWN";
- case VideoFrame::YV12:
+ case YV12:
return "YV12";
- case VideoFrame::YV16:
+ case YV16:
return "YV16";
- case VideoFrame::I420:
+ case I420:
return "I420";
- case VideoFrame::NATIVE_TEXTURE:
- return "NATIVE_TEXTURE";
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
- return "HOLE";
-#endif // defined(VIDEO_HOLE)
- case VideoFrame::YV12A:
+ case YV12A:
return "YV12A";
- case VideoFrame::NV12:
- return "NV12";
- case VideoFrame::YV24:
+ case YV24:
return "YV24";
- case VideoFrame::ARGB:
+ case ARGB:
return "ARGB";
+ case XRGB:
+ return "XRGB";
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ case NV12:
+ return "NV12";
+#endif
}
- NOTREACHED() << "Invalid videoframe format provided: " << format;
+ NOTREACHED() << "Invalid VideoFrame format provided: " << format;
return "";
}
// static
-bool VideoFrame::IsValidConfig(VideoFrame::Format format,
+bool VideoFrame::IsValidConfig(Format format,
+ StorageType storage_type,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size) {
@@ -190,27 +176,30 @@ bool VideoFrame::IsValidConfig(VideoFrame::Format format,
natural_size.height() > limits::kMaxDimension)
return false;
+// TODO(mcasas): Remove parameter |storage_type| when STORAGE_HOLE and
+// STORAGE_TEXTURE comply with the checks below. Right now we skip them.
+#if defined(VIDEO_HOLE)
+ if (storage_type == STORAGE_HOLE)
+ return true;
+#endif
+ if(storage_type == STORAGE_TEXTURE)
+ return true;
+
// Check format-specific width/height requirements.
switch (format) {
- case VideoFrame::UNKNOWN:
+ case UNKNOWN:
return (coded_size.IsEmpty() && visible_rect.IsEmpty() &&
natural_size.IsEmpty());
-
- // NATIVE_TEXTURE and HOLE have no software-allocated buffers and are
- // allowed to skip the below check.
- case VideoFrame::NATIVE_TEXTURE:
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- return true;
-
- case VideoFrame::YV24:
- case VideoFrame::YV12:
- case VideoFrame::I420:
- case VideoFrame::YV12A:
- case VideoFrame::NV12:
- case VideoFrame::YV16:
- case VideoFrame::ARGB:
+ case YV24:
+ case YV12:
+ case I420:
+ case YV12A:
+ case YV16:
+ case ARGB:
+ case XRGB:
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ case NV12:
+#endif
// Check that software-allocated buffer formats are aligned correctly and
// not empty.
const gfx::Size alignment = CommonAlignment(format);
@@ -222,11 +211,39 @@ bool VideoFrame::IsValidConfig(VideoFrame::Format format,
!natural_size.IsEmpty();
}
+ // TODO(mcasas): Check that storage type and underlying mailboxes/dataptr are
+ // matching.
NOTREACHED();
return false;
}
// static
+scoped_refptr<VideoFrame> VideoFrame::CreateFrame(
+ Format format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp) {
+ if (!IsYuvPlanar(format)) {
+ NOTIMPLEMENTED();
+ return nullptr;
+ }
+
+ // Since we're creating a new YUV frame (and allocating memory for it
+ // ourselves), we can pad the requested |coded_size| if necessary if the
+ // request does not line up on sample boundaries.
+ const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size);
+ DCHECK(IsValidConfig(format, STORAGE_OWNED_MEMORY, new_coded_size,
+ visible_rect, natural_size));
+
+ scoped_refptr<VideoFrame> frame(
+ new VideoFrame(format, STORAGE_OWNED_MEMORY, new_coded_size, visible_rect,
+ natural_size, timestamp, false));
+ frame->AllocateYUV();
+ return frame;
+}
+
+// static
scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture(
const gpu::MailboxHolder& mailbox_holder,
const ReleaseMailboxCB& mailbox_holder_release_cb,
@@ -238,10 +255,10 @@ scoped_refptr<VideoFrame> VideoFrame::WrapNativeTexture(
bool has_alpha) {
gpu::MailboxHolder mailbox_holders[kMaxPlanes];
mailbox_holders[kARGBPlane] = mailbox_holder;
- TextureFormat texture_format = has_alpha ? TEXTURE_RGBA : TEXTURE_RGB;
+ Format texture_format = has_alpha ? ARGB : XRGB;
scoped_refptr<VideoFrame> frame(
- new VideoFrame(NATIVE_TEXTURE, coded_size, visible_rect, natural_size,
- mailbox_holders, texture_format, timestamp, false));
+ new VideoFrame(texture_format, STORAGE_TEXTURE, coded_size, visible_rect,
+ natural_size, mailbox_holders, timestamp, false));
frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb;
frame->allow_overlay_ = allow_overlay;
return frame;
@@ -263,15 +280,29 @@ scoped_refptr<VideoFrame> VideoFrame::WrapYUV420NativeTextures(
mailbox_holders[kUPlane] = u_mailbox_holder;
mailbox_holders[kVPlane] = v_mailbox_holder;
scoped_refptr<VideoFrame> frame(
- new VideoFrame(NATIVE_TEXTURE, coded_size, visible_rect, natural_size,
- mailbox_holders, TEXTURE_YUV_420, timestamp, false));
+ new VideoFrame(I420, STORAGE_TEXTURE, coded_size, visible_rect,
+ natural_size, mailbox_holders, timestamp, false));
frame->mailbox_holders_release_cb_ = mailbox_holder_release_cb;
frame->allow_overlay_ = allow_overlay;
return frame;
}
// static
-scoped_refptr<VideoFrame> VideoFrame::WrapExternalPackedMemory(
+scoped_refptr<VideoFrame> VideoFrame::WrapExternalData(
+ Format format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8* data,
+ size_t data_size,
+ base::TimeDelta timestamp) {
+ return WrapExternalStorage(format, STORAGE_UNOWNED_MEMORY, coded_size,
+ visible_rect, natural_size, data, data_size,
+ timestamp, base::SharedMemory::NULLHandle(), 0);
+}
+
+// static
+scoped_refptr<VideoFrame> VideoFrame::WrapExternalSharedMemory(
Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
@@ -281,33 +312,9 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalPackedMemory(
base::SharedMemoryHandle handle,
size_t data_offset,
base::TimeDelta timestamp) {
- const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size);
-
- if (!IsValidConfig(format, new_coded_size, visible_rect, natural_size))
- return NULL;
- if (data_size < AllocationSize(format, new_coded_size))
- return NULL;
-
- switch (format) {
- case VideoFrame::I420: {
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
- scoped_refptr<VideoFrame> frame(
- new VideoFrame(format, new_coded_size, visible_rect, natural_size,
- mailbox_holders, TEXTURE_RGBA, timestamp, false));
- frame->shared_memory_handle_ = handle;
- frame->shared_memory_offset_ = data_offset;
- frame->strides_[kYPlane] = new_coded_size.width();
- frame->strides_[kUPlane] = new_coded_size.width() / 2;
- frame->strides_[kVPlane] = new_coded_size.width() / 2;
- frame->data_[kYPlane] = data;
- frame->data_[kUPlane] = data + new_coded_size.GetArea();
- frame->data_[kVPlane] = data + (new_coded_size.GetArea() * 5 / 4);
- return frame;
- }
- default:
- NOTIMPLEMENTED();
- return NULL;
- }
+ return WrapExternalStorage(format, STORAGE_SHMEM, coded_size, visible_rect,
+ natural_size, data, data_size, timestamp, handle,
+ data_offset);
}
// static
@@ -324,12 +331,12 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData(
uint8* v_data,
base::TimeDelta timestamp) {
const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size);
- CHECK(IsValidConfig(format, new_coded_size, visible_rect, natural_size));
+ CHECK(IsValidConfig(format, STORAGE_UNOWNED_MEMORY, new_coded_size,
+ visible_rect, natural_size));
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
scoped_refptr<VideoFrame> frame(
- new VideoFrame(format, new_coded_size, visible_rect, natural_size,
- mailbox_holders, TEXTURE_RGBA, timestamp, false));
+ new VideoFrame(format, STORAGE_UNOWNED_MEMORY, new_coded_size,
+ visible_rect, natural_size, timestamp, false));
frame->strides_[kYPlane] = y_stride;
frame->strides_[kUPlane] = u_stride;
frame->strides_[kVPlane] = v_stride;
@@ -339,7 +346,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalYuvData(
return frame;
}
-#if defined(OS_POSIX)
+#if defined(OS_LINUX)
// static
scoped_refptr<VideoFrame> VideoFrame::WrapExternalDmabufs(
Format format,
@@ -348,8 +355,10 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalDmabufs(
const gfx::Size& natural_size,
const std::vector<int> dmabuf_fds,
base::TimeDelta timestamp) {
- if (!IsValidConfig(format, coded_size, visible_rect, natural_size))
+ if (!IsValidConfig(format, STORAGE_DMABUFS, coded_size, visible_rect,
+ natural_size)) {
return NULL;
+ }
// TODO(posciak): This is not exactly correct, it's possible for one
// buffer to contain more than one plane.
@@ -358,10 +367,10 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalDmabufs(
return NULL;
}
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
+ DCHECK_EQ(format, ARGB);
scoped_refptr<VideoFrame> frame(
- new VideoFrame(format, coded_size, visible_rect, natural_size,
- mailbox_holders, TEXTURE_RGBA, timestamp, false));
+ new VideoFrame(format, STORAGE_DMABUFS, coded_size, visible_rect,
+ natural_size, timestamp, false));
for (size_t i = 0; i < dmabuf_fds.size(); ++i) {
int duped_fd = HANDLE_EINTR(dup(dmabuf_fds[i]));
@@ -394,13 +403,13 @@ scoped_refptr<VideoFrame> VideoFrame::WrapCVPixelBuffer(
Format format;
// There are very few compatible CV pixel formats, so just check each.
if (cv_format == kCVPixelFormatType_420YpCbCr8Planar) {
- format = Format::I420;
+ format = I420;
} else if (cv_format == kCVPixelFormatType_444YpCbCr8) {
- format = Format::YV24;
+ format = YV24;
} else if (cv_format == '420v') {
// TODO(jfroy): Use kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange when the
// minimum OS X and iOS SDKs permits it.
- format = Format::NV12;
+ format = NV12;
} else {
DLOG(ERROR) << "CVPixelBuffer format not supported: " << cv_format;
return NULL;
@@ -410,13 +419,14 @@ scoped_refptr<VideoFrame> VideoFrame::WrapCVPixelBuffer(
const gfx::Rect visible_rect(CVImageBufferGetCleanRect(cv_pixel_buffer));
const gfx::Size natural_size(CVImageBufferGetDisplaySize(cv_pixel_buffer));
- if (!IsValidConfig(format, coded_size, visible_rect, natural_size))
+ if (!IsValidConfig(format, STORAGE_UNOWNED_MEMORY, coded_size, visible_rect,
+ natural_size)) {
return NULL;
+ }
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
scoped_refptr<VideoFrame> frame(
- new VideoFrame(format, coded_size, visible_rect, natural_size,
- mailbox_holders, TEXTURE_RGBA, timestamp, false));
+ new VideoFrame(format, STORAGE_UNOWNED_MEMORY, coded_size, visible_rect,
+ natural_size, timestamp, false));
frame->cv_pixel_buffer_.reset(cv_pixel_buffer, base::scoped_policy::RETAIN);
return frame;
@@ -428,15 +438,14 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
const scoped_refptr<VideoFrame>& frame,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size) {
- // NATIVE_TEXTURE frames need mailbox info propagated, and there's no support
+ // STORAGE_TEXTURE frames need mailbox info propagated, and there's no support
// for that here yet, see http://crbug/362521.
- CHECK_NE(frame->format(), NATIVE_TEXTURE);
+ CHECK_NE(frame->storage_type(), STORAGE_TEXTURE);
DCHECK(frame->visible_rect().Contains(visible_rect));
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
scoped_refptr<VideoFrame> wrapped_frame(
- new VideoFrame(frame->format(), frame->coded_size(), visible_rect,
- natural_size, mailbox_holders, TEXTURE_RGBA,
+ new VideoFrame(frame->format(), frame->storage_type(),
+ frame->coded_size(), visible_rect, natural_size,
frame->timestamp(), frame->end_of_stream()));
for (size_t i = 0; i < NumPlanes(frame->format()); ++i) {
@@ -449,10 +458,8 @@ scoped_refptr<VideoFrame> VideoFrame::WrapVideoFrame(
// static
scoped_refptr<VideoFrame> VideoFrame::CreateEOSFrame() {
- gpu::MailboxHolder mailbox_holders[kMaxPlanes];
- return new VideoFrame(VideoFrame::UNKNOWN, gfx::Size(), gfx::Rect(),
- gfx::Size(), mailbox_holders, TEXTURE_RGBA,
- kNoTimestamp(), true);
+ return new VideoFrame(UNKNOWN, STORAGE_UNKNOWN, gfx::Size(), gfx::Rect(),
+ gfx::Size(), kNoTimestamp(), true);
}
// static
@@ -460,8 +467,8 @@ scoped_refptr<VideoFrame> VideoFrame::CreateColorFrame(
const gfx::Size& size,
uint8 y, uint8 u, uint8 v,
base::TimeDelta timestamp) {
- scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
- VideoFrame::YV12, size, gfx::Rect(size), size, timestamp);
+ scoped_refptr<VideoFrame> frame =
+ CreateFrame(YV12, size, gfx::Rect(size), size, timestamp);
FillYUV(frame.get(), y, u, v);
return frame;
}
@@ -481,28 +488,24 @@ scoped_refptr<VideoFrame> VideoFrame::CreateTransparentFrame(
const uint8 kBlackUV = 0x00;
const uint8 kTransparentA = 0x00;
const base::TimeDelta kZero;
- scoped_refptr<VideoFrame> frame = VideoFrame::CreateFrame(
- VideoFrame::YV12A, size, gfx::Rect(size), size, kZero);
+ scoped_refptr<VideoFrame> frame =
+ CreateFrame(YV12A, size, gfx::Rect(size), size, kZero);
FillYUVA(frame.get(), kBlackY, kBlackUV, kBlackUV, kTransparentA);
return frame;
}
#if defined(VIDEO_HOLE)
// This block and other blocks wrapped around #if defined(VIDEO_HOLE) is not
-// maintained by the general compositor team. Please contact the following
-// people instead:
-//
-// wonsik@chromium.org
-// ycheo@chromium.org
+// maintained by the general compositor team. Please contact
+// wonsik@chromium.org .
// static
scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame(
const gfx::Size& size) {
- DCHECK(IsValidConfig(VideoFrame::HOLE, size, gfx::Rect(size), size));
- gpu::MailboxHolder mailboxes[kMaxPlanes];
+ DCHECK(IsValidConfig(UNKNOWN, STORAGE_HOLE, size, gfx::Rect(size), size));
scoped_refptr<VideoFrame> frame(
- new VideoFrame(VideoFrame::HOLE, size, gfx::Rect(size), size, mailboxes,
- TEXTURE_RGBA, base::TimeDelta(), false));
+ new VideoFrame(UNKNOWN, STORAGE_HOLE, size, gfx::Rect(size), size,
+ base::TimeDelta(), false));
return frame;
}
#endif // defined(VIDEO_HOLE)
@@ -510,23 +513,21 @@ scoped_refptr<VideoFrame> VideoFrame::CreateHoleFrame(
// static
size_t VideoFrame::NumPlanes(Format format) {
switch (format) {
- case VideoFrame::NATIVE_TEXTURE:
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- return 0;
- case VideoFrame::ARGB:
+ case ARGB:
+ case XRGB:
return 1;
- case VideoFrame::NV12:
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ case NV12:
return 2;
- case VideoFrame::YV12:
- case VideoFrame::YV16:
- case VideoFrame::I420:
- case VideoFrame::YV24:
+#endif
+ case YV12:
+ case YV16:
+ case I420:
+ case YV24:
return 3;
- case VideoFrame::YV12A:
+ case YV12A:
return 4;
- case VideoFrame::UNKNOWN:
+ case UNKNOWN:
break;
}
NOTREACHED() << "Unsupported video frame format: " << format;
@@ -534,20 +535,6 @@ size_t VideoFrame::NumPlanes(Format format) {
}
// static
-size_t VideoFrame::NumTextures(TextureFormat texture_format) {
- switch (texture_format) {
- case TEXTURE_RGBA:
- case TEXTURE_RGB:
- return 1;
- case TEXTURE_YUV_420:
- return 3;
- }
-
- NOTREACHED();
- return 0;
-}
-
-// static
size_t VideoFrame::AllocationSize(Format format, const gfx::Size& coded_size) {
size_t total = 0;
for (size_t i = 0; i < NumPlanes(format); ++i)
@@ -563,7 +550,7 @@ gfx::Size VideoFrame::PlaneSize(Format format,
int width = coded_size.width();
int height = coded_size.height();
- if (format != VideoFrame::ARGB) {
+ if (format != ARGB) {
// Align to multiple-of-two size overall. This ensures that non-subsampled
// planes can be addressed by pixel with the same scaling as the subsampled
// planes.
@@ -600,20 +587,13 @@ int VideoFrame::PlaneBitsPerPixel(Format format, size_t plane) {
SampleSize(format, plane).height();
}
-// Release data allocated by AllocateYUV().
-static void ReleaseData(uint8* data) {
- DCHECK(data);
- base::AlignedFree(data);
-}
-
void VideoFrame::AllocateYUV() {
- DCHECK(format_ == YV12 || format_ == YV16 || format_ == YV12A ||
- format_ == I420 || format_ == YV24);
+ DCHECK_EQ(storage_type_, STORAGE_OWNED_MEMORY);
static_assert(0 == kYPlane, "y plane data must be index 0");
size_t data_size = 0;
size_t offset[kMaxPlanes];
- for (size_t plane = 0; plane < VideoFrame::NumPlanes(format_); ++plane) {
+ for (size_t plane = 0; plane < NumPlanes(format_); ++plane) {
// The *2 in alignment for height is because some formats (e.g. h264) allow
// interlaced coding, and then the size needs to be a multiple of two
// macroblocks (vertically). See
@@ -637,22 +617,21 @@ void VideoFrame::AllocateYUV() {
base::AlignedAlloc(data_size, kFrameAddressAlignment));
memset(data, 0, data_size);
- for (size_t plane = 0; plane < VideoFrame::NumPlanes(format_); ++plane)
+ for (size_t plane = 0; plane < NumPlanes(format_); ++plane)
data_[plane] = data + offset[plane];
AddDestructionObserver(base::Bind(&ReleaseData, data));
}
-VideoFrame::VideoFrame(VideoFrame::Format format,
+VideoFrame::VideoFrame(Format format,
+ StorageType storage_type,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
- const gpu::MailboxHolder(&mailbox_holders)[kMaxPlanes],
- VideoFrame::TextureFormat texture_format,
base::TimeDelta timestamp,
bool end_of_stream)
: format_(format),
- texture_format_(texture_format),
+ storage_type_(storage_type),
coded_size_(coded_size),
visible_rect_(visible_rect),
natural_size_(natural_size),
@@ -662,12 +641,41 @@ VideoFrame::VideoFrame(VideoFrame::Format format,
release_sync_point_(0),
end_of_stream_(end_of_stream),
allow_overlay_(false) {
- DCHECK(IsValidConfig(format_, coded_size_, visible_rect_, natural_size_));
- memcpy(&mailbox_holders_, mailbox_holders, sizeof(mailbox_holders_));
+ DCHECK(IsValidConfig(format_, storage_type, coded_size_, visible_rect_,
+ natural_size_));
+ memset(&mailbox_holders_, 0, sizeof(mailbox_holders_));
memset(&strides_, 0, sizeof(strides_));
memset(&data_, 0, sizeof(data_));
}
+VideoFrame::VideoFrame(Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp,
+ bool end_of_stream,
+ base::SharedMemoryHandle handle,
+ size_t shared_memory_offset)
+ : VideoFrame(format, storage_type, coded_size, visible_rect, natural_size,
+ timestamp, end_of_stream) {
+ shared_memory_handle_ = handle;
+ shared_memory_offset_ = shared_memory_offset;
+}
+
+VideoFrame::VideoFrame(Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ const gpu::MailboxHolder(&mailbox_holders)[kMaxPlanes],
+ base::TimeDelta timestamp,
+ bool end_of_stream)
+ : VideoFrame(format, storage_type, coded_size, visible_rect, natural_size,
+ timestamp, end_of_stream) {
+ memcpy(&mailbox_holders_, mailbox_holders, sizeof(mailbox_holders_));
+}
+
VideoFrame::~VideoFrame() {
if (!mailbox_holders_release_cb_.is_null()) {
uint32 release_sync_point;
@@ -685,7 +693,48 @@ VideoFrame::~VideoFrame() {
}
// static
-bool VideoFrame::IsValidPlane(size_t plane, VideoFrame::Format format) {
+scoped_refptr<VideoFrame> VideoFrame::WrapExternalStorage(
+ Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8* data,
+ size_t data_size,
+ base::TimeDelta timestamp,
+ base::SharedMemoryHandle handle,
+ size_t data_offset) {
+ const gfx::Size new_coded_size = AdjustCodedSize(format, coded_size);
+
+ if (!IsValidConfig(format, storage_type, new_coded_size, visible_rect,
+ natural_size) ||
+ data_size < AllocationSize(format, new_coded_size)) {
+ return NULL;
+ }
+ DLOG_IF(ERROR, format != I420) << "Only I420 format supported: "
+ << FormatToString(format);
+ if (format != I420)
+ return NULL;
+
+ scoped_refptr<VideoFrame> frame;
+ if (storage_type == STORAGE_SHMEM) {
+ frame = new VideoFrame(format, storage_type, new_coded_size, visible_rect,
+ natural_size, timestamp, false, handle, data_offset);
+ } else {
+ frame = new VideoFrame(format, storage_type, new_coded_size, visible_rect,
+ natural_size, timestamp, false);
+ }
+ frame->strides_[kYPlane] = new_coded_size.width();
+ frame->strides_[kUPlane] = new_coded_size.width() / 2;
+ frame->strides_[kVPlane] = new_coded_size.width() / 2;
+ frame->data_[kYPlane] = data;
+ frame->data_[kUPlane] = data + new_coded_size.GetArea();
+ frame->data_[kVPlane] = data + (new_coded_size.GetArea() * 5 / 4);
+ return frame;
+}
+
+// static
+bool VideoFrame::IsValidPlane(size_t plane, Format format) {
return (plane < NumPlanes(format));
}
@@ -695,9 +744,7 @@ int VideoFrame::stride(size_t plane) const {
}
// static
-size_t VideoFrame::RowBytes(size_t plane,
- VideoFrame::Format format,
- int width) {
+size_t VideoFrame::RowBytes(size_t plane, Format format, int width) {
DCHECK(IsValidPlane(plane, format));
return BytesPerElement(format, plane) * Columns(plane, format, width);
}
@@ -707,7 +754,7 @@ int VideoFrame::row_bytes(size_t plane) const {
}
// static
-size_t VideoFrame::Rows(size_t plane, VideoFrame::Format format, int height) {
+size_t VideoFrame::Rows(size_t plane, Format format, int height) {
DCHECK(IsValidPlane(plane, format));
const int sample_height = SampleSize(format, plane).height();
return RoundUp(height, sample_height) / sample_height;
@@ -726,16 +773,19 @@ int VideoFrame::rows(size_t plane) const {
const uint8* VideoFrame::data(size_t plane) const {
DCHECK(IsValidPlane(plane, format_));
+ DCHECK(IsMappable(storage_type_));
return data_[plane];
}
uint8* VideoFrame::data(size_t plane) {
DCHECK(IsValidPlane(plane, format_));
+ DCHECK(IsMappable(storage_type_));
return data_[plane];
}
const uint8* VideoFrame::visible_data(size_t plane) const {
DCHECK(IsValidPlane(plane, format_));
+ DCHECK(IsMappable(storage_type_));
// Calculate an offset that is properly aligned for all planes.
const gfx::Size alignment = CommonAlignment(format_);
@@ -756,17 +806,26 @@ uint8* VideoFrame::visible_data(size_t plane) {
static_cast<const VideoFrame*>(this)->visible_data(plane));
}
-const gpu::MailboxHolder& VideoFrame::mailbox_holder(size_t texture) const {
- DCHECK_EQ(format_, NATIVE_TEXTURE);
- DCHECK_LT(texture, NumTextures(texture_format_));
- return mailbox_holders_[texture];
+const gpu::MailboxHolder&
+VideoFrame::mailbox_holder(size_t texture_index) const {
+#if defined(OS_LINUX)
+ DCHECK(storage_type_ == STORAGE_TEXTURE || storage_type_ == STORAGE_DMABUFS);
+#else
+ DCHECK(storage_type_ == STORAGE_TEXTURE);
+#endif
+ DCHECK_LT(texture_index, NumPlanes(format_));
+ return mailbox_holders_[texture_index];
}
base::SharedMemoryHandle VideoFrame::shared_memory_handle() const {
+ DCHECK_EQ(storage_type_, STORAGE_SHMEM);
+ DCHECK(shared_memory_handle_ != base::SharedMemory::NULLHandle());
return shared_memory_handle_;
}
size_t VideoFrame::shared_memory_offset() const {
+ DCHECK_EQ(storage_type_, STORAGE_SHMEM);
+ DCHECK(shared_memory_handle_ != base::SharedMemory::NULLHandle());
return shared_memory_offset_;
}
@@ -776,7 +835,11 @@ void VideoFrame::AddDestructionObserver(const base::Closure& callback) {
}
void VideoFrame::UpdateReleaseSyncPoint(SyncPointClient* client) {
- DCHECK_EQ(format_, NATIVE_TEXTURE);
+#if defined(OS_LINUX)
+ DCHECK(storage_type_ == STORAGE_TEXTURE || storage_type_ == STORAGE_DMABUFS);
+#else
+ DCHECK(storage_type_ == STORAGE_TEXTURE);
+#endif
base::AutoLock locker(release_sync_point_lock_);
// Must wait on the previous sync point before inserting a new sync point so
// that |mailbox_holders_release_cb_| guarantees the previous sync point
@@ -786,8 +849,9 @@ void VideoFrame::UpdateReleaseSyncPoint(SyncPointClient* client) {
release_sync_point_ = client->InsertSyncPoint();
}
-#if defined(OS_POSIX)
+#if defined(OS_LINUX)
int VideoFrame::dmabuf_fd(size_t plane) const {
+ DCHECK_EQ(storage_type_, STORAGE_DMABUFS);
return dmabuf_fds_[plane].get();
}
#endif
diff --git a/media/base/video_frame.h b/media/base/video_frame.h
index dd3fcc0..90d29f4 100644
--- a/media/base/video_frame.h
+++ b/media/base/video_frame.h
@@ -48,33 +48,23 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// http://www.fourcc.org/yuv.php
// Logged to UMA, so never reuse values. Leave gaps if necessary.
enum Format {
- UNKNOWN = 0, // Unknown format value.
+ UNKNOWN = 0, // Unknown or unspecified format value.
YV12 = 1, // 12bpp YVU planar 1x1 Y, 2x2 VU samples.
- I420 = 2, // 12bpp YVU planar 1x1 Y, 2x2 UV samples, a.k.a. YU12.
+ I420 = 2, // 12bpp YUV planar 1x1 Y, 2x2 UV samples, a.k.a. YU12.
YV16 = 3, // 16bpp YVU planar 1x1 Y, 2x1 VU samples.
- YV12A = 4, // 20bpp YUVA planar 1x1 Y, 2x2 VU, 1x1 A samples.
+ YV12A = 4, // 20bpp YUVA planar 1x1 Y, 2x2 VU, 1x1 A samples.
YV24 = 5, // 24bpp YUV planar, no subsampling.
- NV12 = 6, // 12bpp 1x1 Y plane followed by an interleaved 2x2 UV plane.
- ARGB = 7, // 32bpp ARGB, 1 plane.
-#if defined(VIDEO_HOLE)
- HOLE = 8, // Hole frame.
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ NV12 = 6, // 12bpp with Y plane followed by a 2x2 interleaved UV plane.
#endif
- NATIVE_TEXTURE = 9, // Native texture. Pixel-format agnostic.
+ ARGB = 7, // 32bpp ARGB, 1 plane.
+ XRGB = 8, // 24bpp XRGB, 1 plane.
// Please update UMA histogram enumeration when adding new formats here.
- // Must always be equal to largest entry logged.
- FORMAT_MAX = NATIVE_TEXTURE,
- };
-
- // Defines the internal format and the number of the textures in the mailbox
- // holders.
- enum TextureFormat {
- TEXTURE_RGBA, // One RGBA texture.
- TEXTURE_RGB, // One RGB texture.
- TEXTURE_YUV_420, // 3 RED textures one per channel. UV are 2x2 subsampled.
+ FORMAT_MAX = XRGB, // Must always be equal to largest entry logged.
};
// Color space or color range used for the pixels, in general this is left
- // unspecified, meaning SD is assumed.
+ // unspecified, meaning Rec601 (SD) is assumed.
enum ColorSpace {
COLOR_SPACE_UNSPECIFIED = 0, // In general this is Rec601.
COLOR_SPACE_JPEG = 1, // JPEG color range.
@@ -82,33 +72,62 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
COLOR_SPACE_MAX = COLOR_SPACE_HD_REC709,
};
+ // Defines the pixel storage type. STORAGE_{OWNED, UNOWNED}_MEMORY and
+ // STORAGE_SHMEM have in common that are mappable, i.e. can be accessed from
+ // the VideoFrame memory space, whereas the rest, in principle, can't.
+ enum StorageType {
+ STORAGE_UNKNOWN = 0,
+ STORAGE_UNOWNED_MEMORY = 1, // External, non owned data pointers.
+ STORAGE_OWNED_MEMORY = 2, // VideoFrame has allocated its own data buffer.
+ STORAGE_SHMEM = 3, // Pixels are backed by Shared Memory.
+#if defined(OS_LINUX)
+ STORAGE_DMABUFS = 4, // Each plane is stored into a DmaBuf.
+#endif
+#if defined(VIDEO_HOLE)
+ // Opaque storage.
+ STORAGE_HOLE = 5,
+#endif
+ // Pixels are stored in textures, one per plane, referred by MailboxHolders.
+ STORAGE_TEXTURE = 6,
+ STORAGE_MAX = STORAGE_TEXTURE, // Always equal to the last StorageType.
+ };
+
+ // CB to be called on the mailbox backing this frame when the frame is
+ // destroyed.
+ typedef base::Callback<void(uint32)> ReleaseMailboxCB;
+
// Returns the name of a Format as a string.
static std::string FormatToString(Format format);
- // Creates a new frame in system memory with given parameters. Buffers for
- // the frame are allocated but not initialized.
- static scoped_refptr<VideoFrame> CreateFrame(
- Format format,
- const gfx::Size& coded_size,
- const gfx::Rect& visible_rect,
- const gfx::Size& natural_size,
- base::TimeDelta timestamp);
+ // Returns true if |format| is a YUV non interleaved format.
+ static bool IsYuvPlanar(Format format);
- // Returns true if |plane| is a valid plane number for the given format. This
- // can be used to DCHECK() plane parameters.
+ // Returns true if |storage_type| is mappable in the VideoFrame memory space.
+ static bool IsMappable(StorageType storage_type);
+
+ // Returns true if |plane| is a valid plane number for the given format.
static bool IsValidPlane(size_t plane, VideoFrame::Format format);
// Call prior to CreateFrame to ensure validity of frame configuration. Called
// automatically by VideoDecoderConfig::IsValidConfig().
// TODO(scherkus): VideoDecoderConfig shouldn't call this method
static bool IsValidConfig(Format format,
+ StorageType storage_type,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size);
- // CB to be called on the mailbox backing this frame when the frame is
- // destroyed.
- typedef base::Callback<void(uint32)> ReleaseMailboxCB;
+ // Creates a new YUV frame in system memory with given parameters (|format|
+ // must be YUV). Buffers for the frame are allocated but not initialized. The
+ // caller most not make assumptions about the actual underlying size(s), but
+ // check the returned VideoFrame instead.
+ // TODO(mcasas): implement the RGB version of this factory method.
+ static scoped_refptr<VideoFrame> CreateFrame(
+ Format format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp);
// Wraps a native texture of the given parameters with a VideoFrame.
// The backing of the VideoFrame is held in the mailbox held by
@@ -141,10 +160,18 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// Wraps packed image data residing in a memory buffer with a VideoFrame.
// The image data resides in |data| and is assumed to be packed tightly in a
// buffer of logical dimensions |coded_size| with the appropriate bit depth
- // and plane count as given by |format|. The shared memory handle of the
- // backing allocation, if present, can be passed in with |handle|.
- // Returns NULL on failure.
- static scoped_refptr<VideoFrame> WrapExternalPackedMemory(
+ // and plane count as given by |format|. Returns NULL on failure.
+ static scoped_refptr<VideoFrame> WrapExternalData(
+ Format format,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8* data,
+ size_t data_size,
+ base::TimeDelta timestamp);
+
+ // Same as WrapExternalData() with SharedMemoryHandle and its offset.
+ static scoped_refptr<VideoFrame> WrapExternalSharedMemory(
Format format,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
@@ -170,7 +197,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
uint8* v_data,
base::TimeDelta timestamp);
-#if defined(OS_POSIX)
+#if defined(OS_LINUX)
// Wraps provided dmabufs
// (https://www.kernel.org/doc/Documentation/dma-buf-sharing.txt) with a
// VideoFrame. The dmabuf fds are dup()ed on creation, so that the VideoFrame
@@ -236,8 +263,6 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
static size_t NumPlanes(Format format);
- static size_t NumTextures(TextureFormat texture_format);
-
// Returns the required allocation size for a (tightly packed) frame of the
// given coded size and format.
static size_t AllocationSize(Format format, const gfx::Size& coded_size);
@@ -274,7 +299,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
Format format() const { return format_; }
- TextureFormat texture_format() const { return texture_format_; }
+ StorageType storage_type() const { return storage_type_; }
const gfx::Size& coded_size() const { return coded_size_; }
const gfx::Rect& visible_rect() const { return visible_rect_; }
@@ -289,22 +314,23 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
int row_bytes(size_t plane) const;
int rows(size_t plane) const;
- // Returns pointer to the buffer for a given plane. The memory is owned by
- // VideoFrame object and must not be freed by the caller.
+ // Returns pointer to the buffer for a given plane, if this is an IsMappable()
+ // frame type. The memory is owned by VideoFrame object and must not be freed
+ // by the caller.
const uint8* data(size_t plane) const;
uint8* data(size_t plane);
- // Returns pointer to the data in the visible region of the frame. I.e. the
- // returned pointer is offsetted into the plane buffer specified by
- // visible_rect().origin(). Memory is owned by VideoFrame object and must not
- // be freed by the caller.
+ // Returns pointer to the data in the visible region of the frame, for
+ // IsMappable() storage types. The returned pointer is offsetted into the
+ // plane buffer specified by visible_rect().origin(). Memory is owned by
+ // VideoFrame object and must not be freed by the caller.
const uint8* visible_data(size_t plane) const;
uint8* visible_data(size_t plane);
// Returns a mailbox holder for a given texture.
// Only valid to call if this is a NATIVE_TEXTURE frame. Before using the
// mailbox, the caller must wait for the included sync point.
- const gpu::MailboxHolder& mailbox_holder(size_t texture) const;
+ const gpu::MailboxHolder& mailbox_holder(size_t texture_index) const;
// Returns the shared-memory handle, if present
base::SharedMemoryHandle shared_memory_handle() const;
@@ -331,7 +357,7 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
bool allow_overlay() const { return allow_overlay_; }
-#if defined(OS_POSIX)
+#if defined(OS_LINUX)
// Returns backing dmabuf file descriptor for given |plane|, if present.
int dmabuf_fd(size_t plane) const;
#endif
@@ -344,10 +370,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// Returns true if this VideoFrame represents the end of the stream.
bool end_of_stream() const { return end_of_stream_; }
- base::TimeDelta timestamp() const {
- return timestamp_;
- }
- void set_timestamp(const base::TimeDelta& timestamp) {
+ base::TimeDelta timestamp() const { return timestamp_; }
+ void set_timestamp(base::TimeDelta timestamp) {
timestamp_ = timestamp;
}
@@ -377,22 +401,50 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
// Clients must use the static CreateFrame() method to create a new frame.
VideoFrame(Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp,
+ bool end_of_stream);
+ VideoFrame(Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ base::TimeDelta timestamp,
+ bool end_of_stream,
+ base::SharedMemoryHandle handle,
+ size_t shared_memory_offset);
+ VideoFrame(Format format,
+ StorageType storage_type,
const gfx::Size& coded_size,
const gfx::Rect& visible_rect,
const gfx::Size& natural_size,
const gpu::MailboxHolder(&mailbox_holders)[kMaxPlanes],
- TextureFormat texture_format,
base::TimeDelta timestamp,
bool end_of_stream);
virtual ~VideoFrame();
+ static scoped_refptr<VideoFrame> WrapExternalStorage(
+ Format format,
+ StorageType storage_type,
+ const gfx::Size& coded_size,
+ const gfx::Rect& visible_rect,
+ const gfx::Size& natural_size,
+ uint8* data,
+ size_t data_size,
+ base::TimeDelta timestamp,
+ base::SharedMemoryHandle handle,
+ size_t data_offset);
+
void AllocateYUV();
// Frame format.
const Format format_;
- // Format of the native textures associated with this frame.
- const TextureFormat texture_format_;
+ // Storage type for the different planes.
+ const StorageType storage_type_;
// Width and height of the video frame, in pixels. This must include pixel
// data for the whole image; i.e. for YUV formats with subsampled chroma
@@ -416,19 +468,20 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> {
int32 strides_[kMaxPlanes];
// Array of data pointers to each plane.
+ // TODO(mcasas): we don't know on ctor if we own |data_| or not. After
+ // refactoring VideoFrame, change to scoped_ptr<uint8, AlignedFreeDeleter>.
uint8* data_[kMaxPlanes];
- // Native texture mailboxes, if this is a NATIVE_TEXTURE frame.
+ // Native texture mailboxes, if this is a STORAGE_TEXTURE frame.
gpu::MailboxHolder mailbox_holders_[kMaxPlanes];
ReleaseMailboxCB mailbox_holders_release_cb_;
- // Shared memory handle, if this frame was allocated from shared memory.
+ // Shared memory handle and associated offset inside it, if this frame was
+ // constructed as STORAGE_SHMEM.
base::SharedMemoryHandle shared_memory_handle_;
-
- // Offset in shared memory buffer.
size_t shared_memory_offset_;
-#if defined(OS_POSIX)
+#if defined(OS_LINUX)
// Dmabufs for each plane, if this frame is wrapping memory
// acquired via dmabuf.
base::ScopedFD dmabuf_fds_[kMaxPlanes];
diff --git a/media/base/video_frame_unittest.cc b/media/base/video_frame_unittest.cc
index 213d41e..6e67668 100644
--- a/media/base/video_frame_unittest.cc
+++ b/media/base/video_frame_unittest.cc
@@ -76,8 +76,7 @@ void ExpectFrameColor(media::VideoFrame* yv12_frame, uint32 expect_rgb_color) {
uint32* rgb_row_data = reinterpret_cast<uint32*>(
rgb_data + (bytes_per_row * row));
for (int col = 0; col < yv12_frame->coded_size().width(); ++col) {
- SCOPED_TRACE(
- base::StringPrintf("Checking (%d, %d)", row, col));
+ SCOPED_TRACE(base::StringPrintf("Checking (%d, %d)", row, col));
EXPECT_EQ(expect_rgb_color, rgb_row_data[col]);
}
}
@@ -261,7 +260,8 @@ TEST(VideoFrame, TextureNoLongerNeededCallbackIsCalled) {
base::TimeDelta(), // timestamp
false, // allow_overlay
true); // has_alpha
- EXPECT_EQ(VideoFrame::TEXTURE_RGBA, frame->texture_format());
+ EXPECT_EQ(VideoFrame::STORAGE_TEXTURE, frame->storage_type());
+ EXPECT_EQ(VideoFrame::ARGB, frame->format());
}
// Nobody set a sync point to |frame|, so |frame| set |called_sync_point| to 0
// as default value.
@@ -310,9 +310,10 @@ TEST(VideoFrame,
base::TimeDelta(), // timestamp
false); // allow_overlay
- EXPECT_EQ(VideoFrame::TEXTURE_YUV_420, frame->texture_format());
- EXPECT_EQ(3u, VideoFrame::NumTextures(frame->texture_format()));
- for (size_t i = 0; i < VideoFrame::NumTextures(frame->texture_format());
+ EXPECT_EQ(VideoFrame::STORAGE_TEXTURE, frame->storage_type());
+ EXPECT_EQ(VideoFrame::I420, frame->format());
+ EXPECT_EQ(3u, VideoFrame::NumPlanes(frame->format()));
+ for (size_t i = 0; i < VideoFrame::NumPlanes(frame->format());
++i) {
const gpu::MailboxHolder& mailbox_holder = frame->mailbox_holder(i);
EXPECT_EQ(mailbox[i].name[0], mailbox_holder.mailbox.name[0]);
diff --git a/media/blink/skcanvas_video_renderer.cc b/media/blink/skcanvas_video_renderer.cc
index f86e118..2d3a416 100644
--- a/media/blink/skcanvas_video_renderer.cc
+++ b/media/blink/skcanvas_video_renderer.cc
@@ -42,27 +42,6 @@ namespace {
// a temporary resource if it is not used for 3 sec.
const int kTemporaryResourceDeletionDelay = 3; // Seconds;
-bool IsYUV(media::VideoFrame::Format format) {
- switch (format) {
- case VideoFrame::YV12:
- case VideoFrame::YV16:
- case VideoFrame::I420:
- case VideoFrame::YV12A:
- case VideoFrame::YV24:
- case VideoFrame::NV12:
- return true;
- case VideoFrame::UNKNOWN:
- case VideoFrame::NATIVE_TEXTURE:
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- case VideoFrame::ARGB:
- return false;
- }
- NOTREACHED() << "Invalid videoframe format provided: " << format;
- return false;
-}
-
bool CheckColorSpace(const scoped_refptr<VideoFrame>& video_frame,
VideoFrame::ColorSpace color_space) {
int result;
@@ -71,10 +50,6 @@ bool CheckColorSpace(const scoped_refptr<VideoFrame>& video_frame,
result == color_space;
}
-bool IsYUVOrNative(media::VideoFrame::Format format) {
- return IsYUV(format) || format == media::VideoFrame::NATIVE_TEXTURE;
-}
-
bool IsSkBitmapProperlySizedTexture(const SkBitmap* bitmap,
const gfx::Size& size) {
return bitmap->getTexture() && bitmap->width() == size.width() &&
@@ -184,7 +159,7 @@ class VideoImageGenerator : public SkImageGenerator {
void* planes[3],
size_t row_bytes[3],
SkYUVColorSpace* color_space) override {
- if (!frame_.get() || !IsYUV(frame_->format()) ||
+ if (!frame_.get() || !media::VideoFrame::IsYuvPlanar(frame_->format()) ||
// TODO(rileya): Skia currently doesn't support Rec709 YUV conversion,
// or YUVA conversion. Remove this case once it does. As-is we will
// fall back on the pure-software path in this case.
@@ -294,7 +269,8 @@ void SkCanvasVideoRenderer::Paint(const scoped_refptr<VideoFrame>& video_frame,
// Paint black rectangle if there isn't a frame available or the
// frame has an unexpected format.
if (!video_frame.get() || video_frame->natural_size().IsEmpty() ||
- !IsYUVOrNative(video_frame->format())) {
+ !(media::VideoFrame::IsYuvPlanar(video_frame->format()) ||
+ (video_frame->storage_type() == media::VideoFrame::STORAGE_TEXTURE))) {
canvas->drawRect(dest, paint);
canvas->flush();
return;
@@ -302,7 +278,7 @@ void SkCanvasVideoRenderer::Paint(const scoped_refptr<VideoFrame>& video_frame,
SkBitmap* target_frame = nullptr;
- if (video_frame->format() == VideoFrame::NATIVE_TEXTURE) {
+ if (video_frame->storage_type() == VideoFrame::STORAGE_TEXTURE) {
// Draw HW Video on both SW and HW Canvas.
// In SW Canvas case, rely on skia drawing Ganesh SkBitmap on SW SkCanvas.
if (accelerated_last_frame_.isNull() ||
@@ -329,7 +305,6 @@ void SkCanvasVideoRenderer::Paint(const scoped_refptr<VideoFrame>& video_frame,
target_frame = &accelerated_last_frame_;
accelerated_frame_deleting_timer_.Reset();
} else if (canvas->getGrContext()) {
- DCHECK(video_frame->format() != VideoFrame::NATIVE_TEXTURE);
if (accelerated_last_frame_.isNull() ||
video_frame->timestamp() != accelerated_last_frame_timestamp_) {
// Draw SW Video on HW Canvas.
@@ -358,7 +333,7 @@ void SkCanvasVideoRenderer::Paint(const scoped_refptr<VideoFrame>& video_frame,
accelerated_frame_deleting_timer_.Reset();
} else {
// Draw SW Video on SW Canvas.
- DCHECK(video_frame->format() != VideoFrame::NATIVE_TEXTURE);
+ DCHECK(VideoFrame::IsMappable(video_frame->storage_type()));
if (last_frame_.isNull() ||
video_frame->timestamp() != last_frame_timestamp_) {
// Check if |bitmap| needs to be (re)allocated.
@@ -442,29 +417,31 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
const scoped_refptr<media::VideoFrame>& video_frame,
void* rgb_pixels,
size_t row_bytes) {
- DCHECK(IsYUVOrNative(video_frame->format()))
- << video_frame->format();
- if (IsYUV(video_frame->format())) {
- DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane),
- video_frame->stride(media::VideoFrame::kVPlane));
+ if (!VideoFrame::IsMappable(video_frame->storage_type())) {
+ NOTREACHED() << "Cannot extract pixels from non-CPU frame formats.";
+ return;
}
-
- size_t y_offset = 0;
- size_t uv_offset = 0;
- if (IsYUV(video_frame->format())) {
- int y_shift = (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1;
- // Use the "left" and "top" of the destination rect to locate the offset
- // in Y, U and V planes.
- y_offset = (video_frame->stride(media::VideoFrame::kYPlane) *
- video_frame->visible_rect().y()) +
- video_frame->visible_rect().x();
- // For format YV12, there is one U, V value per 2x2 block.
- // For format YV16, there is one U, V value per 2x1 block.
- uv_offset = (video_frame->stride(media::VideoFrame::kUPlane) *
- (video_frame->visible_rect().y() >> y_shift)) +
- (video_frame->visible_rect().x() >> 1);
+ if (!media::VideoFrame::IsYuvPlanar(video_frame->format())) {
+ NOTREACHED() << "Non YUV formats are not supported";
+ return;
}
+ DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane),
+ video_frame->stride(media::VideoFrame::kVPlane));
+
+ const int y_shift =
+ (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1;
+ // Use the "left" and "top" of the destination rect to locate the offset
+ // in Y, U and V planes.
+ const size_t y_offset = (video_frame->stride(media::VideoFrame::kYPlane) *
+ video_frame->visible_rect().y()) +
+ video_frame->visible_rect().x();
+ // For format YV12, there is one U, V value per 2x2 block.
+ // For format YV16, there is one U, V value per 2x1 block.
+ const size_t uv_offset = (video_frame->stride(media::VideoFrame::kUPlane) *
+ (video_frame->visible_rect().y() >> y_shift)) +
+ (video_frame->visible_rect().x() >> 1);
+
switch (video_frame->format()) {
case VideoFrame::YV12:
case VideoFrame::I420:
@@ -560,16 +537,12 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
video_frame->visible_rect().height());
#endif
break;
-
- case VideoFrame::NATIVE_TEXTURE:
- NOTREACHED();
- break;
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ case VideoFrame::NV12:
+#endif
case VideoFrame::ARGB:
+ case VideoFrame::XRGB:
case VideoFrame::UNKNOWN:
- case VideoFrame::NV12:
NOTREACHED();
}
}
@@ -583,8 +556,10 @@ void SkCanvasVideoRenderer::CopyVideoFrameTextureToGLTexture(
unsigned int type,
bool premultiply_alpha,
bool flip_y) {
- DCHECK(video_frame && video_frame->format() == VideoFrame::NATIVE_TEXTURE);
- DCHECK_EQ(1u, VideoFrame::NumTextures(video_frame->texture_format()));
+ DCHECK(video_frame);
+ DCHECK_EQ(video_frame->storage_type(), VideoFrame::STORAGE_TEXTURE);
+ DCHECK_EQ(1u, VideoFrame::NumPlanes(video_frame->format()));
+
const gpu::MailboxHolder& mailbox_holder = video_frame->mailbox_holder(0);
DCHECK(mailbox_holder.texture_target == GL_TEXTURE_2D ||
mailbox_holder.texture_target == GL_TEXTURE_RECTANGLE_ARB ||
diff --git a/media/blink/video_frame_compositor.cc b/media/blink/video_frame_compositor.cc
index 2be84ad..d815fd1 100644
--- a/media/blink/video_frame_compositor.cc
+++ b/media/blink/video_frame_compositor.cc
@@ -16,21 +16,20 @@ namespace media {
// background rendering to keep the Render() callbacks moving.
const int kBackgroundRenderingTimeoutMs = 250;
+// Returns true if the format has no Alpha channel (hence is always opaque).
static bool IsOpaque(const scoped_refptr<VideoFrame>& frame) {
switch (frame->format()) {
case VideoFrame::UNKNOWN:
case VideoFrame::YV12:
- case VideoFrame::YV16:
case VideoFrame::I420:
+ case VideoFrame::YV16:
case VideoFrame::YV24:
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
case VideoFrame::NV12:
+#endif
+ case VideoFrame::XRGB:
return true;
-
case VideoFrame::YV12A:
-#if defined(VIDEO_HOLE)
- case VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- case VideoFrame::NATIVE_TEXTURE:
case VideoFrame::ARGB:
break;
}
diff --git a/media/blink/webmediaplayer_impl.cc b/media/blink/webmediaplayer_impl.cc
index f48f982..da8feec 100644
--- a/media/blink/webmediaplayer_impl.cc
+++ b/media/blink/webmediaplayer_impl.cc
@@ -510,16 +510,14 @@ void WebMediaPlayerImpl::paint(blink::WebCanvas* canvas,
// - We haven't reached HAVE_CURRENT_DATA and need to paint black
// - We're painting to a canvas
// See http://crbug.com/341225 http://crbug.com/342621 for details.
- scoped_refptr<VideoFrame> video_frame =
- GetCurrentFrameFromCompositor();
+ scoped_refptr<VideoFrame> video_frame = GetCurrentFrameFromCompositor();
gfx::Rect gfx_rect(rect);
Context3D context_3d;
if (video_frame.get() &&
- video_frame->format() == VideoFrame::NATIVE_TEXTURE) {
- if (!context_3d_cb_.is_null()) {
+ video_frame->storage_type() == VideoFrame::STORAGE_TEXTURE) {
+ if (!context_3d_cb_.is_null())
context_3d = context_3d_cb_.Run();
- }
// GPU Process crashed.
if (!context_3d.gl)
return;
@@ -594,11 +592,10 @@ bool WebMediaPlayerImpl::copyVideoTextureToPlatformTexture(
bool flip_y) {
TRACE_EVENT0("media", "WebMediaPlayerImpl:copyVideoTextureToPlatformTexture");
- scoped_refptr<VideoFrame> video_frame =
- GetCurrentFrameFromCompositor();
+ scoped_refptr<VideoFrame> video_frame = GetCurrentFrameFromCompositor();
if (!video_frame.get() ||
- video_frame->format() != VideoFrame::NATIVE_TEXTURE) {
+ video_frame->storage_type() != VideoFrame::STORAGE_TEXTURE) {
return false;
}
diff --git a/media/cast/test/end2end_unittest.cc b/media/cast/test/end2end_unittest.cc
index 15fdf74..4b3bbba 100644
--- a/media/cast/test/end2end_unittest.cc
+++ b/media/cast/test/end2end_unittest.cc
@@ -656,8 +656,9 @@ class End2EndTest : public ::testing::Test {
// since the video clock may not be the same as the reference clock.
const base::TimeDelta time_diff = reference_time - start_time_;
const gfx::Size size(kVideoHdWidth, kVideoHdHeight);
- EXPECT_TRUE(VideoFrame::IsValidConfig(
- VideoFrame::I420, size, gfx::Rect(size), size));
+ EXPECT_TRUE(VideoFrame::IsValidConfig(VideoFrame::I420,
+ VideoFrame::STORAGE_UNKNOWN, size,
+ gfx::Rect(size), size));
scoped_refptr<media::VideoFrame> video_frame =
media::VideoFrame::CreateFrame(
VideoFrame::I420, size, gfx::Rect(size), size,
diff --git a/media/cast/test/utility/video_utility.cc b/media/cast/test/utility/video_utility.cc
index 46469563..8c4b0cb 100644
--- a/media/cast/test/utility/video_utility.cc
+++ b/media/cast/test/utility/video_utility.cc
@@ -77,6 +77,7 @@ void PopulateVideoFrame(VideoFrame* frame, int start_value) {
}
const int half_height = (height + 1) / 2;
+#if defined(OS_MACOSX)
if (frame->format() == VideoFrame::NV12) {
const int stride_uv = frame->stride(VideoFrame::kUVPlane);
uint8* uv_plane = frame->data(VideoFrame::kUVPlane);
@@ -91,7 +92,11 @@ void PopulateVideoFrame(VideoFrame* frame, int start_value) {
uv_plane += 2;
}
}
- } else { // I420, YV12, etc.
+ } else
+#endif
+ {
+ DCHECK(frame->format() == VideoFrame::I420 ||
+ frame->format() == VideoFrame::YV12);
const int stride_u = frame->stride(VideoFrame::kUPlane);
const int stride_v = frame->stride(VideoFrame::kVPlane);
uint8* u_plane = frame->data(VideoFrame::kUPlane);
@@ -112,7 +117,7 @@ void PopulateVideoFrame(VideoFrame* frame, int start_value) {
const int stripe_j = (j / stripe_size) * stripe_size;
for (int i = 0; i < stride_v; ++i) {
const int stripe_i = (i / stripe_size) * stripe_size;
- *u_plane = static_cast<uint8>(start_value + stripe_i + stripe_j);
+ *v_plane = static_cast<uint8>(start_value + stripe_i + stripe_j);
++v_plane;
}
}
@@ -120,14 +125,14 @@ void PopulateVideoFrame(VideoFrame* frame, int start_value) {
}
void PopulateVideoFrameWithNoise(VideoFrame* frame) {
- int height = frame->coded_size().height();
- int stride_y = frame->stride(VideoFrame::kYPlane);
- int stride_u = frame->stride(VideoFrame::kUPlane);
- int stride_v = frame->stride(VideoFrame::kVPlane);
- int half_height = (height + 1) / 2;
- uint8* y_plane = frame->data(VideoFrame::kYPlane);
- uint8* u_plane = frame->data(VideoFrame::kUPlane);
- uint8* v_plane = frame->data(VideoFrame::kVPlane);
+ const int height = frame->coded_size().height();
+ const int stride_y = frame->stride(VideoFrame::kYPlane);
+ const int stride_u = frame->stride(VideoFrame::kUPlane);
+ const int stride_v = frame->stride(VideoFrame::kVPlane);
+ const int half_height = (height + 1) / 2;
+ uint8* const y_plane = frame->data(VideoFrame::kYPlane);
+ uint8* const u_plane = frame->data(VideoFrame::kUPlane);
+ uint8* const v_plane = frame->data(VideoFrame::kVPlane);
base::RandBytes(y_plane, height * stride_y);
base::RandBytes(u_plane, half_height * stride_u);
@@ -135,17 +140,17 @@ void PopulateVideoFrameWithNoise(VideoFrame* frame) {
}
bool PopulateVideoFrameFromFile(VideoFrame* frame, FILE* video_file) {
- int width = frame->coded_size().width();
- int height = frame->coded_size().height();
- int half_width = (width + 1) / 2;
- int half_height = (height + 1) / 2;
- size_t frame_size = width * height + 2 * half_width * half_height;
- uint8* y_plane = frame->data(VideoFrame::kYPlane);
- uint8* u_plane = frame->data(VideoFrame::kUPlane);
- uint8* v_plane = frame->data(VideoFrame::kVPlane);
+ const int width = frame->coded_size().width();
+ const int height = frame->coded_size().height();
+ const int half_width = (width + 1) / 2;
+ const int half_height = (height + 1) / 2;
+ const size_t frame_size = width * height + 2 * half_width * half_height;
+ uint8* const y_plane = frame->data(VideoFrame::kYPlane);
+ uint8* const u_plane = frame->data(VideoFrame::kUPlane);
+ uint8* const v_plane = frame->data(VideoFrame::kVPlane);
- uint8* raw_data = new uint8[frame_size];
- size_t count = fread(raw_data, 1, frame_size, video_file);
+ uint8* const raw_data = new uint8[frame_size];
+ const size_t count = fread(raw_data, 1, frame_size, video_file);
if (count != frame_size)
return false;
diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc
index 54f1808..5d03c80 100644
--- a/media/filters/ffmpeg_video_decoder.cc
+++ b/media/filters/ffmpeg_video_decoder.cc
@@ -120,9 +120,10 @@ int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
RoundUp(std::max(size.width(), codec_context->coded_width), 2),
RoundUp(std::max(size.height(), codec_context->coded_height), 2));
- if (!VideoFrame::IsValidConfig(
- format, coded_size, gfx::Rect(size), natural_size))
+ if (!VideoFrame::IsValidConfig(format, VideoFrame::STORAGE_UNKNOWN,
+ coded_size, gfx::Rect(size), natural_size)) {
return AVERROR(EINVAL);
+ }
scoped_refptr<VideoFrame> video_frame = frame_pool_.CreateFrame(
format, coded_size, gfx::Rect(size), natural_size, kNoTimestamp());
diff --git a/media/mojo/interfaces/media_types.mojom b/media/mojo/interfaces/media_types.mojom
index 47fea66..ff87cec 100644
--- a/media/mojo/interfaces/media_types.mojom
+++ b/media/mojo/interfaces/media_types.mojom
@@ -100,9 +100,8 @@ enum VideoFormat {
YV24,
NV12,
ARGB,
- HOLE,
- NATIVE_TEXTURE,
- FORMAT_MAX = NATIVE_TEXTURE,
+ XRGB,
+ FORMAT_MAX = XRGB,
};
// See media/base/video_decoder_config.h for descriptions.
diff --git a/media/mojo/services/media_type_converters.cc b/media/mojo/services/media_type_converters.cc
index 69d3123..d1630df 100644
--- a/media/mojo/services/media_type_converters.cc
+++ b/media/mojo/services/media_type_converters.cc
@@ -134,14 +134,11 @@ ASSERT_ENUM_EQ_RAW(VideoFrame::Format, VideoFrame::I420, VIDEO_FORMAT_I420);
ASSERT_ENUM_EQ_RAW(VideoFrame::Format, VideoFrame::YV16, VIDEO_FORMAT_YV16);
ASSERT_ENUM_EQ_RAW(VideoFrame::Format, VideoFrame::YV12A, VIDEO_FORMAT_YV12A);
ASSERT_ENUM_EQ_RAW(VideoFrame::Format, VideoFrame::YV24, VIDEO_FORMAT_YV24);
+#if defined(OS_MACOSX)
ASSERT_ENUM_EQ_RAW(VideoFrame::Format, VideoFrame::NV12, VIDEO_FORMAT_NV12);
-ASSERT_ENUM_EQ_RAW(VideoFrame::Format, VideoFrame::ARGB, VIDEO_FORMAT_ARGB);
-#if defined(VIDEO_HOLE)
-ASSERT_ENUM_EQ_RAW(VideoFrame::Format, VideoFrame::HOLE, VIDEO_FORMAT_HOLE);
#endif
-ASSERT_ENUM_EQ_RAW(VideoFrame::Format,
- VideoFrame::NATIVE_TEXTURE,
- VIDEO_FORMAT_NATIVE_TEXTURE);
+ASSERT_ENUM_EQ_RAW(VideoFrame::Format, VideoFrame::ARGB, VIDEO_FORMAT_ARGB);
+ASSERT_ENUM_EQ_RAW(VideoFrame::Format, VideoFrame::XRGB, VIDEO_FORMAT_XRGB);
ASSERT_ENUM_EQ_RAW(VideoFrame::Format,
VideoFrame::FORMAT_MAX,
VIDEO_FORMAT_FORMAT_MAX);
diff --git a/media/video/gpu_memory_buffer_video_frame_pool.cc b/media/video/gpu_memory_buffer_video_frame_pool.cc
index 6704e01..fb5bec0 100644
--- a/media/video/gpu_memory_buffer_video_frame_pool.cc
+++ b/media/video/gpu_memory_buffer_video_frame_pool.cc
@@ -332,16 +332,15 @@ GpuMemoryBufferVideoFramePool::MaybeCreateHardwareFrame(
case VideoFrame::I420:
return pool_impl_->CreateHardwareFrame(video_frame);
// Unsupported cases.
- case media::VideoFrame::YV12A:
- case media::VideoFrame::YV16:
- case media::VideoFrame::YV24:
-#if defined(VIDEO_HOLE)
- case media::VideoFrame::HOLE:
-#endif // defined(VIDEO_HOLE)
- case media::VideoFrame::ARGB:
- case media::VideoFrame::NATIVE_TEXTURE:
- case media::VideoFrame::UNKNOWN:
- case media::VideoFrame::NV12:
+ case VideoFrame::YV12A:
+ case VideoFrame::YV16:
+ case VideoFrame::YV24:
+#if defined(OS_MACOSX) || defined(OS_CHROMEOS)
+ case VideoFrame::NV12:
+#endif
+ case VideoFrame::ARGB:
+ case VideoFrame::XRGB:
+ case VideoFrame::UNKNOWN:
break;
}
return video_frame;