summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--chrome/renderer/media/ipc_video_renderer.cc44
-rw-r--r--media/base/buffers.h64
-rw-r--r--media/base/video_frame.cc170
-rw-r--r--media/base/video_frame.h111
-rw-r--r--media/base/video_frame_impl.cc194
-rw-r--r--media/base/video_frame_impl.h58
-rw-r--r--media/base/video_frame_impl_unittest.cc187
-rw-r--r--media/base/video_frame_unittest.cc176
-rw-r--r--media/filters/ffmpeg_video_decode_engine.cc10
-rw-r--r--media/filters/ffmpeg_video_decode_engine.h8
-rw-r--r--media/filters/ffmpeg_video_decode_engine_unittest.cc12
-rw-r--r--media/filters/omx_video_decode_engine.cc4
-rw-r--r--media/filters/omx_video_decode_engine.h2
-rw-r--r--media/filters/video_decode_engine.h10
-rw-r--r--media/filters/video_decoder_impl.cc43
-rw-r--r--media/filters/video_decoder_impl.h7
-rw-r--r--media/filters/video_decoder_impl_unittest.cc10
-rw-r--r--media/filters/video_renderer_base.cc6
-rw-r--r--media/filters/video_renderer_base_unittest.cc8
-rw-r--r--media/media.gyp6
-rw-r--r--media/tools/player_wtl/view.h76
-rw-r--r--media/tools/player_x11/x11_video_renderer.cc47
-rw-r--r--webkit/glue/media/video_renderer_impl.cc236
23 files changed, 708 insertions, 781 deletions
diff --git a/chrome/renderer/media/ipc_video_renderer.cc b/chrome/renderer/media/ipc_video_renderer.cc
index 0a98489..c5bac86 100644
--- a/chrome/renderer/media/ipc_video_renderer.cc
+++ b/chrome/renderer/media/ipc_video_renderer.cc
@@ -6,7 +6,7 @@
#include "chrome/common/render_messages.h"
#include "chrome/renderer/render_thread.h"
-#include "media/base/buffers.h"
+#include "media/base/video_frame.h"
#include "media/base/media_format.h"
IPCVideoRenderer::IPCVideoRenderer(
@@ -123,39 +123,37 @@ void IPCVideoRenderer::DoUpdateVideo() {
return;
}
- media::VideoSurface surface;
- CHECK(frame->Lock(&surface));
- CHECK(surface.width == static_cast<size_t>(video_size_.width()));
- CHECK(surface.height == static_cast<size_t>(video_size_.height()));
- CHECK(surface.format == media::VideoSurface::YV12);
- CHECK(surface.planes == 3);
+ CHECK(frame->width() == static_cast<size_t>(video_size_.width()));
+ CHECK(frame->height() == static_cast<size_t>(video_size_.height()));
+ CHECK(frame->format() == media::VideoFrame::YV12);
+ CHECK(frame->planes() == 3);
uint8* dest = reinterpret_cast<uint8*>(transport_dib_->memory());
// Copy Y plane.
- const uint8* src = surface.data[media::VideoSurface::kYPlane];
- size_t stride = surface.strides[media::VideoSurface::kYPlane];
- for (size_t row = 0; row < surface.height; ++row) {
- memcpy(dest, src, surface.width);
- dest += surface.width;
+ const uint8* src = frame->data(media::VideoFrame::kYPlane);
+ size_t stride = frame->stride(media::VideoFrame::kYPlane);
+ for (size_t row = 0; row < frame->height(); ++row) {
+ memcpy(dest, src, frame->width());
+ dest += frame->width();
src += stride;
}
// Copy U plane.
- src = surface.data[media::VideoSurface::kUPlane];
- stride = surface.strides[media::VideoSurface::kUPlane];
- for (size_t row = 0; row < surface.height / 2; ++row) {
- memcpy(dest, src, surface.width / 2);
- dest += surface.width / 2;
+ src = frame->data(media::VideoFrame::kUPlane);
+ stride = frame->stride(media::VideoFrame::kUPlane);
+ for (size_t row = 0; row < frame->height() / 2; ++row) {
+ memcpy(dest, src, frame->width() / 2);
+ dest += frame->width() / 2;
src += stride;
}
// Copy V plane.
- src = surface.data[media::VideoSurface::kVPlane];
- stride = surface.strides[media::VideoSurface::kVPlane];
- for (size_t row = 0; row < surface.height / 2; ++row) {
- memcpy(dest, src, surface.width / 2);
- dest += surface.width / 2;
+ src = frame->data(media::VideoFrame::kVPlane);
+ stride = frame->stride(media::VideoFrame::kVPlane);
+ for (size_t row = 0; row < frame->height() / 2; ++row) {
+ memcpy(dest, src, frame->width() / 2);
+ dest += frame->width() / 2;
src += stride;
}
@@ -167,8 +165,6 @@ void IPCVideoRenderer::DoUpdateVideo() {
Send(new ViewHostMsg_UpdateVideo(routing_id_,
transport_dib_->id(),
video_rect_));
-
- frame->Unlock();
}
void IPCVideoRenderer::DoDestroyVideo() {
diff --git a/media/base/buffers.h b/media/base/buffers.h
index 6102ea9..d03717d 100644
--- a/media/base/buffers.h
+++ b/media/base/buffers.h
@@ -1,4 +1,4 @@
-// Copyright (c) 2008-2009 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@@ -122,68 +122,6 @@ class WritableBuffer : public Buffer {
virtual ~WritableBuffer() {}
};
-
-struct VideoSurface {
- static const size_t kMaxPlanes = 3;
-
- static const size_t kNumRGBPlanes = 1;
- static const size_t kRGBPlane = 0;
-
- static const size_t kNumYUVPlanes = 3;
- static const size_t kYPlane = 0;
- static const size_t kUPlane = 1;
- static const size_t kVPlane = 2;
-
- // Surface formats roughly based on FOURCC labels, see:
- // http://www.fourcc.org/rgb.php
- // http://www.fourcc.org/yuv.php
- enum Format {
- INVALID, // Invalid format value. Used for error reporting.
- RGB555, // 16bpp RGB packed 5:5:5
- RGB565, // 16bpp RGB packed 5:6:5
- RGB24, // 24bpp RGB packed 8:8:8
- RGB32, // 32bpp RGB packed with extra byte 8:8:8
- RGBA, // 32bpp RGBA packed 8:8:8:8
- YV12, // 12bpp YVU planar 1x1 Y, 2x2 VU samples
- YV16, // 16bpp YVU planar 1x1 Y, 2x1 VU samples
- EMPTY, // An empty frame.
- };
-
- // Surface format.
- Format format;
-
- // Width and height of surface.
- size_t width;
- size_t height;
-
- // Number of planes, typically 1 for packed RGB formats and 3 for planar
- // YUV formats.
- size_t planes;
-
- // Array of strides for each plane, typically greater or equal to the width
- // of the surface divided by the horizontal sampling period. Note that
- // strides can be negative.
- int32 strides[kMaxPlanes];
-
- // Array of data pointers to each plane.
- uint8* data[kMaxPlanes];
-};
-
-
-class VideoFrame : public StreamSample {
- public:
- // Locks the underlying surface and fills out the given VideoSurface and
- // returns true if successful, false otherwise. Any additional calls to Lock
- // will fail.
- virtual bool Lock(VideoSurface* surface) = 0;
-
- // Unlocks the underlying surface, the VideoSurface acquired from Lock is no
- // longer guaranteed to be valid.
- virtual void Unlock() = 0;
-
- virtual bool IsEndOfStream() const = 0;
-};
-
} // namespace media
#endif // MEDIA_BASE_BUFFERS_H_
diff --git a/media/base/video_frame.cc b/media/base/video_frame.cc
new file mode 100644
index 0000000..0b8239c
--- /dev/null
+++ b/media/base/video_frame.cc
@@ -0,0 +1,170 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/video_frame.h"
+
+namespace media {
+
+// static
+void VideoFrame::CreateFrame(VideoFrame::Format format,
+ size_t width,
+ size_t height,
+ base::TimeDelta timestamp,
+ base::TimeDelta duration,
+ scoped_refptr<VideoFrame>* frame_out) {
+ DCHECK(width > 0 && height > 0);
+ DCHECK(width * height < 100000000);
+ DCHECK(frame_out);
+ bool alloc_worked = false;
+ scoped_refptr<VideoFrame> frame =
+ new VideoFrame(format, width, height);
+ if (frame) {
+ frame->SetTimestamp(timestamp);
+ frame->SetDuration(duration);
+ switch (format) {
+ case VideoFrame::RGB555:
+ case VideoFrame::RGB565:
+ alloc_worked = frame->AllocateRGB(2u);
+ break;
+ case VideoFrame::RGB24:
+ alloc_worked = frame->AllocateRGB(3u);
+ break;
+ case VideoFrame::RGB32:
+ case VideoFrame::RGBA:
+ alloc_worked = frame->AllocateRGB(4u);
+ break;
+ case VideoFrame::YV12:
+ case VideoFrame::YV16:
+ alloc_worked = frame->AllocateYUV();
+ break;
+ default:
+ NOTREACHED();
+ alloc_worked = false;
+ break;
+ }
+ }
+ *frame_out = alloc_worked ? frame : NULL;
+}
+
+// static
+void VideoFrame::CreateEmptyFrame(scoped_refptr<VideoFrame>* frame_out) {
+ *frame_out = new VideoFrame(VideoFrame::EMPTY, 0, 0);
+}
+
+// static
+void VideoFrame::CreateBlackFrame(int width, int height,
+ scoped_refptr<VideoFrame>* frame_out) {
+ DCHECK_GT(width, 0);
+ DCHECK_GT(height, 0);
+
+ // Create our frame.
+ scoped_refptr<VideoFrame> frame;
+ const base::TimeDelta kZero;
+ VideoFrame::CreateFrame(VideoFrame::YV12, width, height, kZero, kZero,
+ &frame);
+ DCHECK(frame);
+
+ // Now set the data to YUV(0,128,128).
+ const uint8 kBlackY = 0x00;
+ const uint8 kBlackUV = 0x80;
+
+ // Fill the Y plane.
+ uint8* y_plane = frame->data(VideoFrame::kYPlane);
+ for (size_t i = 0; i < frame->height_; ++i) {
+ memset(y_plane, kBlackY, frame->width_);
+ y_plane += frame->stride(VideoFrame::kYPlane);
+ }
+
+ // Fill the U and V planes.
+ uint8* u_plane = frame->data(VideoFrame::kUPlane);
+ uint8* v_plane = frame->data(VideoFrame::kVPlane);
+ for (size_t i = 0; i < (frame->height_ / 2); ++i) {
+ memset(u_plane, kBlackUV, frame->width_ / 2);
+ memset(v_plane, kBlackUV, frame->width_ / 2);
+ u_plane += frame->stride(VideoFrame::kUPlane);
+ v_plane += frame->stride(VideoFrame::kVPlane);
+ }
+
+ // Success!
+ *frame_out = frame;
+}
+
+static inline size_t RoundUp(size_t value, size_t alignment) {
+ // Check that |alignment| is a power of 2.
+ DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1)));
+ return ((value + (alignment - 1)) & ~(alignment-1));
+}
+
+bool VideoFrame::AllocateRGB(size_t bytes_per_pixel) {
+ // Round up to align at a 64-bit (8 byte) boundary for each row. This
+ // is sufficient for MMX reads (movq).
+ size_t bytes_per_row = RoundUp(width_ * bytes_per_pixel, 8);
+ planes_ = VideoFrame::kNumRGBPlanes;
+ strides_[VideoFrame::kRGBPlane] = bytes_per_row;
+ data_[VideoFrame::kRGBPlane] = new uint8[bytes_per_row * height_];
+ DCHECK(data_[VideoFrame::kRGBPlane]);
+ DCHECK(!(reinterpret_cast<intptr_t>(data_[VideoFrame::kRGBPlane]) & 7));
+ COMPILE_ASSERT(0 == VideoFrame::kRGBPlane, RGB_data_must_be_index_0);
+ return (NULL != data_[VideoFrame::kRGBPlane]);
+}
+
+bool VideoFrame::AllocateYUV() {
+ DCHECK(format_ == VideoFrame::YV12 ||
+ format_ == VideoFrame::YV16);
+ // Align Y rows at 32-bit (4 byte) boundaries. The stride for both YV12 and
+ // YV16 is 1/2 of the stride of Y. For YV12, every row of bytes for U and V
+ // applies to two rows of Y (one byte of UV for 4 bytes of Y), so in the
+ // case of YV12 the strides are identical for the same width surface, but the
+ // number of bytes allocated for YV12 is 1/2 the amount for U & V as YV16.
+ // We also round the height of the surface allocated to be an even number
+ // to avoid any potential of faulting by code that attempts to access the Y
+ // values of the final row, but assumes that the last row of U & V applies to
+ // a full two rows of Y.
+ size_t alloc_height = RoundUp(height_, 2);
+ size_t y_bytes_per_row = RoundUp(width_, 4);
+ size_t uv_stride = RoundUp(y_bytes_per_row / 2, 4);
+ size_t y_bytes = alloc_height * y_bytes_per_row;
+ size_t uv_bytes = alloc_height * uv_stride;
+ if (format_ == VideoFrame::YV12) {
+ uv_bytes /= 2;
+ }
+ uint8* data = new uint8[y_bytes + (uv_bytes * 2)];
+ if (data) {
+ planes_ = VideoFrame::kNumYUVPlanes;
+ COMPILE_ASSERT(0 == VideoFrame::kYPlane, y_plane_data_must_be_index_0);
+ data_[VideoFrame::kYPlane] = data;
+ data_[VideoFrame::kUPlane] = data + y_bytes;
+ data_[VideoFrame::kVPlane] = data + y_bytes + uv_bytes;
+ strides_[VideoFrame::kYPlane] = y_bytes_per_row;
+ strides_[VideoFrame::kUPlane] = uv_stride;
+ strides_[VideoFrame::kVPlane] = uv_stride;
+ return true;
+ }
+ NOTREACHED();
+ return false;
+}
+
+VideoFrame::VideoFrame(VideoFrame::Format format,
+ size_t width,
+ size_t height) {
+ format_ = format;
+ width_ = width;
+ height_ = height;
+ planes_ = 0;
+ memset(&strides_, 0, sizeof(strides_));
+ memset(&data_, 0, sizeof(data_));
+}
+
+VideoFrame::~VideoFrame() {
+ // In multi-plane allocations, only a single block of memory is allocated
+ // on the heap, and other |data| pointers point inside the same, single block
+ // so just delete index 0.
+ delete[] data_[0];
+}
+
+bool VideoFrame::IsEndOfStream() const {
+ return format_ == VideoFrame::EMPTY;
+}
+
+} // namespace media
diff --git a/media/base/video_frame.h b/media/base/video_frame.h
new file mode 100644
index 0000000..b84d77c
--- /dev/null
+++ b/media/base/video_frame.h
@@ -0,0 +1,111 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef MEDIA_BASE_VIDEO_FRAME_H_
+#define MEDIA_BASE_VIDEO_FRAME_H_
+
+#include "media/base/buffers.h"
+
+namespace media {
+
+class VideoFrame : public StreamSample {
+ public:
+ static const size_t kMaxPlanes = 3;
+
+ static const size_t kNumRGBPlanes = 1;
+ static const size_t kRGBPlane = 0;
+
+ static const size_t kNumYUVPlanes = 3;
+ static const size_t kYPlane = 0;
+ static const size_t kUPlane = 1;
+ static const size_t kVPlane = 2;
+
+ // Surface formats roughly based on FOURCC labels, see:
+ // http://www.fourcc.org/rgb.php
+ // http://www.fourcc.org/yuv.php
+ enum Format {
+ INVALID, // Invalid format value. Used for error reporting.
+ RGB555, // 16bpp RGB packed 5:5:5
+ RGB565, // 16bpp RGB packed 5:6:5
+ RGB24, // 24bpp RGB packed 8:8:8
+ RGB32, // 32bpp RGB packed with extra byte 8:8:8
+ RGBA, // 32bpp RGBA packed 8:8:8:8
+ YV12, // 12bpp YVU planar 1x1 Y, 2x2 VU samples
+ YV16, // 16bpp YVU planar 1x1 Y, 2x1 VU samples
+ EMPTY, // An empty frame.
+ };
+
+ public:
+ // Creates a new frame with given parameters. Buffers for the frame are
+ // allocated but not initialized.
+ static void CreateFrame(Format format,
+ size_t width,
+ size_t height,
+ base::TimeDelta timestamp,
+ base::TimeDelta duration,
+ scoped_refptr<VideoFrame>* frame_out);
+
+ // Creates a frame with format equals to VideoFrame::EMPTY, width, height
+ // timestamp and duration are all 0.
+ static void CreateEmptyFrame(scoped_refptr<VideoFrame>* frame_out);
+
+ // Allocates YV12 frame based on |width| and |height|, and sets its data to
+ // the YUV equivalent of RGB(0,0,0).
+ static void CreateBlackFrame(int width, int height,
+ scoped_refptr<VideoFrame>* frame_out);
+
+ Format format() const { return format_; }
+
+ size_t width() const { return width_; }
+
+ size_t height() const { return height_; }
+
+ size_t planes() const { return planes_; }
+
+ int32 stride(size_t plane) const { return strides_[plane]; }
+
+ // Returns pointer to the buffer for a given plane. The memory is owned by
+ // VideoFrame object and must not be freed by the caller.
+ uint8* data(size_t plane) const { return data_[plane]; }
+
+ // StreamSample interface.
+ virtual bool IsEndOfStream() const;
+
+ private:
+ // Clients must use the static CreateFrame() method to create a new frame.
+ VideoFrame(Format format,
+ size_t video_width,
+ size_t video_height);
+
+ virtual ~VideoFrame();
+
+ // Used internally by CreateFrame().
+ bool AllocateRGB(size_t bytes_per_pixel);
+ bool AllocateYUV();
+
+ // Frame format.
+ Format format_;
+
+ // Width and height of surface.
+ size_t width_;
+ size_t height_;
+
+ // Number of planes, typically 1 for packed RGB formats and 3 for planar
+ // YUV formats.
+ size_t planes_;
+
+ // Array of strides for each plane, typically greater or equal to the width
+ // of the surface divided by the horizontal sampling period. Note that
+ // strides can be negative.
+ int32 strides_[kMaxPlanes];
+
+ // Array of data pointers to each plane.
+ uint8* data_[kMaxPlanes];
+
+ DISALLOW_COPY_AND_ASSIGN(VideoFrame);
+};
+
+} // namespace media
+
+#endif // MEDIA_BASE_VIDEO_FRAME_H_
diff --git a/media/base/video_frame_impl.cc b/media/base/video_frame_impl.cc
deleted file mode 100644
index 470e297..0000000
--- a/media/base/video_frame_impl.cc
+++ /dev/null
@@ -1,194 +0,0 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/base/video_frame_impl.h"
-
-namespace media {
-
-// static
-void VideoFrameImpl::CreateFrame(VideoSurface::Format format,
- size_t width,
- size_t height,
- base::TimeDelta timestamp,
- base::TimeDelta duration,
- scoped_refptr<VideoFrame>* frame_out) {
- DCHECK(width > 0 && height > 0);
- DCHECK(width * height < 100000000);
- DCHECK(frame_out);
- bool alloc_worked = false;
- scoped_refptr<VideoFrameImpl> frame =
- new VideoFrameImpl(format, width, height);
- if (frame) {
- frame->SetTimestamp(timestamp);
- frame->SetDuration(duration);
- switch (format) {
- case VideoSurface::RGB555:
- case VideoSurface::RGB565:
- alloc_worked = frame->AllocateRGB(2u);
- break;
- case VideoSurface::RGB24:
- alloc_worked = frame->AllocateRGB(3u);
- break;
- case VideoSurface::RGB32:
- case VideoSurface::RGBA:
- alloc_worked = frame->AllocateRGB(4u);
- break;
- case VideoSurface::YV12:
- case VideoSurface::YV16:
- alloc_worked = frame->AllocateYUV();
- break;
- default:
- NOTREACHED();
- alloc_worked = false;
- break;
- }
- }
- *frame_out = alloc_worked ? frame : NULL;
-}
-
-// static
-void VideoFrameImpl::CreateEmptyFrame(scoped_refptr<VideoFrame>* frame_out) {
- *frame_out = new VideoFrameImpl(VideoSurface::EMPTY, 0, 0);
-}
-
-// static
-void VideoFrameImpl::CreateBlackFrame(int width, int height,
- scoped_refptr<VideoFrame>* frame_out) {
- DCHECK_GT(width, 0);
- DCHECK_GT(height, 0);
-
- // Create our frame.
- scoped_refptr<VideoFrame> frame;
- const base::TimeDelta kZero;
- VideoFrameImpl::CreateFrame(VideoSurface::YV12, width, height, kZero, kZero,
- &frame);
- DCHECK(frame);
-
- // Now set the data to YUV(0,128,128).
- const uint8 kBlackY = 0x00;
- const uint8 kBlackUV = 0x80;
- VideoSurface surface;
- frame->Lock(&surface);
- DCHECK_EQ(VideoSurface::YV12, surface.format) << "Expected YV12 surface";
-
- // Fill the Y plane.
- for (size_t i = 0; i < surface.height; ++i) {
- memset(surface.data[VideoSurface::kYPlane], kBlackY, surface.width);
- surface.data[VideoSurface::kYPlane]
- += surface.strides[VideoSurface::kYPlane];
- }
-
- // Fill the U and V planes.
- for (size_t i = 0; i < (surface.height / 2); ++i) {
- memset(surface.data[VideoSurface::kUPlane], kBlackUV, surface.width / 2);
- memset(surface.data[VideoSurface::kVPlane], kBlackUV, surface.width / 2);
- surface.data[VideoSurface::kUPlane] +=
- surface.strides[VideoSurface::kUPlane];
- surface.data[VideoSurface::kVPlane] +=
- surface.strides[VideoSurface::kVPlane];
- }
- frame->Unlock();
-
- // Success!
- *frame_out = frame;
-}
-
-static inline size_t RoundUp(size_t value, size_t alignment) {
- // Check that |alignment| is a power of 2.
- DCHECK((alignment + (alignment - 1)) == (alignment | (alignment - 1)));
- return ((value + (alignment - 1)) & ~(alignment-1));
-}
-
-bool VideoFrameImpl::AllocateRGB(size_t bytes_per_pixel) {
- // Round up to align at a 64-bit (8 byte) boundary for each row. This
- // is sufficient for MMX reads (movq).
- size_t bytes_per_row = RoundUp(surface_.width * bytes_per_pixel, 8);
- surface_.planes = VideoSurface::kNumRGBPlanes;
- surface_.strides[VideoSurface::kRGBPlane] = bytes_per_row;
- surface_.data[VideoSurface::kRGBPlane] = new uint8[bytes_per_row *
- surface_.height];
- DCHECK(surface_.data[VideoSurface::kRGBPlane]);
- DCHECK(!(reinterpret_cast<intptr_t>(
- surface_.data[VideoSurface::kRGBPlane]) & 7));
- COMPILE_ASSERT(0 == VideoSurface::kRGBPlane, RGB_data_must_be_index_0);
- return (NULL != surface_.data[VideoSurface::kRGBPlane]);
-}
-
-bool VideoFrameImpl::AllocateYUV() {
- DCHECK(surface_.format == VideoSurface::YV12 ||
- surface_.format == VideoSurface::YV16);
- // Align Y rows at 32-bit (4 byte) boundaries. The stride for both YV12 and
- // YV16 is 1/2 of the stride of Y. For YV12, every row of bytes for U and V
- // applies to two rows of Y (one byte of UV for 4 bytes of Y), so in the
- // case of YV12 the strides are identical for the same width surface, but the
- // number of bytes allocated for YV12 is 1/2 the amount for U & V as YV16.
- // We also round the height of the surface allocated to be an even number
- // to avoid any potential of faulting by code that attempts to access the Y
- // values of the final row, but assumes that the last row of U & V applies to
- // a full two rows of Y.
- size_t alloc_height = RoundUp(surface_.height, 2);
- size_t y_bytes_per_row = RoundUp(surface_.width, 4);
- size_t uv_stride = RoundUp(y_bytes_per_row / 2, 4);
- size_t y_bytes = alloc_height * y_bytes_per_row;
- size_t uv_bytes = alloc_height * uv_stride;
- if (surface_.format == VideoSurface::YV12) {
- uv_bytes /= 2;
- }
- uint8* data = new uint8[y_bytes + (uv_bytes * 2)];
- if (data) {
- surface_.planes = VideoSurface::kNumYUVPlanes;
- COMPILE_ASSERT(0 == VideoSurface::kYPlane, y_plane_data_must_be_index_0);
- surface_.data[VideoSurface::kYPlane] = data;
- surface_.data[VideoSurface::kUPlane] = data + y_bytes;
- surface_.data[VideoSurface::kVPlane] = data + y_bytes + uv_bytes;
- surface_.strides[VideoSurface::kYPlane] = y_bytes_per_row;
- surface_.strides[VideoSurface::kUPlane] = uv_stride;
- surface_.strides[VideoSurface::kVPlane] = uv_stride;
- return true;
- }
- NOTREACHED();
- return false;
-}
-
-VideoFrameImpl::VideoFrameImpl(VideoSurface::Format format,
- size_t width,
- size_t height) {
- locked_ = false;
- memset(&surface_, 0, sizeof(surface_));
- surface_.format = format;
- surface_.width = width;
- surface_.height = height;
-}
-
-VideoFrameImpl::~VideoFrameImpl() {
- // In multi-plane allocations, only a single block of memory is allocated
- // on the heap, and other |data| pointers point inside the same, single block
- // so just delete index 0.
- delete[] surface_.data[0];
-}
-
-bool VideoFrameImpl::Lock(VideoSurface* surface) {
- DCHECK(!locked_);
- DCHECK_NE(surface_.format, VideoSurface::EMPTY);
- if (locked_) {
- memset(surface, 0, sizeof(*surface));
- return false;
- }
- locked_ = true;
- COMPILE_ASSERT(sizeof(*surface) == sizeof(surface_), surface_size_mismatch);
- memcpy(surface, &surface_, sizeof(*surface));
- return true;
-}
-
-void VideoFrameImpl::Unlock() {
- DCHECK(locked_);
- DCHECK_NE(surface_.format, VideoSurface::EMPTY);
- locked_ = false;
-}
-
-bool VideoFrameImpl::IsEndOfStream() const {
- return surface_.format == VideoSurface::EMPTY;
-}
-
-} // namespace media
diff --git a/media/base/video_frame_impl.h b/media/base/video_frame_impl.h
deleted file mode 100644
index cf660ea..0000000
--- a/media/base/video_frame_impl.h
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-// Simple class that implements the VideoFrame interface with memory allocated
-// on the system heap. This class supports every format defined in the
-// VideoSurface::Format enum. The implementation attempts to properly align
-// allocations for maximum system bus efficency.
-#ifndef MEDIA_BASE_VIDEO_FRAME_IMPL_H_
-#define MEDIA_BASE_VIDEO_FRAME_IMPL_H_
-
-#include "media/base/buffers.h"
-
-namespace media {
-
-class VideoFrameImpl : public VideoFrame {
- public:
- static void CreateFrame(VideoSurface::Format format,
- size_t width,
- size_t height,
- base::TimeDelta timestamp,
- base::TimeDelta duration,
- scoped_refptr<VideoFrame>* frame_out);
-
- // Creates a frame with format equals to VideoSurface::EMPTY, width, height
- // timestamp and duration are all 0.
- static void CreateEmptyFrame(scoped_refptr<VideoFrame>* frame_out);
-
- // Allocates YV12 frame based on |width| and |height|, and sets its data to
- // the YUV equivalent of RGB(0,0,0).
- static void CreateBlackFrame(int width, int height,
- scoped_refptr<VideoFrame>* frame_out);
-
- // Implementation of VideoFrame.
- virtual bool Lock(VideoSurface* surface);
- virtual void Unlock();
- virtual bool IsEndOfStream() const;
-
- private:
- // Clients must use the static CreateFrame() method to create a new frame.
- VideoFrameImpl(VideoSurface::Format format,
- size_t video_width,
- size_t video_height);
-
- virtual ~VideoFrameImpl();
-
- bool AllocateRGB(size_t bytes_per_pixel);
- bool AllocateYUV();
-
- bool locked_;
- VideoSurface surface_;
-
- DISALLOW_COPY_AND_ASSIGN(VideoFrameImpl);
-};
-
-} // namespace media
-
-#endif // MEDIA_BASE_VIDEO_FRAME_IMPL_H_
diff --git a/media/base/video_frame_impl_unittest.cc b/media/base/video_frame_impl_unittest.cc
deleted file mode 100644
index d50db36..0000000
--- a/media/base/video_frame_impl_unittest.cc
+++ /dev/null
@@ -1,187 +0,0 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "media/base/video_frame_impl.h"
-
-#include "base/format_macros.h"
-#include "base/string_util.h"
-#include "media/base/buffers.h"
-#include "media/base/mock_filters.h"
-#include "media/base/yuv_convert.h"
-#include "testing/gtest/include/gtest/gtest.h"
-
-namespace media {
-
-// Helper function that initializes a YV12 frame with white and black scan
-// lines based on the |white_to_black| parameter. If 0, then the entire
-// frame will be black, if 1 then the entire frame will be white.
-void InitializeYV12Frame(VideoFrame* frame, double white_to_black) {
- VideoSurface surface;
- if (!frame->Lock(&surface)) {
- ADD_FAILURE();
- return;
- }
- EXPECT_EQ(VideoSurface::YV12, surface.format);
- size_t first_black_row = static_cast<size_t>(surface.height * white_to_black);
- uint8* y_plane = surface.data[VideoSurface::kYPlane];
- for (size_t row = 0; row < surface.height; ++row) {
- int color = (row < first_black_row) ? 0xFF : 0x00;
- memset(y_plane, color, surface.width);
- y_plane += surface.strides[VideoSurface::kYPlane];
- }
- uint8* u_plane = surface.data[VideoSurface::kUPlane];
- uint8* v_plane = surface.data[VideoSurface::kVPlane];
- for (size_t row = 0; row < surface.height; row += 2) {
- memset(u_plane, 0x80, surface.width / 2);
- memset(v_plane, 0x80, surface.width / 2);
- u_plane += surface.strides[VideoSurface::kUPlane];
- v_plane += surface.strides[VideoSurface::kVPlane];
- }
- frame->Unlock();
-}
-
-// Given a |yv12_frame| this method converts the YV12 frame to RGBA and
-// makes sure that all the pixels of the RBG frame equal |expect_rgb_color|.
-void ExpectFrameColor(media::VideoFrame* yv12_frame, uint32 expect_rgb_color) {
- // On linux and mac builds if you directly compare using EXPECT_EQ and use
- // the VideoSurface::kNumxxxPlanes constants, it generates an error when
- // linking. These are declared so that we can compare against locals.
- const size_t expect_yuv_planes = VideoSurface::kNumYUVPlanes;
- const size_t expect_rgb_planes = VideoSurface::kNumRGBPlanes;
-
- VideoSurface yuv_surface;
- ASSERT_TRUE(yv12_frame->Lock(&yuv_surface));
- ASSERT_EQ(VideoSurface::YV12, yuv_surface.format);
- ASSERT_EQ(expect_yuv_planes, yuv_surface.planes);
- ASSERT_EQ(yuv_surface.strides[VideoSurface::kUPlane],
- yuv_surface.strides[VideoSurface::kVPlane]);
-
- scoped_refptr<media::VideoFrame> rgb_frame;
- media::VideoFrameImpl::CreateFrame(VideoSurface::RGBA,
- yuv_surface.width,
- yuv_surface.height,
- yv12_frame->GetTimestamp(),
- yv12_frame->GetDuration(),
- &rgb_frame);
- media::VideoSurface rgb_surface;
- ASSERT_TRUE(rgb_frame->Lock(&rgb_surface));
- ASSERT_EQ(yuv_surface.width, rgb_surface.width);
- ASSERT_EQ(yuv_surface.height, rgb_surface.height);
- ASSERT_EQ(expect_rgb_planes, rgb_surface.planes);
-
- media::ConvertYUVToRGB32(yuv_surface.data[VideoSurface::kYPlane],
- yuv_surface.data[VideoSurface::kUPlane],
- yuv_surface.data[VideoSurface::kVPlane],
- rgb_surface.data[VideoSurface::kRGBPlane],
- rgb_surface.width,
- rgb_surface.height,
- yuv_surface.strides[VideoSurface::kYPlane],
- yuv_surface.strides[VideoSurface::kUPlane],
- rgb_surface.strides[VideoSurface::kRGBPlane],
- media::YV12);
-
- for (size_t row = 0; row < rgb_surface.height; ++row) {
- uint32* rgb_row_data = reinterpret_cast<uint32*>(
- rgb_surface.data[VideoSurface::kRGBPlane] +
- (rgb_surface.strides[VideoSurface::kRGBPlane] * row));
- for (size_t col = 0; col < rgb_surface.width; ++col) {
- SCOPED_TRACE(StringPrintf("Checking (%" PRIuS ", %" PRIuS ")", row, col));
- EXPECT_EQ(expect_rgb_color, rgb_row_data[col]);
- }
- }
- rgb_frame->Unlock();
- yv12_frame->Unlock();
-}
-
-TEST(VideoFrameImpl, CreateFrame) {
- const size_t kWidth = 64;
- const size_t kHeight = 48;
- const base::TimeDelta kTimestampA = base::TimeDelta::FromMicroseconds(1337);
- const base::TimeDelta kDurationA = base::TimeDelta::FromMicroseconds(1667);
- const base::TimeDelta kTimestampB = base::TimeDelta::FromMicroseconds(1234);
- const base::TimeDelta kDurationB = base::TimeDelta::FromMicroseconds(5678);
-
- // Create a YV12 Video Frame.
- scoped_refptr<media::VideoFrame> frame;
- VideoFrameImpl::CreateFrame(media::VideoSurface::YV12, kWidth, kHeight,
- kTimestampA, kDurationA, &frame);
- ASSERT_TRUE(frame);
-
- // Test StreamSample implementation.
- EXPECT_EQ(kTimestampA.InMicroseconds(),
- frame->GetTimestamp().InMicroseconds());
- EXPECT_EQ(kDurationA.InMicroseconds(), frame->GetDuration().InMicroseconds());
- EXPECT_FALSE(frame->IsEndOfStream());
- EXPECT_FALSE(frame->IsDiscontinuous());
- frame->SetTimestamp(kTimestampB);
- frame->SetDuration(kDurationB);
- EXPECT_EQ(kTimestampB.InMicroseconds(),
- frame->GetTimestamp().InMicroseconds());
- EXPECT_EQ(kDurationB.InMicroseconds(), frame->GetDuration().InMicroseconds());
- EXPECT_FALSE(frame->IsEndOfStream());
- frame->SetDiscontinuous(true);
- EXPECT_TRUE(frame->IsDiscontinuous());
- frame->SetDiscontinuous(false);
- EXPECT_FALSE(frame->IsDiscontinuous());
-
- // Test VideoFrame implementation.
- {
- SCOPED_TRACE("");
- InitializeYV12Frame(frame, 0.0f);
- ExpectFrameColor(frame, 0xFF000000);
- }
- {
- SCOPED_TRACE("");
- InitializeYV12Frame(frame, 1.0f);
- ExpectFrameColor(frame, 0xFFFFFFFF);
- }
-
- // Test an empty frame.
- VideoFrameImpl::CreateEmptyFrame(&frame);
- EXPECT_TRUE(frame->IsEndOfStream());
-}
-
-TEST(VideoFrameImpl, CreateBlackFrame) {
- const size_t kWidth = 2;
- const size_t kHeight = 2;
- const uint8 kExpectedYRow[] = { 0, 0 };
- const uint8 kExpectedUVRow[] = { 128 };
-
- scoped_refptr<media::VideoFrame> frame;
- VideoFrameImpl::CreateBlackFrame(kWidth, kHeight, &frame);
- ASSERT_TRUE(frame);
-
- // Test basic properties.
- EXPECT_EQ(0, frame->GetTimestamp().InMicroseconds());
- EXPECT_EQ(0, frame->GetDuration().InMicroseconds());
- EXPECT_FALSE(frame->IsEndOfStream());
-
- // Test surface properties.
- VideoSurface surface;
- EXPECT_TRUE(frame->Lock(&surface));
- EXPECT_EQ(VideoSurface::YV12, surface.format);
- EXPECT_EQ(kWidth, surface.width);
- EXPECT_EQ(kHeight, surface.height);
- EXPECT_EQ(3u, surface.planes);
-
- // Test surfaces themselves.
- for (size_t y = 0; y < surface.height; ++y) {
- EXPECT_EQ(0, memcmp(kExpectedYRow, surface.data[VideoSurface::kYPlane],
- arraysize(kExpectedYRow)));
- surface.data[VideoSurface::kYPlane] +=
- surface.strides[VideoSurface::kYPlane];
- }
- for (size_t y = 0; y < surface.height / 2; ++y) {
- EXPECT_EQ(0, memcmp(kExpectedUVRow, surface.data[VideoSurface::kUPlane],
- arraysize(kExpectedUVRow)));
- EXPECT_EQ(0, memcmp(kExpectedUVRow, surface.data[VideoSurface::kVPlane],
- arraysize(kExpectedUVRow)));
- surface.data[VideoSurface::kUPlane] +=
- surface.strides[VideoSurface::kUPlane];
- surface.data[VideoSurface::kVPlane] +=
- surface.strides[VideoSurface::kVPlane];
- }
-}
-
-} // namespace media
diff --git a/media/base/video_frame_unittest.cc b/media/base/video_frame_unittest.cc
new file mode 100644
index 0000000..27e15dc
--- /dev/null
+++ b/media/base/video_frame_unittest.cc
@@ -0,0 +1,176 @@
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "media/base/video_frame.h"
+
+#include "base/format_macros.h"
+#include "base/string_util.h"
+#include "media/base/buffers.h"
+#include "media/base/mock_filters.h"
+#include "media/base/yuv_convert.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace media {
+
+// Helper function that initializes a YV12 frame with white and black scan
+// lines based on the |white_to_black| parameter. If 0, then the entire
+// frame will be black, if 1 then the entire frame will be white.
+void InitializeYV12Frame(VideoFrame* frame, double white_to_black) {
+ EXPECT_EQ(VideoFrame::YV12, frame->format());
+ size_t first_black_row = static_cast<size_t>(frame->height() *
+ white_to_black);
+ uint8* y_plane = frame->data(VideoFrame::kYPlane);
+ for (size_t row = 0; row < frame->height(); ++row) {
+ int color = (row < first_black_row) ? 0xFF : 0x00;
+ memset(y_plane, color, frame->width());
+ y_plane += frame->stride(VideoFrame::kYPlane);
+ }
+ uint8* u_plane = frame->data(VideoFrame::kUPlane);
+ uint8* v_plane = frame->data(VideoFrame::kVPlane);
+ for (size_t row = 0; row < frame->height(); row += 2) {
+ memset(u_plane, 0x80, frame->width() / 2);
+ memset(v_plane, 0x80, frame->width() / 2);
+ u_plane += frame->stride(VideoFrame::kUPlane);
+ v_plane += frame->stride(VideoFrame::kVPlane);
+ }
+}
+
+// Given a |yv12_frame| this method converts the YV12 frame to RGBA and
+// makes sure that all the pixels of the RBG frame equal |expect_rgb_color|.
+void ExpectFrameColor(media::VideoFrame* yv12_frame, uint32 expect_rgb_color) {
+ // On linux and mac builds if you directly compare using EXPECT_EQ and use
+ // the VideoFrame::kNumxxxPlanes constants, it generates an error when
+ // linking. These are declared so that we can compare against locals.
+ const size_t expect_yuv_planes = VideoFrame::kNumYUVPlanes;
+ const size_t expect_rgb_planes = VideoFrame::kNumRGBPlanes;
+
+ ASSERT_EQ(VideoFrame::YV12, yv12_frame->format());
+ ASSERT_EQ(expect_yuv_planes, yv12_frame->planes());
+ ASSERT_EQ(yv12_frame->stride(VideoFrame::kUPlane),
+ yv12_frame->stride(VideoFrame::kVPlane));
+
+ scoped_refptr<media::VideoFrame> rgb_frame;
+ media::VideoFrame::CreateFrame(VideoFrame::RGBA,
+ yv12_frame->width(),
+ yv12_frame->height(),
+ yv12_frame->GetTimestamp(),
+ yv12_frame->GetDuration(),
+ &rgb_frame);
+
+ ASSERT_EQ(yv12_frame->width(), rgb_frame->width());
+ ASSERT_EQ(yv12_frame->height(), rgb_frame->height());
+ ASSERT_EQ(expect_rgb_planes, rgb_frame->planes());
+
+ media::ConvertYUVToRGB32(yv12_frame->data(VideoFrame::kYPlane),
+ yv12_frame->data(VideoFrame::kUPlane),
+ yv12_frame->data(VideoFrame::kVPlane),
+ rgb_frame->data(VideoFrame::kRGBPlane),
+ rgb_frame->width(),
+ rgb_frame->height(),
+ yv12_frame->stride(VideoFrame::kYPlane),
+ yv12_frame->stride(VideoFrame::kUPlane),
+ rgb_frame->stride(VideoFrame::kRGBPlane),
+ media::YV12);
+
+ for (size_t row = 0; row < rgb_frame->height(); ++row) {
+ uint32* rgb_row_data = reinterpret_cast<uint32*>(
+ rgb_frame->data(VideoFrame::kRGBPlane) +
+ (rgb_frame->stride(VideoFrame::kRGBPlane) * row));
+ for (size_t col = 0; col < rgb_frame->width(); ++col) {
+ SCOPED_TRACE(StringPrintf("Checking (%" PRIuS ", %" PRIuS ")",
+ row, col));
+ EXPECT_EQ(expect_rgb_color, rgb_row_data[col]);
+ }
+ }
+}
+
+TEST(VideoFrame, CreateFrame) {
+ const size_t kWidth = 64;
+ const size_t kHeight = 48;
+ const base::TimeDelta kTimestampA = base::TimeDelta::FromMicroseconds(1337);
+ const base::TimeDelta kDurationA = base::TimeDelta::FromMicroseconds(1667);
+ const base::TimeDelta kTimestampB = base::TimeDelta::FromMicroseconds(1234);
+ const base::TimeDelta kDurationB = base::TimeDelta::FromMicroseconds(5678);
+
+ // Create a YV12 Video Frame.
+ scoped_refptr<media::VideoFrame> frame;
+ VideoFrame::CreateFrame(media::VideoFrame::YV12, kWidth, kHeight,
+ kTimestampA, kDurationA, &frame);
+ ASSERT_TRUE(frame);
+
+ // Test StreamSample implementation.
+ EXPECT_EQ(kTimestampA.InMicroseconds(),
+ frame->GetTimestamp().InMicroseconds());
+ EXPECT_EQ(kDurationA.InMicroseconds(),
+ frame->GetDuration().InMicroseconds());
+ EXPECT_FALSE(frame->IsEndOfStream());
+ EXPECT_FALSE(frame->IsDiscontinuous());
+ frame->SetTimestamp(kTimestampB);
+ frame->SetDuration(kDurationB);
+ EXPECT_EQ(kTimestampB.InMicroseconds(),
+ frame->GetTimestamp().InMicroseconds());
+ EXPECT_EQ(kDurationB.InMicroseconds(),
+ frame->GetDuration().InMicroseconds());
+ EXPECT_FALSE(frame->IsEndOfStream());
+ frame->SetDiscontinuous(true);
+ EXPECT_TRUE(frame->IsDiscontinuous());
+ frame->SetDiscontinuous(false);
+ EXPECT_FALSE(frame->IsDiscontinuous());
+
+ // Test VideoFrame implementation.
+ {
+ SCOPED_TRACE("");
+ InitializeYV12Frame(frame, 0.0f);
+ ExpectFrameColor(frame, 0xFF000000);
+ }
+ {
+ SCOPED_TRACE("");
+ InitializeYV12Frame(frame, 1.0f);
+ ExpectFrameColor(frame, 0xFFFFFFFF);
+ }
+
+ // Test an empty frame.
+ VideoFrame::CreateEmptyFrame(&frame);
+ EXPECT_TRUE(frame->IsEndOfStream());
+}
+
+TEST(VideoFrame, CreateBlackFrame) {
+ const size_t kWidth = 2;
+ const size_t kHeight = 2;
+ const uint8 kExpectedYRow[] = { 0, 0 };
+ const uint8 kExpectedUVRow[] = { 128 };
+
+ scoped_refptr<media::VideoFrame> frame;
+ VideoFrame::CreateBlackFrame(kWidth, kHeight, &frame);
+ ASSERT_TRUE(frame);
+
+ // Test basic properties.
+ EXPECT_EQ(0, frame->GetTimestamp().InMicroseconds());
+ EXPECT_EQ(0, frame->GetDuration().InMicroseconds());
+ EXPECT_FALSE(frame->IsEndOfStream());
+
+ // Test |frame| properties.
+ EXPECT_EQ(VideoFrame::YV12, frame->format());
+ EXPECT_EQ(kWidth, frame->width());
+ EXPECT_EQ(kHeight, frame->height());
+ EXPECT_EQ(3u, frame->planes());
+
+ // Test frames themselves.
+ uint8* y_plane = frame->data(VideoFrame::kYPlane);
+ for (size_t y = 0; y < frame->height(); ++y) {
+ EXPECT_EQ(0, memcmp(kExpectedYRow, y_plane, arraysize(kExpectedYRow)));
+ y_plane += frame->stride(VideoFrame::kYPlane);
+ }
+
+ uint8* u_plane = frame->data(VideoFrame::kUPlane);
+ uint8* v_plane = frame->data(VideoFrame::kVPlane);
+ for (size_t y = 0; y < frame->height() / 2; ++y) {
+ EXPECT_EQ(0, memcmp(kExpectedUVRow, u_plane, arraysize(kExpectedUVRow)));
+ EXPECT_EQ(0, memcmp(kExpectedUVRow, v_plane, arraysize(kExpectedUVRow)));
+ u_plane += frame->stride(VideoFrame::kUPlane);
+ v_plane += frame->stride(VideoFrame::kVPlane);
+ }
+}
+
+} // namespace media
diff --git a/media/filters/ffmpeg_video_decode_engine.cc b/media/filters/ffmpeg_video_decode_engine.cc
index b6adec2..223e4fa 100644
--- a/media/filters/ffmpeg_video_decode_engine.cc
+++ b/media/filters/ffmpeg_video_decode_engine.cc
@@ -6,7 +6,7 @@
#include "base/task.h"
#include "media/base/callback.h"
-#include "media/base/video_frame_impl.h"
+#include "media/base/buffers.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/ffmpeg/ffmpeg_util.h"
#include "media/filters/ffmpeg_demuxer.h"
@@ -103,22 +103,22 @@ void FFmpegVideoDecodeEngine::Flush(Task* done_cb) {
avcodec_flush_buffers(codec_context_);
}
-VideoSurface::Format FFmpegVideoDecodeEngine::GetSurfaceFormat() const {
+VideoFrame::Format FFmpegVideoDecodeEngine::GetSurfaceFormat() const {
// J (Motion JPEG) versions of YUV are full range 0..255.
// Regular (MPEG) YUV is 16..240.
// For now we will ignore the distinction and treat them the same.
switch (codec_context_->pix_fmt) {
case PIX_FMT_YUV420P:
case PIX_FMT_YUVJ420P:
- return VideoSurface::YV12;
+ return VideoFrame::YV12;
break;
case PIX_FMT_YUV422P:
case PIX_FMT_YUVJ422P:
- return VideoSurface::YV16;
+ return VideoFrame::YV16;
break;
default:
// TODO(scherkus): More formats here?
- return VideoSurface::INVALID;
+ return VideoFrame::INVALID;
}
}
diff --git a/media/filters/ffmpeg_video_decode_engine.h b/media/filters/ffmpeg_video_decode_engine.h
index b8338ab..b4fe69d 100644
--- a/media/filters/ffmpeg_video_decode_engine.h
+++ b/media/filters/ffmpeg_video_decode_engine.h
@@ -1,6 +1,6 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved. Use of this
-// source code is governed by a BSD-style license that can be found in the
-// LICENSE file.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
#ifndef MEDIA_FILTERS_FFMPEG_VIDEO_DECODE_ENGINE_H_
#define MEDIA_FILTERS_FFMPEG_VIDEO_DECODE_ENGINE_H_
@@ -27,7 +27,7 @@ class FFmpegVideoDecodeEngine : public VideoDecodeEngine {
virtual void DecodeFrame(const Buffer& buffer, AVFrame* yuv_frame,
bool* got_result, Task* done_cb);
virtual void Flush(Task* done_cb);
- virtual VideoSurface::Format GetSurfaceFormat() const;
+ virtual VideoFrame::Format GetSurfaceFormat() const;
virtual State state() const { return state_; }
diff --git a/media/filters/ffmpeg_video_decode_engine_unittest.cc b/media/filters/ffmpeg_video_decode_engine_unittest.cc
index 4d56866..449a67c 100644
--- a/media/filters/ffmpeg_video_decode_engine_unittest.cc
+++ b/media/filters/ffmpeg_video_decode_engine_unittest.cc
@@ -1,4 +1,4 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@@ -167,19 +167,19 @@ TEST_F(FFmpegVideoDecodeEngineTest, DecodeFrame_DecodeError) {
TEST_F(FFmpegVideoDecodeEngineTest, GetSurfaceFormat) {
// YV12 formats.
codec_context_.pix_fmt = PIX_FMT_YUV420P;
- EXPECT_EQ(VideoSurface::YV12, test_engine_->GetSurfaceFormat());
+ EXPECT_EQ(VideoFrame::YV12, test_engine_->GetSurfaceFormat());
codec_context_.pix_fmt = PIX_FMT_YUVJ420P;
- EXPECT_EQ(VideoSurface::YV12, test_engine_->GetSurfaceFormat());
+ EXPECT_EQ(VideoFrame::YV12, test_engine_->GetSurfaceFormat());
// YV16 formats.
codec_context_.pix_fmt = PIX_FMT_YUV422P;
- EXPECT_EQ(VideoSurface::YV16, test_engine_->GetSurfaceFormat());
+ EXPECT_EQ(VideoFrame::YV16, test_engine_->GetSurfaceFormat());
codec_context_.pix_fmt = PIX_FMT_YUVJ422P;
- EXPECT_EQ(VideoSurface::YV16, test_engine_->GetSurfaceFormat());
+ EXPECT_EQ(VideoFrame::YV16, test_engine_->GetSurfaceFormat());
// Invalid value.
codec_context_.pix_fmt = PIX_FMT_NONE;
- EXPECT_EQ(VideoSurface::INVALID, test_engine_->GetSurfaceFormat());
+ EXPECT_EQ(VideoFrame::INVALID, test_engine_->GetSurfaceFormat());
}
} // namespace media
diff --git a/media/filters/omx_video_decode_engine.cc b/media/filters/omx_video_decode_engine.cc
index 4b6d081..be4bd8d 100644
--- a/media/filters/omx_video_decode_engine.cc
+++ b/media/filters/omx_video_decode_engine.cc
@@ -136,8 +136,8 @@ void OmxVideoDecodeEngine::Flush(Task* done_cb) {
omx_codec_->Flush(TaskToCallbackAdapter::NewCallback(done_cb));
}
-VideoSurface::Format OmxVideoDecodeEngine::GetSurfaceFormat() const {
- return VideoSurface::YV12;
+VideoFrame::Format OmxVideoDecodeEngine::GetSurfaceFormat() const {
+ return VideoFrame::YV12;
}
void OmxVideoDecodeEngine::Stop(Callback0::Type* done_cb) {
diff --git a/media/filters/omx_video_decode_engine.h b/media/filters/omx_video_decode_engine.h
index 43bb65d..3ddc244 100644
--- a/media/filters/omx_video_decode_engine.h
+++ b/media/filters/omx_video_decode_engine.h
@@ -38,7 +38,7 @@ class OmxVideoDecodeEngine : public VideoDecodeEngine,
virtual void DecodeFrame(const Buffer& buffer, AVFrame* yuv_frame,
bool* got_result, Task* done_cb);
virtual void Flush(Task* done_cb);
- virtual VideoSurface::Format GetSurfaceFormat() const;
+ virtual VideoFrame::Format GetSurfaceFormat() const;
virtual State state() const { return state_; }
diff --git a/media/filters/video_decode_engine.h b/media/filters/video_decode_engine.h
index 9cfbe97..bfdc13c 100644
--- a/media/filters/video_decode_engine.h
+++ b/media/filters/video_decode_engine.h
@@ -1,11 +1,11 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved. Use of this
-// source code is governed by a BSD-style license that can be found in the
-// LICENSE file.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
#ifndef MEDIA_FILTERS_VIDEO_DECODE_ENGINE_H_
#define MEDIA_FILTERS_VIDEO_DECODE_ENGINE_H_
-#include "media/base/buffers.h" // For VideoSurface.
+#include "media/base/video_frame.h"
// FFmpeg types.
//
@@ -48,7 +48,7 @@ class VideoDecodeEngine {
// Returns the VideoSurface::Format of the resulting |yuv_frame| from
// DecodeFrame().
- virtual VideoSurface::Format GetSurfaceFormat() const = 0;
+ virtual VideoFrame::Format GetSurfaceFormat() const = 0;
// Returns the current state of the decode engine.
virtual State state() const = 0;
diff --git a/media/filters/video_decoder_impl.cc b/media/filters/video_decoder_impl.cc
index 870e160..4bd7abc 100644
--- a/media/filters/video_decoder_impl.cc
+++ b/media/filters/video_decoder_impl.cc
@@ -7,7 +7,7 @@
#include "base/task.h"
#include "media/base/filters.h"
#include "media/base/limits.h"
-#include "media/base/video_frame_impl.h"
+#include "media/base/video_frame.h"
#include "media/ffmpeg/ffmpeg_common.h"
#include "media/filters/ffmpeg_interfaces.h"
#include "media/filters/video_decode_engine.h"
@@ -195,21 +195,21 @@ void VideoDecoderImpl::OnDecodeComplete(AVFrame* yuv_frame, bool* got_frame,
}
}
-bool VideoDecoderImpl::EnqueueVideoFrame(VideoSurface::Format surface_format,
+bool VideoDecoderImpl::EnqueueVideoFrame(VideoFrame::Format surface_format,
const TimeTuple& time,
const AVFrame* frame) {
// TODO(fbarchard): Work around for FFmpeg http://crbug.com/27675
// The decoder is in a bad state and not decoding correctly.
// Checking for NULL avoids a crash in CopyPlane().
- if (!frame->data[VideoSurface::kYPlane] ||
- !frame->data[VideoSurface::kUPlane] ||
- !frame->data[VideoSurface::kVPlane]) {
+ if (!frame->data[VideoFrame::kYPlane] ||
+ !frame->data[VideoFrame::kUPlane] ||
+ !frame->data[VideoFrame::kVPlane]) {
return true;
}
scoped_refptr<VideoFrame> video_frame;
- VideoFrameImpl::CreateFrame(surface_format, width_, height_,
- time.timestamp, time.duration, &video_frame);
+ VideoFrame::CreateFrame(surface_format, width_, height_,
+ time.timestamp, time.duration, &video_frame);
if (!video_frame) {
return false;
}
@@ -219,31 +219,26 @@ bool VideoDecoderImpl::EnqueueVideoFrame(VideoSurface::Format surface_format,
// avcodec_decode_video() call.
// TODO(scherkus): figure out pre-allocation/buffer cycling scheme.
// TODO(scherkus): is there a cleaner way to figure out the # of planes?
- VideoSurface surface;
- if (!video_frame->Lock(&surface)) {
- return false;
- }
- CopyPlane(VideoSurface::kYPlane, surface, frame);
- CopyPlane(VideoSurface::kUPlane, surface, frame);
- CopyPlane(VideoSurface::kVPlane, surface, frame);
- video_frame->Unlock();
+ CopyPlane(VideoFrame::kYPlane, *video_frame, frame);
+ CopyPlane(VideoFrame::kUPlane, *video_frame, frame);
+ CopyPlane(VideoFrame::kVPlane, *video_frame, frame);
EnqueueResult(video_frame);
return true;
}
void VideoDecoderImpl::CopyPlane(size_t plane,
- const VideoSurface& surface,
+ const VideoFrame& video_frame,
const AVFrame* frame) {
- DCHECK(surface.width % 2 == 0);
+ DCHECK(video_frame.width() % 2 == 0);
const uint8* source = frame->data[plane];
const size_t source_stride = frame->linesize[plane];
- uint8* dest = surface.data[plane];
- const size_t dest_stride = surface.strides[plane];
- size_t bytes_per_line = surface.width;
- size_t copy_lines = surface.height;
- if (plane != VideoSurface::kYPlane) {
+ uint8* dest = video_frame.data(plane);
+ const size_t dest_stride = video_frame.stride(plane);
+ size_t bytes_per_line = video_frame.width();
+ size_t copy_lines = video_frame.height();
+ if (plane != VideoFrame::kYPlane) {
bytes_per_line /= 2;
- if (surface.format == VideoSurface::YV12) {
+ if (video_frame.format() == VideoFrame::YV12) {
copy_lines = (copy_lines + 1) / 2;
}
}
@@ -257,7 +252,7 @@ void VideoDecoderImpl::CopyPlane(size_t plane,
void VideoDecoderImpl::EnqueueEmptyFrame() {
scoped_refptr<VideoFrame> video_frame;
- VideoFrameImpl::CreateEmptyFrame(&video_frame);
+ VideoFrame::CreateEmptyFrame(&video_frame);
EnqueueResult(video_frame);
}
diff --git a/media/filters/video_decoder_impl.h b/media/filters/video_decoder_impl.h
index 959b5b0..7f34912 100644
--- a/media/filters/video_decoder_impl.h
+++ b/media/filters/video_decoder_impl.h
@@ -1,4 +1,4 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@@ -7,6 +7,7 @@
#include "base/time.h"
#include "media/base/pts_heap.h"
+#include "media/base/video_frame.h"
#include "media/filters/decoder_base.h"
#include "testing/gtest/include/gtest/gtest_prod.h"
@@ -60,14 +61,14 @@ class VideoDecoderImpl : public DecoderBase<VideoDecoder, VideoFrame> {
virtual void DoSeek(base::TimeDelta time, Task* done_cb);
virtual void DoDecode(Buffer* buffer, Task* done_cb);
- virtual bool EnqueueVideoFrame(VideoSurface::Format surface_format,
+ virtual bool EnqueueVideoFrame(VideoFrame::Format surface_format,
const TimeTuple& time,
const AVFrame* frame);
// Create an empty video frame and queue it.
virtual void EnqueueEmptyFrame();
- virtual void CopyPlane(size_t plane, const VideoSurface& surface,
+ virtual void CopyPlane(size_t plane, const VideoFrame& video_frame,
const AVFrame* frame);
// Methods that pickup after the decode engine has finished its action.
diff --git a/media/filters/video_decoder_impl_unittest.cc b/media/filters/video_decoder_impl_unittest.cc
index a1b1cf0..4742951 100644
--- a/media/filters/video_decoder_impl_unittest.cc
+++ b/media/filters/video_decoder_impl_unittest.cc
@@ -53,7 +53,7 @@ class MockVideoDecodeEngine : public VideoDecodeEngine {
bool* got_result, Task* done_cb));
MOCK_METHOD1(Flush, void(Task* done_cb));
MOCK_CONST_METHOD0(state, State());
- MOCK_CONST_METHOD0(GetSurfaceFormat, VideoSurface::Format());
+ MOCK_CONST_METHOD0(GetSurfaceFormat, VideoFrame::Format());
};
// Class that just mocks the private functions.
@@ -63,11 +63,11 @@ class DecoderPrivateMock : public VideoDecoderImpl {
: VideoDecoderImpl(engine) {
}
- MOCK_METHOD3(EnqueueVideoFrame, bool(VideoSurface::Format surface_format,
+ MOCK_METHOD3(EnqueueVideoFrame, bool(VideoFrame::Format surface_format,
const TimeTuple& time,
const AVFrame* frame));
MOCK_METHOD0(EnqueueEmptyFrame, void());
- MOCK_METHOD3(CopyPlane, void(size_t plane, const VideoSurface& surface,
+ MOCK_METHOD3(CopyPlane, void(size_t plane, const VideoFrame* video_frame,
const AVFrame* frame));
MOCK_METHOD4(FindPtsAndDuration, TimeTuple(const AVRational& time_base,
const PtsHeap& pts_heap,
@@ -341,7 +341,7 @@ TEST_F(VideoDecoderImplTest, DoDecode_TestStateTransition) {
WithArg<3>(InvokeRunnable())));
EXPECT_CALL(*mock_engine, GetSurfaceFormat())
.Times(3)
- .WillRepeatedly(Return(VideoSurface::YV16));
+ .WillRepeatedly(Return(VideoFrame::YV16));
EXPECT_CALL(*mock_decoder, FindPtsAndDuration(_, _, _, _))
.WillOnce(Return(kTestPts1))
.WillOnce(Return(kTestPts2))
@@ -427,7 +427,7 @@ TEST_F(VideoDecoderImplTest, DoDecode_EnqueueVideoFrameError) {
SetArgumentPointee<2>(true),
WithArg<3>(InvokeRunnable())));
EXPECT_CALL(*mock_engine, GetSurfaceFormat())
- .WillOnce(Return(VideoSurface::YV16));
+ .WillOnce(Return(VideoFrame::YV16));
EXPECT_CALL(*mock_decoder, FindPtsAndDuration(_, _, _, _))
.WillOnce(Return(kTestPts1));
EXPECT_CALL(*mock_decoder, EnqueueVideoFrame(_, _, _))
diff --git a/media/filters/video_renderer_base.cc b/media/filters/video_renderer_base.cc
index e8c4966..e58868d 100644
--- a/media/filters/video_renderer_base.cc
+++ b/media/filters/video_renderer_base.cc
@@ -5,7 +5,7 @@
#include "base/callback.h"
#include "media/base/buffers.h"
#include "media/base/filter_host.h"
-#include "media/base/video_frame_impl.h"
+#include "media/base/video_frame.h"
#include "media/filters/video_renderer_base.h"
namespace media {
@@ -157,7 +157,7 @@ void VideoRendererBase::Initialize(VideoDecoder* decoder,
// Create a black frame so clients have something to render before we finish
// prerolling.
- VideoFrameImpl::CreateBlackFrame(width_, height_, &current_frame_);
+ VideoFrame::CreateBlackFrame(width_, height_, &current_frame_);
// We're all good! Consider ourselves paused (ThreadMain() should never
// see us in the kUninitialized state).
@@ -322,7 +322,7 @@ void VideoRendererBase::OnReadComplete(VideoFrame* frame) {
// our new location.
state_ = kPaused;
if (frames_.front()->IsEndOfStream()) {
- VideoFrameImpl::CreateBlackFrame(width_, height_, &current_frame_);
+ VideoFrame::CreateBlackFrame(width_, height_, &current_frame_);
} else {
current_frame_ = frames_.front();
}
diff --git a/media/filters/video_renderer_base_unittest.cc b/media/filters/video_renderer_base_unittest.cc
index 17b8b13..d5eaf99 100644
--- a/media/filters/video_renderer_base_unittest.cc
+++ b/media/filters/video_renderer_base_unittest.cc
@@ -1,4 +1,4 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
@@ -7,7 +7,7 @@
#include "media/base/data_buffer.h"
#include "media/base/mock_filter_host.h"
#include "media/base/mock_filters.h"
-#include "media/base/video_frame_impl.h"
+#include "media/base/video_frame.h"
#include "media/filters/video_renderer_base.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -185,8 +185,8 @@ TEST_F(VideoRendererBaseTest, Initialize_Successful) {
while (!read_queue_.empty()) {
const base::TimeDelta kZero;
scoped_refptr<VideoFrame> frame;
- VideoFrameImpl::CreateFrame(VideoSurface::RGB32, kWidth, kHeight, kZero,
- kZero, &frame);
+ VideoFrame::CreateFrame(VideoFrame::RGB32, kWidth, kHeight, kZero,
+ kZero, &frame);
read_queue_.front()->Run(frame);
delete read_queue_.front();
read_queue_.pop_front();
diff --git a/media/media.gyp b/media/media.gyp
index 20e1f87..08c2f2b 100644
--- a/media/media.gyp
+++ b/media/media.gyp
@@ -72,8 +72,8 @@
'base/seekable_buffer.h',
'base/synchronizer.cc',
'base/synchronizer.h',
- 'base/video_frame_impl.cc',
- 'base/video_frame_impl.h',
+ 'base/video_frame.cc',
+ 'base/video_frame.h',
'base/yuv_convert.cc',
'base/yuv_convert.h',
'base/yuv_row_win.cc',
@@ -185,7 +185,7 @@
'base/pts_heap_unittest.cc',
'base/run_all_unittests.cc',
'base/seekable_buffer_unittest.cc',
- 'base/video_frame_impl_unittest.cc',
+ 'base/video_frame_unittest.cc',
'base/yuv_convert_unittest.cc',
'filters/audio_renderer_algorithm_ola_unittest.cc',
'filters/audio_renderer_base_unittest.cc',
diff --git a/media/tools/player_wtl/view.h b/media/tools/player_wtl/view.h
index ce55c10..a22dea6 100644
--- a/media/tools/player_wtl/view.h
+++ b/media/tools/player_wtl/view.h
@@ -1,6 +1,6 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved. Use of this
-// source code is governed by a BSD-style license that can be found in the
-// LICENSE file.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
#ifndef MEDIA_TOOLS_PLAYER_WTL_VIEW_H_
#define MEDIA_TOOLS_PLAYER_WTL_VIEW_H_
@@ -9,7 +9,7 @@
#include <process.h>
#include <string.h>
-#include "media/base/buffers.h"
+#include "media/base/video_frame.h"
#include "media/base/yuv_convert.h"
#include "media/tools/player_wtl/movie.h"
#include "media/tools/player_wtl/player_wtl.h"
@@ -121,10 +121,7 @@ class WtlVideoWindow : public CScrollWindowImpl<WtlVideoWindow> {
}
// Convert the video frame to RGB and Blit.
- void ConvertFrame(media::VideoFrame * video_frame) {
- media::VideoSurface frame_in;
- bool lock_result = video_frame->Lock(&frame_in);
- DCHECK(lock_result);
+ void ConvertFrame(media::VideoFrame* video_frame) {
BITMAP bm;
bmp_.GetBitmap(&bm);
int dibwidth = bm.bmWidth;
@@ -133,11 +130,11 @@ class WtlVideoWindow : public CScrollWindowImpl<WtlVideoWindow> {
uint8 *movie_dib_bits = reinterpret_cast<uint8 *>(bm.bmBits) +
bm.bmWidthBytes * (bm.bmHeight - 1);
int dibrowbytes = -bm.bmWidthBytes;
- int clipped_width = frame_in.width;
+ int clipped_width = video_frame->width();
if (dibwidth < clipped_width) {
clipped_width = dibwidth;
}
- int clipped_height = frame_in.height;
+ int clipped_height = video_frame->height();
if (dibheight < clipped_height) {
clipped_height = dibheight;
}
@@ -186,7 +183,7 @@ class WtlVideoWindow : public CScrollWindowImpl<WtlVideoWindow> {
// Append each frame to end of file.
bool enable_dump_yuv_file = media::Movie::get()->GetDumpYuvFileEnable();
if (enable_dump_yuv_file) {
- DumpYUV(frame_in);
+ DumpYUV(video_frame);
}
#ifdef TESTING
@@ -196,7 +193,7 @@ class WtlVideoWindow : public CScrollWindowImpl<WtlVideoWindow> {
bool enable_draw = media::Movie::get()->GetDrawEnable();
if (enable_draw) {
DCHECK(bm.bmBitsPixel == 32);
- DrawYUV(frame_in,
+ DrawYUV(video_frame,
movie_dib_bits,
dibrowbytes,
clipped_width,
@@ -344,38 +341,39 @@ class WtlVideoWindow : public CScrollWindowImpl<WtlVideoWindow> {
// Draw a frame of YUV to an RGB buffer with scaling.
// Handles different YUV formats.
- void DrawYUV(const media::VideoSurface &frame_in,
+ void DrawYUV(const media::VideoFrame* video_frame,
uint8 *movie_dib_bits,
int dibrowbytes,
int clipped_width,
int clipped_height,
int scaled_width,
int scaled_height) {
- media::YUVType yuv_type = (frame_in.format == media::VideoSurface::YV12) ?
- media::YV12 : media::YV16;
+ media::YUVType yuv_type =
+ (video_frame->format() == media::VideoFrame::YV12) ?
+ media::YV12 : media::YV16;
// Simple convert is not necessary for performance, but allows
// easier alternative implementations.
if ((view_rotate_ == media::ROTATE_0) && // Not scaled or rotated
(view_size_ == 2)) {
- media::ConvertYUVToRGB32(frame_in.data[0],
- frame_in.data[1],
- frame_in.data[2],
+ media::ConvertYUVToRGB32(video_frame->data(0),
+ video_frame->data(1),
+ video_frame->data(2),
movie_dib_bits,
scaled_width, scaled_height,
- frame_in.strides[0],
- frame_in.strides[1],
+ video_frame->stride(0),
+ video_frame->stride(1),
dibrowbytes,
yuv_type);
} else {
- media::ScaleYUVToRGB32(frame_in.data[0],
- frame_in.data[1],
- frame_in.data[2],
+ media::ScaleYUVToRGB32(video_frame->data(0),
+ video_frame->data(1),
+ video_frame->data(2),
movie_dib_bits,
clipped_width, clipped_height,
scaled_width, scaled_height,
- frame_in.strides[0],
- frame_in.strides[1],
+ video_frame->stride(0),
+ video_frame->stride(1),
dibrowbytes,
yuv_type,
view_rotate_);
@@ -383,28 +381,30 @@ class WtlVideoWindow : public CScrollWindowImpl<WtlVideoWindow> {
}
// Diagnostic function to write out YUV in format compatible with PYUV tool.
- void DumpYUV(const media::VideoSurface &frame_in) {
+ void DumpYUV(const media::VideoFrame* video_frame) {
FILE * file_yuv = fopen("raw.yuv", "ab+"); // Open for append binary.
if (file_yuv != NULL) {
fseek(file_yuv, 0, SEEK_END);
- const size_t frame_size = frame_in.width * frame_in.height;
- for (size_t y = 0; y < frame_in.height; ++y)
- fwrite(frame_in.data[0]+frame_in.strides[0]*y,
- frame_in.width, sizeof(uint8), file_yuv);
- for (size_t y = 0; y < frame_in.height/2; ++y)
- fwrite(frame_in.data[1]+frame_in.strides[1]*y,
- frame_in.width/2, sizeof(uint8), file_yuv);
- for (size_t y = 0; y < frame_in.height/2; ++y)
- fwrite(frame_in.data[2]+frame_in.strides[2]*y,
- frame_in.width/2, sizeof(uint8), file_yuv);
+ const size_t frame_size =
+ video_frame->width() * video_frame->height();
+ for (size_t y = 0; y < video_frame->height(); ++y)
+ fwrite(video_frame->data(0) + video_frame->stride(0)*y,
+ video_frame->width(), sizeof(uint8), file_yuv);
+ for (size_t y = 0; y < video_frame->height()/2; ++y)
+ fwrite(video_frame->data(1) + video_frame->stride(1)*y,
+ video_frame->width() / 2, sizeof(uint8), file_yuv);
+ for (size_t y = 0; y < video_frame->height()/2; ++y)
+ fwrite(video_frame->data(2) + video_frame->stride(2)*y,
+ video_frame->width() / 2, sizeof(uint8), file_yuv);
fclose(file_yuv);
#if TESTING
static int frame_dump_count = 0;
char outputbuf[512];
_snprintf_s(outputbuf, sizeof(outputbuf), "yuvdump %4d %dx%d stride %d\n",
- frame_dump_count, frame_in.width, frame_in.height,
- frame_in.strides[0]);
+ frame_dump_count, video_frame->width(),
+ video_frame->height(),
+ video_frame->stride(0));
OutputDebugStringA(outputbuf);
++frame_dump_count;
#endif
diff --git a/media/tools/player_x11/x11_video_renderer.cc b/media/tools/player_x11/x11_video_renderer.cc
index 068b971..87d1c2f 100644
--- a/media/tools/player_x11/x11_video_renderer.cc
+++ b/media/tools/player_x11/x11_video_renderer.cc
@@ -9,7 +9,7 @@
#include <X11/extensions/Xrender.h>
#include <X11/extensions/Xcomposite.h>
-#include "media/base/buffers.h"
+#include "media/base/video_frame.h"
#include "media/base/yuv_convert.h"
X11VideoRenderer* X11VideoRenderer::instance_ = NULL;
@@ -153,31 +153,26 @@ void X11VideoRenderer::Paint() {
return;
// Convert YUV frame to RGB.
- media::VideoSurface frame_in;
- if (video_frame->Lock(&frame_in)) {
- DCHECK(frame_in.format == media::VideoSurface::YV12 ||
- frame_in.format == media::VideoSurface::YV16);
- DCHECK(frame_in.strides[media::VideoSurface::kUPlane] ==
- frame_in.strides[media::VideoSurface::kVPlane]);
- DCHECK(frame_in.planes == media::VideoSurface::kNumYUVPlanes);
-
- DCHECK(image_->data);
- media::YUVType yuv_type = (frame_in.format == media::VideoSurface::YV12) ?
- media::YV12 : media::YV16;
- media::ConvertYUVToRGB32(frame_in.data[media::VideoSurface::kYPlane],
- frame_in.data[media::VideoSurface::kUPlane],
- frame_in.data[media::VideoSurface::kVPlane],
- (uint8*)image_->data,
- frame_in.width,
- frame_in.height,
- frame_in.strides[media::VideoSurface::kYPlane],
- frame_in.strides[media::VideoSurface::kUPlane],
- image_->bytes_per_line,
- yuv_type);
- video_frame->Unlock();
- } else {
- NOTREACHED();
- }
+ DCHECK(video_frame->format() == media::VideoFrame::YV12 ||
+ video_frame->format() == media::VideoFrame::YV16);
+ DCHECK(video_frame->stride(media::VideoFrame::kUPlane) ==
+ video_frame->stride(media::VideoFrame::kVPlane));
+ DCHECK(video_frame->planes() == media::VideoFrame::kNumYUVPlanes);
+
+ DCHECK(image_->data);
+ media::YUVType yuv_type =
+ (video_frame->format() == media::VideoFrame::YV12) ?
+ media::YV12 : media::YV16;
+ media::ConvertYUVToRGB32(video_frame->data(media::VideoFrame::kYPlane),
+ video_frame->data(media::VideoFrame::kUPlane),
+ video_frame->data(media::VideoFrame::kVPlane),
+ (uint8*)image_->data,
+ video_frame->width(),
+ video_frame->height(),
+ video_frame->stride(media::VideoFrame::kYPlane),
+ video_frame->stride(media::VideoFrame::kUPlane),
+ image_->bytes_per_line,
+ yuv_type);
if (use_render_) {
// If XRender is used, we'll upload the image to a pixmap. And then
diff --git a/webkit/glue/media/video_renderer_impl.cc b/webkit/glue/media/video_renderer_impl.cc
index ba06d1c..50c29ca 100644
--- a/webkit/glue/media/video_renderer_impl.cc
+++ b/webkit/glue/media/video_renderer_impl.cc
@@ -1,8 +1,8 @@
-// Copyright (c) 2009 The Chromium Authors. All rights reserved. Use of this
-// source code is governed by a BSD-style license that can be found in the
-// LICENSE file.
+// Copyright (c) 2010 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
-#include "media/base/buffers.h"
+#include "media/base/video_frame.h"
#include "media/base/yuv_convert.h"
#include "webkit/glue/media/video_renderer_impl.h"
#include "webkit/glue/webmediaplayer_impl.h"
@@ -129,31 +129,26 @@ void VideoRendererImpl::SlowPaint(media::VideoFrame* video_frame,
timestamp != last_converted_timestamp_) {
last_converted_frame_ = video_frame;
last_converted_timestamp_ = timestamp;
- media::VideoSurface frame_in;
- if (video_frame->Lock(&frame_in)) {
- DCHECK(frame_in.format == media::VideoSurface::YV12 ||
- frame_in.format == media::VideoSurface::YV16);
- DCHECK(frame_in.strides[media::VideoSurface::kUPlane] ==
- frame_in.strides[media::VideoSurface::kVPlane]);
- DCHECK(frame_in.planes == media::VideoSurface::kNumYUVPlanes);
- bitmap_.lockPixels();
- media::YUVType yuv_type = (frame_in.format == media::VideoSurface::YV12) ?
- media::YV12 : media::YV16;
- media::ConvertYUVToRGB32(frame_in.data[media::VideoSurface::kYPlane],
- frame_in.data[media::VideoSurface::kUPlane],
- frame_in.data[media::VideoSurface::kVPlane],
- static_cast<uint8*>(bitmap_.getPixels()),
- frame_in.width,
- frame_in.height,
- frame_in.strides[media::VideoSurface::kYPlane],
- frame_in.strides[media::VideoSurface::kUPlane],
- bitmap_.rowBytes(),
- yuv_type);
- bitmap_.unlockPixels();
- video_frame->Unlock();
- } else {
- NOTREACHED();
- }
+ DCHECK(video_frame->format() == media::VideoFrame::YV12 ||
+ video_frame->format() == media::VideoFrame::YV16);
+ DCHECK(video_frame->stride(media::VideoFrame::kUPlane) ==
+ video_frame->stride(media::VideoFrame::kVPlane));
+ DCHECK(video_frame->planes() == media::VideoFrame::kNumYUVPlanes);
+ bitmap_.lockPixels();
+ media::YUVType yuv_type =
+ (video_frame->format() == media::VideoFrame::YV12) ?
+ media::YV12 : media::YV16;
+ media::ConvertYUVToRGB32(video_frame->data(media::VideoFrame::kYPlane),
+ video_frame->data(media::VideoFrame::kUPlane),
+ video_frame->data(media::VideoFrame::kVPlane),
+ static_cast<uint8*>(bitmap_.getPixels()),
+ video_frame->width(),
+ video_frame->height(),
+ video_frame->stride(media::VideoFrame::kYPlane),
+ video_frame->stride(media::VideoFrame::kUPlane),
+ bitmap_.rowBytes(),
+ yuv_type);
+ bitmap_.unlockPixels();
}
// 2. Paint the bitmap to canvas.
@@ -173,112 +168,101 @@ void VideoRendererImpl::SlowPaint(media::VideoFrame* video_frame,
void VideoRendererImpl::FastPaint(media::VideoFrame* video_frame,
skia::PlatformCanvas* canvas,
const gfx::Rect& dest_rect) {
- media::VideoSurface frame_in;
- if (video_frame->Lock(&frame_in)) {
- DCHECK(frame_in.format == media::VideoSurface::YV12 ||
- frame_in.format == media::VideoSurface::YV16);
- DCHECK(frame_in.strides[media::VideoSurface::kUPlane] ==
- frame_in.strides[media::VideoSurface::kVPlane]);
- DCHECK(frame_in.planes == media::VideoSurface::kNumYUVPlanes);
- const SkBitmap& bitmap = canvas->getDevice()->accessBitmap(true);
- media::YUVType yuv_type = (frame_in.format == media::VideoSurface::YV12) ?
- media::YV12 : media::YV16;
- int y_shift = yuv_type; // 1 for YV12, 0 for YV16.
+ DCHECK(video_frame->format() == media::VideoFrame::YV12 ||
+ video_frame->format() == media::VideoFrame::YV16);
+ DCHECK(video_frame->stride(media::VideoFrame::kUPlane) ==
+ video_frame->stride(media::VideoFrame::kVPlane));
+ DCHECK(video_frame->planes() == media::VideoFrame::kNumYUVPlanes);
+ const SkBitmap& bitmap = canvas->getDevice()->accessBitmap(true);
+ media::YUVType yuv_type = (video_frame->format() == media::VideoFrame::YV12) ?
+ media::YV12 : media::YV16;
+ int y_shift = yuv_type; // 1 for YV12, 0 for YV16.
- // Create a rectangle backed by SkScalar.
- SkRect scalar_dest_rect;
- scalar_dest_rect.iset(dest_rect.x(), dest_rect.y(),
- dest_rect.right(), dest_rect.bottom());
+ // Create a rectangle backed by SkScalar.
+ SkRect scalar_dest_rect;
+ scalar_dest_rect.iset(dest_rect.x(), dest_rect.y(),
+ dest_rect.right(), dest_rect.bottom());
- // Transform the destination rectangle to local coordinates.
- const SkMatrix& local_matrix = canvas->getTotalMatrix();
- SkRect local_dest_rect;
- local_matrix.mapRect(&local_dest_rect, scalar_dest_rect);
+ // Transform the destination rectangle to local coordinates.
+ const SkMatrix& local_matrix = canvas->getTotalMatrix();
+ SkRect local_dest_rect;
+ local_matrix.mapRect(&local_dest_rect, scalar_dest_rect);
- // After projecting the destination rectangle to local coordinates, round
- // the projected rectangle to integer values, this will give us pixel values
- // of the rectangle.
- SkIRect local_dest_irect, local_dest_irect_saved;
- local_dest_rect.round(&local_dest_irect);
- local_dest_rect.round(&local_dest_irect_saved);
+ // After projecting the destination rectangle to local coordinates, round
+ // the projected rectangle to integer values, this will give us pixel values
+ // of the rectangle.
+ SkIRect local_dest_irect, local_dest_irect_saved;
+ local_dest_rect.round(&local_dest_irect);
+ local_dest_rect.round(&local_dest_irect_saved);
- // Only does the paint if the destination rect intersects with the clip
- // rect.
- if (local_dest_irect.intersect(canvas->getTotalClip().getBounds())) {
- // At this point |local_dest_irect| contains the rect that we should draw
- // to within the clipping rect.
+ // Only does the paint if the destination rect intersects with the clip
+ // rect.
+ if (local_dest_irect.intersect(canvas->getTotalClip().getBounds())) {
+ // At this point |local_dest_irect| contains the rect that we should draw
+ // to within the clipping rect.
- // Calculate the address for the top left corner of destination rect in
- // the canvas that we will draw to. The address is obtained by the base
- // address of the canvas shifted by "left" and "top" of the rect.
- uint8* dest_rect_pointer = static_cast<uint8*>(bitmap.getPixels()) +
- local_dest_irect.fTop * bitmap.rowBytes() +
- local_dest_irect.fLeft * 4;
+ // Calculate the address for the top left corner of destination rect in
+ // the canvas that we will draw to. The address is obtained by the base
+ // address of the canvas shifted by "left" and "top" of the rect.
+ uint8* dest_rect_pointer = static_cast<uint8*>(bitmap.getPixels()) +
+ local_dest_irect.fTop * bitmap.rowBytes() +
+ local_dest_irect.fLeft * 4;
- // Project the clip rect to the original video frame, obtains the
- // dimensions of the projected clip rect, "left" and "top" of the rect.
- // The math here are all integer math so we won't have rounding error and
- // write outside of the canvas.
- // We have the assumptions of dest_rect.width() and dest_rect.height()
- // being non-zero, these are valid assumptions since finding intersection
- // above rejects empty rectangle so we just do a DCHECK here.
- DCHECK_NE(0, dest_rect.width());
- DCHECK_NE(0, dest_rect.height());
- size_t frame_clip_width = local_dest_irect.width() *
- frame_in.width /
- local_dest_irect_saved.width();
- size_t frame_clip_height = local_dest_irect.height() *
- frame_in.height /
- local_dest_irect_saved.height();
+ // Project the clip rect to the original video frame, obtains the
+ // dimensions of the projected clip rect, "left" and "top" of the rect.
+ // The math here are all integer math so we won't have rounding error and
+ // write outside of the canvas.
+ // We have the assumptions of dest_rect.width() and dest_rect.height()
+ // being non-zero, these are valid assumptions since finding intersection
+ // above rejects empty rectangle so we just do a DCHECK here.
+ DCHECK_NE(0, dest_rect.width());
+ DCHECK_NE(0, dest_rect.height());
+ size_t frame_clip_width = local_dest_irect.width() *
+ video_frame->width() / local_dest_irect_saved.width();
+ size_t frame_clip_height = local_dest_irect.height() *
+ video_frame->height() / local_dest_irect_saved.height();
- // Project the "left" and "top" of the final destination rect to local
- // coordinates of the video frame, use these values to find the offsets
- // in the video frame to start reading.
- size_t frame_clip_left = (local_dest_irect.fLeft -
- local_dest_irect_saved.fLeft) *
- frame_in.width /
- local_dest_irect_saved.width();
- size_t frame_clip_top = (local_dest_irect.fTop -
- local_dest_irect_saved.fTop) *
- frame_in.height /
- local_dest_irect_saved.height();
+ // Project the "left" and "top" of the final destination rect to local
+ // coordinates of the video frame, use these values to find the offsets
+ // in the video frame to start reading.
+ size_t frame_clip_left =
+ (local_dest_irect.fLeft - local_dest_irect_saved.fLeft) *
+ video_frame->width() / local_dest_irect_saved.width();
+ size_t frame_clip_top =
+ (local_dest_irect.fTop - local_dest_irect_saved.fTop) *
+ video_frame->height() / local_dest_irect_saved.height();
- // Use the "left" and "top" of the destination rect to locate the offset
- // in Y, U and V planes.
- size_t y_offset = frame_in.strides[media::VideoSurface::kYPlane] *
- frame_clip_top + frame_clip_left;
- // For format YV12, there is one U, V value per 2x2 block.
- // For format YV16, there is one u, V value per 2x1 block.
- size_t uv_offset = (frame_in.strides[media::VideoSurface::kUPlane] *
- (frame_clip_top >> y_shift)) +
- (frame_clip_left >> 1);
- uint8* frame_clip_y = frame_in.data[media::VideoSurface::kYPlane] +
- y_offset;
- uint8* frame_clip_u = frame_in.data[media::VideoSurface::kUPlane] +
- uv_offset;
- uint8* frame_clip_v = frame_in.data[media::VideoSurface::kVPlane] +
- uv_offset;
- bitmap.lockPixels();
+ // Use the "left" and "top" of the destination rect to locate the offset
+ // in Y, U and V planes.
+ size_t y_offset = video_frame->stride(media::VideoFrame::kYPlane) *
+ frame_clip_top + frame_clip_left;
+ // For format YV12, there is one U, V value per 2x2 block.
+ // For format YV16, there is one u, V value per 2x1 block.
+ size_t uv_offset = (video_frame->stride(media::VideoFrame::kUPlane) *
+ (frame_clip_top >> y_shift)) + (frame_clip_left >> 1);
+ uint8* frame_clip_y =
+ video_frame->data(media::VideoFrame::kYPlane) + y_offset;
+ uint8* frame_clip_u =
+ video_frame->data(media::VideoFrame::kUPlane) + uv_offset;
+ uint8* frame_clip_v =
+ video_frame->data(media::VideoFrame::kVPlane) + uv_offset;
+ bitmap.lockPixels();
- // TODO(hclam): do rotation and mirroring here.
- media::ScaleYUVToRGB32(frame_clip_y,
- frame_clip_u,
- frame_clip_v,
- dest_rect_pointer,
- frame_clip_width,
- frame_clip_height,
- local_dest_irect.width(),
- local_dest_irect.height(),
- frame_in.strides[media::VideoSurface::kYPlane],
- frame_in.strides[media::VideoSurface::kUPlane],
- bitmap.rowBytes(),
- yuv_type,
- media::ROTATE_0);
- bitmap.unlockPixels();
- }
- video_frame->Unlock();
- } else {
- NOTREACHED();
+ // TODO(hclam): do rotation and mirroring here.
+ media::ScaleYUVToRGB32(frame_clip_y,
+ frame_clip_u,
+ frame_clip_v,
+ dest_rect_pointer,
+ frame_clip_width,
+ frame_clip_height,
+ local_dest_irect.width(),
+ local_dest_irect.height(),
+ video_frame->stride(media::VideoFrame::kYPlane),
+ video_frame->stride(media::VideoFrame::kUPlane),
+ bitmap.rowBytes(),
+ yuv_type,
+ media::ROTATE_0);
+ bitmap.unlockPixels();
}
}