summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorpenghuang@chromium.org <penghuang@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-02-26 00:44:05 +0000
committerpenghuang@chromium.org <penghuang@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2014-02-26 00:44:05 +0000
commit61ebd74e979d3be1daea3569e9e7411401afdf40 (patch)
treea781bafd3cc5980b2f3a4bea95f9e71c70abf047
parent679f6679273c9cafd8f49fe18359c1774f50b744 (diff)
downloadchromium_src-61ebd74e979d3be1daea3569e9e7411401afdf40.zip
chromium_src-61ebd74e979d3be1daea3569e9e7411401afdf40.tar.gz
chromium_src-61ebd74e979d3be1daea3569e9e7411401afdf40.tar.bz2
[PPAPI][MediaStream] Support configure for video input.
Support configuring frame format and size for video input. BUG=330851 Review URL: https://codereview.chromium.org/150403006 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@253307 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--content/content_renderer.gypi1
-rw-r--r--content/renderer/pepper/DEPS1
-rw-r--r--content/renderer/pepper/pepper_media_stream_track_host_base.h8
-rw-r--r--content/renderer/pepper/pepper_media_stream_video_track_host.cc244
-rw-r--r--content/renderer/pepper/pepper_media_stream_video_track_host.h31
-rw-r--r--ppapi/api/ppb_media_stream_video_track.idl27
-rw-r--r--ppapi/api/ppb_video_frame.idl17
-rw-r--r--ppapi/c/ppb_media_stream_video_track.h29
-rw-r--r--ppapi/c/ppb_video_frame.h18
-rw-r--r--ppapi/cpp/media_stream_video_track.h10
-rw-r--r--ppapi/examples/media_stream_video/media_stream_video.cc305
-rw-r--r--ppapi/examples/media_stream_video/media_stream_video.html37
-rw-r--r--ppapi/ppapi_shared.gypi2
-rw-r--r--ppapi/proxy/media_stream_video_track_resource.cc63
-rw-r--r--ppapi/proxy/media_stream_video_track_resource.h6
-rw-r--r--ppapi/proxy/ppapi_messages.h13
-rw-r--r--ppapi/proxy/ppapi_param_traits.h1
-rw-r--r--ppapi/shared_impl/media_stream_buffer_manager.cc16
-rw-r--r--ppapi/shared_impl/media_stream_video_track_shared.cc40
-rw-r--r--ppapi/shared_impl/media_stream_video_track_shared.h32
20 files changed, 708 insertions, 193 deletions
diff --git a/content/content_renderer.gypi b/content/content_renderer.gypi
index 2bf4e49..7291021 100644
--- a/content/content_renderer.gypi
+++ b/content/content_renderer.gypi
@@ -595,6 +595,7 @@
'dependencies': [
'../third_party/libjingle/libjingle.gyp:libjingle_webrtc',
'../third_party/libjingle/libjingle.gyp:libpeerconnection',
+ '../third_party/libyuv/libyuv.gyp:libyuv',
'../third_party/webrtc/modules/modules.gyp:audio_device',
'../third_party/webrtc/modules/modules.gyp:audio_processing',
'<(DEPTH)/crypto/crypto.gyp:crypto',
diff --git a/content/renderer/pepper/DEPS b/content/renderer/pepper/DEPS
index 0b8e33d..677f309 100644
--- a/content/renderer/pepper/DEPS
+++ b/content/renderer/pepper/DEPS
@@ -6,6 +6,7 @@ include_rules = [
"+media/audio",
"+media/base",
"+media/video",
+ "+third_party/libyuv",
"+ui/base/ime",
"+ui/base/range",
]
diff --git a/content/renderer/pepper/pepper_media_stream_track_host_base.h b/content/renderer/pepper/pepper_media_stream_track_host_base.h
index a31c1e1..8298591 100644
--- a/content/renderer/pepper/pepper_media_stream_track_host_base.h
+++ b/content/renderer/pepper/pepper_media_stream_track_host_base.h
@@ -33,15 +33,15 @@ class PepperMediaStreamTrackHostBase
// Also see |MediaStreamBufferManager|.
void SendEnqueueBufferMessageToPlugin(int32_t index);
- private:
- // Subclasses must implement this method to clean up when the track is closed.
- virtual void OnClose() = 0;
-
// ResourceMessageHandler overrides:
virtual int32_t OnResourceMessageReceived(
const IPC::Message& msg,
ppapi::host::HostMessageContext* context) OVERRIDE;
+ private:
+ // Subclasses must implement this method to clean up when the track is closed.
+ virtual void OnClose() = 0;
+
// Message handlers:
int32_t OnHostMsgEnqueueBuffer(ppapi::host::HostMessageContext* context,
int32_t index);
diff --git a/content/renderer/pepper/pepper_media_stream_video_track_host.cc b/content/renderer/pepper/pepper_media_stream_video_track_host.cc
index 7512c5a..618d044 100644
--- a/content/renderer/pepper/pepper_media_stream_video_track_host.cc
+++ b/content/renderer/pepper/pepper_media_stream_video_track_host.cc
@@ -5,35 +5,135 @@
#include "content/renderer/pepper/pepper_media_stream_video_track_host.h"
#include "base/logging.h"
+#include "media/base/yuv_convert.h"
#include "ppapi/c/pp_errors.h"
#include "ppapi/c/ppb_video_frame.h"
+#include "ppapi/host/dispatch_host_message.h"
+#include "ppapi/host/host_message_context.h"
+#include "ppapi/proxy/ppapi_messages.h"
#include "ppapi/shared_impl/media_stream_buffer.h"
+#include "third_party/libyuv/include/libyuv.h"
using media::VideoFrame;
+using ppapi::host::HostMessageContext;
+using ppapi::MediaStreamVideoTrackShared;
namespace {
-// TODO(penghuang): make it configurable.
-const int32_t kNumberOfFrames = 4;
+const int32_t kDefaultNumberOfBuffers = 4;
+const int32_t kMaxNumberOfBuffers = 8;
+// Filter mode for scaling frames.
+const libyuv::FilterMode kFilterMode = libyuv::kFilterBox;
PP_VideoFrame_Format ToPpapiFormat(VideoFrame::Format format) {
switch (format) {
case VideoFrame::YV12:
return PP_VIDEOFRAME_FORMAT_YV12;
- case VideoFrame::YV16:
- return PP_VIDEOFRAME_FORMAT_YV16;
case VideoFrame::I420:
return PP_VIDEOFRAME_FORMAT_I420;
- case VideoFrame::YV12A:
- return PP_VIDEOFRAME_FORMAT_YV12A;
- case VideoFrame::YV12J:
- return PP_VIDEOFRAME_FORMAT_YV12J;
default:
DVLOG(1) << "Unsupported pixel format " << format;
return PP_VIDEOFRAME_FORMAT_UNKNOWN;
}
}
+VideoFrame::Format FromPpapiFormat(PP_VideoFrame_Format format) {
+ switch (format) {
+ case PP_VIDEOFRAME_FORMAT_YV12:
+ return VideoFrame::YV12;
+ case PP_VIDEOFRAME_FORMAT_I420:
+ return VideoFrame::I420;
+ default:
+ DVLOG(1) << "Unsupported pixel format " << format;
+ return VideoFrame::UNKNOWN;
+ }
+}
+
+// Compute size base on the size of frame received from MediaStreamVideoSink
+// and size specified by plugin.
+gfx::Size GetTargetSize(const gfx::Size& source,
+ const gfx::Size& plugin) {
+ return gfx::Size(plugin.width() ? plugin.width() : source.width(),
+ plugin.height() ? plugin.height() : source.height());
+}
+
+// Compute format base on the format of frame received from MediaStreamVideoSink
+// and format specified by plugin.
+PP_VideoFrame_Format GetTargetFormat(PP_VideoFrame_Format source,
+ PP_VideoFrame_Format plugin) {
+ return plugin != PP_VIDEOFRAME_FORMAT_UNKNOWN ? plugin : source;
+}
+
+void ConvertFromMediaVideoFrame(const scoped_refptr<media::VideoFrame>& src,
+ PP_VideoFrame_Format dst_format,
+ const gfx::Size& dst_size,
+ uint8_t* dst) {
+ CHECK(src->format() == VideoFrame::YV12 ||
+ src->format() == VideoFrame::I420);
+ if (dst_format == PP_VIDEOFRAME_FORMAT_BGRA) {
+ if (src->coded_size() == dst_size) {
+ libyuv::I420ToARGB(src->data(VideoFrame::kYPlane),
+ src->stride(VideoFrame::kYPlane),
+ src->data(VideoFrame::kUPlane),
+ src->stride(VideoFrame::kUPlane),
+ src->data(VideoFrame::kVPlane),
+ src->stride(VideoFrame::kVPlane),
+ dst,
+ dst_size.width() * 4,
+ dst_size.width(),
+ dst_size.height());
+ } else {
+ media::ScaleYUVToRGB32(src->data(VideoFrame::kYPlane),
+ src->data(VideoFrame::kUPlane),
+ src->data(VideoFrame::kVPlane),
+ dst,
+ src->coded_size().width(),
+ src->coded_size().height(),
+ dst_size.width(),
+ dst_size.height(),
+ src->stride(VideoFrame::kYPlane),
+ src->stride(VideoFrame::kUPlane),
+ dst_size.width() * 4,
+ media::YV12,
+ media::ROTATE_0,
+ media::FILTER_BILINEAR);
+ }
+ } else if (dst_format == PP_VIDEOFRAME_FORMAT_YV12 ||
+ dst_format == PP_VIDEOFRAME_FORMAT_I420) {
+ static const size_t kPlanesOrder[][3] = {
+ { VideoFrame::kYPlane, VideoFrame::kVPlane, VideoFrame::kUPlane }, // YV12
+ { VideoFrame::kYPlane, VideoFrame::kUPlane, VideoFrame::kVPlane }, // I420
+ };
+ const int plane_order = (dst_format == PP_VIDEOFRAME_FORMAT_YV12) ? 0 : 1;
+ int dst_width = dst_size.width();
+ int dst_height = dst_size.height();
+ libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][0]),
+ src->stride(kPlanesOrder[plane_order][0]),
+ src->coded_size().width(),
+ src->coded_size().height(),
+ dst, dst_width, dst_width, dst_height,
+ kFilterMode);
+ dst += dst_width * dst_height;
+ const int src_halfwidth = (src->coded_size().width() + 1) >> 1;
+ const int src_halfheight = (src->coded_size().height() + 1) >> 1;
+ const int dst_halfwidth = (dst_width + 1) >> 1;
+ const int dst_halfheight = (dst_height + 1) >> 1;
+ libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][1]),
+ src->stride(kPlanesOrder[plane_order][1]),
+ src_halfwidth, src_halfheight,
+ dst, dst_halfwidth, dst_halfwidth, dst_halfheight,
+ kFilterMode);
+ dst += dst_halfwidth * dst_halfheight;
+ libyuv::ScalePlane(src->data(kPlanesOrder[plane_order][2]),
+ src->stride(kPlanesOrder[plane_order][2]),
+ src_halfwidth, src_halfheight,
+ dst, dst_halfwidth, dst_halfwidth, dst_halfheight,
+ kFilterMode);
+ } else {
+ NOTREACHED();
+ }
+}
+
} // namespace
namespace content {
@@ -46,7 +146,9 @@ PepperMediaStreamVideoTrackHost::PepperMediaStreamVideoTrackHost(
: PepperMediaStreamTrackHostBase(host, instance, resource),
track_(track),
connected_(false),
- frame_format_(VideoFrame::UNKNOWN),
+ number_of_buffers_(kDefaultNumberOfBuffers),
+ source_frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN),
+ plugin_frame_format_(PP_VIDEOFRAME_FORMAT_UNKNOWN),
frame_data_size_(0) {
DCHECK(!track_.isNull());
}
@@ -55,6 +157,29 @@ PepperMediaStreamVideoTrackHost::~PepperMediaStreamVideoTrackHost() {
OnClose();
}
+void PepperMediaStreamVideoTrackHost::InitBuffers() {
+ gfx::Size size = GetTargetSize(source_frame_size_, plugin_frame_size_);
+ DCHECK(!size.IsEmpty());
+
+ PP_VideoFrame_Format format =
+ GetTargetFormat(source_frame_format_, plugin_frame_format_);
+ DCHECK_NE(format, PP_VIDEOFRAME_FORMAT_UNKNOWN);
+
+ if (format == PP_VIDEOFRAME_FORMAT_BGRA) {
+ frame_data_size_ = size.width() * size.height() * 4;
+ } else {
+ frame_data_size_ = VideoFrame::AllocationSize(FromPpapiFormat(format),
+ size);
+ }
+
+ DCHECK_GT(frame_data_size_, 0U);
+ int32_t buffer_size =
+ sizeof(ppapi::MediaStreamBuffer::Video) + frame_data_size_;
+ bool result = PepperMediaStreamTrackHostBase::InitBuffers(number_of_buffers_,
+ buffer_size);
+ CHECK(result);
+}
+
void PepperMediaStreamVideoTrackHost::OnClose() {
if (connected_) {
MediaStreamVideoSink::RemoveFromVideoTrack(this, track_);
@@ -70,53 +195,35 @@ void PepperMediaStreamVideoTrackHost::OnVideoFrame(
if (ppformat == PP_VIDEOFRAME_FORMAT_UNKNOWN)
return;
- if (frame_size_ != frame->coded_size() || frame_format_ != frame->format()) {
- frame_size_ = frame->coded_size();
- frame_format_ = frame->format();
- // TODO(penghuang): Support changing |frame_size_| & |frame_format_| more
- // than once.
- DCHECK(!frame_data_size_);
- frame_data_size_ = VideoFrame::AllocationSize(frame_format_, frame_size_);
- int32_t size = sizeof(ppapi::MediaStreamBuffer::Video) + frame_data_size_;
- bool result = InitBuffers(kNumberOfFrames, size);
- // TODO(penghuang): Send PP_ERROR_NOMEMORY to plugin.
- CHECK(result);
+ if (source_frame_size_.IsEmpty()) {
+ source_frame_size_ = frame->coded_size();
+ source_frame_format_ = ppformat;
+ InitBuffers();
}
int32_t index = buffer_manager()->DequeueBuffer();
// Drop frames if the underlying buffer is full.
- if (index < 0)
+ if (index < 0) {
+ DVLOG(1) << "A frame is dropped.";
return;
+ }
+
+ CHECK(frame->coded_size() == source_frame_size_) << "Frame size is changed";
+ CHECK_EQ(ppformat, source_frame_format_) << "Frame format is changed.";
- // TODO(penghuang): support format conversion and size scaling.
+ gfx::Size size = GetTargetSize(source_frame_size_, plugin_frame_size_);
+ PP_VideoFrame_Format format = GetTargetFormat(source_frame_format_,
+ plugin_frame_format_);
ppapi::MediaStreamBuffer::Video* buffer =
&(buffer_manager()->GetBufferPointer(index)->video);
buffer->header.size = buffer_manager()->buffer_size();
buffer->header.type = ppapi::MediaStreamBuffer::TYPE_VIDEO;
buffer->timestamp = frame->GetTimestamp().InSecondsF();
- buffer->format = ppformat;
- buffer->size.width = frame->coded_size().width();
- buffer->size.height = frame->coded_size().height();
+ buffer->format = format;
+ buffer->size.width = size.width();
+ buffer->size.height = size.height();
buffer->data_size = frame_data_size_;
-
- COMPILE_ASSERT(VideoFrame::kYPlane == 0, y_plane_should_be_0);
- COMPILE_ASSERT(VideoFrame::kUPlane == 1, u_plane_should_be_1);
- COMPILE_ASSERT(VideoFrame::kVPlane == 2, v_plane_should_be_2);
-
- uint8_t* dst = buffer->data;
- size_t num_planes = VideoFrame::NumPlanes(frame->format());
- for (size_t i = 0; i < num_planes; ++i) {
- const uint8_t* src = frame->data(i);
- const size_t row_bytes = frame->row_bytes(i);
- const size_t src_stride = frame->stride(i);
- int rows = frame->rows(i);
- for (int j = 0; j < rows; ++j) {
- memcpy(dst, src, row_bytes);
- dst += row_bytes;
- src += src_stride;
- }
- }
-
+ ConvertFromMediaVideoFrame(frame, format, size, buffer->data);
SendEnqueueBufferMessageToPlugin(index);
}
@@ -127,4 +234,53 @@ void PepperMediaStreamVideoTrackHost::DidConnectPendingHostToResource() {
}
}
+int32_t PepperMediaStreamVideoTrackHost::OnResourceMessageReceived(
+ const IPC::Message& msg,
+ HostMessageContext* context) {
+ IPC_BEGIN_MESSAGE_MAP(PepperMediaStreamVideoTrackHost, msg)
+ PPAPI_DISPATCH_HOST_RESOURCE_CALL(
+ PpapiHostMsg_MediaStreamVideoTrack_Configure,
+ OnHostMsgConfigure)
+ IPC_END_MESSAGE_MAP()
+ return PepperMediaStreamTrackHostBase::OnResourceMessageReceived(msg,
+ context);
+}
+
+int32_t PepperMediaStreamVideoTrackHost::OnHostMsgConfigure(
+ HostMessageContext* context,
+ const MediaStreamVideoTrackShared::Attributes& attributes) {
+ CHECK(MediaStreamVideoTrackShared::VerifyAttributes(attributes));
+
+ bool changed = false;
+ gfx::Size new_size(attributes.width, attributes.height);
+ if (GetTargetSize(source_frame_size_, plugin_frame_size_) !=
+ GetTargetSize(source_frame_size_, new_size)) {
+ changed = true;
+ }
+ plugin_frame_size_ = new_size;
+
+ int32_t buffers = attributes.buffers ?
+ std::min(kMaxNumberOfBuffers, attributes.buffers) :
+ kDefaultNumberOfBuffers;
+ if (buffers != number_of_buffers_)
+ changed = true;
+ number_of_buffers_ = buffers;
+
+ if (GetTargetFormat(source_frame_format_, plugin_frame_format_) !=
+ GetTargetFormat(source_frame_format_, attributes.format)) {
+ changed = true;
+ }
+ plugin_frame_format_ = attributes.format;
+
+ // If the first frame has been received, we will re-initialize buffers with
+ // new settings. Otherwise, we will initialize buffer when we receive
+ // the first frame, because plugin can only provide part of attributes
+ // which are not enough to initialize buffers.
+ if (changed && !source_frame_size_.IsEmpty())
+ InitBuffers();
+
+ context->reply_msg = PpapiPluginMsg_MediaStreamVideoTrack_ConfigureReply();
+ return PP_OK;
+}
+
} // namespace content
diff --git a/content/renderer/pepper/pepper_media_stream_video_track_host.h b/content/renderer/pepper/pepper_media_stream_video_track_host.h
index 588c7eda..3b374ef 100644
--- a/content/renderer/pepper/pepper_media_stream_video_track_host.h
+++ b/content/renderer/pepper/pepper_media_stream_video_track_host.h
@@ -9,6 +9,8 @@
#include "content/public/renderer/media_stream_video_sink.h"
#include "content/renderer/pepper/pepper_media_stream_track_host_base.h"
#include "media/base/video_frame.h"
+#include "ppapi/c/ppb_video_frame.h"
+#include "ppapi/shared_impl/media_stream_video_track_shared.h"
#include "third_party/WebKit/public/platform/WebMediaStreamTrack.h"
#include "ui/gfx/size.h"
@@ -25,6 +27,8 @@ class PepperMediaStreamVideoTrackHost : public PepperMediaStreamTrackHostBase,
private:
virtual ~PepperMediaStreamVideoTrackHost();
+ void InitBuffers();
+
// PepperMediaStreamTrackHostBase overrides:
virtual void OnClose() OVERRIDE;
@@ -35,16 +39,35 @@ class PepperMediaStreamVideoTrackHost : public PepperMediaStreamTrackHostBase,
// ResourceHost overrides:
virtual void DidConnectPendingHostToResource() OVERRIDE;
+ // ResourceMessageHandler overrides:
+ virtual int32_t OnResourceMessageReceived(
+ const IPC::Message& msg,
+ ppapi::host::HostMessageContext* context) OVERRIDE;
+
+ // Message handlers:
+ int32_t OnHostMsgConfigure(
+ ppapi::host::HostMessageContext* context,
+ const ppapi::MediaStreamVideoTrackShared::Attributes& attributes);
+
blink::WebMediaStreamTrack track_;
// True if it has been added to |blink::WebMediaStreamTrack| as a sink.
bool connected_;
- // Frame size.
- gfx::Size frame_size_;
+ // Number of buffers.
+ int32_t number_of_buffers_;
+
+ // Size of frames which are received from MediaStreamVideoSink.
+ gfx::Size source_frame_size_;
+
+ // Plugin specified frame size.
+ gfx::Size plugin_frame_size_;
+
+ // Format of frames which are received from MediaStreamVideoSink.
+ PP_VideoFrame_Format source_frame_format_;
- // Frame format.
- media::VideoFrame::Format frame_format_;
+ // Plugin specified frame format.
+ PP_VideoFrame_Format plugin_frame_format_;
// The size of frame pixels in bytes.
uint32_t frame_data_size_;
diff --git a/ppapi/api/ppb_media_stream_video_track.idl b/ppapi/api/ppb_media_stream_video_track.idl
index 1f331c0..199ebb2 100644
--- a/ppapi/api/ppb_media_stream_video_track.idl
+++ b/ppapi/api/ppb_media_stream_video_track.idl
@@ -32,11 +32,17 @@ enum PP_MediaStreamVideoTrack_Attrib {
* request at least 2 to make sure latency doesn't cause lost frames. If
* the plugin expects to hold on to more than one frame at a time (e.g. to do
* multi-frame processing), it should request that many more.
+ * If this attribute is not specified or value 0 is specified for this
+ * attribute, the default value will be used.
*/
PP_MEDIASTREAMVIDEOTRACK_ATTRIB_BUFFERED_FRAMES = 1,
/**
* The width of video frames in pixels. It should be a multiple of 4.
+ * If the specified size is different from the video source (webcam),
+ * frames will be scaled to specified size.
+ * If this attribute is not specified or value 0 is specified, the original
+ * frame size of the video track will be used.
*
* Maximum value: 4096 (4K resolution).
*/
@@ -44,6 +50,10 @@ enum PP_MediaStreamVideoTrack_Attrib {
/**
* The height of video frames in pixels. It should be a multiple of 4.
+ * If the specified size is different from the video source (webcam),
+ * frames will be scaled to specified size.
+ * If this attribute is not specified or value 0 is specified, the original
+ * frame size of the video track will be used.
*
* Maximum value: 4096 (4K resolution).
*/
@@ -51,7 +61,12 @@ enum PP_MediaStreamVideoTrack_Attrib {
/**
* The format of video frames. The attribute value is
- * a <code>PP_VideoFrame_Format</code>.
+ * a <code>PP_VideoFrame_Format</code>. If the specified format is different
+ * from the video source (webcam), frames will be converted to specified
+ * format.
+ * If this attribute is not specified or value
+ * <code>PP_VIDEOFRAME_FORMAT_UNKNOWN</code> is specified, the orignal frame
+ * format of the video track will be used.
*/
PP_MEDIASTREAMVIDEOTRACK_ATTRIB_FORMAT = 4
};
@@ -75,8 +90,9 @@ interface PPB_MediaStreamVideoTrack {
* chosen such that inter-frame processing time variability won't overrun the
* input buffer. If the buffer is overfilled, then frames will be dropped.
* The application can detect this by examining the timestamp on returned
- * frames. If <code>Configure()</code> is not called, default settings will be
- * used.
+ * frames. If some attributes are not specified, default values will be used
+ * for those unspecified attributes. If <code>Configure()</code> is not
+ * called, default settings will be used.
* Example usage from plugin code:
* @code
* int32_t attribs[] = {
@@ -95,6 +111,11 @@ interface PPB_MediaStreamVideoTrack {
* completion of <code>Configure()</code>.
*
* @return An int32_t containing a result code from <code>pp_errors.h</code>.
+ * Returns <code>PP_ERROR_INPROGRESS</code> if there is a pending call of
+ * <code>Configure()</code> or <code>GetFrame()</code>, or the plugin
+ * holds some frames which are not recycled with <code>RecycleFrame()</code>.
+ * If an error is returned, all attributes and the underlying buffer will not
+ * be changed.
*/
int32_t Configure([in] PP_Resource video_track,
[in] int32_t[] attrib_list,
diff --git a/ppapi/api/ppb_video_frame.idl b/ppapi/api/ppb_video_frame.idl
index 0ca6419..fc2d523 100644
--- a/ppapi/api/ppb_video_frame.idl
+++ b/ppapi/api/ppb_video_frame.idl
@@ -25,24 +25,19 @@ enum PP_VideoFrame_Format {
PP_VIDEOFRAME_FORMAT_YV12 = 1,
/**
- * 16bpp YVU planar 1x1 Y, 2x1 VU samples.
+ * 12bpp YUV planar 1x1 Y, 2x2 UV samples.
*/
- PP_VIDEOFRAME_FORMAT_YV16 = 2,
+ PP_VIDEOFRAME_FORMAT_I420 = 2,
/**
- * 12bpp YVU planar 1x1 Y, 2x2 UV samples.
+ * 32bpp BGRA.
*/
- PP_VIDEOFRAME_FORMAT_I420 = 3,
+ PP_VIDEOFRAME_FORMAT_BGRA = 3,
/**
- * 20bpp YVU planar 1x1 Y, 2x2 VU, 1x1 A samples.
+ * The last format.
*/
- PP_VIDEOFRAME_FORMAT_YV12A = 4,
-
- /**
- * JPEG color range version of YV12.
- */
- PP_VIDEOFRAME_FORMAT_YV12J = 5
+ PP_VIDEOFRAME_FORMAT_LAST = PP_VIDEOFRAME_FORMAT_BGRA
};
interface PPB_VideoFrame {
diff --git a/ppapi/c/ppb_media_stream_video_track.h b/ppapi/c/ppb_media_stream_video_track.h
index 650ccd2..ce4f9c8 100644
--- a/ppapi/c/ppb_media_stream_video_track.h
+++ b/ppapi/c/ppb_media_stream_video_track.h
@@ -3,7 +3,7 @@
* found in the LICENSE file.
*/
-/* From ppb_media_stream_video_track.idl modified Thu Jan 23 14:09:56 2014. */
+/* From ppb_media_stream_video_track.idl modified Wed Feb 19 11:06:48 2014. */
#ifndef PPAPI_C_PPB_MEDIA_STREAM_VIDEO_TRACK_H_
#define PPAPI_C_PPB_MEDIA_STREAM_VIDEO_TRACK_H_
@@ -45,23 +45,38 @@ typedef enum {
* request at least 2 to make sure latency doesn't cause lost frames. If
* the plugin expects to hold on to more than one frame at a time (e.g. to do
* multi-frame processing), it should request that many more.
+ * If this attribute is not specified or value 0 is specified for this
+ * attribute, the default value will be used.
*/
PP_MEDIASTREAMVIDEOTRACK_ATTRIB_BUFFERED_FRAMES = 1,
/**
* The width of video frames in pixels. It should be a multiple of 4.
+ * If the specified size is different from the video source (webcam),
+ * frames will be scaled to specified size.
+ * If this attribute is not specified or value 0 is specified, the original
+ * frame size of the video track will be used.
*
* Maximum value: 4096 (4K resolution).
*/
PP_MEDIASTREAMVIDEOTRACK_ATTRIB_WIDTH = 2,
/**
* The height of video frames in pixels. It should be a multiple of 4.
+ * If the specified size is different from the video source (webcam),
+ * frames will be scaled to specified size.
+ * If this attribute is not specified or value 0 is specified, the original
+ * frame size of the video track will be used.
*
* Maximum value: 4096 (4K resolution).
*/
PP_MEDIASTREAMVIDEOTRACK_ATTRIB_HEIGHT = 3,
/**
* The format of video frames. The attribute value is
- * a <code>PP_VideoFrame_Format</code>.
+ * a <code>PP_VideoFrame_Format</code>. If the specified format is different
+ * from the video source (webcam), frames will be converted to specified
+ * format.
+ * If this attribute is not specified or value
+ * <code>PP_VIDEOFRAME_FORMAT_UNKNOWN</code> is specified, the orignal frame
+ * format of the video track will be used.
*/
PP_MEDIASTREAMVIDEOTRACK_ATTRIB_FORMAT = 4
} PP_MediaStreamVideoTrack_Attrib;
@@ -91,8 +106,9 @@ struct PPB_MediaStreamVideoTrack_0_1 { /* dev */
* chosen such that inter-frame processing time variability won't overrun the
* input buffer. If the buffer is overfilled, then frames will be dropped.
* The application can detect this by examining the timestamp on returned
- * frames. If <code>Configure()</code> is not called, default settings will be
- * used.
+ * frames. If some attributes are not specified, default values will be used
+ * for those unspecified attributes. If <code>Configure()</code> is not
+ * called, default settings will be used.
* Example usage from plugin code:
* @code
* int32_t attribs[] = {
@@ -111,6 +127,11 @@ struct PPB_MediaStreamVideoTrack_0_1 { /* dev */
* completion of <code>Configure()</code>.
*
* @return An int32_t containing a result code from <code>pp_errors.h</code>.
+ * Returns <code>PP_ERROR_INPROGRESS</code> if there is a pending call of
+ * <code>Configure()</code> or <code>GetFrame()</code>, or the plugin
+ * holds some frames which are not recycled with <code>RecycleFrame()</code>.
+ * If an error is returned, all attributes and the underlying buffer will not
+ * be changed.
*/
int32_t (*Configure)(PP_Resource video_track,
const int32_t attrib_list[],
diff --git a/ppapi/c/ppb_video_frame.h b/ppapi/c/ppb_video_frame.h
index 3acfdc7..6dafc0c 100644
--- a/ppapi/c/ppb_video_frame.h
+++ b/ppapi/c/ppb_video_frame.h
@@ -3,7 +3,7 @@
* found in the LICENSE file.
*/
-/* From ppb_video_frame.idl modified Wed Jan 22 21:25:01 2014. */
+/* From ppb_video_frame.idl modified Tue Feb 11 14:41:52 2014. */
#ifndef PPAPI_C_PPB_VIDEO_FRAME_H_
#define PPAPI_C_PPB_VIDEO_FRAME_H_
@@ -36,21 +36,17 @@ typedef enum {
*/
PP_VIDEOFRAME_FORMAT_YV12 = 1,
/**
- * 16bpp YVU planar 1x1 Y, 2x1 VU samples.
+ * 12bpp YUV planar 1x1 Y, 2x2 UV samples.
*/
- PP_VIDEOFRAME_FORMAT_YV16 = 2,
+ PP_VIDEOFRAME_FORMAT_I420 = 2,
/**
- * 12bpp YVU planar 1x1 Y, 2x2 UV samples.
+ * 32bpp BGRA.
*/
- PP_VIDEOFRAME_FORMAT_I420 = 3,
+ PP_VIDEOFRAME_FORMAT_BGRA = 3,
/**
- * 20bpp YVU planar 1x1 Y, 2x2 VU, 1x1 A samples.
+ * The last format.
*/
- PP_VIDEOFRAME_FORMAT_YV12A = 4,
- /**
- * JPEG color range version of YV12.
- */
- PP_VIDEOFRAME_FORMAT_YV12J = 5
+ PP_VIDEOFRAME_FORMAT_LAST = PP_VIDEOFRAME_FORMAT_BGRA
} PP_VideoFrame_Format;
/**
* @}
diff --git a/ppapi/cpp/media_stream_video_track.h b/ppapi/cpp/media_stream_video_track.h
index f4beabb5..6975986 100644
--- a/ppapi/cpp/media_stream_video_track.h
+++ b/ppapi/cpp/media_stream_video_track.h
@@ -55,8 +55,9 @@ class MediaStreamVideoTrack : public Resource {
/// chosen such that inter-frame processing time variability won't overrun the
/// input buffer. If the buffer is overfilled, then frames will be dropped.
/// The application can detect this by examining the timestamp on returned
- /// frames. If <code>Configure()</code> is not called, default settings will
- /// be used.
+ /// frames. If some attributes are not specified, default values will be used
+ /// for those unspecified attributes. If <code>Configure()</code> is not
+ /// called, default settings will be used.
/// Example usage from plugin code:
/// @code
/// int32_t attribs[] = {
@@ -73,6 +74,11 @@ class MediaStreamVideoTrack : public Resource {
/// completion of <code>Configure()</code>.
///
/// @return An int32_t containing a result code from <code>pp_errors.h</code>.
+ /// Returns <code>PP_ERROR_INPROGRESS</code> if there is a pending call of
+ /// <code>Configure()</code> or <code>GetFrame()</code>, or the plugin
+ /// holds some frames which are not recycled with <code>RecycleFrame()</code>.
+ /// If an error is returned, all attributes and the underlying buffer will not
+ /// be changed.
int32_t Configure(const int32_t attributes[],
const CompletionCallback& callback);
diff --git a/ppapi/examples/media_stream_video/media_stream_video.cc b/ppapi/examples/media_stream_video/media_stream_video.cc
index b219712..adb1eba 100644
--- a/ppapi/examples/media_stream_video/media_stream_video.cc
+++ b/ppapi/examples/media_stream_video/media_stream_video.cc
@@ -2,6 +2,8 @@
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
+#include <vector>
+
#include "ppapi/c/pp_errors.h"
#include "ppapi/c/ppb_opengles2.h"
#include "ppapi/cpp/completion_callback.h"
@@ -14,6 +16,7 @@
#include "ppapi/cpp/var.h"
#include "ppapi/cpp/video_frame.h"
#include "ppapi/lib/gl/include/GLES2/gl2.h"
+#include "ppapi/lib/gl/include/GLES2/gl2ext.h"
#include "ppapi/utility/completion_callback_factory.h"
// When compiling natively on Windows, PostMessage can be #define-d to
@@ -53,32 +56,49 @@ class MediaStreamVideoDemoInstance : public pp::Instance,
// pp::Graphics3DClient implementation.
virtual void Graphics3DContextLost() {
InitGL();
- CreateYUVTextures();
+ CreateTextures();
Render();
}
private:
+ void DrawYUV();
+ void DrawRGB();
void Render();
// GL-related functions.
void InitGL();
- GLuint CreateTexture(int32_t width, int32_t height, int unit);
+ GLuint CreateTexture(int32_t width, int32_t height, int unit, bool rgba);
void CreateGLObjects();
void CreateShader(GLuint program, GLenum type, const char* source, int size);
void PaintFinished(int32_t result);
- void CreateYUVTextures();
+ void CreateTextures();
+ void ConfigureTrack();
+
- // Callback that is invoked when new frames are recevied.
+ // MediaStreamVideoTrack callbacks.
+ void OnConfigure(int32_t result);
void OnGetFrame(int32_t result, pp::VideoFrame frame);
pp::Size position_size_;
bool is_painting_;
bool needs_paint_;
+ bool is_bgra_;
+ GLuint program_yuv_;
+ GLuint program_rgb_;
+ GLuint buffer_;
GLuint texture_y_;
GLuint texture_u_;
GLuint texture_v_;
+ GLuint texture_rgb_;
pp::MediaStreamVideoTrack video_track_;
pp::CompletionCallbackFactory<MediaStreamVideoDemoInstance> callback_factory_;
+ std::vector<int32_t> attrib_list_;
+
+ // MediaStreamVideoTrack attributes:
+ bool need_config_;
+ PP_VideoFrame_Format attrib_format_;
+ int32_t attrib_width_;
+ int32_t attrib_height_;
// Unowned pointers.
const struct PPB_OpenGLES2* gles2_if_;
@@ -95,10 +115,16 @@ MediaStreamVideoDemoInstance::MediaStreamVideoDemoInstance(
pp::Graphics3DClient(this),
is_painting_(false),
needs_paint_(false),
+ is_bgra_(false),
texture_y_(0),
texture_u_(0),
texture_v_(0),
+ texture_rgb_(0),
callback_factory_(this),
+ need_config_(false),
+ attrib_format_(PP_VIDEOFRAME_FORMAT_I420),
+ attrib_width_(0),
+ attrib_height_(0),
context_(NULL) {
gles2_if_ = static_cast<const struct PPB_OpenGLES2*>(
module->GetBrowserInterface(PPB_OPENGLES2_INTERFACE));
@@ -126,17 +152,34 @@ void MediaStreamVideoDemoInstance::DidChangeView(
void MediaStreamVideoDemoInstance::HandleMessage(const pp::Var& var_message) {
if (!var_message.is_dictionary())
return;
- pp::VarDictionary var_dictionary_message(var_message);
- pp::Var var_track = var_dictionary_message.Get("track");
- if (!var_track.is_resource())
- return;
-
- pp::Resource resource_track = var_track.AsResource();
- video_track_ = pp::MediaStreamVideoTrack(resource_track);
-
- video_track_.GetFrame(callback_factory_.NewCallbackWithOutput(
- &MediaStreamVideoDemoInstance::OnGetFrame));
+ pp::VarDictionary var_dictionary_message(var_message);
+ std::string command = var_dictionary_message.Get("command").AsString();
+
+ if (command == "init") {
+ pp::Var var_track = var_dictionary_message.Get("track");
+ if (!var_track.is_resource())
+ return;
+ pp::Resource resource_track = var_track.AsResource();
+ video_track_ = pp::MediaStreamVideoTrack(resource_track);
+ ConfigureTrack();
+ } else if (command == "format") {
+ std::string str_format = var_dictionary_message.Get("format").AsString();
+ if (str_format == "YV12") {
+ attrib_format_ = PP_VIDEOFRAME_FORMAT_YV12;
+ } else if (str_format == "I420") {
+ attrib_format_ = PP_VIDEOFRAME_FORMAT_I420;
+ } else if (str_format == "BGRA") {
+ attrib_format_ = PP_VIDEOFRAME_FORMAT_BGRA;
+ } else {
+ attrib_format_ = PP_VIDEOFRAME_FORMAT_UNKNOWN;
+ }
+ need_config_ = true;
+ } else if (command == "size") {
+ attrib_width_ = var_dictionary_message.Get("width").AsInt();
+ attrib_height_ = var_dictionary_message.Get("height").AsInt();
+ need_config_ = true;
+ }
}
void MediaStreamVideoDemoInstance::InitGL() {
@@ -172,12 +215,77 @@ void MediaStreamVideoDemoInstance::InitGL() {
CreateGLObjects();
}
+void MediaStreamVideoDemoInstance::DrawYUV() {
+ PP_Resource context = context_->pp_resource();
+ static const float kColorMatrix[9] = {
+ 1.1643828125f, 1.1643828125f, 1.1643828125f,
+ 0.0f, -0.39176171875f, 2.017234375f,
+ 1.59602734375f, -0.81296875f, 0.0f
+ };
+
+ gles2_if_->UseProgram(context, program_yuv_);
+ gles2_if_->Uniform1i(context, gles2_if_->GetUniformLocation(
+ context, program_yuv_, "y_texture"), 0);
+ gles2_if_->Uniform1i(context, gles2_if_->GetUniformLocation(
+ context, program_yuv_, "u_texture"), 1);
+ gles2_if_->Uniform1i(context, gles2_if_->GetUniformLocation(
+ context, program_yuv_, "v_texture"), 2);
+ gles2_if_->UniformMatrix3fv(
+ context,
+ gles2_if_->GetUniformLocation(context, program_yuv_, "color_matrix"),
+ 1, GL_FALSE, kColorMatrix);
+ AssertNoGLError();
+
+ GLint pos_location = gles2_if_->GetAttribLocation(
+ context, program_yuv_, "a_position");
+ GLint tc_location = gles2_if_->GetAttribLocation(
+ context, program_yuv_, "a_texCoord");
+ AssertNoGLError();
+ gles2_if_->EnableVertexAttribArray(context, pos_location);
+ gles2_if_->VertexAttribPointer(context, pos_location, 2,
+ GL_FLOAT, GL_FALSE, 0, 0);
+ gles2_if_->EnableVertexAttribArray(context, tc_location);
+ gles2_if_->VertexAttribPointer(
+ context, tc_location, 2, GL_FLOAT, GL_FALSE, 0,
+ static_cast<float*>(0) + 16); // Skip position coordinates.
+ AssertNoGLError();
+
+ gles2_if_->DrawArrays(context, GL_TRIANGLE_STRIP, 0, 4);
+ AssertNoGLError();
+}
+
+void MediaStreamVideoDemoInstance::DrawRGB() {
+ PP_Resource context = context_->pp_resource();
+ gles2_if_->UseProgram(context, program_rgb_);
+ gles2_if_->Uniform1i(context,
+ gles2_if_->GetUniformLocation(context, program_rgb_, "rgb_texture"), 3);
+ AssertNoGLError();
+
+ GLint pos_location = gles2_if_->GetAttribLocation(
+ context, program_rgb_, "a_position");
+ GLint tc_location = gles2_if_->GetAttribLocation(
+ context, program_rgb_, "a_texCoord");
+ AssertNoGLError();
+ gles2_if_->EnableVertexAttribArray(context, pos_location);
+ gles2_if_->VertexAttribPointer(context, pos_location, 2,
+ GL_FLOAT, GL_FALSE, 0, 0);
+ gles2_if_->EnableVertexAttribArray(context, tc_location);
+ gles2_if_->VertexAttribPointer(
+ context, tc_location, 2, GL_FLOAT, GL_FALSE, 0,
+ static_cast<float*>(0) + 16); // Skip position coordinates.
+ AssertNoGLError();
+
+ gles2_if_->DrawArrays(context, GL_TRIANGLE_STRIP, 4, 4);
+}
+
void MediaStreamVideoDemoInstance::Render() {
PP_DCHECK(!is_painting_);
is_painting_ = true;
needs_paint_ = false;
+
if (texture_y_) {
- gles2_if_->DrawArrays(context_->pp_resource(), GL_TRIANGLE_STRIP, 0, 4);
+ DrawRGB();
+ DrawYUV();
} else {
gles2_if_->Clear(context_->pp_resource(), GL_COLOR_BUFFER_BIT);
}
@@ -193,10 +301,11 @@ void MediaStreamVideoDemoInstance::PaintFinished(int32_t result) {
}
GLuint MediaStreamVideoDemoInstance::CreateTexture(
- int32_t width, int32_t height, int unit) {
+ int32_t width, int32_t height, int unit, bool rgba) {
GLuint texture_id;
gles2_if_->GenTextures(context_->pp_resource(), 1, &texture_id);
AssertNoGLError();
+
// Assign parameters.
gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0 + unit);
gles2_if_->BindTexture(context_->pp_resource(), GL_TEXTURE_2D, texture_id);
@@ -212,11 +321,12 @@ GLuint MediaStreamVideoDemoInstance::CreateTexture(
gles2_if_->TexParameterf(
context_->pp_resource(), GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,
GL_CLAMP_TO_EDGE);
-
// Allocate texture.
gles2_if_->TexImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, GL_LUMINANCE, width, height, 0,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
+ context_->pp_resource(), GL_TEXTURE_2D, 0,
+ rgba ? GL_BGRA_EXT : GL_LUMINANCE,
+ width, height, 0,
+ rgba ? GL_BGRA_EXT : GL_LUMINANCE, GL_UNSIGNED_BYTE, NULL);
AssertNoGLError();
return texture_id;
}
@@ -233,7 +343,7 @@ void MediaStreamVideoDemoInstance::CreateGLObjects() {
" gl_Position = a_position; \n"
"}";
- static const char kFragmentShader[] =
+ static const char kFragmentShaderYUV[] =
"precision mediump float; \n"
"varying vec2 v_texCoord; \n"
"uniform sampler2D y_texture; \n"
@@ -250,60 +360,48 @@ void MediaStreamVideoDemoInstance::CreateGLObjects() {
" gl_FragColor = vec4(rgb, 1.0); \n"
"}";
- static const float kColorMatrix[9] = {
- 1.1643828125f, 1.1643828125f, 1.1643828125f,
- 0.0f, -0.39176171875f, 2.017234375f,
- 1.59602734375f, -0.81296875f, 0.0f
- };
+ static const char kFragmentShaderRGB[] =
+ "precision mediump float; \n"
+ "varying vec2 v_texCoord; \n"
+ "uniform sampler2D rgb_texture; \n"
+ "void main() \n"
+ "{ \n"
+ " gl_FragColor = texture2D(rgb_texture, v_texCoord); \n"
+ "}";
PP_Resource context = context_->pp_resource();
- // Create shader program.
- GLuint program = gles2_if_->CreateProgram(context);
- CreateShader(program, GL_VERTEX_SHADER, kVertexShader, sizeof(kVertexShader));
- CreateShader(
- program, GL_FRAGMENT_SHADER, kFragmentShader, sizeof(kFragmentShader));
- gles2_if_->LinkProgram(context, program);
- gles2_if_->UseProgram(context, program);
- gles2_if_->DeleteProgram(context, program);
- gles2_if_->Uniform1i(
- context, gles2_if_->GetUniformLocation(context, program, "y_texture"), 0);
- gles2_if_->Uniform1i(
- context, gles2_if_->GetUniformLocation(context, program, "u_texture"), 1);
- gles2_if_->Uniform1i(
- context, gles2_if_->GetUniformLocation(context, program, "v_texture"), 2);
- gles2_if_->UniformMatrix3fv(
- context,
- gles2_if_->GetUniformLocation(context, program, "color_matrix"),
- 1, GL_FALSE, kColorMatrix);
+ // Create shader programs.
+ program_yuv_ = gles2_if_->CreateProgram(context);
+ CreateShader(program_yuv_, GL_VERTEX_SHADER,
+ kVertexShader, sizeof(kVertexShader));
+ CreateShader(program_yuv_, GL_FRAGMENT_SHADER,
+ kFragmentShaderYUV, sizeof(kFragmentShaderYUV));
+ gles2_if_->LinkProgram(context, program_yuv_);
+ AssertNoGLError();
+
+ program_rgb_ = gles2_if_->CreateProgram(context);
+ CreateShader(program_rgb_, GL_VERTEX_SHADER,
+ kVertexShader, sizeof(kVertexShader));
+ CreateShader(program_rgb_, GL_FRAGMENT_SHADER,
+ kFragmentShaderRGB, sizeof(kFragmentShaderRGB));
+ gles2_if_->LinkProgram(context, program_rgb_);
AssertNoGLError();
// Assign vertex positions and texture coordinates to buffers for use in
// shader program.
static const float kVertices[] = {
- -1, 1, -1, -1, 1, 1, 1, -1, // Position coordinates.
+ -1, 1, -1, -1, 0, 1, 0, -1, // Position coordinates.
+ 0, 1, 0, -1, 1, 1, 1, -1, // Position coordinates.
+ 0, 0, 0, 1, 1, 0, 1, 1, // Texture coordinates.
0, 0, 0, 1, 1, 0, 1, 1, // Texture coordinates.
};
- GLuint buffer;
- gles2_if_->GenBuffers(context, 1, &buffer);
- gles2_if_->BindBuffer(context, GL_ARRAY_BUFFER, buffer);
+ gles2_if_->GenBuffers(context, 1, &buffer_);
+ gles2_if_->BindBuffer(context, GL_ARRAY_BUFFER, buffer_);
gles2_if_->BufferData(context, GL_ARRAY_BUFFER,
sizeof(kVertices), kVertices, GL_STATIC_DRAW);
AssertNoGLError();
- GLint pos_location = gles2_if_->GetAttribLocation(
- context, program, "a_position");
- GLint tc_location = gles2_if_->GetAttribLocation(
- context, program, "a_texCoord");
- AssertNoGLError();
- gles2_if_->EnableVertexAttribArray(context, pos_location);
- gles2_if_->VertexAttribPointer(context, pos_location, 2,
- GL_FLOAT, GL_FALSE, 0, 0);
- gles2_if_->EnableVertexAttribArray(context, tc_location);
- gles2_if_->VertexAttribPointer(
- context, tc_location, 2, GL_FLOAT, GL_FALSE, 0,
- static_cast<float*>(0) + 8); // Skip position coordinates.
- AssertNoGLError();
}
void MediaStreamVideoDemoInstance::CreateShader(
@@ -316,7 +414,7 @@ void MediaStreamVideoDemoInstance::CreateShader(
gles2_if_->DeleteShader(context, shader);
}
-void MediaStreamVideoDemoInstance::CreateYUVTextures() {
+void MediaStreamVideoDemoInstance::CreateTextures() {
int32_t width = frame_size_.width();
int32_t height = frame_size_.height();
if (width == 0 || height == 0)
@@ -327,12 +425,29 @@ void MediaStreamVideoDemoInstance::CreateYUVTextures() {
gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_u_);
if (texture_v_)
gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_v_);
- texture_y_ = CreateTexture(width, height, 0);
+ if (texture_rgb_)
+ gles2_if_->DeleteTextures(context_->pp_resource(), 1, &texture_rgb_);
+ texture_y_ = CreateTexture(width, height, 0, false);
+
+ texture_u_ = CreateTexture(width / 2, height / 2, 1, false);
+ texture_v_ = CreateTexture(width / 2, height / 2, 2, false);
+ texture_rgb_ = CreateTexture(width, height, 3, true);
+}
- width /= 2;
- height /= 2;
- texture_u_ = CreateTexture(width, height, 1);
- texture_v_ = CreateTexture(width, height, 2);
+void MediaStreamVideoDemoInstance::ConfigureTrack() {
+ const int32_t attrib_list[] = {
+ PP_MEDIASTREAMVIDEOTRACK_ATTRIB_FORMAT, attrib_format_,
+ PP_MEDIASTREAMVIDEOTRACK_ATTRIB_WIDTH, attrib_width_,
+ PP_MEDIASTREAMVIDEOTRACK_ATTRIB_HEIGHT, attrib_height_,
+ PP_MEDIASTREAMVIDEOTRACK_ATTRIB_NONE
+ };
+ video_track_.Configure(attrib_list, callback_factory_.NewCallback(
+ &MediaStreamVideoDemoInstance::OnConfigure));
+}
+
+void MediaStreamVideoDemoInstance::OnConfigure(int32_t result) {
+ video_track_.GetFrame(callback_factory_.NewCallbackWithOutput(
+ &MediaStreamVideoDemoInstance::OnGetFrame));
}
void MediaStreamVideoDemoInstance::OnGetFrame(
@@ -345,30 +460,39 @@ void MediaStreamVideoDemoInstance::OnGetFrame(
if (size != frame_size_) {
frame_size_ = size;
- CreateYUVTextures();
+ CreateTextures();
}
+ is_bgra_ = (frame.GetFormat() == PP_VIDEOFRAME_FORMAT_BGRA);
+
int32_t width = frame_size_.width();
int32_t height = frame_size_.height();
- gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0);
- gles2_if_->TexSubImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
-
- data += width * height;
- width /= 2;
- height /= 2;
-
- gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE1);
- gles2_if_->TexSubImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
-
- data += width * height;
- gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE2);
- gles2_if_->TexSubImage2D(
- context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
+ if (!is_bgra_) {
+ gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE0);
+ gles2_if_->TexSubImage2D(
+ context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
+
+ data += width * height;
+ width /= 2;
+ height /= 2;
+
+ gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE1);
+ gles2_if_->TexSubImage2D(
+ context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
+
+ data += width * height;
+ gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE2);
+ gles2_if_->TexSubImage2D(
+ context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_LUMINANCE, GL_UNSIGNED_BYTE, data);
+ } else {
+ gles2_if_->ActiveTexture(context_->pp_resource(), GL_TEXTURE3);
+ gles2_if_->TexSubImage2D(
+ context_->pp_resource(), GL_TEXTURE_2D, 0, 0, 0, width, height,
+ GL_BGRA_EXT, GL_UNSIGNED_BYTE, data);
+ }
if (is_painting_)
needs_paint_ = true;
@@ -376,8 +500,13 @@ void MediaStreamVideoDemoInstance::OnGetFrame(
Render();
video_track_.RecycleFrame(frame);
- video_track_.GetFrame(callback_factory_.NewCallbackWithOutput(
- &MediaStreamVideoDemoInstance::OnGetFrame));
+ if (need_config_) {
+ ConfigureTrack();
+ need_config_ = false;
+ } else {
+ video_track_.GetFrame(callback_factory_.NewCallbackWithOutput(
+ &MediaStreamVideoDemoInstance::OnGetFrame));
+ }
}
pp::Instance* MediaStreamVideoModule::CreateInstance(PP_Instance instance) {
diff --git a/ppapi/examples/media_stream_video/media_stream_video.html b/ppapi/examples/media_stream_video/media_stream_video.html
index a20c284..ba7a325 100644
--- a/ppapi/examples/media_stream_video/media_stream_video.html
+++ b/ppapi/examples/media_stream_video/media_stream_video.html
@@ -17,7 +17,7 @@
function success(s) {
stream = s;
- plugin.postMessage({track: stream.getVideoTracks()[0]});
+ plugin.postMessage({command: 'init', track: stream.getVideoTracks()[0]});
}
function failure(e) {
@@ -28,19 +28,27 @@
plugin = document.getElementById('plugin');
plugin.addEventListener('message', handleMessage, false);
var constraints = {
- "audio": false,
- "video": {
- "mandatory": {
- "minWidth": "1280",
- "minHeight": "720",
- "minFrameRate": "30"
+ audio: false,
+ video: {
+ mandatory: {
+ minWidth: 640,
+ minHeight: 320,
+ minFrameRate: 30
},
- "optional": []
+ optional: []
}
};
+
navigator.webkitGetUserMedia(constraints, success, failure);
}
+ function changeFormat(format) {
+ plugin.postMessage({command:'format', format: format});
+ }
+
+ function changeSize(width, height) {
+ plugin.postMessage({command:'size', width: width, height: height});
+ }
document.addEventListener('DOMContentLoaded', initialize, false);
</script>
</head>
@@ -49,7 +57,18 @@
<h1>Pepper MediaStream Video API Example</h1><br>
This example demonstrates receiving frames from a video MediaStreamTrack and
rendering them in a plugin.<br>
+ Left side shows YUV frames. Right side shows BGRA frames.
<embed id="plugin" type="application/x-ppapi-example-media-stream-video"
- width="320" height="240"/>
+ width="640" height="240"/>
+ <h2>Format:</h2><br>
+ <button onclick="changeFormat('YV12')" >YV12</button>
+ <button onclick="changeFormat('I420')" >I420</button>
+ <button onclick="changeFormat('BGRA')" >BGRA</button>
+ <button onclick="changeFormat('DEFAULT')" >DEFAULT</button>
+ <h2>Size:</h2><br>
+ <button onclick="changeSize(72, 72)" >72 x 72</button>
+ <button onclick="changeSize(640, 360)" >640 x 360</button>
+ <button onclick="changeSize(1280, 720)" >1280 x 720</button>
+ <button onclick="changeSize(0, 0)" >DEFAULT</button>
</body>
</html>
diff --git a/ppapi/ppapi_shared.gypi b/ppapi/ppapi_shared.gypi
index 83ba96d..620a6eb 100644
--- a/ppapi/ppapi_shared.gypi
+++ b/ppapi/ppapi_shared.gypi
@@ -42,6 +42,8 @@
'shared_impl/media_stream_buffer.h',
'shared_impl/media_stream_buffer_manager.cc',
'shared_impl/media_stream_buffer_manager.h',
+ 'shared_impl/media_stream_video_track_shared.h',
+ 'shared_impl/media_stream_video_track_shared.cc',
'shared_impl/platform_file.cc',
'shared_impl/platform_file.h',
'shared_impl/ppapi_globals.cc',
diff --git a/ppapi/proxy/media_stream_video_track_resource.cc b/ppapi/proxy/media_stream_video_track_resource.cc
index 451f259..aaa5b52 100644
--- a/ppapi/proxy/media_stream_video_track_resource.cc
+++ b/ppapi/proxy/media_stream_video_track_resource.cc
@@ -5,8 +5,10 @@
#include "ppapi/proxy/media_stream_video_track_resource.h"
#include "base/logging.h"
+#include "ppapi/proxy/ppapi_messages.h"
#include "ppapi/proxy/video_frame_resource.h"
#include "ppapi/shared_impl/media_stream_buffer.h"
+#include "ppapi/shared_impl/media_stream_video_track_shared.h"
#include "ppapi/shared_impl/var.h"
namespace ppapi {
@@ -39,13 +41,53 @@ PP_Bool MediaStreamVideoTrackResource::HasEnded() {
return PP_FromBool(has_ended());
}
-
int32_t MediaStreamVideoTrackResource::Configure(
const int32_t attrib_list[],
scoped_refptr<TrackedCallback> callback) {
- // TODO(penghuang): redesign and implement Configure() to support format,
- // size, etc.
- return PP_ERROR_NOTSUPPORTED;
+ if (has_ended())
+ return PP_ERROR_FAILED;
+
+ if (TrackedCallback::IsPending(configure_callback_) ||
+ TrackedCallback::IsPending(get_frame_callback_)) {
+ return PP_ERROR_INPROGRESS;
+ }
+
+ // Do not support configure, if frames are hold by plugin.
+ if (!frames_.empty())
+ return PP_ERROR_INPROGRESS;
+
+ MediaStreamVideoTrackShared::Attributes attributes;
+ int i = 0;
+ for (;attrib_list[i] != PP_MEDIASTREAMVIDEOTRACK_ATTRIB_NONE; i += 2) {
+ switch (attrib_list[i]) {
+ case PP_MEDIASTREAMVIDEOTRACK_ATTRIB_BUFFERED_FRAMES:
+ attributes.buffers = attrib_list[i + 1];
+ break;
+ case PP_MEDIASTREAMVIDEOTRACK_ATTRIB_WIDTH:
+ attributes.width = attrib_list[i + 1];
+ break;
+ case PP_MEDIASTREAMVIDEOTRACK_ATTRIB_HEIGHT:
+ attributes.height = attrib_list[i + 1];
+ break;
+ case PP_MEDIASTREAMVIDEOTRACK_ATTRIB_FORMAT:
+ attributes.format = static_cast<PP_VideoFrame_Format>(attrib_list[i + 1]);
+ break;
+ default:
+ return PP_ERROR_BADARGUMENT;
+ }
+ }
+
+ if (!MediaStreamVideoTrackShared::VerifyAttributes(attributes))
+ return PP_ERROR_BADARGUMENT;
+
+ configure_callback_ = callback;
+ Call<PpapiPluginMsg_MediaStreamVideoTrack_ConfigureReply>(
+ RENDERER,
+ PpapiHostMsg_MediaStreamVideoTrack_Configure(attributes),
+ base::Bind(&MediaStreamVideoTrackResource::OnPluginMsgConfigureReply,
+ base::Unretained(this)),
+ callback);
+ return PP_OK_COMPLETIONPENDING;
}
int32_t MediaStreamVideoTrackResource::GetAttrib(
@@ -61,8 +103,10 @@ int32_t MediaStreamVideoTrackResource::GetFrame(
if (has_ended())
return PP_ERROR_FAILED;
- if (TrackedCallback::IsPending(get_frame_callback_))
+ if (TrackedCallback::IsPending(configure_callback_) ||
+ TrackedCallback::IsPending(get_frame_callback_)) {
return PP_ERROR_INPROGRESS;
+ }
*frame = GetVideoFrame();
if (*frame)
@@ -144,5 +188,14 @@ void MediaStreamVideoTrackResource::ReleaseFrames() {
}
}
+void MediaStreamVideoTrackResource::OnPluginMsgConfigureReply(
+ const ResourceMessageReplyParams& params) {
+ if (TrackedCallback::IsPending(configure_callback_)) {
+ scoped_refptr<TrackedCallback> callback;
+ callback.swap(configure_callback_);
+ callback->Run(params.result());
+ }
+}
+
} // namespace proxy
} // namespace ppapi
diff --git a/ppapi/proxy/media_stream_video_track_resource.h b/ppapi/proxy/media_stream_video_track_resource.h
index ee301d2..d1b3bb5 100644
--- a/ppapi/proxy/media_stream_video_track_resource.h
+++ b/ppapi/proxy/media_stream_video_track_resource.h
@@ -52,14 +52,18 @@ class PPAPI_PROXY_EXPORT MediaStreamVideoTrackResource
void ReleaseFrames();
+ // IPC message handlers.
+ void OnPluginMsgConfigureReply(const ResourceMessageReplyParams& params);
+
// Allocated frame resources by |GetFrame()|.
typedef std::map<PP_Resource, scoped_refptr<VideoFrameResource> > FrameMap;
FrameMap frames_;
PP_Resource* get_frame_output_;
-
scoped_refptr<TrackedCallback> get_frame_callback_;
+ scoped_refptr<TrackedCallback> configure_callback_;
+
DISALLOW_COPY_AND_ASSIGN(MediaStreamVideoTrackResource);
};
diff --git a/ppapi/proxy/ppapi_messages.h b/ppapi/proxy/ppapi_messages.h
index 629347b..fb677cf 100644
--- a/ppapi/proxy/ppapi_messages.h
+++ b/ppapi/proxy/ppapi_messages.h
@@ -62,6 +62,7 @@
#include "ppapi/shared_impl/file_growth.h"
#include "ppapi/shared_impl/file_path.h"
#include "ppapi/shared_impl/file_ref_create_info.h"
+#include "ppapi/shared_impl/media_stream_video_track_shared.h"
#include "ppapi/shared_impl/ppapi_nacl_plugin_args.h"
#include "ppapi/shared_impl/ppapi_preferences.h"
#include "ppapi/shared_impl/ppb_device_ref_shared.h"
@@ -121,6 +122,7 @@ IPC_ENUM_TRAITS_MAX_VALUE(PP_UDPSocket_Option,
PP_UDPSOCKET_OPTION_RECV_BUFFER_SIZE)
IPC_ENUM_TRAITS(PP_VideoDecodeError_Dev)
IPC_ENUM_TRAITS(PP_VideoDecoder_Profile)
+IPC_ENUM_TRAITS_MAX_VALUE(PP_VideoFrame_Format, PP_VIDEOFRAME_FORMAT_LAST)
IPC_STRUCT_TRAITS_BEGIN(PP_Point)
IPC_STRUCT_TRAITS_MEMBER(x)
@@ -241,6 +243,13 @@ IPC_STRUCT_TRAITS_BEGIN(ppapi::FlashSiteSetting)
IPC_STRUCT_TRAITS_MEMBER(permission)
IPC_STRUCT_TRAITS_END()
+IPC_STRUCT_TRAITS_BEGIN(ppapi::MediaStreamVideoTrackShared::Attributes)
+ IPC_STRUCT_TRAITS_MEMBER(buffers)
+ IPC_STRUCT_TRAITS_MEMBER(width)
+ IPC_STRUCT_TRAITS_MEMBER(height)
+ IPC_STRUCT_TRAITS_MEMBER(format)
+IPC_STRUCT_TRAITS_END()
+
IPC_STRUCT_TRAITS_BEGIN(ppapi::ViewData)
IPC_STRUCT_TRAITS_MEMBER(rect)
IPC_STRUCT_TRAITS_MEMBER(is_fullscreen)
@@ -1445,6 +1454,10 @@ IPC_MESSAGE_CONTROL1(PpapiPluginMsg_MediaStreamAudioTrack_CreateFromPendingHost,
std::string /* track_id */)
IPC_MESSAGE_CONTROL1(PpapiPluginMsg_MediaStreamVideoTrack_CreateFromPendingHost,
std::string /* track_id */)
+IPC_MESSAGE_CONTROL1(
+ PpapiHostMsg_MediaStreamVideoTrack_Configure,
+ ppapi::MediaStreamVideoTrackShared::Attributes /* attributes */)
+IPC_MESSAGE_CONTROL0(PpapiPluginMsg_MediaStreamVideoTrack_ConfigureReply)
// Message for init buffers. It also takes a shared memory handle which is put
// in the outer ResourceReplyMessage.
diff --git a/ppapi/proxy/ppapi_param_traits.h b/ppapi/proxy/ppapi_param_traits.h
index 3bf5fb6..96ff11c 100644
--- a/ppapi/proxy/ppapi_param_traits.h
+++ b/ppapi/proxy/ppapi_param_traits.h
@@ -16,6 +16,7 @@
#include "ppapi/proxy/ppapi_proxy_export.h"
#include "ppapi/shared_impl/file_path.h"
#include "ppapi/shared_impl/file_ref_create_info.h"
+#include "ppapi/shared_impl/media_stream_video_track_shared.h"
#include "ppapi/shared_impl/ppapi_permissions.h"
#include "ppapi/shared_impl/socket_option_data.h"
diff --git a/ppapi/shared_impl/media_stream_buffer_manager.cc b/ppapi/shared_impl/media_stream_buffer_manager.cc
index 37277fc..d6c4b0d 100644
--- a/ppapi/shared_impl/media_stream_buffer_manager.cc
+++ b/ppapi/shared_impl/media_stream_buffer_manager.cc
@@ -26,7 +26,6 @@ bool MediaStreamBufferManager::SetBuffers(int32_t number_of_buffers,
scoped_ptr<base::SharedMemory> shm,
bool enqueue_all_buffers) {
DCHECK(shm);
- DCHECK(!shm_);
DCHECK_GT(number_of_buffers, 0);
DCHECK_GT(buffer_size,
static_cast<int32_t>(sizeof(MediaStreamBuffer::Header)));
@@ -35,11 +34,13 @@ bool MediaStreamBufferManager::SetBuffers(int32_t number_of_buffers,
number_of_buffers_ = number_of_buffers;
buffer_size_ = buffer_size;
- int32_t size = number_of_buffers_ * buffer_size;
+ size_t size = number_of_buffers_ * buffer_size;
shm_ = shm.Pass();
if (!shm_->Map(size))
return false;
+ buffer_queue_.clear();
+ buffers_.clear();
uint8_t* p = reinterpret_cast<uint8_t*>(shm_->memory());
for (int32_t i = 0; i < number_of_buffers; ++i) {
if (enqueue_all_buffers)
@@ -59,15 +60,16 @@ int32_t MediaStreamBufferManager::DequeueBuffer() {
}
void MediaStreamBufferManager::EnqueueBuffer(int32_t index) {
- DCHECK_GE(index, 0);
- DCHECK_LT(index, number_of_buffers_);
+ CHECK_GE(index, 0) << "Invalid buffer index";
+ CHECK_LT(index, number_of_buffers_) << "Invalid buffer index";
buffer_queue_.push_back(index);
delegate_->OnNewBufferEnqueued();
}
-MediaStreamBuffer* MediaStreamBufferManager::GetBufferPointer(int32_t index) {
- DCHECK_GE(index, 0);
- DCHECK_LT(index, number_of_buffers_);
+MediaStreamBuffer* MediaStreamBufferManager::GetBufferPointer(
+ int32_t index) {
+ CHECK_GE(index, 0) << "Invalid buffer index";
+ CHECK_LT(index, number_of_buffers_) << "Invalid buffer index";
return buffers_[index];
}
diff --git a/ppapi/shared_impl/media_stream_video_track_shared.cc b/ppapi/shared_impl/media_stream_video_track_shared.cc
new file mode 100644
index 0000000..0ca894a
--- /dev/null
+++ b/ppapi/shared_impl/media_stream_video_track_shared.cc
@@ -0,0 +1,40 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "ppapi/shared_impl/media_stream_video_track_shared.h"
+
+#include "base/logging.h"
+
+namespace {
+
+const int32_t kMaxWidth = 4096;
+const int32_t kMaxHeight = 4096;
+
+} // namespace
+
+namespace ppapi {
+
+// static
+bool MediaStreamVideoTrackShared::VerifyAttributes(
+ const Attributes& attributes) {
+ if (attributes.buffers < 0)
+ return false;
+ if (attributes.format < PP_VIDEOFRAME_FORMAT_UNKNOWN ||
+ attributes.format > PP_VIDEOFRAME_FORMAT_LAST) {
+ return false;
+ }
+ if (attributes.width < 0 ||
+ attributes.width > kMaxWidth ||
+ attributes.width & 0x3) {
+ return false;
+ }
+ if (attributes.height < 0 ||
+ attributes.height > kMaxHeight ||
+ attributes.height & 0x3) {
+ return false;
+ }
+ return true;
+}
+
+} // namespace ppapi
diff --git a/ppapi/shared_impl/media_stream_video_track_shared.h b/ppapi/shared_impl/media_stream_video_track_shared.h
new file mode 100644
index 0000000..75e32b8
--- /dev/null
+++ b/ppapi/shared_impl/media_stream_video_track_shared.h
@@ -0,0 +1,32 @@
+// Copyright 2014 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#ifndef PPAPI_SHARED_IMPL_MEDIA_STREAM_VIDEO_TRACK_SHARED_H_
+#define PPAPI_SHARED_IMPL_MEDIA_STREAM_VIDEO_TRACK_SHARED_H_
+
+#include "ppapi/c/ppb_video_frame.h"
+#include "ppapi/shared_impl/ppapi_shared_export.h"
+
+namespace ppapi {
+
+class PPAPI_SHARED_EXPORT MediaStreamVideoTrackShared {
+ public:
+ struct Attributes {
+ Attributes()
+ : buffers(0),
+ width(0),
+ height(0),
+ format(PP_VIDEOFRAME_FORMAT_UNKNOWN) {}
+ int32_t buffers;
+ int32_t width;
+ int32_t height;
+ PP_VideoFrame_Format format;
+ };
+
+ static bool VerifyAttributes(const Attributes& attributes);
+};
+
+} // namespace ppapi
+
+#endif // PPAPI_SHARED_IMPL_MEDIA_STREAM_VIDEO_TRACK_SHARED_H_