summaryrefslogtreecommitdiffstats
path: root/webkit/glue
diff options
context:
space:
mode:
Diffstat (limited to 'webkit/glue')
-rw-r--r--webkit/glue/DEPS1
-rw-r--r--webkit/glue/media/simple_data_source.cc146
-rw-r--r--webkit/glue/media/simple_data_source.h89
-rw-r--r--webkit/glue/media/video_renderer_impl.cc275
-rw-r--r--webkit/glue/media/video_renderer_impl.h113
-rw-r--r--webkit/glue/webmediaplayer_impl.cc348
-rw-r--r--webkit/glue/webmediaplayer_impl.h229
-rw-r--r--webkit/glue/webview_delegate.h2
8 files changed, 1202 insertions, 1 deletions
diff --git a/webkit/glue/DEPS b/webkit/glue/DEPS
index beea903..1129c20 100644
--- a/webkit/glue/DEPS
+++ b/webkit/glue/DEPS
@@ -1,4 +1,5 @@
include_rules = [
+ "+media",
"+skia/ext",
"+skia/include",
"+webkit/tools/test_shell", # Needed for test shell tests.
diff --git a/webkit/glue/media/simple_data_source.cc b/webkit/glue/media/simple_data_source.cc
new file mode 100644
index 0000000..16fbd04
--- /dev/null
+++ b/webkit/glue/media/simple_data_source.cc
@@ -0,0 +1,146 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/message_loop.h"
+#include "base/process_util.h"
+#include "media/base/filter_host.h"
+#include "net/base/load_flags.h"
+#include "net/http/http_response_headers.h"
+#include "net/url_request/url_request_status.h"
+#include "webkit/glue/media/simple_data_source.h"
+#include "webkit/glue/resource_loader_bridge.h"
+#include "webkit/glue/webappcachecontext.h"
+
+namespace webkit_glue {
+
+SimpleDataSource::SimpleDataSource(MessageLoop* render_loop, int32 routing_id)
+ : routing_id_(routing_id),
+ render_loop_(render_loop),
+ size_(-1),
+ position_(0) {
+ DCHECK(render_loop);
+}
+
+SimpleDataSource::~SimpleDataSource() {
+}
+
+void SimpleDataSource::Stop() {}
+
+bool SimpleDataSource::Initialize(const std::string& url) {
+ SetURL(GURL(url));
+
+ // Validate the URL.
+ if (!url_.is_valid()) {
+ return false;
+ }
+
+ // Create our bridge and post a task to start loading the resource.
+ bridge_.reset(webkit_glue::ResourceLoaderBridge::Create(
+ "GET",
+ url_,
+ url_,
+ GURL::EmptyGURL(), // TODO(scherkus): provide referer here.
+ "null", // TODO(abarth): provide frame_origin
+ "null", // TODO(abarth): provide main_frame_origin
+ "",
+ net::LOAD_BYPASS_CACHE,
+ base::GetCurrentProcId(),
+ ResourceType::MEDIA,
+ // TODO(michaeln): delegate->mediaplayer->frame->
+ // app_cache_context()->context_id()
+ // For now don't service media resource requests from the appcache.
+ WebAppCacheContext::kNoAppCacheContextId,
+ routing_id_));
+ render_loop_->PostTask(FROM_HERE,
+ NewRunnableMethod(this, &SimpleDataSource::StartTask));
+ return true;
+}
+
+const media::MediaFormat& SimpleDataSource::media_format() {
+ return media_format_;
+}
+
+size_t SimpleDataSource::Read(uint8* data, size_t size) {
+ DCHECK_GE(size_, 0);
+ size_t copied = std::min(size, static_cast<size_t>(size_ - position_));
+ memcpy(data, data_.c_str() + position_, copied);
+ position_ += copied;
+ return copied;
+}
+
+bool SimpleDataSource::GetPosition(int64* position_out) {
+ *position_out = position_;
+ return true;
+}
+
+bool SimpleDataSource::SetPosition(int64 position) {
+ if (position < 0 || position > size_)
+ return false;
+ position_ = position;
+ return true;
+}
+
+bool SimpleDataSource::GetSize(int64* size_out) {
+ *size_out = size_;
+ return true;
+}
+
+bool SimpleDataSource::IsSeekable() {
+ return true;
+}
+
+void SimpleDataSource::OnDownloadProgress(uint64 position, uint64 size) {}
+
+void SimpleDataSource::OnUploadProgress(uint64 position, uint64 size) {}
+
+void SimpleDataSource::OnReceivedRedirect(const GURL& new_url) {
+ SetURL(new_url);
+}
+
+void SimpleDataSource::OnReceivedResponse(
+ const webkit_glue::ResourceLoaderBridge::ResponseInfo& info,
+ bool content_filtered) {
+ size_ = info.content_length;
+}
+
+void SimpleDataSource::OnReceivedData(const char* data, int len) {
+ data_.append(data, len);
+}
+
+void SimpleDataSource::OnCompletedRequest(const URLRequestStatus& status,
+ const std::string& security_info) {
+ bridge_.reset();
+ // If we don't get a content length or the request has failed, report it
+ // as a network error.
+ DCHECK(size_ == -1 || size_ == data_.length());
+ if (size_ == -1)
+ size_ = data_.length();
+ if (!status.is_success()) {
+ host_->Error(media::PIPELINE_ERROR_NETWORK);
+ } else {
+ host_->InitializationComplete();
+ }
+ host_->SetTotalBytes(size_);
+ host_->SetBufferedBytes(size_);
+ host_->InitializationComplete();
+}
+
+std::string SimpleDataSource::GetURLForDebugging() {
+ return url_.spec();
+}
+
+void SimpleDataSource::SetURL(const GURL& url) {
+ url_ = url;
+ media_format_.Clear();
+ media_format_.SetAsString(media::MediaFormat::kMimeType,
+ media::mime_type::kApplicationOctetStream);
+ media_format_.SetAsString(media::MediaFormat::kURL, url.spec());
+}
+
+void SimpleDataSource::StartTask() {
+ DCHECK(MessageLoop::current() == render_loop_);
+ bridge_->Start(this);
+}
+
+} // namespace webkit_glue
diff --git a/webkit/glue/media/simple_data_source.h b/webkit/glue/media/simple_data_source.h
new file mode 100644
index 0000000..c1e0a00
--- /dev/null
+++ b/webkit/glue/media/simple_data_source.h
@@ -0,0 +1,89 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+// An extremely simple implementation of DataSource that downloads the entire
+// media resource into memory before signaling that initialization has finished.
+// Primarily used to test <audio> and <video> with buffering/caching removed
+// from the equation.
+
+#ifndef WEBKIT_GLUE_MEDIA_SIMPLE_DATA_SOURCE_H_
+#define WEBKIT_GLUE_MEDIA_SIMPLE_DATA_SOURCE_H_
+
+#include "base/message_loop.h"
+#include "base/scoped_ptr.h"
+#include "media/base/factory.h"
+#include "media/base/filters.h"
+#include "webkit/glue/resource_loader_bridge.h"
+
+class MessageLoop;
+class WebMediaPlayerDelegateImpl;
+
+namespace webkit_glue {
+
+class SimpleDataSource : public media::DataSource,
+ public webkit_glue::ResourceLoaderBridge::Peer {
+ public:
+ static media::FilterFactory* CreateFactory(MessageLoop* message_loop,
+ int32 routing_id) {
+ return new media::FilterFactoryImpl2<SimpleDataSource,
+ MessageLoop*,
+ int32>(message_loop, routing_id);
+ }
+
+ // MediaFilter implementation.
+ virtual void Stop();
+
+ // DataSource implementation.
+ virtual bool Initialize(const std::string& url);
+ virtual const media::MediaFormat& media_format();
+ virtual size_t Read(uint8* data, size_t size);
+ virtual bool GetPosition(int64* position_out);
+ virtual bool SetPosition(int64 position);
+ virtual bool GetSize(int64* size_out);
+ virtual bool IsSeekable();
+
+ // webkit_glue::ResourceLoaderBridge::Peer implementation.
+ virtual void OnDownloadProgress(uint64 position, uint64 size);
+ virtual void OnUploadProgress(uint64 position, uint64 size);
+ virtual void OnReceivedRedirect(const GURL& new_url);
+ virtual void OnReceivedResponse(
+ const webkit_glue::ResourceLoaderBridge::ResponseInfo& info,
+ bool content_filtered);
+ virtual void OnReceivedData(const char* data, int len);
+ virtual void OnCompletedRequest(const URLRequestStatus& status,
+ const std::string& security_info);
+ virtual std::string GetURLForDebugging();
+
+ private:
+ friend class media::FilterFactoryImpl2<SimpleDataSource, MessageLoop*, int32>;
+ SimpleDataSource(MessageLoop* render_loop, int32 routing_id);
+ virtual ~SimpleDataSource();
+
+ // Updates |url_| and |media_format_| with the given URL.
+ void SetURL(const GURL& url);
+
+ // Start the resource loading on the render thread.
+ void StartTask();
+
+ // Passed in during construction, used when creating the bridge.
+ int32 routing_id_;
+
+ // Primarily used for asserting the bridge is loading on the render thread.
+ MessageLoop* render_loop_;
+
+ // Bridge used to load the media resource.
+ scoped_ptr<webkit_glue::ResourceLoaderBridge> bridge_;
+
+ media::MediaFormat media_format_;
+ GURL url_;
+ std::string data_;
+ int64 size_;
+ int64 position_;
+
+ DISALLOW_COPY_AND_ASSIGN(SimpleDataSource);
+};
+
+} // namespace webkit_glue
+
+#endif // WEBKIT_GLUE_MEDIA_SIMPLE_DATA_SOURCE_H_
diff --git a/webkit/glue/media/video_renderer_impl.cc b/webkit/glue/media/video_renderer_impl.cc
new file mode 100644
index 0000000..f9aa998
--- /dev/null
+++ b/webkit/glue/media/video_renderer_impl.cc
@@ -0,0 +1,275 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved. Use of this
+// source code is governed by a BSD-style license that can be found in the
+// LICENSE file.
+
+#include "media/base/buffers.h"
+#include "media/base/yuv_convert.h"
+#include "webkit/glue/media/video_renderer_impl.h"
+#include "webkit/glue/webmediaplayer_impl.h"
+
+namespace webkit_glue {
+
+VideoRendererImpl::VideoRendererImpl(WebMediaPlayerImpl* delegate)
+ : delegate_(delegate),
+ last_converted_frame_(NULL) {
+ // TODO(hclam): decide whether to do the following line in this thread or
+ // in the render thread.
+ delegate_->SetVideoRenderer(this);
+}
+
+// static
+bool VideoRendererImpl::IsMediaFormatSupported(
+ const media::MediaFormat& media_format) {
+ int width = 0;
+ int height = 0;
+ return ParseMediaFormat(media_format, &width, &height);
+}
+
+void VideoRendererImpl::Stop() {
+ VideoThread::Stop();
+ delegate_->SetVideoRenderer(NULL);
+}
+
+bool VideoRendererImpl::OnInitialize(media::VideoDecoder* decoder) {
+ int width = 0;
+ int height = 0;
+ if (!ParseMediaFormat(decoder->media_format(), &width, &height))
+ return false;
+
+ video_size_.SetSize(width, height);
+ bitmap_.setConfig(SkBitmap::kARGB_8888_Config, width, height);
+ if (bitmap_.allocPixels(NULL, NULL)) {
+ bitmap_.eraseRGB(0x00, 0x00, 0x00);
+ return true;
+ }
+
+ NOTREACHED();
+ return false;
+}
+
+void VideoRendererImpl::SetRect(const gfx::Rect& rect) {
+}
+
+void VideoRendererImpl::OnFrameAvailable() {
+ delegate_->PostRepaintTask();
+}
+
+// This method is always called on the renderer's thread.
+void VideoRendererImpl::Paint(skia::PlatformCanvas* canvas,
+ const gfx::Rect& dest_rect) {
+ scoped_refptr<media::VideoFrame> video_frame;
+ GetCurrentFrame(&video_frame);
+ if (video_frame) {
+ if (CanFastPaint(canvas, dest_rect)) {
+ FastPaint(video_frame, canvas, dest_rect);
+ } else {
+ SlowPaint(video_frame, canvas, dest_rect);
+ }
+ video_frame = NULL;
+ }
+}
+
+// CanFastPaint is a helper method to determine the conditions for fast
+// painting. The conditions are:
+// 1. No skew in canvas matrix.
+// 2. Canvas has pixel format ARGB8888.
+// 3. Canvas is opaque.
+bool VideoRendererImpl::CanFastPaint(skia::PlatformCanvas* canvas,
+ const gfx::Rect& dest_rect) {
+ const SkMatrix& total_matrix = canvas->getTotalMatrix();
+ if (SkScalarNearlyZero(total_matrix.getSkewX()) &&
+ SkScalarNearlyZero(total_matrix.getSkewY())) {
+ // Get the properties of the SkDevice and the clip rect.
+ SkDevice* device = canvas->getDevice();
+
+ // Get the boundary of the device.
+ SkIRect device_rect;
+ device->getBounds(&device_rect);
+
+ // Get the pixel config of the device.
+ const SkBitmap::Config config = device->config();
+ // Get the total clip rect associated with the canvas.
+ const SkRegion& total_clip = canvas->getTotalClip();
+
+ SkIRect dest_irect;
+ TransformToSkIRect(canvas->getTotalMatrix(), dest_rect, &dest_irect);
+
+ if (config == SkBitmap::kARGB_8888_Config && device->isOpaque() &&
+ device_rect.contains(total_clip.getBounds())) {
+ return true;
+ }
+ }
+ return false;
+}
+
+void VideoRendererImpl::SlowPaint(media::VideoFrame* video_frame,
+ skia::PlatformCanvas* canvas,
+ const gfx::Rect& dest_rect) {
+ // 1. Convert YUV frame to RGB.
+ base::TimeDelta timestamp = video_frame->GetTimestamp();
+ if (video_frame != last_converted_frame_ ||
+ timestamp != last_converted_timestamp_) {
+ last_converted_frame_ = video_frame;
+ last_converted_timestamp_ = timestamp;
+ media::VideoSurface frame_in;
+ if (video_frame->Lock(&frame_in)) {
+ DCHECK(frame_in.format == media::VideoSurface::YV12 ||
+ frame_in.format == media::VideoSurface::YV16);
+ DCHECK(frame_in.strides[media::VideoSurface::kUPlane] ==
+ frame_in.strides[media::VideoSurface::kVPlane]);
+ DCHECK(frame_in.planes == media::VideoSurface::kNumYUVPlanes);
+ bitmap_.lockPixels();
+ media::YUVType yuv_type = (frame_in.format == media::VideoSurface::YV12) ?
+ media::YV12 : media::YV16;
+ media::ConvertYUVToRGB32(frame_in.data[media::VideoSurface::kYPlane],
+ frame_in.data[media::VideoSurface::kUPlane],
+ frame_in.data[media::VideoSurface::kVPlane],
+ static_cast<uint8*>(bitmap_.getPixels()),
+ frame_in.width,
+ frame_in.height,
+ frame_in.strides[media::VideoSurface::kYPlane],
+ frame_in.strides[media::VideoSurface::kUPlane],
+ bitmap_.rowBytes(),
+ yuv_type);
+ bitmap_.unlockPixels();
+ video_frame->Unlock();
+ } else {
+ NOTREACHED();
+ }
+ }
+
+ // 2. Paint the bitmap to canvas.
+ SkMatrix matrix;
+ matrix.setTranslate(static_cast<SkScalar>(dest_rect.x()),
+ static_cast<SkScalar>(dest_rect.y()));
+ if (dest_rect.width() != video_size_.width() ||
+ dest_rect.height() != video_size_.height()) {
+ matrix.preScale(SkIntToScalar(dest_rect.width()) /
+ SkIntToScalar(video_size_.width()),
+ SkIntToScalar(dest_rect.height()) /
+ SkIntToScalar(video_size_.height()));
+ }
+ canvas->drawBitmapMatrix(bitmap_, matrix, NULL);
+}
+
+void VideoRendererImpl::FastPaint(media::VideoFrame* video_frame,
+ skia::PlatformCanvas* canvas,
+ const gfx::Rect& dest_rect) {
+ media::VideoSurface frame_in;
+ if (video_frame->Lock(&frame_in)) {
+ DCHECK(frame_in.format == media::VideoSurface::YV12 ||
+ frame_in.format == media::VideoSurface::YV16);
+ DCHECK(frame_in.strides[media::VideoSurface::kUPlane] ==
+ frame_in.strides[media::VideoSurface::kVPlane]);
+ DCHECK(frame_in.planes == media::VideoSurface::kNumYUVPlanes);
+ const SkBitmap& bitmap = canvas->getDevice()->accessBitmap(true);
+ media::YUVType yuv_type = (frame_in.format == media::VideoSurface::YV12) ?
+ media::YV12 : media::YV16;
+ int y_shift = yuv_type; // 1 for YV12, 0 for YV16.
+
+ // Create a rectangle backed by SkScalar.
+ SkRect scalar_dest_rect;
+ scalar_dest_rect.iset(dest_rect.x(), dest_rect.y(),
+ dest_rect.right(), dest_rect.bottom());
+
+ // Transform the destination rectangle to local coordinates.
+ const SkMatrix& local_matrix = canvas->getTotalMatrix();
+ SkRect local_dest_rect;
+ local_matrix.mapRect(&local_dest_rect, scalar_dest_rect);
+
+ // After projecting the destination rectangle to local coordinates, round
+ // the projected rectangle to integer values, this will give us pixel values
+ // of the rectangle.
+ SkIRect local_dest_irect, local_dest_irect_saved;
+ local_dest_rect.round(&local_dest_irect);
+ local_dest_rect.round(&local_dest_irect_saved);
+
+ // Only does the paint if the destination rect intersects with the clip
+ // rect.
+ if (local_dest_irect.intersect(canvas->getTotalClip().getBounds())) {
+ // At this point |local_dest_irect| contains the rect that we should draw
+ // to within the clipping rect.
+
+ // Calculate the address for the top left corner of destination rect in
+ // the canvas that we will draw to. The address is obtained by the base
+ // address of the canvas shifted by "left" and "top" of the rect.
+ uint8* dest_rect_pointer = static_cast<uint8*>(bitmap.getPixels()) +
+ local_dest_irect.fTop * bitmap.rowBytes() +
+ local_dest_irect.fLeft * 4;
+
+ // Project the clip rect to the original video frame, obtains the
+ // dimensions of the projected clip rect, "left" and "top" of the rect.
+ // The math here are all integer math so we won't have rounding error and
+ // write outside of the canvas.
+ // We have the assumptions of dest_rect.width() and dest_rect.height()
+ // being non-zero, these are valid assumptions since finding intersection
+ // above rejects empty rectangle so we just do a DCHECK here.
+ DCHECK_NE(0, dest_rect.width());
+ DCHECK_NE(0, dest_rect.height());
+ size_t frame_clip_width = local_dest_irect.width() *
+ frame_in.width / dest_rect.width();
+ size_t frame_clip_height = local_dest_irect.height() *
+ frame_in.height / dest_rect.height();
+
+ // Project the "left" and "top" of the final destination rect to local
+ // coordinates of the video frame, use these values to find the offsets
+ // in the video frame to start reading.
+ size_t frame_clip_left = (local_dest_irect.fLeft -
+ local_dest_irect_saved.fLeft) *
+ frame_in.width / dest_rect.width();
+ size_t frame_clip_top = (local_dest_irect.fTop -
+ local_dest_irect_saved.fTop) *
+ frame_in.height / dest_rect.height();
+
+ // Use the "left" and "top" of the destination rect to locate the offset
+ // in Y, U and V planes.
+ size_t y_offset = frame_in.strides[media::VideoSurface::kYPlane] *
+ frame_clip_top + frame_clip_left;
+ // For format YV12, there is one U, V value per 2x2 block.
+ // For format YV16, there is one u, V value per 2x1 block.
+ size_t uv_offset = (frame_in.strides[media::VideoSurface::kUPlane] *
+ (frame_clip_top >> y_shift)) +
+ (frame_clip_left >> 1);
+ uint8* frame_clip_y = frame_in.data[media::VideoSurface::kYPlane] +
+ y_offset;
+ uint8* frame_clip_u = frame_in.data[media::VideoSurface::kUPlane] +
+ uv_offset;
+ uint8* frame_clip_v = frame_in.data[media::VideoSurface::kVPlane] +
+ uv_offset;
+ bitmap.lockPixels();
+
+ // TODO(hclam): do rotation and mirroring here.
+ media::ScaleYUVToRGB32(frame_clip_y,
+ frame_clip_u,
+ frame_clip_v,
+ dest_rect_pointer,
+ frame_clip_width,
+ frame_clip_height,
+ local_dest_irect.width(),
+ local_dest_irect.height(),
+ frame_in.strides[media::VideoSurface::kYPlane],
+ frame_in.strides[media::VideoSurface::kUPlane],
+ bitmap.rowBytes(),
+ yuv_type,
+ media::ROTATE_0);
+ bitmap.unlockPixels();
+ }
+ video_frame->Unlock();
+ } else {
+ NOTREACHED();
+ }
+}
+
+void VideoRendererImpl::TransformToSkIRect(const SkMatrix& matrix,
+ const gfx::Rect& src_rect,
+ SkIRect* dest_rect) {
+ // Transform destination rect to local coordinates.
+ SkRect transformed_rect;
+ SkRect skia_dest_rect;
+ skia_dest_rect.iset(src_rect.x(), src_rect.y(),
+ src_rect.right(), src_rect.bottom());
+ matrix.mapRect(&transformed_rect, skia_dest_rect);
+ transformed_rect.round(dest_rect);
+}
+
+} // namespace webkit_glue
diff --git a/webkit/glue/media/video_renderer_impl.h b/webkit/glue/media/video_renderer_impl.h
new file mode 100644
index 0000000..b17848b
--- /dev/null
+++ b/webkit/glue/media/video_renderer_impl.h
@@ -0,0 +1,113 @@
+// Copyright (c) 2009 The Chromium Authors. All rights reserved. Use of this
+// source code is governed by a BSD-style license that can be found in the
+// LICENSE file.
+//
+// The video renderer implementation to be use by the media pipeline. It lives
+// inside video renderer thread and also WebKit's main thread. We need to be
+// extra careful about members shared by two different threads, especially
+// video frame buffers.
+//
+// Methods called from WebKit's main thread:
+// Paint()
+// SetRect()
+
+#ifndef WEBKIT_GLUE_MEDIA_VIDEO_RENDERER_IMPL_H_
+#define WEBKIT_GLUE_MEDIA_VIDEO_RENDERER_IMPL_H_
+
+#include "base/gfx/platform_canvas.h"
+#include "base/gfx/rect.h"
+#include "base/gfx/size.h"
+#include "media/base/buffers.h"
+#include "media/base/factory.h"
+#include "media/base/filters.h"
+#include "media/filters/video_thread.h"
+#include "webkit/api/public/WebMediaPlayer.h"
+
+namespace webkit_glue {
+
+class WebMediaPlayerImpl;
+
+class VideoRendererImpl : public media::VideoThread {
+ public:
+ // Methods for painting called by the WebMediaPlayerDelegateImpl
+
+ // This method is called with the same rect as the Paint method and could
+ // be used by future implementations to implement an improved color space +
+ // scale code on a separate thread. Since we always do the stretch on the
+ // same thread as the Paint method, we just ignore the call for now.
+ virtual void SetRect(const gfx::Rect& rect);
+
+ // Paint the current front frame on the |canvas| stretching it to fit the
+ // |dest_rect|
+ virtual void Paint(skia::PlatformCanvas* canvas, const gfx::Rect& dest_rect);
+
+ // Static method for creating factory for this object.
+ static media::FilterFactory* CreateFactory(WebMediaPlayerImpl* delegate) {
+ return new media::FilterFactoryImpl1<VideoRendererImpl,
+ WebMediaPlayerImpl*>(delegate);
+ }
+
+ // FilterFactoryImpl1 implementation.
+ static bool IsMediaFormatSupported(const media::MediaFormat& media_format);
+
+ // Override VideoThread implementation of Stop().
+ virtual void Stop();
+
+ protected:
+ // Method called by VideoThread during initialization.
+ virtual bool OnInitialize(media::VideoDecoder* decoder);
+
+ // Method called by the VideoThread when a frame is available.
+ virtual void OnFrameAvailable();
+
+ private:
+ // Only the filter factories can create instances.
+ friend class media::FilterFactoryImpl1<VideoRendererImpl,
+ WebMediaPlayerImpl*>;
+ explicit VideoRendererImpl(WebMediaPlayerImpl* delegate);
+ virtual ~VideoRendererImpl() {}
+
+ // Determine the conditions to perform fast paint. Returns true if we can do
+ // fast paint otherwise false.
+ bool CanFastPaint(skia::PlatformCanvas* canvas, const gfx::Rect& dest_rect);
+
+ // Slow paint does a YUV => RGB, and scaled blit in two separate operations.
+ void SlowPaint(media::VideoFrame* video_frame,
+ skia::PlatformCanvas* canvas,
+ const gfx::Rect& dest_rect);
+
+ // Fast paint does YUV => RGB, scaling, blitting all in one step into the
+ // canvas. It's not always safe and appropriate to perform fast paint.
+ // CanFastPaint() is used to determine the conditions.
+ void FastPaint(media::VideoFrame* video_frame,
+ skia::PlatformCanvas* canvas,
+ const gfx::Rect& dest_rect);
+
+ void TransformToSkIRect(const SkMatrix& matrix, const gfx::Rect& src_rect,
+ SkIRect* dest_rect);
+
+ // Pointer to our parent object that is called to request repaints.
+ WebMediaPlayerImpl* delegate_;
+
+ // An RGB bitmap used to convert the video frames.
+ SkBitmap bitmap_;
+
+ // These two members are used to determine if the |bitmap_| contains
+ // an already converted image of the current frame. IMPORTANT NOTE: The
+ // value of |last_converted_frame_| must only be used for comparison purposes,
+ // and it should be assumed that the value of the pointer is INVALID unless
+ // it matches the pointer returned from GetCurrentFrame(). Even then, just
+ // to make sure, we compare the timestamp to be sure the bits in the
+ // |current_frame_bitmap_| are valid.
+ media::VideoFrame* last_converted_frame_;
+ base::TimeDelta last_converted_timestamp_;
+
+ // The size of the video.
+ gfx::Size video_size_;
+
+ DISALLOW_COPY_AND_ASSIGN(VideoRendererImpl);
+};
+
+} // namespace webkit_glue
+
+#endif // WEBKIT_GLUE_MEDIA_VIDEO_RENDERER_IMPL_H_
diff --git a/webkit/glue/webmediaplayer_impl.cc b/webkit/glue/webmediaplayer_impl.cc
new file mode 100644
index 0000000..ee6f7b5f
--- /dev/null
+++ b/webkit/glue/webmediaplayer_impl.cc
@@ -0,0 +1,348 @@
+// Copyright (c) 2008-2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "base/command_line.h"
+#include "googleurl/src/gurl.h"
+#include "media/filters/ffmpeg_audio_decoder.h"
+#include "media/filters/ffmpeg_demuxer.h"
+#include "media/filters/ffmpeg_video_decoder.h"
+#include "media/filters/null_audio_renderer.h"
+#include "webkit/api/public/WebRect.h"
+#include "webkit/api/public/WebSize.h"
+#include "webkit/api/public/WebURL.h"
+#include "webkit/glue/media/simple_data_source.h"
+#include "webkit/glue/media/video_renderer_impl.h"
+#include "webkit/glue/webmediaplayer_impl.h"
+
+using WebKit::WebRect;
+using WebKit::WebSize;
+
+namespace webkit_glue {
+
+/////////////////////////////////////////////////////////////////////////////
+// Task to be posted on main thread that fire WebMediaPlayer methods.
+
+class NotifyWebMediaPlayerTask : public CancelableTask {
+ public:
+ NotifyWebMediaPlayerTask(WebMediaPlayerImpl* media_player,
+ WebMediaPlayerClientMethod method)
+ : media_player_(media_player),
+ method_(method) {}
+
+ virtual void Run() {
+ if (media_player_) {
+ (media_player_->client()->*(method_))();
+ media_player_->DidTask(this);
+ }
+ }
+
+ virtual void Cancel() {
+ media_player_ = NULL;
+ }
+
+ private:
+ WebMediaPlayerImpl* media_player_;
+ WebMediaPlayerClientMethod method_;
+
+ DISALLOW_COPY_AND_ASSIGN(NotifyWebMediaPlayerTask);
+};
+
+/////////////////////////////////////////////////////////////////////////////
+// WebMediaPlayerImpl implementation
+
+WebMediaPlayerImpl::WebMediaPlayerImpl(WebKit::WebMediaPlayerClient* client,
+ media::FilterFactoryCollection* factory)
+ : network_state_(WebKit::WebMediaPlayer::Empty),
+ ready_state_(WebKit::WebMediaPlayer::HaveNothing),
+ main_loop_(NULL),
+ filter_factory_(factory),
+ video_renderer_(NULL),
+ client_(client),
+ tasks_(kLastTaskIndex) {
+ // Add in the default filter factories.
+ filter_factory_->AddFactory(media::FFmpegDemuxer::CreateFilterFactory());
+ filter_factory_->AddFactory(media::FFmpegAudioDecoder::CreateFactory());
+ filter_factory_->AddFactory(media::FFmpegVideoDecoder::CreateFactory());
+ filter_factory_->AddFactory(media::NullAudioRenderer::CreateFilterFactory());
+ filter_factory_->AddFactory(VideoRendererImpl::CreateFactory(this));
+ // TODO(hclam): Provide a valid routing id to simple data source.
+ filter_factory_->AddFactory(
+ SimpleDataSource::CreateFactory(MessageLoop::current(), 0));
+
+ DCHECK(client_);
+
+ // Saves the current message loop.
+ DCHECK(!main_loop_);
+ main_loop_ = MessageLoop::current();
+
+ // Also we want to be notified of |main_loop_| destruction.
+ main_loop_->AddDestructionObserver(this);
+}
+
+WebMediaPlayerImpl::~WebMediaPlayerImpl() {
+ pipeline_.Stop();
+
+ // Cancel all tasks posted on the |main_loop_|.
+ CancelAllTasks();
+
+ // Finally tell the |main_loop_| we don't want to be notified of destruction
+ // event.
+ if (main_loop_) {
+ main_loop_->RemoveDestructionObserver(this);
+ }
+}
+
+void WebMediaPlayerImpl::load(const WebKit::WebURL& url) {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ // Initialize the pipeline
+ pipeline_.Start(filter_factory_.get(), url.spec(),
+ NewCallback(this, &WebMediaPlayerImpl::OnPipelineInitialize));
+}
+
+void WebMediaPlayerImpl::cancelLoad() {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ // TODO(hclam): Calls to render_view_ to stop resource load
+}
+
+void WebMediaPlayerImpl::play() {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ // TODO(hclam): We should restore the previous playback rate rather than
+ // having it at 1.0.
+ pipeline_.SetPlaybackRate(1.0f);
+}
+
+void WebMediaPlayerImpl::pause() {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ pipeline_.SetPlaybackRate(0.0f);
+}
+
+void WebMediaPlayerImpl::stop() {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ // We can fire Stop() multiple times.
+ pipeline_.Stop();
+}
+
+void WebMediaPlayerImpl::seek(float seconds) {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ // Try to preserve as much accuracy as possible.
+ float microseconds = seconds * base::Time::kMicrosecondsPerSecond;
+ if (seconds != 0)
+ pipeline_.Seek(
+ base::TimeDelta::FromMicroseconds(static_cast<int64>(microseconds)),
+ NewCallback(this, &WebMediaPlayerImpl::OnPipelineSeek));
+}
+
+void WebMediaPlayerImpl::setEndTime(float seconds) {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ // TODO(hclam): add method call when it has been implemented.
+ return;
+}
+
+void WebMediaPlayerImpl::setRate(float rate) {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ pipeline_.SetPlaybackRate(rate);
+}
+
+void WebMediaPlayerImpl::setVolume(float volume) {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ pipeline_.SetVolume(volume);
+}
+
+void WebMediaPlayerImpl::setVisible(bool visible) {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ // TODO(hclam): add appropriate method call when pipeline has it implemented.
+ return;
+}
+
+bool WebMediaPlayerImpl::setAutoBuffer(bool autoBuffer) {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ return false;
+}
+
+bool WebMediaPlayerImpl::totalBytesKnown() {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ return pipeline_.GetTotalBytes() != 0;
+}
+
+bool WebMediaPlayerImpl::hasVideo() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ size_t width, height;
+ pipeline_.GetVideoSize(&width, &height);
+ return width != 0 && height != 0;
+}
+
+WebKit::WebSize WebMediaPlayerImpl::naturalSize() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ size_t width, height;
+ pipeline_.GetVideoSize(&width, &height);
+ return WebKit::WebSize(width, height);
+}
+
+bool WebMediaPlayerImpl::paused() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ return pipeline_.GetPlaybackRate() == 0.0f;
+}
+
+bool WebMediaPlayerImpl::seeking() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ return tasks_[kTimeChangedTaskIndex] != NULL;
+}
+
+float WebMediaPlayerImpl::duration() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ return static_cast<float>(pipeline_.GetDuration().InSecondsF());
+}
+
+float WebMediaPlayerImpl::currentTime() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ return static_cast<float>(pipeline_.GetTime().InSecondsF());
+}
+
+int WebMediaPlayerImpl::dataRate() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ // TODO(hclam): Add this method call if pipeline has it in the interface.
+ return 0;
+}
+
+float WebMediaPlayerImpl::maxTimeBuffered() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ return static_cast<float>(pipeline_.GetBufferedTime().InSecondsF());
+}
+
+float WebMediaPlayerImpl::maxTimeSeekable() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ // TODO(scherkus): move this logic down into the pipeline.
+ if (pipeline_.GetTotalBytes() == 0) {
+ return 0.0f;
+ }
+ double total_bytes = static_cast<double>(pipeline_.GetTotalBytes());
+ double buffered_bytes = static_cast<double>(pipeline_.GetBufferedBytes());
+ double duration = static_cast<double>(pipeline_.GetDuration().InSecondsF());
+ return static_cast<float>(duration * (buffered_bytes / total_bytes));
+}
+
+unsigned long long WebMediaPlayerImpl::bytesLoaded() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ return pipeline_.GetBufferedBytes();
+}
+
+unsigned long long WebMediaPlayerImpl::totalBytes() const {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ return pipeline_.GetTotalBytes();
+}
+
+void WebMediaPlayerImpl::setSize(const WebSize& size) {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ if (video_renderer_) {
+ // TODO(scherkus): Change API to use SetSize().
+ video_renderer_->SetRect(gfx::Rect(0, 0, size.width, size.height));
+ }
+}
+
+// TODO(hclam): enable this for mac.
+#if WEBKIT_USING_SKIA
+void WebMediaPlayerImpl::paint(skia::PlatformCanvas* canvas,
+ const WebRect& rect) {
+ DCHECK(main_loop_ && MessageLoop::current() == main_loop_);
+
+ if (video_renderer_) {
+ video_renderer_->Paint(canvas, rect);
+ }
+}
+#endif
+
+void WebMediaPlayerImpl::WillDestroyCurrentMessageLoop() {
+ pipeline_.Stop();
+}
+
+void WebMediaPlayerImpl::OnPipelineInitialize(bool successful) {
+ if (successful) {
+ // Since we have initialized the pipeline, say we have everything.
+ // TODO(hclam): change this to report the correct status.
+ ready_state_ = WebKit::WebMediaPlayer::HaveEnoughData;
+ network_state_ = WebKit::WebMediaPlayer::Loaded;
+ } else {
+ // TODO(hclam): should use pipeline_.GetError() to determine the state
+ // properly and reports error using MediaError.
+ ready_state_ = WebKit::WebMediaPlayer::HaveNothing;
+ network_state_ = WebKit::WebMediaPlayer::NetworkError;
+ }
+
+ PostTask(kNetworkStateTaskIndex,
+ &WebKit::WebMediaPlayerClient::networkStateChanged);
+ PostTask(kReadyStateTaskIndex,
+ &WebKit::WebMediaPlayerClient::readyStateChanged);
+}
+
+void WebMediaPlayerImpl::OnPipelineSeek(bool successful) {
+ PostTask(kTimeChangedTaskIndex,
+ &WebKit::WebMediaPlayerClient::timeChanged);
+}
+
+void WebMediaPlayerImpl::SetVideoRenderer(VideoRendererImpl* video_renderer) {
+ video_renderer_ = video_renderer;
+}
+
+void WebMediaPlayerImpl::DidTask(CancelableTask* task) {
+ AutoLock auto_lock(task_lock_);
+
+ for (size_t i = 0; i < tasks_.size(); ++i) {
+ if (tasks_[i] == task) {
+ tasks_[i] = NULL;
+ return;
+ }
+ }
+ NOTREACHED();
+}
+
+void WebMediaPlayerImpl::CancelAllTasks() {
+ AutoLock auto_lock(task_lock_);
+ // Loop through the list of tasks and cancel tasks that are still alive.
+ for (size_t i = 0; i < tasks_.size(); ++i) {
+ if (tasks_[i])
+ tasks_[i]->Cancel();
+ }
+}
+
+void WebMediaPlayerImpl::PostTask(int index,
+ WebMediaPlayerClientMethod method) {
+ DCHECK(main_loop_);
+
+ AutoLock auto_lock(task_lock_);
+ if (!tasks_[index]) {
+ CancelableTask* task = new NotifyWebMediaPlayerTask(this, method);
+ tasks_[index] = task;
+ main_loop_->PostTask(FROM_HERE, task);
+ }
+}
+
+void WebMediaPlayerImpl::PostRepaintTask() {
+ PostTask(kRepaintTaskIndex, &WebKit::WebMediaPlayerClient::repaint);
+}
+
+} // namespace webkit_glue
diff --git a/webkit/glue/webmediaplayer_impl.h b/webkit/glue/webmediaplayer_impl.h
new file mode 100644
index 0000000..e670535
--- /dev/null
+++ b/webkit/glue/webmediaplayer_impl.h
@@ -0,0 +1,229 @@
+// Copyright (c) 2008-2009 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be found
+// in the LICENSE file.
+//
+// Delegate calls from WebCore::MediaPlayerPrivate to Chrome's video player.
+// It contains PipelineImpl which is the actual media player pipeline, it glues
+// the media player pipeline, data source, audio renderer and renderer.
+// PipelineImpl would creates multiple threads and access some public methods
+// of this class, so we need to be extra careful about concurrent access of
+// methods and members.
+//
+// Properties that are shared by main thread and media threads:
+// CancelableTaskList tasks_;
+// ^--- This property is shared for keeping records of the tasks posted to
+// make sure there will be only one task for each task type that can
+// exist in the main thread.
+//
+// Methods that are accessed in media threads:
+// SetAudioRenderer()
+// ^--- Called during the initialization of the pipeline, essentially from the
+// the pipeline thread.
+// SetVideoRenderer()
+// ^--- Called during the initialization of the pipeline, essentially from the
+// the pipeline thread.
+// PostRepaintTask()
+// ^--- Called from the video renderer thread to notify a video frame has
+// been prepared.
+// PostTask()
+// ^--- A method that helps posting tasks to the main thread, it is
+// accessed from main thread and media threads, it access the |tasks_|
+// internally. Needs locking inside to avoid concurrent access to
+// |tasks_|.
+//
+//
+// Other issues:
+// During tear down of the whole browser or a tab, the DOM tree may not be
+// destructed nicely, and there will be some dangling media threads trying to
+// the main thread, so we need this class to listen to destruction event of the
+// main thread and cleanup the media threads when the even is received. Also
+// at destruction of this class we will need to unhook it from destruction event
+// list of the main thread.
+
+#ifndef WEBKIT_GLUE_WEBMEDIAPLAYER_IMPL_H_
+#define WEBKTI_GLUE_WEBMEDIAPLAYER_IMPL_H_
+
+#include <vector>
+
+#include "base/gfx/platform_canvas.h"
+#include "base/lock.h"
+#include "base/message_loop.h"
+#include "media/base/filters.h"
+#include "media/base/pipeline_impl.h"
+#include "webkit/api/public/WebMediaPlayer.h"
+#include "webkit/api/public/WebMediaPlayerClient.h"
+
+class AudioRendererImpl;
+class DataSourceImpl;
+class GURL;
+class RenderView;
+class VideoRendererImpl;
+
+namespace media {
+class FilterFactoryCollection;
+}
+
+namespace webkit_glue {
+
+// This typedef is used for WebMediaPlayerImpl::PostTask() and
+// NotifyWebMediaPlayerTask in the source file.
+typedef void (WebKit::WebMediaPlayerClient::*WebMediaPlayerClientMethod)();
+
+class WebMediaPlayerImpl : public WebKit::WebMediaPlayer,
+ public MessageLoop::DestructionObserver {
+ public:
+ // Construct a WebMediaPlayerImpl with reference to the client, and media
+ // filter factory collection. By providing the filter factory collection
+ // the implementor can provide more specific media filters that does resource
+ // loading and rendering. |factory| should contain filter factories for:
+ // 1. Data source
+ // 2. Audio renderer
+ // 3. Video renderer (optional)
+ //
+ // There are some default filters provided by this method:
+ // 1. FFmpeg demuxer
+ // 2. FFmpeg audio decoder
+ // 3. FFmpeg video decoder
+ // 4. Video renderer
+ // 5. Simple data source
+ // 6. Null audio renderer
+ // The video renderer provided by this class is using the graphics context
+ // provided by WebKit to perform renderering. The simple data source does
+ // resource loading by loading the whole resource object into memory. Null
+ // audio renderer is a fake audio device that plays silence. Provider of the
+ // |factory| can override the default filters by adding extra filters to
+ // |factory| before calling this method.
+ WebMediaPlayerImpl(WebKit::WebMediaPlayerClient* client,
+ media::FilterFactoryCollection* factory);
+ virtual ~WebMediaPlayerImpl();
+
+ virtual void load(const WebKit::WebURL& url);
+ virtual void cancelLoad();
+
+ // Playback controls.
+ virtual void play();
+ virtual void pause();
+ virtual void stop();
+ virtual void seek(float seconds);
+ virtual void setEndTime(float seconds);
+ virtual void setRate(float rate);
+ virtual void setVolume(float volume);
+ virtual void setVisible(bool visible);
+ virtual bool setAutoBuffer(bool autoBuffer);
+ virtual bool totalBytesKnown();
+ virtual float maxTimeBuffered() const;
+ virtual float maxTimeSeekable() const;
+
+ // Methods for painting.
+ virtual void setSize(const WebKit::WebSize& size);
+
+ // TODO(hclam): enable this for mac.
+#if WEBKIT_USING_SKIA
+ virtual void paint(WebKit::WebCanvas* canvas, const WebKit::WebRect& rect);
+#endif
+
+ // True if a video is loaded.
+ virtual bool hasVideo() const;
+
+ // Dimensions of the video.
+ virtual WebKit::WebSize naturalSize() const;
+
+ // Getters of playback state.
+ virtual bool paused() const;
+ virtual bool seeking() const;
+ virtual float duration() const;
+ virtual float currentTime() const;
+
+ // Get rate of loading the resource.
+ virtual int32 dataRate() const;
+
+ // Internal states of loading and network.
+ // TODO(hclam): Ask the pipeline about the state rather than having reading
+ // them from members which would cause race conditions.
+ virtual WebKit::WebMediaPlayer::NetworkState networkState() const {
+ return network_state_;
+ }
+ virtual WebKit::WebMediaPlayer::ReadyState readyState() const {
+ return ready_state_;
+ }
+
+ virtual unsigned long long bytesLoaded() const;
+ virtual unsigned long long totalBytes() const;
+
+ // As we are closing the tab or even the browser, |main_loop_| is destroyed
+ // even before this object gets destructed, so we need to know when
+ // |main_loop_| is being destroyed and we can stop posting repaint task
+ // to it.
+ virtual void WillDestroyCurrentMessageLoop();
+
+ // Notification from |pipeline_| when initialization has finished.
+ void OnPipelineInitialize(bool successful);
+
+ // Notification from |pipeline_| when a seek has finished.
+ void OnPipelineSeek(bool successful);
+
+ // Called from tasks posted to |main_loop_| from this object to remove
+ // reference of them.
+ void DidTask(CancelableTask* task);
+
+ // Public methods to be called from renderers and data source so that
+ // WebMediaPlayerImpl has references to them.
+ void SetVideoRenderer(VideoRendererImpl* video_renderer);
+
+ // Called from VideoRenderer to fire a repaint task to |main_loop_|.
+ void PostRepaintTask();
+
+ // Inline getters.
+ WebKit::WebMediaPlayerClient* client() { return client_; }
+
+ private:
+ // Methods for posting tasks and cancelling tasks. This method may lives in
+ // the main thread or the media threads.
+ void PostTask(int index, WebMediaPlayerClientMethod method);
+
+ // Cancel all tasks currently live in |main_loop_|.
+ void CancelAllTasks();
+
+ // Indexes for tasks.
+ enum {
+ kRepaintTaskIndex = 0,
+ kReadyStateTaskIndex,
+ kNetworkStateTaskIndex,
+ kTimeChangedTaskIndex,
+ kLastTaskIndex
+ };
+
+ // TODO(hclam): get rid of these members and read from the pipeline directly.
+ WebKit::WebMediaPlayer::NetworkState network_state_;
+ WebKit::WebMediaPlayer::ReadyState ready_state_;
+
+ // Message loops for posting tasks between Chrome's main thread. Also used
+ // for DCHECKs so methods calls won't execute in the wrong thread.
+ MessageLoop* main_loop_;
+
+ // A collection of factories for creating filters.
+ scoped_refptr<media::FilterFactoryCollection> filter_factory_;
+
+ // The actual pipeline. We do it a composition here because we expect to have
+ // the same lifetime as the pipeline.
+ media::PipelineImpl pipeline_;
+
+ // We have the interface to VideoRenderer to delegate paint messages to it
+ // from WebKit.
+ scoped_refptr<VideoRendererImpl> video_renderer_;
+
+ WebKit::WebMediaPlayerClient* client_;
+
+ // List of tasks for holding pointers to all tasks currently in the
+ // |main_loop_|. |tasks_| can be access from main thread or the media threads
+ // we need a lock for protecting it.
+ Lock task_lock_;
+ typedef std::vector<CancelableTask*> CancelableTaskList;
+ CancelableTaskList tasks_;
+
+ DISALLOW_COPY_AND_ASSIGN(WebMediaPlayerImpl);
+};
+
+} // namespace webkit_glue
+
+#endif // WEBKIT_GLUE_WEBMEDIAPLAYER_IMPL_H_
diff --git a/webkit/glue/webview_delegate.h b/webkit/glue/webview_delegate.h
index 7609eff..c75e1fc 100644
--- a/webkit/glue/webview_delegate.h
+++ b/webkit/glue/webview_delegate.h
@@ -137,7 +137,7 @@ class WebViewDelegate : virtual public WebWidgetDelegate {
return NULL;
}
- // Called when a WebMediaPlayerDelegate is needed.
+ // Called when a WebMediaPlayer is needed.
virtual WebKit::WebMediaPlayer* CreateWebMediaPlayer(
WebKit::WebMediaPlayerClient* client) {
return NULL;