diff options
author | sandersd@chromium.org <sandersd@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-06-03 08:46:01 +0000 |
---|---|---|
committer | sandersd@chromium.org <sandersd@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98> | 2014-06-03 08:46:01 +0000 |
commit | 4b0cc73193fb82c7c1eca748da7964ec9c8f7aa8 (patch) | |
tree | 7aea5f30cecb761ac6134f082987cf2238189f2d | |
parent | 195c092d2df1eb9a677fed0fb1ce314944304d60 (diff) | |
download | chromium_src-4b0cc73193fb82c7c1eca748da7964ec9c8f7aa8.zip chromium_src-4b0cc73193fb82c7c1eca748da7964ec9c8f7aa8.tar.gz chromium_src-4b0cc73193fb82c7c1eca748da7964ec9c8f7aa8.tar.bz2 |
Support for YUV 4:4:4 subsampling.
Plumb a new VideoFrame format (YV24) through the stack and add a conversion using libyuv to the software path.
BUG=104711
Review URL: https://codereview.chromium.org/289373011
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@274434 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r-- | cc/resources/video_resource_updater.cc | 7 | ||||
-rw-r--r-- | content/browser/media/media_browsertest.cc | 6 | ||||
-rw-r--r-- | content/browser/renderer_host/render_widget_host_view_browsertest.cc | 2 | ||||
-rw-r--r-- | content/renderer/media/video_frame_compositor.cc | 1 | ||||
-rw-r--r-- | media/base/video_frame.cc | 244 | ||||
-rw-r--r-- | media/base/video_frame.h | 3 | ||||
-rw-r--r-- | media/ffmpeg/ffmpeg_common.cc | 12 | ||||
-rw-r--r-- | media/filters/ffmpeg_video_decoder.cc | 2 | ||||
-rw-r--r-- | media/filters/skcanvas_video_renderer.cc | 60 |
9 files changed, 235 insertions, 102 deletions
diff --git a/cc/resources/video_resource_updater.cc b/cc/resources/video_resource_updater.cc index f971ae8..1d93aeb 100644 --- a/cc/resources/video_resource_updater.cc +++ b/cc/resources/video_resource_updater.cc @@ -65,6 +65,7 @@ bool VideoResourceUpdater::VerifyFrame( case media::VideoFrame::YV12A: case media::VideoFrame::YV16: case media::VideoFrame::YV12J: + case media::VideoFrame::YV24: case media::VideoFrame::NATIVE_TEXTURE: #if defined(VIDEO_HOLE) case media::VideoFrame::HOLE: @@ -111,12 +112,14 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes( input_frame_format == media::VideoFrame::I420 || input_frame_format == media::VideoFrame::YV12A || input_frame_format == media::VideoFrame::YV12J || - input_frame_format == media::VideoFrame::YV16); + input_frame_format == media::VideoFrame::YV16 || + input_frame_format == media::VideoFrame::YV24); if (input_frame_format != media::VideoFrame::YV12 && input_frame_format != media::VideoFrame::I420 && input_frame_format != media::VideoFrame::YV12A && input_frame_format != media::VideoFrame::YV12J && - input_frame_format != media::VideoFrame::YV16) + input_frame_format != media::VideoFrame::YV16 && + input_frame_format != media::VideoFrame::YV24) return VideoFrameExternalResources(); bool software_compositor = context_provider_ == NULL; diff --git a/content/browser/media/media_browsertest.cc b/content/browser/media/media_browsertest.cc index 7e44a31..db3bd25 100644 --- a/content/browser/media/media_browsertest.cc +++ b/content/browser/media/media_browsertest.cc @@ -217,8 +217,7 @@ IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv422pTheora)) { } IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv444pTheora)) { - // TODO(scherkus): Support YUV444 http://crbug.com/104711 - RunColorFormatTest("yuv424p.ogv", "ERROR"); + RunColorFormatTest("yuv444p.ogv", "ENDED"); } IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv420pVp8)) { @@ -239,8 +238,7 @@ IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv422pH264)) { } IN_PROC_BROWSER_TEST_F(MediaTest, MAYBE(Yuv444pH264)) { - // TODO(scherkus): Support YUV444 http://crbug.com/104711 - RunColorFormatTest("yuv444p.mp4", "ERROR"); + RunColorFormatTest("yuv444p.mp4", "ENDED"); } #if defined(OS_CHROMEOS) diff --git a/content/browser/renderer_host/render_widget_host_view_browsertest.cc b/content/browser/renderer_host/render_widget_host_view_browsertest.cc index 2a54985..1c85aa7 100644 --- a/content/browser/renderer_host/render_widget_host_view_browsertest.cc +++ b/content/browser/renderer_host/render_widget_host_view_browsertest.cc @@ -499,7 +499,7 @@ class CompositingRenderWidgetHostViewBrowserTestTabCapture bitmap.allocPixels(SkImageInfo::Make(video_frame->visible_rect().width(), video_frame->visible_rect().height(), kPMColor_SkColorType, - kOpaque_SkAlphaType)); + kPremul_SkAlphaType)); bitmap.allocPixels(); SkCanvas canvas(bitmap); diff --git a/content/renderer/media/video_frame_compositor.cc b/content/renderer/media/video_frame_compositor.cc index 2d3bfd4..3fa8042 100644 --- a/content/renderer/media/video_frame_compositor.cc +++ b/content/renderer/media/video_frame_compositor.cc @@ -15,6 +15,7 @@ static bool IsOpaque(const scoped_refptr<media::VideoFrame>& frame) { case media::VideoFrame::YV12J: case media::VideoFrame::YV16: case media::VideoFrame::I420: + case media::VideoFrame::YV24: case media::VideoFrame::NV12: return true; diff --git a/media/base/video_frame.cc b/media/base/video_frame.cc index 089af93..272d41d 100644 --- a/media/base/video_frame.cc +++ b/media/base/video_frame.cc @@ -36,6 +36,8 @@ scoped_refptr<VideoFrame> VideoFrame::CreateFrame( // request does not line up on sample boundaries. gfx::Size new_coded_size(coded_size); switch (format) { + case VideoFrame::YV24: + break; case VideoFrame::YV12: case VideoFrame::YV12A: case VideoFrame::I420: @@ -45,7 +47,12 @@ scoped_refptr<VideoFrame> VideoFrame::CreateFrame( case VideoFrame::YV16: new_coded_size.set_width((new_coded_size.width() + 1) / 2 * 2); break; - default: + case VideoFrame::UNKNOWN: + case VideoFrame::NV12: +#if defined(VIDEO_HOLE) + case VideoFrame::HOLE: +#endif // defined(VIDEO_HOLE) + case VideoFrame::NATIVE_TEXTURE: LOG(FATAL) << "Only YUV formats supported: " << format; return NULL; } @@ -85,6 +92,8 @@ std::string VideoFrame::FormatToString(VideoFrame::Format format) { return "YV12J"; case VideoFrame::NV12: return "NV12"; + case VideoFrame::YV24: + return "YV24"; } NOTREACHED() << "Invalid videoframe format provided: " << format; return ""; @@ -112,12 +121,14 @@ bool VideoFrame::IsValidConfig(VideoFrame::Format format, case VideoFrame::UNKNOWN: return (coded_size.IsEmpty() && visible_rect.IsEmpty() && natural_size.IsEmpty()); + case VideoFrame::YV24: + break; case VideoFrame::YV12: case VideoFrame::YV12J: case VideoFrame::I420: case VideoFrame::YV12A: case VideoFrame::NV12: - // YUV formats have width/height requirements due to chroma subsampling. + // Subsampled YUV formats have width/height requirements. if (static_cast<size_t>(coded_size.height()) < RoundUp(visible_rect.bottom(), 2)) return false; @@ -186,7 +197,7 @@ scoped_refptr<VideoFrame> VideoFrame::WrapExternalPackedMemory( return NULL; switch (format) { - case I420: { + case VideoFrame::I420: { scoped_refptr<VideoFrame> frame( new VideoFrame(format, coded_size, @@ -390,6 +401,7 @@ size_t VideoFrame::NumPlanes(Format format) { case VideoFrame::YV16: case VideoFrame::I420: case VideoFrame::YV12J: + case VideoFrame::YV24: return 3; case VideoFrame::YV12A: return 4; @@ -413,12 +425,25 @@ size_t VideoFrame::AllocationSize(Format format, const gfx::Size& coded_size) { gfx::Size VideoFrame::PlaneSize(Format format, size_t plane, const gfx::Size& coded_size) { + // Align to multiple-of-two size overall. This ensures that non-subsampled + // planes can be addressed by pixel with the same scaling as the subsampled + // planes. const int width = RoundUp(coded_size.width(), 2); const int height = RoundUp(coded_size.height(), 2); switch (format) { + case VideoFrame::YV24: + switch (plane) { + case VideoFrame::kYPlane: + case VideoFrame::kUPlane: + case VideoFrame::kVPlane: + return gfx::Size(width, height); + default: + break; + } + break; case VideoFrame::YV12: case VideoFrame::YV12J: - case VideoFrame::I420: { + case VideoFrame::I420: switch (plane) { case VideoFrame::kYPlane: return gfx::Size(width, height); @@ -428,8 +453,8 @@ gfx::Size VideoFrame::PlaneSize(Format format, default: break; } - } - case VideoFrame::YV12A: { + break; + case VideoFrame::YV12A: switch (plane) { case VideoFrame::kYPlane: case VideoFrame::kAPlane: @@ -440,8 +465,8 @@ gfx::Size VideoFrame::PlaneSize(Format format, default: break; } - } - case VideoFrame::YV16: { + break; + case VideoFrame::YV16: switch (plane) { case VideoFrame::kYPlane: return gfx::Size(width, height); @@ -451,8 +476,8 @@ gfx::Size VideoFrame::PlaneSize(Format format, default: break; } - } - case VideoFrame::NV12: { + break; + case VideoFrame::NV12: switch (plane) { case VideoFrame::kYPlane: return gfx::Size(width, height); @@ -461,7 +486,7 @@ gfx::Size VideoFrame::PlaneSize(Format format, default: break; } - } + break; case VideoFrame::UNKNOWN: case VideoFrame::NATIVE_TEXTURE: #if defined(VIDEO_HOLE) @@ -484,14 +509,20 @@ size_t VideoFrame::PlaneAllocationSize(Format format, // static int VideoFrame::PlaneHorizontalBitsPerPixel(Format format, size_t plane) { switch (format) { - case VideoFrame::YV12A: - if (plane == kAPlane) - return 8; - // fallthrough + case VideoFrame::YV24: + switch (plane) { + case kYPlane: + case kUPlane: + case kVPlane: + return 8; + default: + break; + } + break; case VideoFrame::YV12: case VideoFrame::YV16: case VideoFrame::I420: - case VideoFrame::YV12J: { + case VideoFrame::YV12J: switch (plane) { case kYPlane: return 8; @@ -501,9 +532,20 @@ int VideoFrame::PlaneHorizontalBitsPerPixel(Format format, size_t plane) { default: break; } - } - - case VideoFrame::NV12: { + break; + case VideoFrame::YV12A: + switch (plane) { + case kYPlane: + case kAPlane: + return 8; + case kUPlane: + case kVPlane: + return 2; + default: + break; + } + break; + case VideoFrame::NV12: switch (plane) { case kYPlane: return 8; @@ -512,12 +554,16 @@ int VideoFrame::PlaneHorizontalBitsPerPixel(Format format, size_t plane) { default: break; } - } - default: + break; + case VideoFrame::UNKNOWN: +#if defined(VIDEO_HOLE) + case VideoFrame::HOLE: +#endif // defined(VIDEO_HOLE) + case VideoFrame::NATIVE_TEXTURE: break; } - - NOTREACHED() << "Unsupported video frame format: " << format; + NOTREACHED() << "Unsupported video frame format/plane: " + << format << "/" << plane; return 0; } @@ -530,7 +576,7 @@ static void ReleaseData(uint8* data) { void VideoFrame::AllocateYUV() { DCHECK(format_ == VideoFrame::YV12 || format_ == VideoFrame::YV16 || format_ == VideoFrame::YV12A || format_ == VideoFrame::I420 || - format_ == VideoFrame::YV12J); + format_ == VideoFrame::YV12J || format_ == VideoFrame::YV24); // Align Y rows at least at 16 byte boundaries. The stride for both // YV12 and YV16 is 1/2 of the stride of Y. For YV12, every row of bytes for // U and V applies to two rows of Y (one byte of UV for 4 bytes of Y), so in @@ -541,11 +587,11 @@ void VideoFrame::AllocateYUV() { // the Y values of the final row, but assumes that the last row of U & V // applies to a full two rows of Y. YV12A is the same as YV12, but with an // additional alpha plane that has the same size and alignment as the Y plane. - size_t y_stride = RoundUp(row_bytes(VideoFrame::kYPlane), kFrameSizeAlignment); size_t uv_stride = RoundUp(row_bytes(VideoFrame::kUPlane), kFrameSizeAlignment); + // The *2 here is because some formats (e.g. h264) allow interlaced coding, // and then the size needs to be a multiple of two macroblocks (vertically). // See libavcodec/utils.c:avcodec_align_dimensions2(). @@ -628,32 +674,60 @@ int VideoFrame::row_bytes(size_t plane) const { DCHECK(IsValidPlane(plane)); int width = coded_size_.width(); switch (format_) { - // Planar, 8bpp. - case YV12A: - if (plane == kAPlane) - return width; - // Fallthrough. - case YV12: - case YV16: - case I420: - case YV12J: - if (plane == kYPlane) - return width; - else if (plane <= kVPlane) - return RoundUp(width, 2) / 2; + case VideoFrame::YV24: + switch (plane) { + case kYPlane: + case kUPlane: + case kVPlane: + return width; + default: + break; + } break; - - case NV12: - if (plane <= kUVPlane) - return width; + case VideoFrame::YV12: + case VideoFrame::YV16: + case VideoFrame::I420: + case VideoFrame::YV12J: + switch (plane) { + case kYPlane: + return width; + case kUPlane: + case kVPlane: + return RoundUp(width, 2) / 2; + default: + break; + } break; - - default: + case VideoFrame::YV12A: + switch (plane) { + case kYPlane: + case kAPlane: + return width; + case kUPlane: + case kVPlane: + return RoundUp(width, 2) / 2; + default: + break; + } + break; + case VideoFrame::NV12: + switch (plane) { + case kYPlane: + case kUVPlane: + return width; + default: + break; + } + break; + case VideoFrame::UNKNOWN: +#if defined(VIDEO_HOLE) + case VideoFrame::HOLE: +#endif // defined(VIDEO_HOLE) + case VideoFrame::NATIVE_TEXTURE: break; } - - // Intentionally leave out non-production formats. - NOTREACHED() << "Unsupported video frame format: " << format_; + NOTREACHED() << "Unsupported video frame format/plane: " + << format_ << "/" << plane; return 0; } @@ -661,35 +735,61 @@ int VideoFrame::rows(size_t plane) const { DCHECK(IsValidPlane(plane)); int height = coded_size_.height(); switch (format_) { - case YV16: - return height; - - case YV12A: - if (plane == kAPlane) - return height; - // Fallthrough. - case YV12: - case YV12J: - case I420: - if (plane == kYPlane) - return height; - else if (plane <= kVPlane) - return RoundUp(height, 2) / 2; + case VideoFrame::YV24: + case VideoFrame::YV16: + switch (plane) { + case kYPlane: + case kUPlane: + case kVPlane: + return height; + default: + break; + } break; - - case NV12: - if (plane == kYPlane) - return height; - else if (plane == kUVPlane) - return RoundUp(height, 2) / 2; + case VideoFrame::YV12: + case VideoFrame::YV12J: + case VideoFrame::I420: + switch (plane) { + case kYPlane: + return height; + case kUPlane: + case kVPlane: + return RoundUp(height, 2) / 2; + default: + break; + } break; - - default: + case VideoFrame::YV12A: + switch (plane) { + case kYPlane: + case kAPlane: + return height; + case kUPlane: + case kVPlane: + return RoundUp(height, 2) / 2; + default: + break; + } + break; + case VideoFrame::NV12: + switch (plane) { + case kYPlane: + return height; + case kUVPlane: + return RoundUp(height, 2) / 2; + default: + break; + } + break; + case VideoFrame::UNKNOWN: +#if defined(VIDEO_HOLE) + case VideoFrame::HOLE: +#endif // defined(VIDEO_HOLE) + case VideoFrame::NATIVE_TEXTURE: break; } - - // Intentionally leave out non-production formats. - NOTREACHED() << "Unsupported video frame format: " << format_; + NOTREACHED() << "Unsupported video frame format/plane: " + << format_ << "/" << plane; return 0; } diff --git a/media/base/video_frame.h b/media/base/video_frame.h index a3f296d..0696a55 100644 --- a/media/base/video_frame.h +++ b/media/base/video_frame.h @@ -57,7 +57,8 @@ class MEDIA_EXPORT VideoFrame : public base::RefCountedThreadSafe<VideoFrame> { NATIVE_TEXTURE = 6, // Native texture. Pixel-format agnostic. YV12J = 7, // JPEG color range version of YV12 NV12 = 8, // 12bpp 1x1 Y plane followed by an interleaved 2x2 UV plane. - FORMAT_MAX = NV12, // Must always be equal to largest entry logged. + YV24 = 9, // 24bpp YUV planar, no subsampling. + FORMAT_MAX = YV24, // Must always be equal to largest entry logged. }; // Returns the name of a Format as a string. diff --git a/media/ffmpeg/ffmpeg_common.cc b/media/ffmpeg/ffmpeg_common.cc index ab64468..d87aa82 100644 --- a/media/ffmpeg/ffmpeg_common.cc +++ b/media/ffmpeg/ffmpeg_common.cc @@ -400,9 +400,11 @@ void AVStreamToVideoDecoderConfig( } // Pad out |coded_size| for subsampled YUV formats. - coded_size.set_width((coded_size.width() + 1) / 2 * 2); - if (format != VideoFrame::YV16) - coded_size.set_height((coded_size.height() + 1) / 2 * 2); + if (format != VideoFrame::YV24) { + coded_size.set_width((coded_size.width() + 1) / 2 * 2); + if (format != VideoFrame::YV16) + coded_size.set_height((coded_size.height() + 1) / 2 * 2); + } bool is_encrypted = false; AVDictionaryEntry* key = av_dict_get(stream->metadata, "enc_key_id", NULL, 0); @@ -517,6 +519,8 @@ VideoFrame::Format PixelFormatToVideoFormat(PixelFormat pixel_format) { switch (pixel_format) { case PIX_FMT_YUV422P: return VideoFrame::YV16; + case PIX_FMT_YUV444P: + return VideoFrame::YV24; case PIX_FMT_YUV420P: return VideoFrame::YV12; case PIX_FMT_YUVJ420P: @@ -539,6 +543,8 @@ PixelFormat VideoFormatToPixelFormat(VideoFrame::Format video_format) { return PIX_FMT_YUVJ420P; case VideoFrame::YV12A: return PIX_FMT_YUVA420P; + case VideoFrame::YV24: + return PIX_FMT_YUV444P; default: DVLOG(1) << "Unsupported VideoFrame::Format: " << video_format; } diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc index aaa8a68..9cb3d5f 100644 --- a/media/filters/ffmpeg_video_decoder.cc +++ b/media/filters/ffmpeg_video_decoder.cc @@ -82,7 +82,7 @@ int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context, if (format == VideoFrame::UNKNOWN) return AVERROR(EINVAL); DCHECK(format == VideoFrame::YV12 || format == VideoFrame::YV16 || - format == VideoFrame::YV12J); + format == VideoFrame::YV12J || format == VideoFrame::YV24); gfx::Size size(codec_context->width, codec_context->height); const int ret = av_image_check_size(size.width(), size.height(), 0, NULL); diff --git a/media/filters/skcanvas_video_renderer.cc b/media/filters/skcanvas_video_renderer.cc index 0658762..2d9d708 100644 --- a/media/filters/skcanvas_video_renderer.cc +++ b/media/filters/skcanvas_video_renderer.cc @@ -29,23 +29,24 @@ namespace media { static bool IsYUV(media::VideoFrame::Format format) { return format == media::VideoFrame::YV12 || + format == media::VideoFrame::YV16 || format == media::VideoFrame::I420 || + format == media::VideoFrame::YV12A || + format == media::VideoFrame::YV12J || + format == media::VideoFrame::YV24; +} + +static bool IsFastPaintYUV(media::VideoFrame::Format format) { + return format == media::VideoFrame::YV12 || format == media::VideoFrame::YV16 || + format == media::VideoFrame::I420 || format == media::VideoFrame::YV12J; } -static bool IsEitherYUVOrNative(media::VideoFrame::Format format) { +static bool IsYUVOrNative(media::VideoFrame::Format format) { return IsYUV(format) || format == media::VideoFrame::NATIVE_TEXTURE; } -static bool IsEitherYUVOrYUVA(media::VideoFrame::Format format) { - return IsYUV(format) || format == media::VideoFrame::YV12A; -} - -static bool IsEitherYUVOrYUVAOrNative(media::VideoFrame::Format format) { - return IsEitherYUVOrNative(format) || format == media::VideoFrame::YV12A; -} - // CanFastPaint is a helper method to determine the conditions for fast // painting. The conditions are: // 1. No skew in canvas matrix. @@ -58,7 +59,7 @@ static bool IsEitherYUVOrYUVAOrNative(media::VideoFrame::Format format) { // Disable the flipping and mirroring checks once we have it. static bool CanFastPaint(SkCanvas* canvas, uint8 alpha, media::VideoFrame::Format format) { - if (alpha != 0xFF || !IsYUV(format)) + if (alpha != 0xFF || !IsFastPaintYUV(format)) return false; const SkMatrix& total_matrix = canvas->getTotalMatrix(); @@ -88,7 +89,7 @@ static void FastPaint( const scoped_refptr<media::VideoFrame>& video_frame, SkCanvas* canvas, const SkRect& dest_rect) { - DCHECK(IsYUV(video_frame->format())) << video_frame->format(); + DCHECK(IsFastPaintYUV(video_frame->format())) << video_frame->format(); DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane), video_frame->stride(media::VideoFrame::kVPlane)); @@ -96,8 +97,7 @@ static void FastPaint( media::YUVType yuv_type = media::YV16; int y_shift = 0; if (video_frame->format() == media::VideoFrame::YV12 || - video_frame->format() == media::VideoFrame::I420 || - video_frame->format() == media::VideoFrame::YV12A) { + video_frame->format() == media::VideoFrame::I420) { yuv_type = media::YV12; y_shift = 1; } @@ -251,9 +251,9 @@ static void FastPaint( static void ConvertVideoFrameToBitmap( const scoped_refptr<media::VideoFrame>& video_frame, SkBitmap* bitmap) { - DCHECK(IsEitherYUVOrYUVAOrNative(video_frame->format())) + DCHECK(IsYUVOrNative(video_frame->format())) << video_frame->format(); - if (IsEitherYUVOrYUVA(video_frame->format())) { + if (IsYUV(video_frame->format())) { DCHECK_EQ(video_frame->stride(media::VideoFrame::kUPlane), video_frame->stride(media::VideoFrame::kVPlane)); } @@ -273,7 +273,7 @@ static void ConvertVideoFrameToBitmap( size_t y_offset = 0; size_t uv_offset = 0; - if (IsEitherYUVOrYUVA(video_frame->format())) { + if (IsYUV(video_frame->format())) { int y_shift = (video_frame->format() == media::VideoFrame::YV16) ? 0 : 1; // Use the "left" and "top" of the destination rect to locate the offset // in Y, U and V planes. @@ -350,6 +350,30 @@ static void ConvertVideoFrameToBitmap( media::YV12); break; + case media::VideoFrame::YV24: + libyuv::I444ToARGB( + video_frame->data(media::VideoFrame::kYPlane) + y_offset, + video_frame->stride(media::VideoFrame::kYPlane), + video_frame->data(media::VideoFrame::kUPlane) + uv_offset, + video_frame->stride(media::VideoFrame::kUPlane), + video_frame->data(media::VideoFrame::kVPlane) + uv_offset, + video_frame->stride(media::VideoFrame::kVPlane), + static_cast<uint8*>(bitmap->getPixels()), + bitmap->rowBytes(), + video_frame->visible_rect().width(), + video_frame->visible_rect().height()); +#if SK_R32_SHIFT == 0 && SK_G32_SHIFT == 8 && SK_B32_SHIFT == 16 && \ + SK_A32_SHIFT == 24 + libyuv::ARGBToABGR( + static_cast<uint8*>(bitmap->getPixels()), + bitmap->rowBytes(), + static_cast<uint8*>(bitmap->getPixels()), + bitmap->rowBytes(), + video_frame->visible_rect().width(), + video_frame->visible_rect().height()); +#endif + break; + case media::VideoFrame::NATIVE_TEXTURE: DCHECK_EQ(video_frame->format(), media::VideoFrame::NATIVE_TEXTURE); video_frame->ReadPixelsFromNativeTexture(*bitmap); @@ -385,7 +409,7 @@ void SkCanvasVideoRenderer::Paint(media::VideoFrame* video_frame, // Paint black rectangle if there isn't a frame available or the // frame has an unexpected format. - if (!video_frame || !IsEitherYUVOrYUVAOrNative(video_frame->format())) { + if (!video_frame || !IsYUVOrNative(video_frame->format())) { canvas->drawRect(dest, paint); return; } @@ -403,7 +427,7 @@ void SkCanvasVideoRenderer::Paint(media::VideoFrame* video_frame, last_frame_timestamp_ = video_frame->timestamp(); } - // Do a slower paint using |last_frame_|. + // Paint using |last_frame_|. paint.setFilterLevel(SkPaint::kLow_FilterLevel); canvas->drawBitmapRect(last_frame_, NULL, dest, &paint); } |