summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorhubbe <hubbe@chromium.org>2016-02-04 16:50:44 -0800
committerCommit bot <commit-bot@chromium.org>2016-02-05 00:52:41 +0000
commit074c4287f2625860b2a9eb437b97f1f1788f8f4b (patch)
tree7b08834a7069d24277c1d8a36614c83a653f8a63
parent4534b3e37c7f273314371ed65fa36583cb755c37 (diff)
downloadchromium_src-074c4287f2625860b2a9eb437b97f1f1788f8f4b.zip
chromium_src-074c4287f2625860b2a9eb437b97f1f1788f8f4b.tar.gz
chromium_src-074c4287f2625860b2a9eb437b97f1f1788f8f4b.tar.bz2
Add support for 9- and 10-bit h264 videos.
Videos are uploaded to GPU using half-floats if supported, otherwise they are downshifted to regular 8-bit textures before uploading. No dithering is done, except for whatever GL_DITHER might do. (Which is probably nothing.) BUG=445071 CQ_INCLUDE_TRYBOTS=tryserver.blink:linux_blink_rel Review URL: https://codereview.chromium.org/1599533002 Cr-Commit-Position: refs/heads/master@{#373691}
-rw-r--r--cc/layers/video_layer_impl.cc6
-rw-r--r--cc/layers/video_layer_impl.h3
-rw-r--r--cc/output/gl_renderer.cc14
-rw-r--r--cc/output/renderer_pixeltest.cc112
-rw-r--r--cc/quads/draw_quad_unittest.cc52
-rw-r--r--cc/quads/yuv_video_draw_quad.cc12
-rw-r--r--cc/quads/yuv_video_draw_quad.h10
-rw-r--r--cc/raster/tile_task_worker_pool.cc1
-rw-r--r--cc/resources/platform_color_unittest.cc1
-rw-r--r--cc/resources/resource_format.cc9
-rw-r--r--cc/resources/resource_format.h3
-rw-r--r--cc/resources/resource_provider.cc22
-rw-r--r--cc/resources/resource_provider.h3
-rw-r--r--cc/resources/video_resource_updater.cc147
-rw-r--r--cc/resources/video_resource_updater.h5
-rw-r--r--cc/resources/video_resource_updater_unittest.cc97
-rw-r--r--cc/test/render_pass_test_utils.cc3
-rw-r--r--cc/test/test_web_graphics_context_3d.h3
-rw-r--r--components/mus/public/interfaces/quads.mojom2
-rw-r--r--content/browser/media/media_browsertest.cc8
-rw-r--r--content/browser/renderer_host/media/video_capture_device_client_unittest.cc10
-rw-r--r--content/common/cc_messages.h2
-rw-r--r--content/common/cc_messages_unittest.cc2
-rw-r--r--gpu/command_buffer/common/capabilities.cc1
-rw-r--r--gpu/command_buffer/common/capabilities.h1
-rw-r--r--gpu/command_buffer/service/gles2_cmd_decoder.cc2
-rw-r--r--gpu/ipc/gpu_command_buffer_traits_multi.h1
-rw-r--r--media/base/video_frame.cc20
-rw-r--r--media/base/video_types.cc24
-rw-r--r--media/base/video_types.h10
-rw-r--r--media/ffmpeg/ffmpeg_common.cc33
-rw-r--r--media/filters/ffmpeg_video_decoder.cc5
-rw-r--r--media/mojo/interfaces/media_types.mojom8
-rw-r--r--media/mojo/services/media_type_converters.cc18
-rw-r--r--media/renderers/skcanvas_video_renderer.cc73
-rw-r--r--media/renderers/skcanvas_video_renderer_unittest.cc38
-rw-r--r--media/test/data/README3
-rw-r--r--media/test/data/bear-320x180-hi10p.mp4bin0 -> 157833 bytes
-rw-r--r--media/test/data/blackwhite_yuv420p_hi10p.mp4bin0 -> 2142 bytes
-rw-r--r--media/test/pipeline_integration_test.cc10
-rw-r--r--media/video/gpu_memory_buffer_video_frame_pool.cc6
-rw-r--r--mojo/converters/surfaces/surfaces_type_converters.cc6
-rw-r--r--tools/metrics/histograms/histograms.xml6
43 files changed, 714 insertions, 78 deletions
diff --git a/cc/layers/video_layer_impl.cc b/cc/layers/video_layer_impl.cc
index 923767e..1eb4ef0 100644
--- a/cc/layers/video_layer_impl.cc
+++ b/cc/layers/video_layer_impl.cc
@@ -119,6 +119,8 @@ bool VideoLayerImpl::WillDraw(DrawMode draw_mode,
external_resources.software_release_callback;
return true;
}
+ frame_resource_offset_ = external_resources.offset;
+ frame_resource_multiplier_ = external_resources.multiplier;
DCHECK_EQ(external_resources.mailboxes.size(),
external_resources.release_callbacks.size());
@@ -277,8 +279,8 @@ void VideoLayerImpl::AppendQuads(RenderPass* render_pass,
ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size, uv_tex_size,
frame_resources_[0].id, frame_resources_[1].id,
frame_resources_[2].id,
- frame_resources_.size() > 3 ? frame_resources_[3].id : 0,
- color_space);
+ frame_resources_.size() > 3 ? frame_resources_[3].id : 0, color_space,
+ frame_resource_offset_, frame_resource_multiplier_);
ValidateQuadResources(yuv_video_quad);
break;
}
diff --git a/cc/layers/video_layer_impl.h b/cc/layers/video_layer_impl.h
index c1688e8..a0ba484 100644
--- a/cc/layers/video_layer_impl.h
+++ b/cc/layers/video_layer_impl.h
@@ -63,6 +63,9 @@ class CC_EXPORT VideoLayerImpl : public LayerImpl {
scoped_ptr<VideoResourceUpdater> updater_;
VideoFrameExternalResources::ResourceType frame_resource_type_;
+ float frame_resource_offset_;
+ float frame_resource_multiplier_;
+
struct FrameResource {
FrameResource(ResourceId id,
gfx::Size size_in_pixels,
diff --git a/cc/output/gl_renderer.cc b/cc/output/gl_renderer.cc
index 67382c0..f76d3e6 100644
--- a/cc/output/gl_renderer.cc
+++ b/cc/output/gl_renderer.cc
@@ -2122,13 +2122,23 @@ void GLRenderer::DrawYUVVideoQuad(const DrawingFrame* frame,
break;
}
+ float yuv_to_rgb_multiplied[9];
+ float yuv_adjust_with_offset[3];
+
+ for (int i = 0; i < 9; ++i)
+ yuv_to_rgb_multiplied[i] = yuv_to_rgb[i] * quad->resource_multiplier;
+
+ for (int i = 0; i < 3; ++i)
+ yuv_adjust_with_offset[i] =
+ yuv_adjust[i] / quad->resource_multiplier - quad->resource_offset;
+
// The transform and vertex data are used to figure out the extents that the
// un-antialiased quad should have and which vertex this is and the float
// quad passed in via uniform is the actual geometry that gets used to draw
// it. This is why this centered rect is used and not the original quad_rect.
auto tile_rect = gfx::RectF(quad->rect);
- gl_->UniformMatrix3fv(yuv_matrix_location, 1, 0, yuv_to_rgb);
- gl_->Uniform3fv(yuv_adj_location, 1, yuv_adjust);
+ gl_->UniformMatrix3fv(yuv_matrix_location, 1, 0, yuv_to_rgb_multiplied);
+ gl_->Uniform3fv(yuv_adj_location, 1, yuv_adjust_with_offset);
SetShaderOpacity(quad->shared_quad_state->opacity, alpha_location);
if (!clip_region) {
diff --git a/cc/output/renderer_pixeltest.cc b/cc/output/renderer_pixeltest.cc
index 083bac1..7f5bbfd 100644
--- a/cc/output/renderer_pixeltest.cc
+++ b/cc/output/renderer_pixeltest.cc
@@ -267,13 +267,61 @@ void CreateTestYUVVideoDrawQuad_FromVideoFrame(
yuv_quad->SetNew(shared_state, rect, opaque_rect, visible_rect,
ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size,
uv_tex_size, y_resource, u_resource, v_resource, a_resource,
- color_space);
+ color_space, 0.0f, 1.0f);
+}
+
+// Upshift video frame to 10 bit.
+scoped_refptr<media::VideoFrame> CreateHighbitVideoFrame(
+ const scoped_refptr<media::VideoFrame>& video_frame) {
+ media::VideoPixelFormat format;
+ switch (video_frame->format()) {
+ case media::PIXEL_FORMAT_I420:
+ case media::PIXEL_FORMAT_YV12:
+ format = media::PIXEL_FORMAT_YUV420P10;
+ break;
+ case media::PIXEL_FORMAT_YV16:
+ format = media::PIXEL_FORMAT_YUV422P10;
+ break;
+ case media::PIXEL_FORMAT_YV24:
+ format = media::PIXEL_FORMAT_YUV444P10;
+ break;
+
+ default:
+ NOTREACHED();
+ return nullptr;
+ }
+ scoped_refptr<media::VideoFrame> ret = media::VideoFrame::CreateFrame(
+ format, video_frame->coded_size(), video_frame->visible_rect(),
+ video_frame->natural_size(), video_frame->timestamp());
+
+ // Copy all metadata.
+ base::DictionaryValue tmp;
+ video_frame->metadata()->MergeInternalValuesInto(&tmp);
+ ret->metadata()->MergeInternalValuesFrom(tmp);
+
+ for (int plane = media::VideoFrame::kYPlane;
+ plane <= media::VideoFrame::kVPlane; ++plane) {
+ int width = video_frame->row_bytes(plane);
+ const uint8_t* src = video_frame->data(plane);
+ uint16_t* dst = reinterpret_cast<uint16_t*>(ret->data(plane));
+ for (int row = 0; row < video_frame->rows(plane); row++) {
+ for (int x = 0; x < width; x++) {
+ // Replicate the top bits into the lower bits, this way
+ // 0xFF becomes 0x3FF.
+ dst[x] = (src[x] << 2) | (src[x] >> 6);
+ }
+ src += video_frame->stride(plane);
+ dst += ret->stride(plane) / 2;
+ }
+ }
+ return ret;
}
void CreateTestYUVVideoDrawQuad_Striped(
const SharedQuadState* shared_state,
media::VideoPixelFormat format,
bool is_transparent,
+ bool highbit,
const gfx::RectF& tex_coord_rect,
RenderPass* render_pass,
VideoResourceUpdater* video_resource_updater,
@@ -308,6 +356,10 @@ void CreateTestYUVVideoDrawQuad_Striped(
}
}
uint8_t alpha_value = is_transparent ? 0 : 128;
+
+ if (highbit)
+ video_frame = CreateHighbitVideoFrame(video_frame);
+
CreateTestYUVVideoDrawQuad_FromVideoFrame(
shared_state, video_frame, alpha_value, tex_coord_rect, render_pass,
video_resource_updater, rect, visible_rect, resource_provider);
@@ -980,7 +1032,11 @@ class VideoGLRendererPixelTest : public GLRendererPixelTest {
scoped_ptr<VideoResourceUpdater> video_resource_updater_;
};
-TEST_F(VideoGLRendererPixelTest, SimpleYUVRect) {
+class VideoGLRendererPixelHiLoTest
+ : public VideoGLRendererPixelTest,
+ public ::testing::WithParamInterface<bool> {};
+
+TEST_P(VideoGLRendererPixelHiLoTest, SimpleYUVRect) {
gfx::Rect rect(this->device_viewport_size_);
RenderPassId id(1, 1);
@@ -989,10 +1045,11 @@ TEST_F(VideoGLRendererPixelTest, SimpleYUVRect) {
SharedQuadState* shared_state =
CreateTestSharedQuadState(gfx::Transform(), rect, pass.get());
- CreateTestYUVVideoDrawQuad_Striped(shared_state, media::PIXEL_FORMAT_YV12,
- false, gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f),
- pass.get(), video_resource_updater_.get(),
- rect, rect, resource_provider_.get());
+ bool highbit = GetParam();
+ CreateTestYUVVideoDrawQuad_Striped(
+ shared_state, media::PIXEL_FORMAT_YV12, false, highbit,
+ gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f), pass.get(),
+ video_resource_updater_.get(), rect, rect, resource_provider_.get());
RenderPassList pass_list;
pass_list.push_back(std::move(pass));
@@ -1003,7 +1060,7 @@ TEST_F(VideoGLRendererPixelTest, SimpleYUVRect) {
FuzzyPixelOffByOneComparator(true)));
}
-TEST_F(VideoGLRendererPixelTest, ClippedYUVRect) {
+TEST_P(VideoGLRendererPixelHiLoTest, ClippedYUVRect) {
gfx::Rect viewport(this->device_viewport_size_);
gfx::Rect draw_rect(this->device_viewport_size_.width() * 1.5,
this->device_viewport_size_.height() * 1.5);
@@ -1014,11 +1071,12 @@ TEST_F(VideoGLRendererPixelTest, ClippedYUVRect) {
SharedQuadState* shared_state =
CreateTestSharedQuadState(gfx::Transform(), viewport, pass.get());
- CreateTestYUVVideoDrawQuad_Striped(shared_state, media::PIXEL_FORMAT_YV12,
- false, gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f),
- pass.get(), video_resource_updater_.get(),
- draw_rect, viewport,
- resource_provider_.get());
+ bool highbit = GetParam();
+ CreateTestYUVVideoDrawQuad_Striped(
+ shared_state, media::PIXEL_FORMAT_YV12, false, highbit,
+ gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f), pass.get(),
+ video_resource_updater_.get(), draw_rect, viewport,
+ resource_provider_.get());
RenderPassList pass_list;
pass_list.push_back(std::move(pass));
@@ -1027,7 +1085,7 @@ TEST_F(VideoGLRendererPixelTest, ClippedYUVRect) {
FuzzyPixelOffByOneComparator(true)));
}
-TEST_F(VideoGLRendererPixelTest, OffsetYUVRect) {
+TEST_F(VideoGLRendererPixelHiLoTest, OffsetYUVRect) {
gfx::Rect rect(this->device_viewport_size_);
RenderPassId id(1, 1);
@@ -1038,7 +1096,7 @@ TEST_F(VideoGLRendererPixelTest, OffsetYUVRect) {
// Intentionally sets frame format to I420 for testing coverage.
CreateTestYUVVideoDrawQuad_Striped(
- shared_state, media::PIXEL_FORMAT_I420, false,
+ shared_state, media::PIXEL_FORMAT_I420, false, false,
gfx::RectF(0.125f, 0.25f, 0.75f, 0.5f), pass.get(),
video_resource_updater_.get(), rect, rect, resource_provider_.get());
@@ -1046,9 +1104,8 @@ TEST_F(VideoGLRendererPixelTest, OffsetYUVRect) {
pass_list.push_back(std::move(pass));
EXPECT_TRUE(this->RunPixelTest(
- &pass_list,
- base::FilePath(FILE_PATH_LITERAL("yuv_stripes_offset.png")),
- FuzzyPixelOffByOneComparator(true)));
+ &pass_list, base::FilePath(FILE_PATH_LITERAL("yuv_stripes_offset.png")),
+ FuzzyPixelComparator(true, 100.0f, 1.0f, 1.0f, 1, 0)));
}
TEST_F(VideoGLRendererPixelTest, SimpleYUVRectBlack) {
@@ -1076,6 +1133,9 @@ TEST_F(VideoGLRendererPixelTest, SimpleYUVRectBlack) {
FuzzyPixelOffByOneComparator(true)));
}
+// First argument (test case prefix) is intentionally left empty.
+INSTANTIATE_TEST_CASE_P(, VideoGLRendererPixelHiLoTest, ::testing::Bool());
+
TEST_F(VideoGLRendererPixelTest, SimpleYUVJRect) {
gfx::Rect rect(this->device_viewport_size_);
@@ -1143,7 +1203,7 @@ TEST_F(VideoGLRendererPixelTest, SimpleYUVJRectGrey) {
FuzzyPixelOffByOneComparator(true)));
}
-TEST_F(VideoGLRendererPixelTest, SimpleYUVARect) {
+TEST_F(VideoGLRendererPixelHiLoTest, SimpleYUVARect) {
gfx::Rect rect(this->device_viewport_size_);
RenderPassId id(1, 1);
@@ -1152,10 +1212,10 @@ TEST_F(VideoGLRendererPixelTest, SimpleYUVARect) {
SharedQuadState* shared_state =
CreateTestSharedQuadState(gfx::Transform(), rect, pass.get());
- CreateTestYUVVideoDrawQuad_Striped(shared_state, media::PIXEL_FORMAT_YV12A,
- false, gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f),
- pass.get(), video_resource_updater_.get(),
- rect, rect, resource_provider_.get());
+ CreateTestYUVVideoDrawQuad_Striped(
+ shared_state, media::PIXEL_FORMAT_YV12A, false, false,
+ gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f), pass.get(),
+ video_resource_updater_.get(), rect, rect, resource_provider_.get());
SolidColorDrawQuad* color_quad =
pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
@@ -1179,10 +1239,10 @@ TEST_F(VideoGLRendererPixelTest, FullyTransparentYUVARect) {
SharedQuadState* shared_state =
CreateTestSharedQuadState(gfx::Transform(), rect, pass.get());
- CreateTestYUVVideoDrawQuad_Striped(shared_state, media::PIXEL_FORMAT_YV12A,
- true, gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f),
- pass.get(), video_resource_updater_.get(),
- rect, rect, resource_provider_.get());
+ CreateTestYUVVideoDrawQuad_Striped(
+ shared_state, media::PIXEL_FORMAT_YV12A, true, false,
+ gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f), pass.get(),
+ video_resource_updater_.get(), rect, rect, resource_provider_.get());
SolidColorDrawQuad* color_quad =
pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
diff --git a/cc/quads/draw_quad_unittest.cc b/cc/quads/draw_quad_unittest.cc
index cfc20a1..8bde8c0 100644
--- a/cc/quads/draw_quad_unittest.cc
+++ b/cc/quads/draw_quad_unittest.cc
@@ -336,6 +336,31 @@ void CompareDrawQuad(DrawQuad* quad,
} \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
+#define CREATE_QUAD_11_ALL(Type, a, b, c, d, e, f, g, h, i, j, k) \
+ Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
+ { \
+ QUAD_DATA quad_all->SetAll(shared_state, quad_rect, quad_opaque_rect, \
+ quad_visible_rect, needs_blending, a, b, c, d, \
+ e, f, g, h, i, j, k); \
+ } \
+ SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
+
+#define CREATE_QUAD_12_NEW(Type, a, b, c, d, e, f, g, h, i, j, k, l) \
+ Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
+ { \
+ QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b, c, d, e, f, g, \
+ h, i, j, k, l); \
+ } \
+ SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
+
+#define CREATE_QUAD_13_NEW(Type, a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
+ { \
+ QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b, c, d, e, f, g, \
+ h, i, j, k, l, m); \
+ } \
+ SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
+
#define CREATE_QUAD_ALL_RP(Type, a, b, c, d, e, f, g, copy_a) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
@@ -623,13 +648,16 @@ TEST(DrawQuadTest, CopyYUVVideoDrawQuad) {
ResourceId u_plane_resource_id = 532;
ResourceId v_plane_resource_id = 4;
ResourceId a_plane_resource_id = 63;
+ float resource_offset = 0.5f;
+ float resource_multiplier = 2.001f;
YUVVideoDrawQuad::ColorSpace color_space = YUVVideoDrawQuad::JPEG;
CREATE_SHARED_STATE();
- CREATE_QUAD_11_NEW(YUVVideoDrawQuad, opaque_rect, visible_rect,
+ CREATE_QUAD_13_NEW(YUVVideoDrawQuad, opaque_rect, visible_rect,
ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size,
uv_tex_size, y_plane_resource_id, u_plane_resource_id,
- v_plane_resource_id, a_plane_resource_id, color_space);
+ v_plane_resource_id, a_plane_resource_id, color_space,
+ resource_offset, resource_multiplier);
EXPECT_EQ(DrawQuad::YUV_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(opaque_rect, copy_quad->opaque_rect);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
@@ -642,11 +670,14 @@ TEST(DrawQuadTest, CopyYUVVideoDrawQuad) {
EXPECT_EQ(v_plane_resource_id, copy_quad->v_plane_resource_id());
EXPECT_EQ(a_plane_resource_id, copy_quad->a_plane_resource_id());
EXPECT_EQ(color_space, copy_quad->color_space);
-
- CREATE_QUAD_9_ALL(YUVVideoDrawQuad, ya_tex_coord_rect, uv_tex_coord_rect,
- ya_tex_size, uv_tex_size, y_plane_resource_id,
- u_plane_resource_id, v_plane_resource_id,
- a_plane_resource_id, color_space);
+ EXPECT_EQ(resource_offset, copy_quad->resource_offset);
+ EXPECT_EQ(resource_multiplier, copy_quad->resource_multiplier);
+
+ CREATE_QUAD_11_ALL(YUVVideoDrawQuad, ya_tex_coord_rect, uv_tex_coord_rect,
+ ya_tex_size, uv_tex_size, y_plane_resource_id,
+ u_plane_resource_id, v_plane_resource_id,
+ a_plane_resource_id, color_space, resource_offset,
+ resource_multiplier);
EXPECT_EQ(DrawQuad::YUV_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(ya_tex_coord_rect, copy_quad->ya_tex_coord_rect);
EXPECT_EQ(uv_tex_coord_rect, copy_quad->uv_tex_coord_rect);
@@ -657,6 +688,8 @@ TEST(DrawQuadTest, CopyYUVVideoDrawQuad) {
EXPECT_EQ(v_plane_resource_id, copy_quad->v_plane_resource_id());
EXPECT_EQ(a_plane_resource_id, copy_quad->a_plane_resource_id());
EXPECT_EQ(color_space, copy_quad->color_space);
+ EXPECT_EQ(resource_offset, copy_quad->resource_offset);
+ EXPECT_EQ(resource_multiplier, copy_quad->resource_multiplier);
}
TEST(DrawQuadTest, CopyPictureDrawQuad) {
@@ -879,10 +912,11 @@ TEST_F(DrawQuadIteratorTest, YUVVideoDrawQuad) {
YUVVideoDrawQuad::ColorSpace color_space = YUVVideoDrawQuad::JPEG;
CREATE_SHARED_STATE();
- CREATE_QUAD_11_NEW(YUVVideoDrawQuad, opaque_rect, visible_rect,
+ CREATE_QUAD_13_NEW(YUVVideoDrawQuad, opaque_rect, visible_rect,
ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size,
uv_tex_size, y_plane_resource_id, u_plane_resource_id,
- v_plane_resource_id, a_plane_resource_id, color_space);
+ v_plane_resource_id, a_plane_resource_id, color_space, 0.0,
+ 1.0);
EXPECT_EQ(DrawQuad::YUV_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(y_plane_resource_id, quad_new->y_plane_resource_id());
EXPECT_EQ(u_plane_resource_id, quad_new->u_plane_resource_id());
diff --git a/cc/quads/yuv_video_draw_quad.cc b/cc/quads/yuv_video_draw_quad.cc
index f9a878a..93a0c91 100644
--- a/cc/quads/yuv_video_draw_quad.cc
+++ b/cc/quads/yuv_video_draw_quad.cc
@@ -27,7 +27,9 @@ void YUVVideoDrawQuad::SetNew(const SharedQuadState* shared_quad_state,
unsigned u_plane_resource_id,
unsigned v_plane_resource_id,
unsigned a_plane_resource_id,
- ColorSpace color_space) {
+ ColorSpace color_space,
+ float offset,
+ float multiplier) {
bool needs_blending = false;
DrawQuad::SetAll(shared_quad_state, DrawQuad::YUV_VIDEO_CONTENT, rect,
opaque_rect, visible_rect, needs_blending);
@@ -41,6 +43,8 @@ void YUVVideoDrawQuad::SetNew(const SharedQuadState* shared_quad_state,
resources.ids[kAPlaneResourceIdIndex] = a_plane_resource_id;
resources.count = a_plane_resource_id ? 4 : 3;
this->color_space = color_space;
+ this->resource_offset = offset;
+ this->resource_multiplier = multiplier;
}
void YUVVideoDrawQuad::SetAll(const SharedQuadState* shared_quad_state,
@@ -56,7 +60,9 @@ void YUVVideoDrawQuad::SetAll(const SharedQuadState* shared_quad_state,
unsigned u_plane_resource_id,
unsigned v_plane_resource_id,
unsigned a_plane_resource_id,
- ColorSpace color_space) {
+ ColorSpace color_space,
+ float offset,
+ float multiplier) {
DrawQuad::SetAll(shared_quad_state, DrawQuad::YUV_VIDEO_CONTENT, rect,
opaque_rect, visible_rect, needs_blending);
this->ya_tex_coord_rect = ya_tex_coord_rect;
@@ -69,6 +75,8 @@ void YUVVideoDrawQuad::SetAll(const SharedQuadState* shared_quad_state,
resources.ids[kAPlaneResourceIdIndex] = a_plane_resource_id;
resources.count = resources.ids[kAPlaneResourceIdIndex] ? 4 : 3;
this->color_space = color_space;
+ this->resource_offset = offset;
+ this->resource_multiplier = multiplier;
}
const YUVVideoDrawQuad* YUVVideoDrawQuad::MaterialCast(
diff --git a/cc/quads/yuv_video_draw_quad.h b/cc/quads/yuv_video_draw_quad.h
index 1a72078..015d5b7 100644
--- a/cc/quads/yuv_video_draw_quad.h
+++ b/cc/quads/yuv_video_draw_quad.h
@@ -43,7 +43,9 @@ class CC_EXPORT YUVVideoDrawQuad : public DrawQuad {
unsigned u_plane_resource_id,
unsigned v_plane_resource_id,
unsigned a_plane_resource_id,
- ColorSpace color_space);
+ ColorSpace color_space,
+ float offset,
+ float multiplier);
void SetAll(const SharedQuadState* shared_quad_state,
const gfx::Rect& rect,
@@ -61,13 +63,17 @@ class CC_EXPORT YUVVideoDrawQuad : public DrawQuad {
unsigned u_plane_resource_id,
unsigned v_plane_resource_id,
unsigned a_plane_resource_id,
- ColorSpace color_space);
+ ColorSpace color_space,
+ float offset,
+ float multiplier);
gfx::RectF ya_tex_coord_rect;
gfx::RectF uv_tex_coord_rect;
gfx::Size ya_tex_size;
gfx::Size uv_tex_size;
ColorSpace color_space;
+ float resource_offset = 0.0f;
+ float resource_multiplier = 1.0f;
static const YUVVideoDrawQuad* MaterialCast(const DrawQuad*);
diff --git a/cc/raster/tile_task_worker_pool.cc b/cc/raster/tile_task_worker_pool.cc
index 668aa66..d4549c7 100644
--- a/cc/raster/tile_task_worker_pool.cc
+++ b/cc/raster/tile_task_worker_pool.cc
@@ -50,6 +50,7 @@ bool IsSupportedPlaybackToMemoryFormat(ResourceFormat format) {
case RGB_565:
case ETC1:
case RED_8:
+ case LUMINANCE_F16:
return false;
}
NOTREACHED();
diff --git a/cc/resources/platform_color_unittest.cc b/cc/resources/platform_color_unittest.cc
index bbc4c06..49c8353 100644
--- a/cc/resources/platform_color_unittest.cc
+++ b/cc/resources/platform_color_unittest.cc
@@ -34,6 +34,7 @@ TEST(PlatformColorTest, SameComponentOrder) {
case RGB_565:
case ETC1:
case RED_8:
+ case LUMINANCE_F16:
EXPECT_FALSE(PlatformColor::SameComponentOrder(format));
break;
}
diff --git a/cc/resources/resource_format.cc b/cc/resources/resource_format.cc
index e11c097..ba4e65d 100644
--- a/cc/resources/resource_format.cc
+++ b/cc/resources/resource_format.cc
@@ -21,6 +21,7 @@ SkColorType ResourceFormatToSkColorType(ResourceFormat format) {
case LUMINANCE_8:
case RGB_565:
case RED_8:
+ case LUMINANCE_F16:
NOTREACHED();
break;
}
@@ -35,6 +36,7 @@ int BitsPerPixel(ResourceFormat format) {
return 32;
case RGBA_4444:
case RGB_565:
+ case LUMINANCE_F16:
return 16;
case ALPHA_8:
case LUMINANCE_8:
@@ -57,7 +59,8 @@ GLenum GLDataType(ResourceFormat format) {
GL_UNSIGNED_BYTE, // LUMINANCE_8
GL_UNSIGNED_SHORT_5_6_5, // RGB_565,
GL_UNSIGNED_BYTE, // ETC1
- GL_UNSIGNED_BYTE // RED_8
+ GL_UNSIGNED_BYTE, // RED_8
+ GL_HALF_FLOAT_OES, // LUMINANCE_F16
};
static_assert(arraysize(format_gl_data_type) == (RESOURCE_FORMAT_MAX + 1),
"format_gl_data_type does not handle all cases.");
@@ -75,7 +78,8 @@ GLenum GLDataFormat(ResourceFormat format) {
GL_LUMINANCE, // LUMINANCE_8
GL_RGB, // RGB_565
GL_ETC1_RGB8_OES, // ETC1
- GL_RED_EXT // RED_8
+ GL_RED_EXT, // RED_8
+ GL_LUMINANCE, // LUMINANCE_F16
};
static_assert(arraysize(format_gl_data_format) == (RESOURCE_FORMAT_MAX + 1),
"format_gl_data_format does not handle all cases.");
@@ -101,6 +105,7 @@ gfx::BufferFormat BufferFormat(ResourceFormat format) {
case LUMINANCE_8:
case RGB_565:
case ETC1:
+ case LUMINANCE_F16:
break;
}
NOTREACHED();
diff --git a/cc/resources/resource_format.h b/cc/resources/resource_format.h
index 79b815f..9a900d4 100644
--- a/cc/resources/resource_format.h
+++ b/cc/resources/resource_format.h
@@ -27,7 +27,8 @@ enum ResourceFormat {
RGB_565,
ETC1,
RED_8,
- RESOURCE_FORMAT_MAX = RED_8,
+ LUMINANCE_F16,
+ RESOURCE_FORMAT_MAX = LUMINANCE_F16,
};
SkColorType ResourceFormatToSkColorType(ResourceFormat format);
diff --git a/cc/resources/resource_provider.cc b/cc/resources/resource_provider.cc
index f2d7992..24796c2 100644
--- a/cc/resources/resource_provider.cc
+++ b/cc/resources/resource_provider.cc
@@ -87,6 +87,7 @@ GLenum TextureToStorageFormat(ResourceFormat format) {
case RGB_565:
case ETC1:
case RED_8:
+ case LUMINANCE_F16:
NOTREACHED();
break;
}
@@ -106,6 +107,7 @@ bool IsFormatSupportedForStorage(ResourceFormat format, bool use_bgra) {
case RGB_565:
case ETC1:
case RED_8:
+ case LUMINANCE_F16:
return false;
}
return false;
@@ -391,14 +393,29 @@ void ResourceProvider::LoseResourceForTesting(ResourceId id) {
resource->lost = true;
}
+ResourceFormat ResourceProvider::YuvResourceFormat(int bits) const {
+ if (bits > 8) {
+ return yuv_highbit_resource_format_;
+ } else {
+ return yuv_resource_format_;
+ }
+}
+
ResourceId ResourceProvider::CreateResource(const gfx::Size& size,
TextureHint hint,
ResourceFormat format) {
DCHECK(!size.IsEmpty());
switch (default_resource_type_) {
case RESOURCE_TYPE_GPU_MEMORY_BUFFER:
+ // GPU memory buffers don't support LUMINANCE_F16.
+ if (format != LUMINANCE_F16) {
+ return CreateGLTexture(size, hint, RESOURCE_TYPE_GPU_MEMORY_BUFFER,
+ format);
+ }
+ // Fall through and use a regular texture.
case RESOURCE_TYPE_GL_TEXTURE:
- return CreateGLTexture(size, hint, default_resource_type_, format);
+ return CreateGLTexture(size, hint, RESOURCE_TYPE_GL_TEXTURE, format);
+
case RESOURCE_TYPE_BITMAP:
DCHECK_EQ(RGBA_8888, format);
return CreateBitmap(size);
@@ -1077,6 +1094,9 @@ void ResourceProvider::Initialize() {
use_texture_usage_hint_ = caps.gpu.texture_usage;
use_compressed_texture_etc1_ = caps.gpu.texture_format_etc1;
yuv_resource_format_ = caps.gpu.texture_rg ? RED_8 : LUMINANCE_8;
+ yuv_highbit_resource_format_ = yuv_resource_format_;
+ if (caps.gpu.texture_half_float_linear)
+ yuv_highbit_resource_format_ = LUMINANCE_F16;
use_sync_query_ = caps.gpu.sync_query;
max_texture_size_ = 0; // Context expects cleared value.
diff --git a/cc/resources/resource_provider.h b/cc/resources/resource_provider.h
index 1913ef0..d6835a0 100644
--- a/cc/resources/resource_provider.h
+++ b/cc/resources/resource_provider.h
@@ -103,7 +103,7 @@ class CC_EXPORT ResourceProvider
ResourceFormat best_render_buffer_format() const {
return best_render_buffer_format_;
}
- ResourceFormat yuv_resource_format() const { return yuv_resource_format_; }
+ ResourceFormat YuvResourceFormat(int bits) const;
bool use_sync_query() const { return use_sync_query_; }
gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager() {
return gpu_memory_buffer_manager_;
@@ -581,6 +581,7 @@ class CC_EXPORT ResourceProvider
bool use_texture_usage_hint_;
bool use_compressed_texture_etc1_;
ResourceFormat yuv_resource_format_;
+ ResourceFormat yuv_highbit_resource_format_;
int max_texture_size_;
ResourceFormat best_texture_format_;
ResourceFormat best_render_buffer_format_;
diff --git a/cc/resources/video_resource_updater.cc b/cc/resources/video_resource_updater.cc
index 216e48f..6f8bb4b 100644
--- a/cc/resources/video_resource_updater.cc
+++ b/cc/resources/video_resource_updater.cc
@@ -10,6 +10,7 @@
#include <algorithm>
#include "base/bind.h"
+#include "base/bit_cast.h"
#include "base/trace_event/trace_event.h"
#include "cc/base/math_util.h"
#include "cc/output/gl_renderer.h"
@@ -69,6 +70,12 @@ VideoFrameExternalResources::ResourceType ResourceTypeForVideoFrame(
case media::PIXEL_FORMAT_RGB32:
case media::PIXEL_FORMAT_MJPEG:
case media::PIXEL_FORMAT_MT21:
+ case media::PIXEL_FORMAT_YUV420P9:
+ case media::PIXEL_FORMAT_YUV422P9:
+ case media::PIXEL_FORMAT_YUV444P9:
+ case media::PIXEL_FORMAT_YUV420P10:
+ case media::PIXEL_FORMAT_YUV422P10:
+ case media::PIXEL_FORMAT_YUV444P10:
case media::PIXEL_FORMAT_UNKNOWN:
break;
}
@@ -140,8 +147,10 @@ void VideoResourceUpdater::SetPlaneResourceUniqueId(
}
VideoFrameExternalResources::VideoFrameExternalResources()
- : type(NONE), read_lock_fences_enabled(false) {
-}
+ : type(NONE),
+ read_lock_fences_enabled(false),
+ offset(0.0f),
+ multiplier(1.0f) {}
VideoFrameExternalResources::~VideoFrameExternalResources() {}
@@ -219,11 +228,15 @@ static gfx::Size SoftwarePlaneDimension(
const scoped_refptr<media::VideoFrame>& input_frame,
bool software_compositor,
size_t plane_index) {
- if (!software_compositor) {
- return media::VideoFrame::PlaneSize(
- input_frame->format(), plane_index, input_frame->coded_size());
- }
- return input_frame->coded_size();
+ gfx::Size coded_size = input_frame->coded_size();
+ if (software_compositor)
+ return coded_size;
+
+ int plane_width = media::VideoFrame::Columns(
+ plane_index, input_frame->format(), coded_size.width());
+ int plane_height = media::VideoFrame::Rows(plane_index, input_frame->format(),
+ coded_size.height());
+ return gfx::Size(plane_width, plane_height);
}
VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
@@ -231,6 +244,41 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes");
const media::VideoPixelFormat input_frame_format = video_frame->format();
+ // TODO(hubbe): Make this a video frame method.
+ int bits_per_channel = 0;
+ switch (input_frame_format) {
+ case media::PIXEL_FORMAT_UNKNOWN:
+ NOTREACHED();
+ // Fall through!
+ case media::PIXEL_FORMAT_I420:
+ case media::PIXEL_FORMAT_YV12:
+ case media::PIXEL_FORMAT_YV16:
+ case media::PIXEL_FORMAT_YV12A:
+ case media::PIXEL_FORMAT_YV24:
+ case media::PIXEL_FORMAT_NV12:
+ case media::PIXEL_FORMAT_NV21:
+ case media::PIXEL_FORMAT_UYVY:
+ case media::PIXEL_FORMAT_YUY2:
+ case media::PIXEL_FORMAT_ARGB:
+ case media::PIXEL_FORMAT_XRGB:
+ case media::PIXEL_FORMAT_RGB24:
+ case media::PIXEL_FORMAT_RGB32:
+ case media::PIXEL_FORMAT_MJPEG:
+ case media::PIXEL_FORMAT_MT21:
+ bits_per_channel = 8;
+ break;
+ case media::PIXEL_FORMAT_YUV420P9:
+ case media::PIXEL_FORMAT_YUV422P9:
+ case media::PIXEL_FORMAT_YUV444P9:
+ bits_per_channel = 9;
+ break;
+ case media::PIXEL_FORMAT_YUV420P10:
+ case media::PIXEL_FORMAT_YUV422P10:
+ case media::PIXEL_FORMAT_YUV444P10:
+ bits_per_channel = 10;
+ break;
+ }
+
// Only YUV software video frames are supported.
if (!media::IsYuvPlanar(input_frame_format)) {
NOTREACHED() << media::VideoPixelFormatToString(input_frame_format);
@@ -240,7 +288,8 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
const bool software_compositor = context_provider_ == NULL;
ResourceFormat output_resource_format =
- resource_provider_->yuv_resource_format();
+ resource_provider_->YuvResourceFormat(bits_per_channel);
+
size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format);
// TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB
@@ -352,7 +401,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
PlaneResource& plane_resource = *plane_resources[i];
// Update each plane's resource id with its content.
DCHECK_EQ(plane_resource.resource_format,
- resource_provider_->yuv_resource_format());
+ resource_provider_->YuvResourceFormat(bits_per_channel));
if (!PlaneResourceMatchesUniqueID(plane_resource, video_frame.get(), i)) {
// We need to transfer data from |video_frame| to the plane resource.
@@ -361,9 +410,9 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
// The |resource_size_pixels| is the size of the resource we want to
// upload to.
gfx::Size resource_size_pixels = plane_resource.resource_size;
- // The |video_stride_pixels| is the width of the video frame we are
+ // The |video_stride_bytes| is the width of the video frame we are
// uploading (including non-frame data to fill in the stride).
- int video_stride_pixels = video_frame->stride(i);
+ int video_stride_bytes = video_frame->stride(i);
size_t bytes_per_row = ResourceUtil::UncheckedWidthInBytes<size_t>(
resource_size_pixels.width(), plane_resource.resource_format);
@@ -372,10 +421,24 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
size_t upload_image_stride =
MathUtil::UncheckedRoundUp<size_t>(bytes_per_row, 4u);
+ bool needs_conversion = false;
+ int shift = 0;
+
+ // LUMINANCE_F16 uses half-floats, so we always need a conversion step.
+ if (plane_resource.resource_format == LUMINANCE_F16) {
+ needs_conversion = true;
+ // Note that the current method of converting integers to half-floats
+ // stops working if you have more than 10 bits of data.
+ DCHECK_LE(bits_per_channel, 10);
+ } else if (bits_per_channel > 8) {
+ // If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to
+ // shift the data down and create an 8-bit texture.
+ needs_conversion = true;
+ shift = bits_per_channel - 8;
+ }
const uint8_t* pixels;
- size_t video_bytes_per_row = ResourceUtil::UncheckedWidthInBytes<size_t>(
- video_stride_pixels, plane_resource.resource_format);
- if (upload_image_stride == video_bytes_per_row) {
+ if (static_cast<int>(upload_image_stride) == video_stride_bytes &&
+ !needs_conversion) {
pixels = video_frame->data(i);
} else {
// Avoid malloc for each frame/plane if possible.
@@ -383,11 +446,36 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
upload_image_stride * resource_size_pixels.height();
if (upload_pixels_.size() < needed_size)
upload_pixels_.resize(needed_size);
+
for (int row = 0; row < resource_size_pixels.height(); ++row) {
- uint8_t* dst = &upload_pixels_[upload_image_stride * row];
- const uint8_t* src =
- video_frame->data(i) + (video_bytes_per_row * row);
- memcpy(dst, src, bytes_per_row);
+ if (plane_resource.resource_format == LUMINANCE_F16) {
+ uint16_t* dst = reinterpret_cast<uint16_t*>(
+ &upload_pixels_[upload_image_stride * row]);
+ const uint16_t* src = reinterpret_cast<uint16_t*>(
+ video_frame->data(i) + (video_stride_bytes * row));
+ // Micro-benchmarking indicates that the compiler does
+ // a good enough job of optimizing this loop that trying
+ // to manually operate on one uint64 at a time is not
+ // actually helpful.
+ // Note to future optimizers: Benchmark your optimizations!
+ for (size_t i = 0; i < bytes_per_row / 2; i++)
+ dst[i] = src[i] | 0x3800;
+ } else if (shift != 0) {
+ // We have more-than-8-bit input which we need to shift
+ // down to fit it into an 8-bit texture.
+ uint8_t* dst = &upload_pixels_[upload_image_stride * row];
+ const uint16_t* src = reinterpret_cast<uint16_t*>(
+ video_frame->data(i) + (video_stride_bytes * row));
+ for (size_t i = 0; i < bytes_per_row; i++)
+ dst[i] = src[i] >> shift;
+ } else {
+ // Input and output are the same size and format, but
+ // differ in stride, copy one row at a time.
+ uint8_t* dst = &upload_pixels_[upload_image_stride * row];
+ const uint8_t* src =
+ video_frame->data(i) + (video_stride_bytes * row);
+ memcpy(dst, src, bytes_per_row);
+ }
}
pixels = &upload_pixels_[0];
}
@@ -397,6 +485,29 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
SetPlaneResourceUniqueId(video_frame.get(), i, &plane_resource);
}
+ if (plane_resource.resource_format == LUMINANCE_F16) {
+ // By OR-ing with 0x3800, 10-bit numbers become half-floats in the
+ // range [0.5..1) and 9-bit numbers get the range [0.5..0.75).
+ //
+ // Half-floats are evaluated as:
+ // float value = pow(2.0, exponent - 25) * (0x400 + fraction);
+ //
+ // In our case the exponent is 14 (since we or with 0x3800) and
+ // pow(2.0, 14-25) * 0x400 evaluates to 0.5 (our offset) and
+ // pow(2.0, 14-25) * fraction is [0..0.49951171875] for 10-bit and
+ // [0..0.24951171875] for 9-bit.
+ //
+ // (https://en.wikipedia.org/wiki/Half-precision_floating-point_format)
+ //
+ // PLEASE NOTE: This doesn't work if bits_per_channel is > 10.
+ // PLEASE NOTE: All planes are assumed to use the same multiplier/offset.
+ external_resources.offset = 0.5f;
+ // Max value from input data.
+ int max_input_value = (1 << bits_per_channel) - 1;
+ // 2 << 11 = 2048 would be 1.0 with our exponent.
+ external_resources.multiplier = 2048.0 / max_input_value;
+ }
+
external_resources.mailboxes.push_back(
TextureMailbox(plane_resource.mailbox, gpu::SyncToken(),
resource_provider_->GetResourceTextureTarget(
diff --git a/cc/resources/video_resource_updater.h b/cc/resources/video_resource_updater.h
index e80a849..4e11110 100644
--- a/cc/resources/video_resource_updater.h
+++ b/cc/resources/video_resource_updater.h
@@ -63,6 +63,11 @@ class CC_EXPORT VideoFrameExternalResources {
std::vector<unsigned> software_resources;
ReleaseCallbackImpl software_release_callback;
+ // Used by hardware textures which do not return values in the 0-1 range.
+ // After a lookup, subtract offset and multiply by multiplier.
+ float offset;
+ float multiplier;
+
VideoFrameExternalResources();
~VideoFrameExternalResources();
};
diff --git a/cc/resources/video_resource_updater_unittest.cc b/cc/resources/video_resource_updater_unittest.cc
index 3063182..2595d5a 100644
--- a/cc/resources/video_resource_updater_unittest.cc
+++ b/cc/resources/video_resource_updater_unittest.cc
@@ -94,6 +94,10 @@ class VideoResourceUpdaterTest : public testing::Test {
output_surface3d_ = FakeOutputSurface::Create3d(std::move(context3d));
CHECK(output_surface3d_->BindToClient(&client_));
+ }
+
+ void SetUp() override {
+ testing::Test::SetUp();
output_surface_software_ = FakeOutputSurface::CreateSoftware(
make_scoped_ptr(new SoftwareOutputDevice));
@@ -131,6 +135,43 @@ class VideoResourceUpdaterTest : public testing::Test {
return video_frame;
}
+ scoped_refptr<media::VideoFrame> CreateWonkyTestYUVVideoFrame() {
+ const int kDimension = 10;
+ const int kYWidth = kDimension + 5;
+ const int kUWidth = (kYWidth + 1) / 2 + 200;
+ const int kVWidth = (kYWidth + 1) / 2 + 1;
+ static uint8_t y_data[kYWidth * kDimension] = {0};
+ static uint8_t u_data[kUWidth * kDimension] = {0};
+ static uint8_t v_data[kVWidth * kDimension] = {0};
+
+ scoped_refptr<media::VideoFrame> video_frame =
+ media::VideoFrame::WrapExternalYuvData(
+ media::PIXEL_FORMAT_YV16, // format
+ gfx::Size(kYWidth, kDimension), // coded_size
+ gfx::Rect(2, 0, kDimension, kDimension), // visible_rect
+ gfx::Size(kDimension, kDimension), // natural_size
+ -kYWidth, // y_stride (negative)
+ kUWidth, // u_stride
+ kVWidth, // v_stride
+ y_data + kYWidth * (kDimension - 1), // y_data
+ u_data, // u_data
+ v_data, // v_data
+ base::TimeDelta()); // timestamp
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
+ scoped_refptr<media::VideoFrame> CreateTestHighBitFrame() {
+ const int kDimension = 10;
+ gfx::Size size(kDimension, kDimension);
+
+ scoped_refptr<media::VideoFrame> video_frame(media::VideoFrame::CreateFrame(
+ media::PIXEL_FORMAT_YUV420P10, size, gfx::Rect(size), size,
+ base::TimeDelta()));
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
static void ReleaseMailboxCB(const gpu::SyncToken& sync_token) {}
scoped_refptr<media::VideoFrame> CreateTestHardwareVideoFrame(
@@ -217,6 +258,61 @@ TEST_F(VideoResourceUpdaterTest, SoftwareFrame) {
EXPECT_EQ(VideoFrameExternalResources::YUV_RESOURCE, resources.type);
}
+TEST_F(VideoResourceUpdaterTest, HighBitFrameNoF16) {
+ VideoResourceUpdater updater(output_surface3d_->context_provider(),
+ resource_provider3d_.get());
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+
+ VideoFrameExternalResources resources =
+ updater.CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameExternalResources::YUV_RESOURCE, resources.type);
+}
+
+class VideoResourceUpdaterTestWithF16 : public VideoResourceUpdaterTest {
+ public:
+ VideoResourceUpdaterTestWithF16() : VideoResourceUpdaterTest() {
+ context3d_->set_support_texture_half_float_linear(true);
+ }
+};
+
+TEST_F(VideoResourceUpdaterTestWithF16, HighBitFrame) {
+ VideoResourceUpdater updater(output_surface3d_->context_provider(),
+ resource_provider3d_.get());
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+
+ VideoFrameExternalResources resources =
+ updater.CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameExternalResources::YUV_RESOURCE, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, HighBitFrameSoftwareCompositor) {
+ VideoResourceUpdater updater(nullptr, resource_provider_software_.get());
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+
+ VideoFrameExternalResources resources =
+ updater.CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameExternalResources::SOFTWARE_RESOURCE, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrame) {
+ VideoResourceUpdater updater(output_surface3d_->context_provider(),
+ resource_provider3d_.get());
+ scoped_refptr<media::VideoFrame> video_frame = CreateWonkyTestYUVVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater.CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameExternalResources::YUV_RESOURCE, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrameSoftwareCompositor) {
+ VideoResourceUpdater updater(nullptr, resource_provider_software_.get());
+ scoped_refptr<media::VideoFrame> video_frame = CreateWonkyTestYUVVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater.CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameExternalResources::SOFTWARE_RESOURCE, resources.type);
+}
+
TEST_F(VideoResourceUpdaterTest, ReuseResource) {
VideoResourceUpdater updater(output_surface3d_->context_provider(),
resource_provider3d_.get());
@@ -405,5 +501,6 @@ TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_StreamTexture) {
// that extension is supported.
EXPECT_FALSE(context3d_->WasImmutableTextureCreated());
}
+
} // namespace
} // namespace cc
diff --git a/cc/test/render_pass_test_utils.cc b/cc/test/render_pass_test_utils.cc
index 186bfca..b746ea6 100644
--- a/cc/test/render_pass_test_utils.cc
+++ b/cc/test/render_pass_test_utils.cc
@@ -297,7 +297,8 @@ void AddOneOfEveryQuadType(RenderPass* to_pass,
gfx::RectF(.0f, .0f, 100.0f, 100.0f),
gfx::RectF(.0f, .0f, 50.0f, 50.0f), gfx::Size(100, 100),
gfx::Size(50, 50), plane_resources[0], plane_resources[1],
- plane_resources[2], plane_resources[3], color_space);
+ plane_resources[2], plane_resources[3], color_space, 0.0,
+ 1.0);
}
} // namespace cc
diff --git a/cc/test/test_web_graphics_context_3d.h b/cc/test/test_web_graphics_context_3d.h
index 03ddf3f..f497ac4 100644
--- a/cc/test/test_web_graphics_context_3d.h
+++ b/cc/test/test_web_graphics_context_3d.h
@@ -347,6 +347,9 @@ class TestWebGraphicsContext3D {
void set_support_texture_rectangle(bool support) {
test_capabilities_.gpu.texture_rectangle = support;
}
+ void set_support_texture_half_float_linear(bool support) {
+ test_capabilities_.gpu.texture_half_float_linear = support;
+ }
// When this context is lost, all contexts in its share group are also lost.
void add_share_group_context(TestWebGraphicsContext3D* context3d) {
diff --git a/components/mus/public/interfaces/quads.mojom b/components/mus/public/interfaces/quads.mojom
index dcc4168..1b0ada8 100644
--- a/components/mus/public/interfaces/quads.mojom
+++ b/components/mus/public/interfaces/quads.mojom
@@ -106,6 +106,8 @@ struct YUVVideoQuadState {
uint32 v_plane_resource_id;
uint32 a_plane_resource_id;
YUVColorSpace color_space;
+ float resource_offset;
+ float resource_multiplier;
};
enum Material {
diff --git a/content/browser/media/media_browsertest.cc b/content/browser/media/media_browsertest.cc
index 43db9ad..c90be2e 100644
--- a/content/browser/media/media_browsertest.cc
+++ b/content/browser/media/media_browsertest.cc
@@ -131,6 +131,10 @@ IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearMp4) {
PlayVideo("bear.mp4", GetParam());
}
+IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearHighBitDepthMp4) {
+ PlayVideo("bear-320x180-hi10p.mp4", GetParam());
+}
+
IN_PROC_BROWSER_TEST_P(MediaTest, VideoBearSilentMp4) {
PlayVideo("bear_silent.mp4", GetParam());
}
@@ -259,6 +263,10 @@ IN_PROC_BROWSER_TEST_F(MediaTest, Yuv420pRec709H264) {
RunColorFormatTest("yuv420p_rec709.mp4", kEnded);
}
+IN_PROC_BROWSER_TEST_F(MediaTest, Yuv420pHighBitDepth) {
+ RunColorFormatTest("yuv420p_hi10p.mp4", kEnded);
+}
+
IN_PROC_BROWSER_TEST_F(MediaTest, Yuvj420pH264) {
RunColorFormatTest("yuvj420p.mp4", kEnded);
}
diff --git a/content/browser/renderer_host/media/video_capture_device_client_unittest.cc b/content/browser/renderer_host/media/video_capture_device_client_unittest.cc
index 56c0a12..1463b26 100644
--- a/content/browser/renderer_host/media/video_capture_device_client_unittest.cc
+++ b/content/browser/renderer_host/media/video_capture_device_client_unittest.cc
@@ -154,11 +154,17 @@ TEST_F(VideoCaptureDeviceClientTest, DataCaptureInEachVideoFormatInSequence) {
format == media::PIXEL_FORMAT_ARGB ||
format == media::PIXEL_FORMAT_XRGB ||
format == media::PIXEL_FORMAT_MJPEG ||
- format == media::PIXEL_FORMAT_MT21) {
+ format == media::PIXEL_FORMAT_MT21 ||
+ format == media::PIXEL_FORMAT_YUV420P9 ||
+ format == media::PIXEL_FORMAT_YUV420P10 ||
+ format == media::PIXEL_FORMAT_YUV422P9 ||
+ format == media::PIXEL_FORMAT_YUV422P10 ||
+ format == media::PIXEL_FORMAT_YUV444P9 ||
+ format == media::PIXEL_FORMAT_YUV444P10) {
continue;
}
#if !defined(OS_LINUX) && !defined(OS_WIN)
- if (format == media::PIXEL_FORMAT_RGB24){
+ if (format == media::PIXEL_FORMAT_RGB24) {
continue;
}
#endif
diff --git a/content/common/cc_messages.h b/content/common/cc_messages.h
index e7358e6..8870a1b 100644
--- a/content/common/cc_messages.h
+++ b/content/common/cc_messages.h
@@ -268,6 +268,8 @@ IPC_STRUCT_TRAITS_BEGIN(cc::YUVVideoDrawQuad)
IPC_STRUCT_TRAITS_MEMBER(ya_tex_size)
IPC_STRUCT_TRAITS_MEMBER(uv_tex_size)
IPC_STRUCT_TRAITS_MEMBER(color_space)
+ IPC_STRUCT_TRAITS_MEMBER(resource_offset)
+ IPC_STRUCT_TRAITS_MEMBER(resource_multiplier)
IPC_STRUCT_TRAITS_END()
IPC_STRUCT_TRAITS_BEGIN(cc::SharedQuadState)
diff --git a/content/common/cc_messages_unittest.cc b/content/common/cc_messages_unittest.cc
index 288e369..35218e7 100644
--- a/content/common/cc_messages_unittest.cc
+++ b/content/common/cc_messages_unittest.cc
@@ -433,7 +433,7 @@ TEST_F(CCMessagesTest, AllQuads) {
arbitrary_rect1_inside_rect1, arbitrary_bool1, arbitrary_rectf1,
arbitrary_rectf2, arbitrary_size1, arbitrary_size2, arbitrary_resourceid1,
arbitrary_resourceid2, arbitrary_resourceid3, arbitrary_resourceid4,
- arbitrary_color_space);
+ arbitrary_color_space, arbitrary_float1, arbitrary_float2);
pass_cmp->CopyFromAndAppendDrawQuad(yuvvideo_in,
yuvvideo_in->shared_quad_state);
diff --git a/gpu/command_buffer/common/capabilities.cc b/gpu/command_buffer/common/capabilities.cc
index 7fb21de..51c1590 100644
--- a/gpu/command_buffer/common/capabilities.cc
+++ b/gpu/command_buffer/common/capabilities.cc
@@ -75,6 +75,7 @@ Capabilities::Capabilities()
blend_equation_advanced(false),
blend_equation_advanced_coherent(false),
texture_rg(false),
+ texture_half_float_linear(false),
image_ycbcr_422(false),
image_ycbcr_420v(false),
render_buffer_format_bgra8888(false),
diff --git a/gpu/command_buffer/common/capabilities.h b/gpu/command_buffer/common/capabilities.h
index b7a3af2..5605cc8 100644
--- a/gpu/command_buffer/common/capabilities.h
+++ b/gpu/command_buffer/common/capabilities.h
@@ -134,6 +134,7 @@ struct GPU_EXPORT Capabilities {
bool blend_equation_advanced;
bool blend_equation_advanced_coherent;
bool texture_rg;
+ bool texture_half_float_linear;
bool image_ycbcr_422;
bool image_ycbcr_420v;
bool render_buffer_format_bgra8888;
diff --git a/gpu/command_buffer/service/gles2_cmd_decoder.cc b/gpu/command_buffer/service/gles2_cmd_decoder.cc
index ed487d0..fe87cca 100644
--- a/gpu/command_buffer/service/gles2_cmd_decoder.cc
+++ b/gpu/command_buffer/service/gles2_cmd_decoder.cc
@@ -3145,6 +3145,8 @@ Capabilities GLES2DecoderImpl::GetCapabilities() {
caps.blend_equation_advanced_coherent =
feature_info_->feature_flags().blend_equation_advanced_coherent;
caps.texture_rg = feature_info_->feature_flags().ext_texture_rg;
+ caps.texture_half_float_linear =
+ feature_info_->feature_flags().enable_texture_half_float_linear;
caps.image_ycbcr_422 =
feature_info_->feature_flags().chromium_image_ycbcr_422;
caps.image_ycbcr_420v =
diff --git a/gpu/ipc/gpu_command_buffer_traits_multi.h b/gpu/ipc/gpu_command_buffer_traits_multi.h
index ae62b67..09a3fb3 100644
--- a/gpu/ipc/gpu_command_buffer_traits_multi.h
+++ b/gpu/ipc/gpu_command_buffer_traits_multi.h
@@ -102,6 +102,7 @@ IPC_STRUCT_TRAITS_BEGIN(gpu::Capabilities)
IPC_STRUCT_TRAITS_MEMBER(blend_equation_advanced)
IPC_STRUCT_TRAITS_MEMBER(blend_equation_advanced_coherent)
IPC_STRUCT_TRAITS_MEMBER(texture_rg)
+ IPC_STRUCT_TRAITS_MEMBER(texture_half_float_linear)
IPC_STRUCT_TRAITS_MEMBER(image_ycbcr_422)
IPC_STRUCT_TRAITS_MEMBER(image_ycbcr_420v)
IPC_STRUCT_TRAITS_MEMBER(render_buffer_format_bgra8888)
diff --git a/media/base/video_frame.cc b/media/base/video_frame.cc
index 0ad4ac0..4dedd82 100644
--- a/media/base/video_frame.cc
+++ b/media/base/video_frame.cc
@@ -112,9 +112,13 @@ static gfx::Size SampleSize(VideoPixelFormat format, size_t plane) {
case VideoFrame::kVPlane:
switch (format) {
case PIXEL_FORMAT_YV24:
+ case PIXEL_FORMAT_YUV444P9:
+ case PIXEL_FORMAT_YUV444P10:
return gfx::Size(1, 1);
case PIXEL_FORMAT_YV16:
+ case PIXEL_FORMAT_YUV422P9:
+ case PIXEL_FORMAT_YUV422P10:
return gfx::Size(2, 1);
case PIXEL_FORMAT_YV12:
@@ -123,6 +127,8 @@ static gfx::Size SampleSize(VideoPixelFormat format, size_t plane) {
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
case PIXEL_FORMAT_MT21:
+ case PIXEL_FORMAT_YUV420P9:
+ case PIXEL_FORMAT_YUV420P10:
return gfx::Size(2, 2);
case PIXEL_FORMAT_UNKNOWN:
@@ -165,6 +171,12 @@ static int BytesPerElement(VideoPixelFormat format, size_t plane) {
return 3;
case PIXEL_FORMAT_UYVY:
case PIXEL_FORMAT_YUY2:
+ case PIXEL_FORMAT_YUV420P9:
+ case PIXEL_FORMAT_YUV422P9:
+ case PIXEL_FORMAT_YUV444P9:
+ case PIXEL_FORMAT_YUV420P10:
+ case PIXEL_FORMAT_YUV422P10:
+ case PIXEL_FORMAT_YUV444P10:
return 2;
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
@@ -214,7 +226,7 @@ bool VideoFrame::IsValidConfig(VideoPixelFormat format,
return true;
// Make sure new formats are properly accounted for in the method.
- static_assert(PIXEL_FORMAT_MAX == 15,
+ static_assert(PIXEL_FORMAT_MAX == 21,
"Added pixel format, please review IsValidConfig()");
if (format == PIXEL_FORMAT_UNKNOWN) {
@@ -617,6 +629,12 @@ size_t VideoFrame::NumPlanes(VideoPixelFormat format) {
case PIXEL_FORMAT_YV12:
case PIXEL_FORMAT_YV16:
case PIXEL_FORMAT_YV24:
+ case PIXEL_FORMAT_YUV420P9:
+ case PIXEL_FORMAT_YUV422P9:
+ case PIXEL_FORMAT_YUV444P9:
+ case PIXEL_FORMAT_YUV420P10:
+ case PIXEL_FORMAT_YUV422P10:
+ case PIXEL_FORMAT_YUV444P10:
return 3;
case PIXEL_FORMAT_YV12A:
return 4;
diff --git a/media/base/video_types.cc b/media/base/video_types.cc
index e2f137d..f8eeb3e 100644
--- a/media/base/video_types.cc
+++ b/media/base/video_types.cc
@@ -42,6 +42,18 @@ std::string VideoPixelFormatToString(VideoPixelFormat format) {
return "PIXEL_FORMAT_MJPEG";
case PIXEL_FORMAT_MT21:
return "PIXEL_FORMAT_MT21";
+ case PIXEL_FORMAT_YUV420P9:
+ return "PIXEL_FORMAT_YUV420P9";
+ case PIXEL_FORMAT_YUV420P10:
+ return "PIXEL_FORMAT_YUV420P10";
+ case PIXEL_FORMAT_YUV422P9:
+ return "PIXEL_FORMAT_YUV422P9";
+ case PIXEL_FORMAT_YUV422P10:
+ return "PIXEL_FORMAT_YUV422P10";
+ case PIXEL_FORMAT_YUV444P9:
+ return "PIXEL_FORMAT_YUV444P9";
+ case PIXEL_FORMAT_YUV444P10:
+ return "PIXEL_FORMAT_YUV444P10";
}
NOTREACHED() << "Invalid VideoPixelFormat provided: " << format;
return "";
@@ -57,6 +69,12 @@ bool IsYuvPlanar(VideoPixelFormat format) {
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
case PIXEL_FORMAT_MT21:
+ case PIXEL_FORMAT_YUV420P9:
+ case PIXEL_FORMAT_YUV420P10:
+ case PIXEL_FORMAT_YUV422P9:
+ case PIXEL_FORMAT_YUV422P10:
+ case PIXEL_FORMAT_YUV444P9:
+ case PIXEL_FORMAT_YUV444P10:
return true;
case PIXEL_FORMAT_UNKNOWN:
@@ -87,6 +105,12 @@ bool IsOpaque(VideoPixelFormat format) {
case PIXEL_FORMAT_RGB24:
case PIXEL_FORMAT_MJPEG:
case PIXEL_FORMAT_MT21:
+ case PIXEL_FORMAT_YUV420P9:
+ case PIXEL_FORMAT_YUV420P10:
+ case PIXEL_FORMAT_YUV422P9:
+ case PIXEL_FORMAT_YUV422P10:
+ case PIXEL_FORMAT_YUV444P9:
+ case PIXEL_FORMAT_YUV444P10:
return true;
case PIXEL_FORMAT_YV12A:
case PIXEL_FORMAT_ARGB:
diff --git a/media/base/video_types.h b/media/base/video_types.h
index 7cf196e..7590b1b 100644
--- a/media/base/video_types.h
+++ b/media/base/video_types.h
@@ -46,9 +46,17 @@ enum VideoPixelFormat {
// Row pitch = ((width+15)/16) * 16.
// Plane size = Row pitch * (((height+31)/32)*32)
PIXEL_FORMAT_MT21 = 15,
+
+ PIXEL_FORMAT_YUV420P9 = 16,
+ PIXEL_FORMAT_YUV420P10 = 17,
+ PIXEL_FORMAT_YUV422P9 = 18,
+ PIXEL_FORMAT_YUV422P10 = 19,
+ PIXEL_FORMAT_YUV444P9 = 20,
+ PIXEL_FORMAT_YUV444P10 = 21,
+
// Please update UMA histogram enumeration when adding new formats here.
PIXEL_FORMAT_MAX =
- PIXEL_FORMAT_MT21, // Must always be equal to largest entry logged.
+ PIXEL_FORMAT_YUV444P10, // Must always be equal to largest entry logged.
};
// Color space or color range used for the pixels.
diff --git a/media/ffmpeg/ffmpeg_common.cc b/media/ffmpeg/ffmpeg_common.cc
index 9261b1f..1f0cc92 100644
--- a/media/ffmpeg/ffmpeg_common.cc
+++ b/media/ffmpeg/ffmpeg_common.cc
@@ -629,6 +629,10 @@ ChannelLayout ChannelLayoutToChromeChannelLayout(int64_t layout, int channels) {
}
}
+#if !defined(ARCH_CPU_LITTLE_ENDIAN)
+#error The code below assumes little-endianness.
+#endif
+
VideoPixelFormat AVPixelFormatToVideoPixelFormat(AVPixelFormat pixel_format) {
// The YUVJ alternatives are FFmpeg's (deprecated, but still in use) way to
// specify a pixel format and full range color combination.
@@ -644,6 +648,22 @@ VideoPixelFormat AVPixelFormatToVideoPixelFormat(AVPixelFormat pixel_format) {
return PIXEL_FORMAT_YV12;
case AV_PIX_FMT_YUVA420P:
return PIXEL_FORMAT_YV12A;
+
+ case AV_PIX_FMT_YUV420P9LE:
+ return PIXEL_FORMAT_YUV420P9;
+ case AV_PIX_FMT_YUV420P10LE:
+ return PIXEL_FORMAT_YUV420P10;
+
+ case AV_PIX_FMT_YUV422P9LE:
+ return PIXEL_FORMAT_YUV422P9;
+ case AV_PIX_FMT_YUV422P10LE:
+ return PIXEL_FORMAT_YUV422P10;
+
+ case AV_PIX_FMT_YUV444P9LE:
+ return PIXEL_FORMAT_YUV444P9;
+ case AV_PIX_FMT_YUV444P10LE:
+ return PIXEL_FORMAT_YUV444P10;
+
default:
DVLOG(1) << "Unsupported AVPixelFormat: " << pixel_format;
}
@@ -660,6 +680,19 @@ AVPixelFormat VideoPixelFormatToAVPixelFormat(VideoPixelFormat video_format) {
return AV_PIX_FMT_YUVA420P;
case PIXEL_FORMAT_YV24:
return AV_PIX_FMT_YUV444P;
+ case PIXEL_FORMAT_YUV420P9:
+ return AV_PIX_FMT_YUV420P9LE;
+ case PIXEL_FORMAT_YUV420P10:
+ return AV_PIX_FMT_YUV420P10LE;
+ case PIXEL_FORMAT_YUV422P9:
+ return AV_PIX_FMT_YUV422P9LE;
+ case PIXEL_FORMAT_YUV422P10:
+ return AV_PIX_FMT_YUV422P10LE;
+ case PIXEL_FORMAT_YUV444P9:
+ return AV_PIX_FMT_YUV444P9LE;
+ case PIXEL_FORMAT_YUV444P10:
+ return AV_PIX_FMT_YUV444P10LE;
+
default:
DVLOG(1) << "Unsupported Format: " << video_format;
}
diff --git a/media/filters/ffmpeg_video_decoder.cc b/media/filters/ffmpeg_video_decoder.cc
index 38d4405..79e5396 100644
--- a/media/filters/ffmpeg_video_decoder.cc
+++ b/media/filters/ffmpeg_video_decoder.cc
@@ -94,7 +94,10 @@ int FFmpegVideoDecoder::GetVideoBuffer(struct AVCodecContext* codec_context,
if (format == PIXEL_FORMAT_UNKNOWN)
return AVERROR(EINVAL);
DCHECK(format == PIXEL_FORMAT_YV12 || format == PIXEL_FORMAT_YV16 ||
- format == PIXEL_FORMAT_YV24);
+ format == PIXEL_FORMAT_YV24 || format == PIXEL_FORMAT_YUV420P9 ||
+ format == PIXEL_FORMAT_YUV420P10 || format == PIXEL_FORMAT_YUV422P9 ||
+ format == PIXEL_FORMAT_YUV422P10 || format == PIXEL_FORMAT_YUV444P9 ||
+ format == PIXEL_FORMAT_YUV444P10);
gfx::Size size(codec_context->width, codec_context->height);
const int ret = av_image_check_size(size.width(), size.height(), 0, NULL);
diff --git a/media/mojo/interfaces/media_types.mojom b/media/mojo/interfaces/media_types.mojom
index 91f1619..085256e 100644
--- a/media/mojo/interfaces/media_types.mojom
+++ b/media/mojo/interfaces/media_types.mojom
@@ -108,7 +108,13 @@ enum VideoFormat {
RGB32,
MJPEG,
MT21,
- FORMAT_MAX = MT21,
+ YUV420P9,
+ YUV420P10,
+ YUV422P9,
+ YUV422P10,
+ YUV444P9,
+ YUV444P10,
+ FORMAT_MAX = YUV444P10,
};
// Kept in sync with media::ColorSpace via static_asserts.
diff --git a/media/mojo/services/media_type_converters.cc b/media/mojo/services/media_type_converters.cc
index a498710..ac49e11 100644
--- a/media/mojo/services/media_type_converters.cc
+++ b/media/mojo/services/media_type_converters.cc
@@ -154,6 +154,24 @@ ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_RGB24, VideoFormat::RGB24);
ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_RGB32, VideoFormat::RGB32);
ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_MJPEG, VideoFormat::MJPEG);
ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_MT21, VideoFormat::MT21);
+ASSERT_ENUM_EQ_RAW(VideoPixelFormat,
+ PIXEL_FORMAT_YUV420P9,
+ VideoFormat::YUV420P9);
+ASSERT_ENUM_EQ_RAW(VideoPixelFormat,
+ PIXEL_FORMAT_YUV422P9,
+ VideoFormat::YUV422P9);
+ASSERT_ENUM_EQ_RAW(VideoPixelFormat,
+ PIXEL_FORMAT_YUV444P9,
+ VideoFormat::YUV444P9);
+ASSERT_ENUM_EQ_RAW(VideoPixelFormat,
+ PIXEL_FORMAT_YUV420P10,
+ VideoFormat::YUV420P10);
+ASSERT_ENUM_EQ_RAW(VideoPixelFormat,
+ PIXEL_FORMAT_YUV422P10,
+ VideoFormat::YUV422P10);
+ASSERT_ENUM_EQ_RAW(VideoPixelFormat,
+ PIXEL_FORMAT_YUV444P10,
+ VideoFormat::YUV444P10);
ASSERT_ENUM_EQ_RAW(VideoPixelFormat, PIXEL_FORMAT_MAX, VideoFormat::FORMAT_MAX);
// ColorSpace.
diff --git a/media/renderers/skcanvas_video_renderer.cc b/media/renderers/skcanvas_video_renderer.cc
index 0740f47..483953e 100644
--- a/media/renderers/skcanvas_video_renderer.cc
+++ b/media/renderers/skcanvas_video_renderer.cc
@@ -448,6 +448,62 @@ void SkCanvasVideoRenderer::Copy(const scoped_refptr<VideoFrame>& video_frame,
SkXfermode::kSrc_Mode, media::VIDEO_ROTATION_0, context_3d);
}
+namespace {
+
+// libyuv doesn't support 9- and 10-bit video frames yet. This function
+// creates a regular 8-bit video frame which we can give to libyuv.
+scoped_refptr<VideoFrame> DownShiftHighbitVideoFrame(
+ const VideoFrame* video_frame) {
+ VideoPixelFormat format;
+ int shift = 1;
+ switch (video_frame->format()) {
+ case PIXEL_FORMAT_YUV420P10:
+ shift = 2;
+ case PIXEL_FORMAT_YUV420P9:
+ format = PIXEL_FORMAT_I420;
+ break;
+ case PIXEL_FORMAT_YUV422P10:
+ shift = 2;
+ case PIXEL_FORMAT_YUV422P9:
+ format = PIXEL_FORMAT_YV16;
+ break;
+ case PIXEL_FORMAT_YUV444P10:
+ shift = 2;
+ case PIXEL_FORMAT_YUV444P9:
+ format = PIXEL_FORMAT_YV24;
+ break;
+
+ default:
+ NOTREACHED();
+ return nullptr;
+ }
+ scoped_refptr<VideoFrame> ret = VideoFrame::CreateFrame(
+ format, video_frame->coded_size(), video_frame->visible_rect(),
+ video_frame->natural_size(), video_frame->timestamp());
+
+ // Copy all metadata.
+ // (May be enough to copy color space)
+ base::DictionaryValue tmp;
+ video_frame->metadata()->MergeInternalValuesInto(&tmp);
+ ret->metadata()->MergeInternalValuesFrom(tmp);
+
+ for (int plane = VideoFrame::kYPlane; plane <= VideoFrame::kVPlane; ++plane) {
+ int width = ret->row_bytes(plane);
+ const uint16_t* src =
+ reinterpret_cast<const uint16_t*>(video_frame->data(plane));
+ uint8_t* dst = ret->data(plane);
+ for (int row = 0; row < video_frame->rows(plane); row++) {
+ for (int x = 0; x < width; x++) {
+ dst[x] = src[x] >> shift;
+ }
+ src += video_frame->stride(plane) / 2;
+ dst += ret->stride(plane);
+ }
+ }
+ return ret;
+}
+}
+
// static
void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
const VideoFrame* video_frame,
@@ -461,8 +517,6 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
NOTREACHED() << "Non YUV formats are not supported";
return;
}
- DCHECK_EQ(video_frame->stride(VideoFrame::kUPlane),
- video_frame->stride(VideoFrame::kVPlane));
switch (video_frame->format()) {
case PIXEL_FORMAT_YV12:
@@ -538,6 +592,20 @@ void SkCanvasVideoRenderer::ConvertVideoFrameToRGBPixels(
video_frame->visible_rect().width(),
video_frame->visible_rect().height());
break;
+
+ case PIXEL_FORMAT_YUV420P9:
+ case PIXEL_FORMAT_YUV422P9:
+ case PIXEL_FORMAT_YUV444P9:
+ case PIXEL_FORMAT_YUV420P10:
+ case PIXEL_FORMAT_YUV422P10:
+ case PIXEL_FORMAT_YUV444P10: {
+ scoped_refptr<VideoFrame> temporary_frame =
+ DownShiftHighbitVideoFrame(video_frame);
+ ConvertVideoFrameToRGBPixels(temporary_frame.get(), rgb_pixels,
+ row_bytes);
+ break;
+ }
+
case PIXEL_FORMAT_NV12:
case PIXEL_FORMAT_NV21:
case PIXEL_FORMAT_UYVY:
@@ -584,7 +652,6 @@ void SkCanvasVideoRenderer::CopyVideoFrameSingleTextureToGLTexture(
// "flip_y == false" means to keep the intrinsic orientation.
gl->CopyTextureCHROMIUM(source_texture, texture, internal_format, type,
flip_y, premultiply_alpha, false);
-
gl->DeleteTextures(1, &source_texture);
gl->Flush();
diff --git a/media/renderers/skcanvas_video_renderer_unittest.cc b/media/renderers/skcanvas_video_renderer_unittest.cc
index b77650d..df82f27 100644
--- a/media/renderers/skcanvas_video_renderer_unittest.cc
+++ b/media/renderers/skcanvas_video_renderer_unittest.cc
@@ -467,6 +467,44 @@ TEST_F(SkCanvasVideoRendererTest, Video_Translate_Rotation_270) {
EXPECT_EQ(SK_ColorBLACK, GetColorAt(&canvas, kWidth / 2, kHeight - 1));
}
+TEST_F(SkCanvasVideoRendererTest, HighBits) {
+ // Copy cropped_frame into a highbit frame.
+ scoped_refptr<VideoFrame> frame(VideoFrame::CreateFrame(
+ PIXEL_FORMAT_YUV420P10, cropped_frame()->coded_size(),
+ cropped_frame()->visible_rect(), cropped_frame()->natural_size(),
+ cropped_frame()->timestamp()));
+ for (int plane = VideoFrame::kYPlane; plane <= VideoFrame::kVPlane; ++plane) {
+ int width = cropped_frame()->row_bytes(plane);
+ uint16_t* dst = reinterpret_cast<uint16_t*>(frame->data(plane));
+ uint8_t* src = cropped_frame()->data(plane);
+ for (int row = 0; row < cropped_frame()->rows(plane); row++) {
+ for (int col = 0; col < width; col++) {
+ dst[col] = src[col] << 2;
+ }
+ src += cropped_frame()->stride(plane);
+ dst += frame->stride(plane) / 2;
+ }
+ }
+
+ Paint(frame, target_canvas(), kNone);
+ // Check the corners.
+ EXPECT_EQ(SK_ColorBLACK, GetColorAt(target_canvas(), 0, 0));
+ EXPECT_EQ(SK_ColorRED, GetColorAt(target_canvas(), kWidth - 1, 0));
+ EXPECT_EQ(SK_ColorGREEN, GetColorAt(target_canvas(), 0, kHeight - 1));
+ EXPECT_EQ(SK_ColorBLUE, GetColorAt(target_canvas(), kWidth - 1, kHeight - 1));
+ // Check the interior along the border between color regions. Note that we're
+ // bilinearly upscaling, so we'll need to take care to pick sample points that
+ // are just outside the "zone of resampling".
+ EXPECT_EQ(SK_ColorBLACK, GetColorAt(target_canvas(), kWidth * 1 / 8 - 1,
+ kHeight * 1 / 6 - 1));
+ EXPECT_EQ(SK_ColorRED,
+ GetColorAt(target_canvas(), kWidth * 3 / 8, kHeight * 1 / 6 - 1));
+ EXPECT_EQ(SK_ColorGREEN,
+ GetColorAt(target_canvas(), kWidth * 1 / 8 - 1, kHeight * 3 / 6));
+ EXPECT_EQ(SK_ColorBLUE,
+ GetColorAt(target_canvas(), kWidth * 3 / 8, kHeight * 3 / 6));
+}
+
namespace {
class TestGLES2Interface : public gpu::gles2::GLES2InterfaceStub {
public:
diff --git a/media/test/data/README b/media/test/data/README
index 8ab8273..4b85fe39 100644
--- a/media/test/data/README
+++ b/media/test/data/README
@@ -43,6 +43,9 @@ vorbis-packet-1 - timestamp: 0ms, duration: 0ms
vorbis-packet-2 - timestamp: 0ms, duration: 0ms
vorbis-packet-3 - timestamp: 2902ms, duration: 0ms
+// 10-bit test file(s)
+bear-320x180-hi10p.mp4
+
// Encrypted Files
bear-1280x720-a_frag-cenc.mp4 - A fragmented MP4 version of the audio track of bear-1280x720.mp4 encrypted (ISO CENC) using key ID [1] and key [2].
bear-1280x720-a_frag-cenc-key_rotation.mp4 - A fragmented MP4 version of the audio track of bear-1280x720.mp4 encrypted (ISO CENC) using key ID [1] and key [2] with key rotation [3].
diff --git a/media/test/data/bear-320x180-hi10p.mp4 b/media/test/data/bear-320x180-hi10p.mp4
new file mode 100644
index 0000000..725b4ac3
--- /dev/null
+++ b/media/test/data/bear-320x180-hi10p.mp4
Binary files differ
diff --git a/media/test/data/blackwhite_yuv420p_hi10p.mp4 b/media/test/data/blackwhite_yuv420p_hi10p.mp4
new file mode 100644
index 0000000..c23aabb
--- /dev/null
+++ b/media/test/data/blackwhite_yuv420p_hi10p.mp4
Binary files differ
diff --git a/media/test/pipeline_integration_test.cc b/media/test/pipeline_integration_test.cc
index c6a7ad0..e23203e 100644
--- a/media/test/pipeline_integration_test.cc
+++ b/media/test/pipeline_integration_test.cc
@@ -1259,6 +1259,15 @@ TEST_F(PipelineIntegrationTest,
}
#if defined(USE_PROPRIETARY_CODECS)
+
+TEST_F(PipelineIntegrationTest, BasicPlaybackHi10P) {
+ ASSERT_EQ(PIPELINE_OK, Start("bear-320x180-hi10p.mp4", kClockless));
+
+ Play();
+
+ ASSERT_TRUE(WaitUntilOnEnded());
+}
+
TEST_F(PipelineIntegrationTest, MediaSource_ADTS) {
MockMediaSource source("sfx.adts", kADTS, kAppendWholeFile);
StartPipelineWithMediaSource(&source);
@@ -1768,6 +1777,7 @@ TEST_F(PipelineIntegrationTest, BasicPlayback_MediaSource_VideoOnly_MP4_AVC3) {
source.Shutdown();
Stop();
}
+
#endif // defined(USE_PROPRIETARY_CODECS)
TEST_F(PipelineIntegrationTest, SeekWhilePaused) {
diff --git a/media/video/gpu_memory_buffer_video_frame_pool.cc b/media/video/gpu_memory_buffer_video_frame_pool.cc
index d6ed4d5..7ac0cd7 100644
--- a/media/video/gpu_memory_buffer_video_frame_pool.cc
+++ b/media/video/gpu_memory_buffer_video_frame_pool.cc
@@ -385,6 +385,12 @@ void GpuMemoryBufferVideoFramePool::PoolImpl::CreateHardwareFrame(
case PIXEL_FORMAT_RGB32:
case PIXEL_FORMAT_MJPEG:
case PIXEL_FORMAT_MT21:
+ case PIXEL_FORMAT_YUV420P9:
+ case PIXEL_FORMAT_YUV422P9:
+ case PIXEL_FORMAT_YUV444P9:
+ case PIXEL_FORMAT_YUV420P10:
+ case PIXEL_FORMAT_YUV422P10:
+ case PIXEL_FORMAT_YUV444P10:
case PIXEL_FORMAT_UNKNOWN:
frame_ready_cb.Run(video_frame);
return;
diff --git a/mojo/converters/surfaces/surfaces_type_converters.cc b/mojo/converters/surfaces/surfaces_type_converters.cc
index bff1f70..a5da881 100644
--- a/mojo/converters/surfaces/surfaces_type_converters.cc
+++ b/mojo/converters/surfaces/surfaces_type_converters.cc
@@ -245,7 +245,9 @@ bool ConvertDrawQuad(const QuadPtr& input,
yuv_state->y_plane_resource_id, yuv_state->u_plane_resource_id,
yuv_state->v_plane_resource_id, yuv_state->a_plane_resource_id,
static_cast<cc::YUVVideoDrawQuad::ColorSpace>(
- yuv_state->color_space));
+ yuv_state->color_space),
+ yuv_state->resource_offset,
+ yuv_state->resource_multiplier);
break;
}
default:
@@ -411,6 +413,8 @@ QuadPtr TypeConverter<QuadPtr, cc::DrawQuad>::Convert(
yuv_state->a_plane_resource_id = yuv_quad->a_plane_resource_id();
yuv_state->color_space =
static_cast<YUVColorSpace>(yuv_quad->color_space);
+ yuv_state->resource_offset = yuv_quad->resource_offset;
+ yuv_state->resource_multiplier = yuv_quad->resource_multiplier;
quad->yuv_video_quad_state = std::move(yuv_state);
break;
}
diff --git a/tools/metrics/histograms/histograms.xml b/tools/metrics/histograms/histograms.xml
index 6b9b776..9a38228 100644
--- a/tools/metrics/histograms/histograms.xml
+++ b/tools/metrics/histograms/histograms.xml
@@ -81155,6 +81155,12 @@ To add a new entry, add it with any value and run test to compute valid value.
<int value="13" label="RGB32"/>
<int value="14" label="MJPEG"/>
<int value="15" label="MT21"/>
+ <int value="16" label="YUV420P9"/>
+ <int value="17" label="YUV420P10"/>
+ <int value="18" label="YUV422P9"/>
+ <int value="19" label="YUV422P10"/>
+ <int value="20" label="YUV444P9"/>
+ <int value="21" label="YUV444P10"/>
</enum>
<enum name="VideoPlayerCastAPIExtensionStatus" type="int">