summaryrefslogtreecommitdiffstats
path: root/cc
diff options
context:
space:
mode:
authorhubbe <hubbe@chromium.org>2016-02-08 16:01:14 -0800
committerCommit bot <commit-bot@chromium.org>2016-02-09 00:02:31 +0000
commit82ba0a1a9f120c5376a7978f1c6ba995b7c819f6 (patch)
tree213cc743a3737a3b474a21c92844513c5d72d6e1 /cc
parent486e98700f552d7a5c4fbc1b3a9b5e9f2a6cf934 (diff)
downloadchromium_src-82ba0a1a9f120c5376a7978f1c6ba995b7c819f6.zip
chromium_src-82ba0a1a9f120c5376a7978f1c6ba995b7c819f6.tar.gz
chromium_src-82ba0a1a9f120c5376a7978f1c6ba995b7c819f6.tar.bz2
Add support for 9- and 10-bit h264 videos.
Videos are uploaded to GPU using half-floats if supported, otherwise they are downshifted to regular 8-bit textures before uploading. No dithering is done, except for whatever GL_DITHER might do. (Which is probably nothing.) BUG=445071 CQ_INCLUDE_TRYBOTS=tryserver.blink:linux_blink_rel Committed: https://crrev.com/074c4287f2625860b2a9eb437b97f1f1788f8f4b Cr-Commit-Position: refs/heads/master@{#373691} Review URL: https://codereview.chromium.org/1599533002 Cr-Commit-Position: refs/heads/master@{#374221}
Diffstat (limited to 'cc')
-rw-r--r--cc/layers/video_layer_impl.cc6
-rw-r--r--cc/layers/video_layer_impl.h3
-rw-r--r--cc/output/gl_renderer.cc14
-rw-r--r--cc/output/renderer_pixeltest.cc112
-rw-r--r--cc/quads/draw_quad_unittest.cc52
-rw-r--r--cc/quads/yuv_video_draw_quad.cc12
-rw-r--r--cc/quads/yuv_video_draw_quad.h10
-rw-r--r--cc/raster/tile_task_worker_pool.cc1
-rw-r--r--cc/resources/platform_color_unittest.cc1
-rw-r--r--cc/resources/resource_format.cc9
-rw-r--r--cc/resources/resource_format.h3
-rw-r--r--cc/resources/resource_provider.cc22
-rw-r--r--cc/resources/resource_provider.h3
-rw-r--r--cc/resources/video_resource_updater.cc147
-rw-r--r--cc/resources/video_resource_updater.h5
-rw-r--r--cc/resources/video_resource_updater_unittest.cc97
-rw-r--r--cc/test/render_pass_test_utils.cc3
-rw-r--r--cc/test/test_web_graphics_context_3d.h3
18 files changed, 436 insertions, 67 deletions
diff --git a/cc/layers/video_layer_impl.cc b/cc/layers/video_layer_impl.cc
index 923767e..1eb4ef0 100644
--- a/cc/layers/video_layer_impl.cc
+++ b/cc/layers/video_layer_impl.cc
@@ -119,6 +119,8 @@ bool VideoLayerImpl::WillDraw(DrawMode draw_mode,
external_resources.software_release_callback;
return true;
}
+ frame_resource_offset_ = external_resources.offset;
+ frame_resource_multiplier_ = external_resources.multiplier;
DCHECK_EQ(external_resources.mailboxes.size(),
external_resources.release_callbacks.size());
@@ -277,8 +279,8 @@ void VideoLayerImpl::AppendQuads(RenderPass* render_pass,
ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size, uv_tex_size,
frame_resources_[0].id, frame_resources_[1].id,
frame_resources_[2].id,
- frame_resources_.size() > 3 ? frame_resources_[3].id : 0,
- color_space);
+ frame_resources_.size() > 3 ? frame_resources_[3].id : 0, color_space,
+ frame_resource_offset_, frame_resource_multiplier_);
ValidateQuadResources(yuv_video_quad);
break;
}
diff --git a/cc/layers/video_layer_impl.h b/cc/layers/video_layer_impl.h
index c1688e8..a0ba484 100644
--- a/cc/layers/video_layer_impl.h
+++ b/cc/layers/video_layer_impl.h
@@ -63,6 +63,9 @@ class CC_EXPORT VideoLayerImpl : public LayerImpl {
scoped_ptr<VideoResourceUpdater> updater_;
VideoFrameExternalResources::ResourceType frame_resource_type_;
+ float frame_resource_offset_;
+ float frame_resource_multiplier_;
+
struct FrameResource {
FrameResource(ResourceId id,
gfx::Size size_in_pixels,
diff --git a/cc/output/gl_renderer.cc b/cc/output/gl_renderer.cc
index 215f58a..1af26c1 100644
--- a/cc/output/gl_renderer.cc
+++ b/cc/output/gl_renderer.cc
@@ -2136,13 +2136,23 @@ void GLRenderer::DrawYUVVideoQuad(const DrawingFrame* frame,
break;
}
+ float yuv_to_rgb_multiplied[9];
+ float yuv_adjust_with_offset[3];
+
+ for (int i = 0; i < 9; ++i)
+ yuv_to_rgb_multiplied[i] = yuv_to_rgb[i] * quad->resource_multiplier;
+
+ for (int i = 0; i < 3; ++i)
+ yuv_adjust_with_offset[i] =
+ yuv_adjust[i] / quad->resource_multiplier - quad->resource_offset;
+
// The transform and vertex data are used to figure out the extents that the
// un-antialiased quad should have and which vertex this is and the float
// quad passed in via uniform is the actual geometry that gets used to draw
// it. This is why this centered rect is used and not the original quad_rect.
auto tile_rect = gfx::RectF(quad->rect);
- gl_->UniformMatrix3fv(yuv_matrix_location, 1, 0, yuv_to_rgb);
- gl_->Uniform3fv(yuv_adj_location, 1, yuv_adjust);
+ gl_->UniformMatrix3fv(yuv_matrix_location, 1, 0, yuv_to_rgb_multiplied);
+ gl_->Uniform3fv(yuv_adj_location, 1, yuv_adjust_with_offset);
SetShaderOpacity(quad->shared_quad_state->opacity, alpha_location);
if (!clip_region) {
diff --git a/cc/output/renderer_pixeltest.cc b/cc/output/renderer_pixeltest.cc
index 083bac1..7f5bbfd 100644
--- a/cc/output/renderer_pixeltest.cc
+++ b/cc/output/renderer_pixeltest.cc
@@ -267,13 +267,61 @@ void CreateTestYUVVideoDrawQuad_FromVideoFrame(
yuv_quad->SetNew(shared_state, rect, opaque_rect, visible_rect,
ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size,
uv_tex_size, y_resource, u_resource, v_resource, a_resource,
- color_space);
+ color_space, 0.0f, 1.0f);
+}
+
+// Upshift video frame to 10 bit.
+scoped_refptr<media::VideoFrame> CreateHighbitVideoFrame(
+ const scoped_refptr<media::VideoFrame>& video_frame) {
+ media::VideoPixelFormat format;
+ switch (video_frame->format()) {
+ case media::PIXEL_FORMAT_I420:
+ case media::PIXEL_FORMAT_YV12:
+ format = media::PIXEL_FORMAT_YUV420P10;
+ break;
+ case media::PIXEL_FORMAT_YV16:
+ format = media::PIXEL_FORMAT_YUV422P10;
+ break;
+ case media::PIXEL_FORMAT_YV24:
+ format = media::PIXEL_FORMAT_YUV444P10;
+ break;
+
+ default:
+ NOTREACHED();
+ return nullptr;
+ }
+ scoped_refptr<media::VideoFrame> ret = media::VideoFrame::CreateFrame(
+ format, video_frame->coded_size(), video_frame->visible_rect(),
+ video_frame->natural_size(), video_frame->timestamp());
+
+ // Copy all metadata.
+ base::DictionaryValue tmp;
+ video_frame->metadata()->MergeInternalValuesInto(&tmp);
+ ret->metadata()->MergeInternalValuesFrom(tmp);
+
+ for (int plane = media::VideoFrame::kYPlane;
+ plane <= media::VideoFrame::kVPlane; ++plane) {
+ int width = video_frame->row_bytes(plane);
+ const uint8_t* src = video_frame->data(plane);
+ uint16_t* dst = reinterpret_cast<uint16_t*>(ret->data(plane));
+ for (int row = 0; row < video_frame->rows(plane); row++) {
+ for (int x = 0; x < width; x++) {
+ // Replicate the top bits into the lower bits, this way
+ // 0xFF becomes 0x3FF.
+ dst[x] = (src[x] << 2) | (src[x] >> 6);
+ }
+ src += video_frame->stride(plane);
+ dst += ret->stride(plane) / 2;
+ }
+ }
+ return ret;
}
void CreateTestYUVVideoDrawQuad_Striped(
const SharedQuadState* shared_state,
media::VideoPixelFormat format,
bool is_transparent,
+ bool highbit,
const gfx::RectF& tex_coord_rect,
RenderPass* render_pass,
VideoResourceUpdater* video_resource_updater,
@@ -308,6 +356,10 @@ void CreateTestYUVVideoDrawQuad_Striped(
}
}
uint8_t alpha_value = is_transparent ? 0 : 128;
+
+ if (highbit)
+ video_frame = CreateHighbitVideoFrame(video_frame);
+
CreateTestYUVVideoDrawQuad_FromVideoFrame(
shared_state, video_frame, alpha_value, tex_coord_rect, render_pass,
video_resource_updater, rect, visible_rect, resource_provider);
@@ -980,7 +1032,11 @@ class VideoGLRendererPixelTest : public GLRendererPixelTest {
scoped_ptr<VideoResourceUpdater> video_resource_updater_;
};
-TEST_F(VideoGLRendererPixelTest, SimpleYUVRect) {
+class VideoGLRendererPixelHiLoTest
+ : public VideoGLRendererPixelTest,
+ public ::testing::WithParamInterface<bool> {};
+
+TEST_P(VideoGLRendererPixelHiLoTest, SimpleYUVRect) {
gfx::Rect rect(this->device_viewport_size_);
RenderPassId id(1, 1);
@@ -989,10 +1045,11 @@ TEST_F(VideoGLRendererPixelTest, SimpleYUVRect) {
SharedQuadState* shared_state =
CreateTestSharedQuadState(gfx::Transform(), rect, pass.get());
- CreateTestYUVVideoDrawQuad_Striped(shared_state, media::PIXEL_FORMAT_YV12,
- false, gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f),
- pass.get(), video_resource_updater_.get(),
- rect, rect, resource_provider_.get());
+ bool highbit = GetParam();
+ CreateTestYUVVideoDrawQuad_Striped(
+ shared_state, media::PIXEL_FORMAT_YV12, false, highbit,
+ gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f), pass.get(),
+ video_resource_updater_.get(), rect, rect, resource_provider_.get());
RenderPassList pass_list;
pass_list.push_back(std::move(pass));
@@ -1003,7 +1060,7 @@ TEST_F(VideoGLRendererPixelTest, SimpleYUVRect) {
FuzzyPixelOffByOneComparator(true)));
}
-TEST_F(VideoGLRendererPixelTest, ClippedYUVRect) {
+TEST_P(VideoGLRendererPixelHiLoTest, ClippedYUVRect) {
gfx::Rect viewport(this->device_viewport_size_);
gfx::Rect draw_rect(this->device_viewport_size_.width() * 1.5,
this->device_viewport_size_.height() * 1.5);
@@ -1014,11 +1071,12 @@ TEST_F(VideoGLRendererPixelTest, ClippedYUVRect) {
SharedQuadState* shared_state =
CreateTestSharedQuadState(gfx::Transform(), viewport, pass.get());
- CreateTestYUVVideoDrawQuad_Striped(shared_state, media::PIXEL_FORMAT_YV12,
- false, gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f),
- pass.get(), video_resource_updater_.get(),
- draw_rect, viewport,
- resource_provider_.get());
+ bool highbit = GetParam();
+ CreateTestYUVVideoDrawQuad_Striped(
+ shared_state, media::PIXEL_FORMAT_YV12, false, highbit,
+ gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f), pass.get(),
+ video_resource_updater_.get(), draw_rect, viewport,
+ resource_provider_.get());
RenderPassList pass_list;
pass_list.push_back(std::move(pass));
@@ -1027,7 +1085,7 @@ TEST_F(VideoGLRendererPixelTest, ClippedYUVRect) {
FuzzyPixelOffByOneComparator(true)));
}
-TEST_F(VideoGLRendererPixelTest, OffsetYUVRect) {
+TEST_F(VideoGLRendererPixelHiLoTest, OffsetYUVRect) {
gfx::Rect rect(this->device_viewport_size_);
RenderPassId id(1, 1);
@@ -1038,7 +1096,7 @@ TEST_F(VideoGLRendererPixelTest, OffsetYUVRect) {
// Intentionally sets frame format to I420 for testing coverage.
CreateTestYUVVideoDrawQuad_Striped(
- shared_state, media::PIXEL_FORMAT_I420, false,
+ shared_state, media::PIXEL_FORMAT_I420, false, false,
gfx::RectF(0.125f, 0.25f, 0.75f, 0.5f), pass.get(),
video_resource_updater_.get(), rect, rect, resource_provider_.get());
@@ -1046,9 +1104,8 @@ TEST_F(VideoGLRendererPixelTest, OffsetYUVRect) {
pass_list.push_back(std::move(pass));
EXPECT_TRUE(this->RunPixelTest(
- &pass_list,
- base::FilePath(FILE_PATH_LITERAL("yuv_stripes_offset.png")),
- FuzzyPixelOffByOneComparator(true)));
+ &pass_list, base::FilePath(FILE_PATH_LITERAL("yuv_stripes_offset.png")),
+ FuzzyPixelComparator(true, 100.0f, 1.0f, 1.0f, 1, 0)));
}
TEST_F(VideoGLRendererPixelTest, SimpleYUVRectBlack) {
@@ -1076,6 +1133,9 @@ TEST_F(VideoGLRendererPixelTest, SimpleYUVRectBlack) {
FuzzyPixelOffByOneComparator(true)));
}
+// First argument (test case prefix) is intentionally left empty.
+INSTANTIATE_TEST_CASE_P(, VideoGLRendererPixelHiLoTest, ::testing::Bool());
+
TEST_F(VideoGLRendererPixelTest, SimpleYUVJRect) {
gfx::Rect rect(this->device_viewport_size_);
@@ -1143,7 +1203,7 @@ TEST_F(VideoGLRendererPixelTest, SimpleYUVJRectGrey) {
FuzzyPixelOffByOneComparator(true)));
}
-TEST_F(VideoGLRendererPixelTest, SimpleYUVARect) {
+TEST_F(VideoGLRendererPixelHiLoTest, SimpleYUVARect) {
gfx::Rect rect(this->device_viewport_size_);
RenderPassId id(1, 1);
@@ -1152,10 +1212,10 @@ TEST_F(VideoGLRendererPixelTest, SimpleYUVARect) {
SharedQuadState* shared_state =
CreateTestSharedQuadState(gfx::Transform(), rect, pass.get());
- CreateTestYUVVideoDrawQuad_Striped(shared_state, media::PIXEL_FORMAT_YV12A,
- false, gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f),
- pass.get(), video_resource_updater_.get(),
- rect, rect, resource_provider_.get());
+ CreateTestYUVVideoDrawQuad_Striped(
+ shared_state, media::PIXEL_FORMAT_YV12A, false, false,
+ gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f), pass.get(),
+ video_resource_updater_.get(), rect, rect, resource_provider_.get());
SolidColorDrawQuad* color_quad =
pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
@@ -1179,10 +1239,10 @@ TEST_F(VideoGLRendererPixelTest, FullyTransparentYUVARect) {
SharedQuadState* shared_state =
CreateTestSharedQuadState(gfx::Transform(), rect, pass.get());
- CreateTestYUVVideoDrawQuad_Striped(shared_state, media::PIXEL_FORMAT_YV12A,
- true, gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f),
- pass.get(), video_resource_updater_.get(),
- rect, rect, resource_provider_.get());
+ CreateTestYUVVideoDrawQuad_Striped(
+ shared_state, media::PIXEL_FORMAT_YV12A, true, false,
+ gfx::RectF(0.0f, 0.0f, 1.0f, 1.0f), pass.get(),
+ video_resource_updater_.get(), rect, rect, resource_provider_.get());
SolidColorDrawQuad* color_quad =
pass->CreateAndAppendDrawQuad<SolidColorDrawQuad>();
diff --git a/cc/quads/draw_quad_unittest.cc b/cc/quads/draw_quad_unittest.cc
index cfc20a1..8bde8c0 100644
--- a/cc/quads/draw_quad_unittest.cc
+++ b/cc/quads/draw_quad_unittest.cc
@@ -336,6 +336,31 @@ void CompareDrawQuad(DrawQuad* quad,
} \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
+#define CREATE_QUAD_11_ALL(Type, a, b, c, d, e, f, g, h, i, j, k) \
+ Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
+ { \
+ QUAD_DATA quad_all->SetAll(shared_state, quad_rect, quad_opaque_rect, \
+ quad_visible_rect, needs_blending, a, b, c, d, \
+ e, f, g, h, i, j, k); \
+ } \
+ SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
+
+#define CREATE_QUAD_12_NEW(Type, a, b, c, d, e, f, g, h, i, j, k, l) \
+ Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
+ { \
+ QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b, c, d, e, f, g, \
+ h, i, j, k, l); \
+ } \
+ SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
+
+#define CREATE_QUAD_13_NEW(Type, a, b, c, d, e, f, g, h, i, j, k, l, m) \
+ Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
+ { \
+ QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b, c, d, e, f, g, \
+ h, i, j, k, l, m); \
+ } \
+ SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
+
#define CREATE_QUAD_ALL_RP(Type, a, b, c, d, e, f, g, copy_a) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
@@ -623,13 +648,16 @@ TEST(DrawQuadTest, CopyYUVVideoDrawQuad) {
ResourceId u_plane_resource_id = 532;
ResourceId v_plane_resource_id = 4;
ResourceId a_plane_resource_id = 63;
+ float resource_offset = 0.5f;
+ float resource_multiplier = 2.001f;
YUVVideoDrawQuad::ColorSpace color_space = YUVVideoDrawQuad::JPEG;
CREATE_SHARED_STATE();
- CREATE_QUAD_11_NEW(YUVVideoDrawQuad, opaque_rect, visible_rect,
+ CREATE_QUAD_13_NEW(YUVVideoDrawQuad, opaque_rect, visible_rect,
ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size,
uv_tex_size, y_plane_resource_id, u_plane_resource_id,
- v_plane_resource_id, a_plane_resource_id, color_space);
+ v_plane_resource_id, a_plane_resource_id, color_space,
+ resource_offset, resource_multiplier);
EXPECT_EQ(DrawQuad::YUV_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(opaque_rect, copy_quad->opaque_rect);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
@@ -642,11 +670,14 @@ TEST(DrawQuadTest, CopyYUVVideoDrawQuad) {
EXPECT_EQ(v_plane_resource_id, copy_quad->v_plane_resource_id());
EXPECT_EQ(a_plane_resource_id, copy_quad->a_plane_resource_id());
EXPECT_EQ(color_space, copy_quad->color_space);
-
- CREATE_QUAD_9_ALL(YUVVideoDrawQuad, ya_tex_coord_rect, uv_tex_coord_rect,
- ya_tex_size, uv_tex_size, y_plane_resource_id,
- u_plane_resource_id, v_plane_resource_id,
- a_plane_resource_id, color_space);
+ EXPECT_EQ(resource_offset, copy_quad->resource_offset);
+ EXPECT_EQ(resource_multiplier, copy_quad->resource_multiplier);
+
+ CREATE_QUAD_11_ALL(YUVVideoDrawQuad, ya_tex_coord_rect, uv_tex_coord_rect,
+ ya_tex_size, uv_tex_size, y_plane_resource_id,
+ u_plane_resource_id, v_plane_resource_id,
+ a_plane_resource_id, color_space, resource_offset,
+ resource_multiplier);
EXPECT_EQ(DrawQuad::YUV_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(ya_tex_coord_rect, copy_quad->ya_tex_coord_rect);
EXPECT_EQ(uv_tex_coord_rect, copy_quad->uv_tex_coord_rect);
@@ -657,6 +688,8 @@ TEST(DrawQuadTest, CopyYUVVideoDrawQuad) {
EXPECT_EQ(v_plane_resource_id, copy_quad->v_plane_resource_id());
EXPECT_EQ(a_plane_resource_id, copy_quad->a_plane_resource_id());
EXPECT_EQ(color_space, copy_quad->color_space);
+ EXPECT_EQ(resource_offset, copy_quad->resource_offset);
+ EXPECT_EQ(resource_multiplier, copy_quad->resource_multiplier);
}
TEST(DrawQuadTest, CopyPictureDrawQuad) {
@@ -879,10 +912,11 @@ TEST_F(DrawQuadIteratorTest, YUVVideoDrawQuad) {
YUVVideoDrawQuad::ColorSpace color_space = YUVVideoDrawQuad::JPEG;
CREATE_SHARED_STATE();
- CREATE_QUAD_11_NEW(YUVVideoDrawQuad, opaque_rect, visible_rect,
+ CREATE_QUAD_13_NEW(YUVVideoDrawQuad, opaque_rect, visible_rect,
ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size,
uv_tex_size, y_plane_resource_id, u_plane_resource_id,
- v_plane_resource_id, a_plane_resource_id, color_space);
+ v_plane_resource_id, a_plane_resource_id, color_space, 0.0,
+ 1.0);
EXPECT_EQ(DrawQuad::YUV_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(y_plane_resource_id, quad_new->y_plane_resource_id());
EXPECT_EQ(u_plane_resource_id, quad_new->u_plane_resource_id());
diff --git a/cc/quads/yuv_video_draw_quad.cc b/cc/quads/yuv_video_draw_quad.cc
index f9a878a..93a0c91 100644
--- a/cc/quads/yuv_video_draw_quad.cc
+++ b/cc/quads/yuv_video_draw_quad.cc
@@ -27,7 +27,9 @@ void YUVVideoDrawQuad::SetNew(const SharedQuadState* shared_quad_state,
unsigned u_plane_resource_id,
unsigned v_plane_resource_id,
unsigned a_plane_resource_id,
- ColorSpace color_space) {
+ ColorSpace color_space,
+ float offset,
+ float multiplier) {
bool needs_blending = false;
DrawQuad::SetAll(shared_quad_state, DrawQuad::YUV_VIDEO_CONTENT, rect,
opaque_rect, visible_rect, needs_blending);
@@ -41,6 +43,8 @@ void YUVVideoDrawQuad::SetNew(const SharedQuadState* shared_quad_state,
resources.ids[kAPlaneResourceIdIndex] = a_plane_resource_id;
resources.count = a_plane_resource_id ? 4 : 3;
this->color_space = color_space;
+ this->resource_offset = offset;
+ this->resource_multiplier = multiplier;
}
void YUVVideoDrawQuad::SetAll(const SharedQuadState* shared_quad_state,
@@ -56,7 +60,9 @@ void YUVVideoDrawQuad::SetAll(const SharedQuadState* shared_quad_state,
unsigned u_plane_resource_id,
unsigned v_plane_resource_id,
unsigned a_plane_resource_id,
- ColorSpace color_space) {
+ ColorSpace color_space,
+ float offset,
+ float multiplier) {
DrawQuad::SetAll(shared_quad_state, DrawQuad::YUV_VIDEO_CONTENT, rect,
opaque_rect, visible_rect, needs_blending);
this->ya_tex_coord_rect = ya_tex_coord_rect;
@@ -69,6 +75,8 @@ void YUVVideoDrawQuad::SetAll(const SharedQuadState* shared_quad_state,
resources.ids[kAPlaneResourceIdIndex] = a_plane_resource_id;
resources.count = resources.ids[kAPlaneResourceIdIndex] ? 4 : 3;
this->color_space = color_space;
+ this->resource_offset = offset;
+ this->resource_multiplier = multiplier;
}
const YUVVideoDrawQuad* YUVVideoDrawQuad::MaterialCast(
diff --git a/cc/quads/yuv_video_draw_quad.h b/cc/quads/yuv_video_draw_quad.h
index 1a72078..015d5b7 100644
--- a/cc/quads/yuv_video_draw_quad.h
+++ b/cc/quads/yuv_video_draw_quad.h
@@ -43,7 +43,9 @@ class CC_EXPORT YUVVideoDrawQuad : public DrawQuad {
unsigned u_plane_resource_id,
unsigned v_plane_resource_id,
unsigned a_plane_resource_id,
- ColorSpace color_space);
+ ColorSpace color_space,
+ float offset,
+ float multiplier);
void SetAll(const SharedQuadState* shared_quad_state,
const gfx::Rect& rect,
@@ -61,13 +63,17 @@ class CC_EXPORT YUVVideoDrawQuad : public DrawQuad {
unsigned u_plane_resource_id,
unsigned v_plane_resource_id,
unsigned a_plane_resource_id,
- ColorSpace color_space);
+ ColorSpace color_space,
+ float offset,
+ float multiplier);
gfx::RectF ya_tex_coord_rect;
gfx::RectF uv_tex_coord_rect;
gfx::Size ya_tex_size;
gfx::Size uv_tex_size;
ColorSpace color_space;
+ float resource_offset = 0.0f;
+ float resource_multiplier = 1.0f;
static const YUVVideoDrawQuad* MaterialCast(const DrawQuad*);
diff --git a/cc/raster/tile_task_worker_pool.cc b/cc/raster/tile_task_worker_pool.cc
index 668aa66..d4549c7 100644
--- a/cc/raster/tile_task_worker_pool.cc
+++ b/cc/raster/tile_task_worker_pool.cc
@@ -50,6 +50,7 @@ bool IsSupportedPlaybackToMemoryFormat(ResourceFormat format) {
case RGB_565:
case ETC1:
case RED_8:
+ case LUMINANCE_F16:
return false;
}
NOTREACHED();
diff --git a/cc/resources/platform_color_unittest.cc b/cc/resources/platform_color_unittest.cc
index bbc4c06..49c8353 100644
--- a/cc/resources/platform_color_unittest.cc
+++ b/cc/resources/platform_color_unittest.cc
@@ -34,6 +34,7 @@ TEST(PlatformColorTest, SameComponentOrder) {
case RGB_565:
case ETC1:
case RED_8:
+ case LUMINANCE_F16:
EXPECT_FALSE(PlatformColor::SameComponentOrder(format));
break;
}
diff --git a/cc/resources/resource_format.cc b/cc/resources/resource_format.cc
index e11c097..ba4e65d 100644
--- a/cc/resources/resource_format.cc
+++ b/cc/resources/resource_format.cc
@@ -21,6 +21,7 @@ SkColorType ResourceFormatToSkColorType(ResourceFormat format) {
case LUMINANCE_8:
case RGB_565:
case RED_8:
+ case LUMINANCE_F16:
NOTREACHED();
break;
}
@@ -35,6 +36,7 @@ int BitsPerPixel(ResourceFormat format) {
return 32;
case RGBA_4444:
case RGB_565:
+ case LUMINANCE_F16:
return 16;
case ALPHA_8:
case LUMINANCE_8:
@@ -57,7 +59,8 @@ GLenum GLDataType(ResourceFormat format) {
GL_UNSIGNED_BYTE, // LUMINANCE_8
GL_UNSIGNED_SHORT_5_6_5, // RGB_565,
GL_UNSIGNED_BYTE, // ETC1
- GL_UNSIGNED_BYTE // RED_8
+ GL_UNSIGNED_BYTE, // RED_8
+ GL_HALF_FLOAT_OES, // LUMINANCE_F16
};
static_assert(arraysize(format_gl_data_type) == (RESOURCE_FORMAT_MAX + 1),
"format_gl_data_type does not handle all cases.");
@@ -75,7 +78,8 @@ GLenum GLDataFormat(ResourceFormat format) {
GL_LUMINANCE, // LUMINANCE_8
GL_RGB, // RGB_565
GL_ETC1_RGB8_OES, // ETC1
- GL_RED_EXT // RED_8
+ GL_RED_EXT, // RED_8
+ GL_LUMINANCE, // LUMINANCE_F16
};
static_assert(arraysize(format_gl_data_format) == (RESOURCE_FORMAT_MAX + 1),
"format_gl_data_format does not handle all cases.");
@@ -101,6 +105,7 @@ gfx::BufferFormat BufferFormat(ResourceFormat format) {
case LUMINANCE_8:
case RGB_565:
case ETC1:
+ case LUMINANCE_F16:
break;
}
NOTREACHED();
diff --git a/cc/resources/resource_format.h b/cc/resources/resource_format.h
index 79b815f..9a900d4 100644
--- a/cc/resources/resource_format.h
+++ b/cc/resources/resource_format.h
@@ -27,7 +27,8 @@ enum ResourceFormat {
RGB_565,
ETC1,
RED_8,
- RESOURCE_FORMAT_MAX = RED_8,
+ LUMINANCE_F16,
+ RESOURCE_FORMAT_MAX = LUMINANCE_F16,
};
SkColorType ResourceFormatToSkColorType(ResourceFormat format);
diff --git a/cc/resources/resource_provider.cc b/cc/resources/resource_provider.cc
index f2d7992..24796c2 100644
--- a/cc/resources/resource_provider.cc
+++ b/cc/resources/resource_provider.cc
@@ -87,6 +87,7 @@ GLenum TextureToStorageFormat(ResourceFormat format) {
case RGB_565:
case ETC1:
case RED_8:
+ case LUMINANCE_F16:
NOTREACHED();
break;
}
@@ -106,6 +107,7 @@ bool IsFormatSupportedForStorage(ResourceFormat format, bool use_bgra) {
case RGB_565:
case ETC1:
case RED_8:
+ case LUMINANCE_F16:
return false;
}
return false;
@@ -391,14 +393,29 @@ void ResourceProvider::LoseResourceForTesting(ResourceId id) {
resource->lost = true;
}
+ResourceFormat ResourceProvider::YuvResourceFormat(int bits) const {
+ if (bits > 8) {
+ return yuv_highbit_resource_format_;
+ } else {
+ return yuv_resource_format_;
+ }
+}
+
ResourceId ResourceProvider::CreateResource(const gfx::Size& size,
TextureHint hint,
ResourceFormat format) {
DCHECK(!size.IsEmpty());
switch (default_resource_type_) {
case RESOURCE_TYPE_GPU_MEMORY_BUFFER:
+ // GPU memory buffers don't support LUMINANCE_F16.
+ if (format != LUMINANCE_F16) {
+ return CreateGLTexture(size, hint, RESOURCE_TYPE_GPU_MEMORY_BUFFER,
+ format);
+ }
+ // Fall through and use a regular texture.
case RESOURCE_TYPE_GL_TEXTURE:
- return CreateGLTexture(size, hint, default_resource_type_, format);
+ return CreateGLTexture(size, hint, RESOURCE_TYPE_GL_TEXTURE, format);
+
case RESOURCE_TYPE_BITMAP:
DCHECK_EQ(RGBA_8888, format);
return CreateBitmap(size);
@@ -1077,6 +1094,9 @@ void ResourceProvider::Initialize() {
use_texture_usage_hint_ = caps.gpu.texture_usage;
use_compressed_texture_etc1_ = caps.gpu.texture_format_etc1;
yuv_resource_format_ = caps.gpu.texture_rg ? RED_8 : LUMINANCE_8;
+ yuv_highbit_resource_format_ = yuv_resource_format_;
+ if (caps.gpu.texture_half_float_linear)
+ yuv_highbit_resource_format_ = LUMINANCE_F16;
use_sync_query_ = caps.gpu.sync_query;
max_texture_size_ = 0; // Context expects cleared value.
diff --git a/cc/resources/resource_provider.h b/cc/resources/resource_provider.h
index 578fd59..de65f21 100644
--- a/cc/resources/resource_provider.h
+++ b/cc/resources/resource_provider.h
@@ -103,7 +103,7 @@ class CC_EXPORT ResourceProvider
ResourceFormat best_render_buffer_format() const {
return best_render_buffer_format_;
}
- ResourceFormat yuv_resource_format() const { return yuv_resource_format_; }
+ ResourceFormat YuvResourceFormat(int bits) const;
bool use_sync_query() const { return use_sync_query_; }
gpu::GpuMemoryBufferManager* gpu_memory_buffer_manager() {
return gpu_memory_buffer_manager_;
@@ -590,6 +590,7 @@ class CC_EXPORT ResourceProvider
bool use_texture_usage_hint_;
bool use_compressed_texture_etc1_;
ResourceFormat yuv_resource_format_;
+ ResourceFormat yuv_highbit_resource_format_;
int max_texture_size_;
ResourceFormat best_texture_format_;
ResourceFormat best_render_buffer_format_;
diff --git a/cc/resources/video_resource_updater.cc b/cc/resources/video_resource_updater.cc
index 216e48f..6f8bb4b 100644
--- a/cc/resources/video_resource_updater.cc
+++ b/cc/resources/video_resource_updater.cc
@@ -10,6 +10,7 @@
#include <algorithm>
#include "base/bind.h"
+#include "base/bit_cast.h"
#include "base/trace_event/trace_event.h"
#include "cc/base/math_util.h"
#include "cc/output/gl_renderer.h"
@@ -69,6 +70,12 @@ VideoFrameExternalResources::ResourceType ResourceTypeForVideoFrame(
case media::PIXEL_FORMAT_RGB32:
case media::PIXEL_FORMAT_MJPEG:
case media::PIXEL_FORMAT_MT21:
+ case media::PIXEL_FORMAT_YUV420P9:
+ case media::PIXEL_FORMAT_YUV422P9:
+ case media::PIXEL_FORMAT_YUV444P9:
+ case media::PIXEL_FORMAT_YUV420P10:
+ case media::PIXEL_FORMAT_YUV422P10:
+ case media::PIXEL_FORMAT_YUV444P10:
case media::PIXEL_FORMAT_UNKNOWN:
break;
}
@@ -140,8 +147,10 @@ void VideoResourceUpdater::SetPlaneResourceUniqueId(
}
VideoFrameExternalResources::VideoFrameExternalResources()
- : type(NONE), read_lock_fences_enabled(false) {
-}
+ : type(NONE),
+ read_lock_fences_enabled(false),
+ offset(0.0f),
+ multiplier(1.0f) {}
VideoFrameExternalResources::~VideoFrameExternalResources() {}
@@ -219,11 +228,15 @@ static gfx::Size SoftwarePlaneDimension(
const scoped_refptr<media::VideoFrame>& input_frame,
bool software_compositor,
size_t plane_index) {
- if (!software_compositor) {
- return media::VideoFrame::PlaneSize(
- input_frame->format(), plane_index, input_frame->coded_size());
- }
- return input_frame->coded_size();
+ gfx::Size coded_size = input_frame->coded_size();
+ if (software_compositor)
+ return coded_size;
+
+ int plane_width = media::VideoFrame::Columns(
+ plane_index, input_frame->format(), coded_size.width());
+ int plane_height = media::VideoFrame::Rows(plane_index, input_frame->format(),
+ coded_size.height());
+ return gfx::Size(plane_width, plane_height);
}
VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
@@ -231,6 +244,41 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
TRACE_EVENT0("cc", "VideoResourceUpdater::CreateForSoftwarePlanes");
const media::VideoPixelFormat input_frame_format = video_frame->format();
+ // TODO(hubbe): Make this a video frame method.
+ int bits_per_channel = 0;
+ switch (input_frame_format) {
+ case media::PIXEL_FORMAT_UNKNOWN:
+ NOTREACHED();
+ // Fall through!
+ case media::PIXEL_FORMAT_I420:
+ case media::PIXEL_FORMAT_YV12:
+ case media::PIXEL_FORMAT_YV16:
+ case media::PIXEL_FORMAT_YV12A:
+ case media::PIXEL_FORMAT_YV24:
+ case media::PIXEL_FORMAT_NV12:
+ case media::PIXEL_FORMAT_NV21:
+ case media::PIXEL_FORMAT_UYVY:
+ case media::PIXEL_FORMAT_YUY2:
+ case media::PIXEL_FORMAT_ARGB:
+ case media::PIXEL_FORMAT_XRGB:
+ case media::PIXEL_FORMAT_RGB24:
+ case media::PIXEL_FORMAT_RGB32:
+ case media::PIXEL_FORMAT_MJPEG:
+ case media::PIXEL_FORMAT_MT21:
+ bits_per_channel = 8;
+ break;
+ case media::PIXEL_FORMAT_YUV420P9:
+ case media::PIXEL_FORMAT_YUV422P9:
+ case media::PIXEL_FORMAT_YUV444P9:
+ bits_per_channel = 9;
+ break;
+ case media::PIXEL_FORMAT_YUV420P10:
+ case media::PIXEL_FORMAT_YUV422P10:
+ case media::PIXEL_FORMAT_YUV444P10:
+ bits_per_channel = 10;
+ break;
+ }
+
// Only YUV software video frames are supported.
if (!media::IsYuvPlanar(input_frame_format)) {
NOTREACHED() << media::VideoPixelFormatToString(input_frame_format);
@@ -240,7 +288,8 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
const bool software_compositor = context_provider_ == NULL;
ResourceFormat output_resource_format =
- resource_provider_->yuv_resource_format();
+ resource_provider_->YuvResourceFormat(bits_per_channel);
+
size_t output_plane_count = media::VideoFrame::NumPlanes(input_frame_format);
// TODO(skaslev): If we're in software compositing mode, we do the YUV -> RGB
@@ -352,7 +401,7 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
PlaneResource& plane_resource = *plane_resources[i];
// Update each plane's resource id with its content.
DCHECK_EQ(plane_resource.resource_format,
- resource_provider_->yuv_resource_format());
+ resource_provider_->YuvResourceFormat(bits_per_channel));
if (!PlaneResourceMatchesUniqueID(plane_resource, video_frame.get(), i)) {
// We need to transfer data from |video_frame| to the plane resource.
@@ -361,9 +410,9 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
// The |resource_size_pixels| is the size of the resource we want to
// upload to.
gfx::Size resource_size_pixels = plane_resource.resource_size;
- // The |video_stride_pixels| is the width of the video frame we are
+ // The |video_stride_bytes| is the width of the video frame we are
// uploading (including non-frame data to fill in the stride).
- int video_stride_pixels = video_frame->stride(i);
+ int video_stride_bytes = video_frame->stride(i);
size_t bytes_per_row = ResourceUtil::UncheckedWidthInBytes<size_t>(
resource_size_pixels.width(), plane_resource.resource_format);
@@ -372,10 +421,24 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
size_t upload_image_stride =
MathUtil::UncheckedRoundUp<size_t>(bytes_per_row, 4u);
+ bool needs_conversion = false;
+ int shift = 0;
+
+ // LUMINANCE_F16 uses half-floats, so we always need a conversion step.
+ if (plane_resource.resource_format == LUMINANCE_F16) {
+ needs_conversion = true;
+ // Note that the current method of converting integers to half-floats
+ // stops working if you have more than 10 bits of data.
+ DCHECK_LE(bits_per_channel, 10);
+ } else if (bits_per_channel > 8) {
+ // If bits_per_channel > 8 and we can't use LUMINANCE_F16, we need to
+ // shift the data down and create an 8-bit texture.
+ needs_conversion = true;
+ shift = bits_per_channel - 8;
+ }
const uint8_t* pixels;
- size_t video_bytes_per_row = ResourceUtil::UncheckedWidthInBytes<size_t>(
- video_stride_pixels, plane_resource.resource_format);
- if (upload_image_stride == video_bytes_per_row) {
+ if (static_cast<int>(upload_image_stride) == video_stride_bytes &&
+ !needs_conversion) {
pixels = video_frame->data(i);
} else {
// Avoid malloc for each frame/plane if possible.
@@ -383,11 +446,36 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
upload_image_stride * resource_size_pixels.height();
if (upload_pixels_.size() < needed_size)
upload_pixels_.resize(needed_size);
+
for (int row = 0; row < resource_size_pixels.height(); ++row) {
- uint8_t* dst = &upload_pixels_[upload_image_stride * row];
- const uint8_t* src =
- video_frame->data(i) + (video_bytes_per_row * row);
- memcpy(dst, src, bytes_per_row);
+ if (plane_resource.resource_format == LUMINANCE_F16) {
+ uint16_t* dst = reinterpret_cast<uint16_t*>(
+ &upload_pixels_[upload_image_stride * row]);
+ const uint16_t* src = reinterpret_cast<uint16_t*>(
+ video_frame->data(i) + (video_stride_bytes * row));
+ // Micro-benchmarking indicates that the compiler does
+ // a good enough job of optimizing this loop that trying
+ // to manually operate on one uint64 at a time is not
+ // actually helpful.
+ // Note to future optimizers: Benchmark your optimizations!
+ for (size_t i = 0; i < bytes_per_row / 2; i++)
+ dst[i] = src[i] | 0x3800;
+ } else if (shift != 0) {
+ // We have more-than-8-bit input which we need to shift
+ // down to fit it into an 8-bit texture.
+ uint8_t* dst = &upload_pixels_[upload_image_stride * row];
+ const uint16_t* src = reinterpret_cast<uint16_t*>(
+ video_frame->data(i) + (video_stride_bytes * row));
+ for (size_t i = 0; i < bytes_per_row; i++)
+ dst[i] = src[i] >> shift;
+ } else {
+ // Input and output are the same size and format, but
+ // differ in stride, copy one row at a time.
+ uint8_t* dst = &upload_pixels_[upload_image_stride * row];
+ const uint8_t* src =
+ video_frame->data(i) + (video_stride_bytes * row);
+ memcpy(dst, src, bytes_per_row);
+ }
}
pixels = &upload_pixels_[0];
}
@@ -397,6 +485,29 @@ VideoFrameExternalResources VideoResourceUpdater::CreateForSoftwarePlanes(
SetPlaneResourceUniqueId(video_frame.get(), i, &plane_resource);
}
+ if (plane_resource.resource_format == LUMINANCE_F16) {
+ // By OR-ing with 0x3800, 10-bit numbers become half-floats in the
+ // range [0.5..1) and 9-bit numbers get the range [0.5..0.75).
+ //
+ // Half-floats are evaluated as:
+ // float value = pow(2.0, exponent - 25) * (0x400 + fraction);
+ //
+ // In our case the exponent is 14 (since we or with 0x3800) and
+ // pow(2.0, 14-25) * 0x400 evaluates to 0.5 (our offset) and
+ // pow(2.0, 14-25) * fraction is [0..0.49951171875] for 10-bit and
+ // [0..0.24951171875] for 9-bit.
+ //
+ // (https://en.wikipedia.org/wiki/Half-precision_floating-point_format)
+ //
+ // PLEASE NOTE: This doesn't work if bits_per_channel is > 10.
+ // PLEASE NOTE: All planes are assumed to use the same multiplier/offset.
+ external_resources.offset = 0.5f;
+ // Max value from input data.
+ int max_input_value = (1 << bits_per_channel) - 1;
+ // 2 << 11 = 2048 would be 1.0 with our exponent.
+ external_resources.multiplier = 2048.0 / max_input_value;
+ }
+
external_resources.mailboxes.push_back(
TextureMailbox(plane_resource.mailbox, gpu::SyncToken(),
resource_provider_->GetResourceTextureTarget(
diff --git a/cc/resources/video_resource_updater.h b/cc/resources/video_resource_updater.h
index e80a849..4e11110 100644
--- a/cc/resources/video_resource_updater.h
+++ b/cc/resources/video_resource_updater.h
@@ -63,6 +63,11 @@ class CC_EXPORT VideoFrameExternalResources {
std::vector<unsigned> software_resources;
ReleaseCallbackImpl software_release_callback;
+ // Used by hardware textures which do not return values in the 0-1 range.
+ // After a lookup, subtract offset and multiply by multiplier.
+ float offset;
+ float multiplier;
+
VideoFrameExternalResources();
~VideoFrameExternalResources();
};
diff --git a/cc/resources/video_resource_updater_unittest.cc b/cc/resources/video_resource_updater_unittest.cc
index 3063182..2595d5a 100644
--- a/cc/resources/video_resource_updater_unittest.cc
+++ b/cc/resources/video_resource_updater_unittest.cc
@@ -94,6 +94,10 @@ class VideoResourceUpdaterTest : public testing::Test {
output_surface3d_ = FakeOutputSurface::Create3d(std::move(context3d));
CHECK(output_surface3d_->BindToClient(&client_));
+ }
+
+ void SetUp() override {
+ testing::Test::SetUp();
output_surface_software_ = FakeOutputSurface::CreateSoftware(
make_scoped_ptr(new SoftwareOutputDevice));
@@ -131,6 +135,43 @@ class VideoResourceUpdaterTest : public testing::Test {
return video_frame;
}
+ scoped_refptr<media::VideoFrame> CreateWonkyTestYUVVideoFrame() {
+ const int kDimension = 10;
+ const int kYWidth = kDimension + 5;
+ const int kUWidth = (kYWidth + 1) / 2 + 200;
+ const int kVWidth = (kYWidth + 1) / 2 + 1;
+ static uint8_t y_data[kYWidth * kDimension] = {0};
+ static uint8_t u_data[kUWidth * kDimension] = {0};
+ static uint8_t v_data[kVWidth * kDimension] = {0};
+
+ scoped_refptr<media::VideoFrame> video_frame =
+ media::VideoFrame::WrapExternalYuvData(
+ media::PIXEL_FORMAT_YV16, // format
+ gfx::Size(kYWidth, kDimension), // coded_size
+ gfx::Rect(2, 0, kDimension, kDimension), // visible_rect
+ gfx::Size(kDimension, kDimension), // natural_size
+ -kYWidth, // y_stride (negative)
+ kUWidth, // u_stride
+ kVWidth, // v_stride
+ y_data + kYWidth * (kDimension - 1), // y_data
+ u_data, // u_data
+ v_data, // v_data
+ base::TimeDelta()); // timestamp
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
+ scoped_refptr<media::VideoFrame> CreateTestHighBitFrame() {
+ const int kDimension = 10;
+ gfx::Size size(kDimension, kDimension);
+
+ scoped_refptr<media::VideoFrame> video_frame(media::VideoFrame::CreateFrame(
+ media::PIXEL_FORMAT_YUV420P10, size, gfx::Rect(size), size,
+ base::TimeDelta()));
+ EXPECT_TRUE(video_frame);
+ return video_frame;
+ }
+
static void ReleaseMailboxCB(const gpu::SyncToken& sync_token) {}
scoped_refptr<media::VideoFrame> CreateTestHardwareVideoFrame(
@@ -217,6 +258,61 @@ TEST_F(VideoResourceUpdaterTest, SoftwareFrame) {
EXPECT_EQ(VideoFrameExternalResources::YUV_RESOURCE, resources.type);
}
+TEST_F(VideoResourceUpdaterTest, HighBitFrameNoF16) {
+ VideoResourceUpdater updater(output_surface3d_->context_provider(),
+ resource_provider3d_.get());
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+
+ VideoFrameExternalResources resources =
+ updater.CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameExternalResources::YUV_RESOURCE, resources.type);
+}
+
+class VideoResourceUpdaterTestWithF16 : public VideoResourceUpdaterTest {
+ public:
+ VideoResourceUpdaterTestWithF16() : VideoResourceUpdaterTest() {
+ context3d_->set_support_texture_half_float_linear(true);
+ }
+};
+
+TEST_F(VideoResourceUpdaterTestWithF16, HighBitFrame) {
+ VideoResourceUpdater updater(output_surface3d_->context_provider(),
+ resource_provider3d_.get());
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+
+ VideoFrameExternalResources resources =
+ updater.CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameExternalResources::YUV_RESOURCE, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, HighBitFrameSoftwareCompositor) {
+ VideoResourceUpdater updater(nullptr, resource_provider_software_.get());
+ scoped_refptr<media::VideoFrame> video_frame = CreateTestHighBitFrame();
+
+ VideoFrameExternalResources resources =
+ updater.CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameExternalResources::SOFTWARE_RESOURCE, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrame) {
+ VideoResourceUpdater updater(output_surface3d_->context_provider(),
+ resource_provider3d_.get());
+ scoped_refptr<media::VideoFrame> video_frame = CreateWonkyTestYUVVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater.CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameExternalResources::YUV_RESOURCE, resources.type);
+}
+
+TEST_F(VideoResourceUpdaterTest, WonkySoftwareFrameSoftwareCompositor) {
+ VideoResourceUpdater updater(nullptr, resource_provider_software_.get());
+ scoped_refptr<media::VideoFrame> video_frame = CreateWonkyTestYUVVideoFrame();
+
+ VideoFrameExternalResources resources =
+ updater.CreateExternalResourcesFromVideoFrame(video_frame);
+ EXPECT_EQ(VideoFrameExternalResources::SOFTWARE_RESOURCE, resources.type);
+}
+
TEST_F(VideoResourceUpdaterTest, ReuseResource) {
VideoResourceUpdater updater(output_surface3d_->context_provider(),
resource_provider3d_.get());
@@ -405,5 +501,6 @@ TEST_F(VideoResourceUpdaterTest, CreateForHardwarePlanes_StreamTexture) {
// that extension is supported.
EXPECT_FALSE(context3d_->WasImmutableTextureCreated());
}
+
} // namespace
} // namespace cc
diff --git a/cc/test/render_pass_test_utils.cc b/cc/test/render_pass_test_utils.cc
index 186bfca..b746ea6 100644
--- a/cc/test/render_pass_test_utils.cc
+++ b/cc/test/render_pass_test_utils.cc
@@ -297,7 +297,8 @@ void AddOneOfEveryQuadType(RenderPass* to_pass,
gfx::RectF(.0f, .0f, 100.0f, 100.0f),
gfx::RectF(.0f, .0f, 50.0f, 50.0f), gfx::Size(100, 100),
gfx::Size(50, 50), plane_resources[0], plane_resources[1],
- plane_resources[2], plane_resources[3], color_space);
+ plane_resources[2], plane_resources[3], color_space, 0.0,
+ 1.0);
}
} // namespace cc
diff --git a/cc/test/test_web_graphics_context_3d.h b/cc/test/test_web_graphics_context_3d.h
index 03ddf3f..f497ac4 100644
--- a/cc/test/test_web_graphics_context_3d.h
+++ b/cc/test/test_web_graphics_context_3d.h
@@ -347,6 +347,9 @@ class TestWebGraphicsContext3D {
void set_support_texture_rectangle(bool support) {
test_capabilities_.gpu.texture_rectangle = support;
}
+ void set_support_texture_half_float_linear(bool support) {
+ test_capabilities_.gpu.texture_half_float_linear = support;
+ }
// When this context is lost, all contexts in its share group are also lost.
void add_share_group_context(TestWebGraphicsContext3D* context3d) {