summaryrefslogtreecommitdiffstats
path: root/chrome/gpu
diff options
context:
space:
mode:
authorscherkus@chromium.org <scherkus@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-06-08 01:01:58 +0000
committerscherkus@chromium.org <scherkus@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2010-06-08 01:01:58 +0000
commitb04cfb5ac972b460323dba7e3e79105ee44b491a (patch)
treef64fa6d8dc686524c14b07d4e7fad8d4713e7cb8 /chrome/gpu
parent7d60fb28dd7f2b2f46b2f5cd55f721245965faf1 (diff)
downloadchromium_src-b04cfb5ac972b460323dba7e3e79105ee44b491a.zip
chromium_src-b04cfb5ac972b460323dba7e3e79105ee44b491a.tar.gz
chromium_src-b04cfb5ac972b460323dba7e3e79105ee44b491a.tar.bz2
Added check for negative height/width and ensure that buffer size has not changed before reading buffers.
Patch by cdn@chromium.org: http://codereview.chromium.org/2479002/show BUG=43322 TEST=Run with --enable-video-layering and --enable-gpu-rendering and use <video> tag git-svn-id: svn://svn.chromium.org/chrome/trunk/src@49132 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'chrome/gpu')
-rw-r--r--chrome/gpu/gpu_video_layer_glx.cc31
1 files changed, 21 insertions, 10 deletions
diff --git a/chrome/gpu/gpu_video_layer_glx.cc b/chrome/gpu/gpu_video_layer_glx.cc
index 7416237..21e1bc1 100644
--- a/chrome/gpu/gpu_video_layer_glx.cc
+++ b/chrome/gpu/gpu_video_layer_glx.cc
@@ -80,6 +80,14 @@ static const char kFragmentShader[] =
#endif
"}\n";
+
+// Assume that somewhere along the line, someone will do width * height * 4
+// with signed numbers. If the maximum value is 2**31, then 2**31 / 4 =
+// 2**29 and floor(sqrt(2**29)) = 23170.
+
+// Max height and width for layers
+static const int kMaxVideoLayerSize = 23170;
+
GpuVideoLayerGLX::GpuVideoLayerGLX(GpuViewX* view,
GpuThread* gpu_thread,
int32 routing_id,
@@ -246,16 +254,14 @@ void GpuVideoLayerGLX::OnChannelError() {
void GpuVideoLayerGLX::OnPaintToVideoLayer(base::ProcessId source_process_id,
TransportDIB::Id id,
const gfx::Rect& bitmap_rect) {
- // Assume that somewhere along the line, someone will do width * height * 4
- // with signed numbers. If the maximum value is 2**31, then 2**31 / 4 =
- // 2**29 and floor(sqrt(2**29)) = 23170.
- //
// TODO(scherkus): |native_size_| is set in constructor, so perhaps this check
// should be a DCHECK().
const int width = native_size_.width();
const int height = native_size_.height();
const int stride = width;
- if (width > 23170 || height > 23170)
+
+ if (width <= 0 || width > kMaxVideoLayerSize ||
+ height <= 0 || height > kMaxVideoLayerSize)
return;
TransportDIB* dib = TransportDIB::Map(id);
@@ -279,11 +285,16 @@ void GpuVideoLayerGLX::OnPaintToVideoLayer(base::ProcessId source_process_id,
int plane_height = (i == kYPlane ? height : height / 2);
int plane_stride = (i == kYPlane ? stride : stride / 2);
- glActiveTexture(GL_TEXTURE0 + i);
- glBindTexture(GL_TEXTURE_2D, textures_[i]);
- glPixelStorei(GL_UNPACK_ROW_LENGTH, plane_stride);
- glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, plane_width, plane_height, 0,
- GL_LUMINANCE, GL_UNSIGNED_BYTE, planes[i]);
+ // Ensure that we will not read outside the shared mem region.
+ if (planes[i] >= planes[kYPlane] &&
+ (dib->size() - (planes[kYPlane] - planes[i])) >=
+ static_cast<unsigned int>(plane_width * plane_height)) {
+ glActiveTexture(GL_TEXTURE0 + i);
+ glBindTexture(GL_TEXTURE_2D, textures_[i]);
+ glPixelStorei(GL_UNPACK_ROW_LENGTH, plane_stride);
+ glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, plane_width, plane_height, 0,
+ GL_LUMINANCE, GL_UNSIGNED_BYTE, planes[i]);
+ }
}
// Reset back to original state.