summaryrefslogtreecommitdiffstats
path: root/gpu/command_buffer/service/gles2_cmd_decoder.h
diff options
context:
space:
mode:
authoroetuaho@nvidia.com <oetuaho@nvidia.com@0039d316-1c4b-4281-b951-d872f2087c98>2014-02-20 16:56:25 +0000
committeroetuaho@nvidia.com <oetuaho@nvidia.com@0039d316-1c4b-4281-b951-d872f2087c98>2014-02-20 16:56:25 +0000
commitd8e6c924b1bb23056f02d9c6e6363cfc43c38c7d (patch)
treefb995c004adc2bb8e20cd96f5e603f7bfd3b29d7 /gpu/command_buffer/service/gles2_cmd_decoder.h
parent8357da40eb30d9c734b4b8c18d4633760d94c1e5 (diff)
downloadchromium_src-d8e6c924b1bb23056f02d9c6e6363cfc43c38c7d.zip
chromium_src-d8e6c924b1bb23056f02d9c6e6363cfc43c38c7d.tar.gz
chromium_src-d8e6c924b1bb23056f02d9c6e6363cfc43c38c7d.tar.bz2
Do not rely on a texture's internalformat matching format
After recently implemented support for rendering to 32-bit floating point textures on ES3, texture's internal format and format do not necessarily always match. Fix the command decoder code that relied on them matching. BUG=329605 TEST=gpu_unittests, WebGL conformance tests Review URL: https://codereview.chromium.org/168983005 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@252268 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'gpu/command_buffer/service/gles2_cmd_decoder.h')
-rw-r--r--gpu/command_buffer/service/gles2_cmd_decoder.h1
1 files changed, 1 insertions, 0 deletions
diff --git a/gpu/command_buffer/service/gles2_cmd_decoder.h b/gpu/command_buffer/service/gles2_cmd_decoder.h
index 2d16a5a..409ae7b 100644
--- a/gpu/command_buffer/service/gles2_cmd_decoder.h
+++ b/gpu/command_buffer/service/gles2_cmd_decoder.h
@@ -192,6 +192,7 @@ class GPU_EXPORT GLES2Decoder : public base::SupportsWeakPtr<GLES2Decoder>,
unsigned bind_target,
unsigned target,
int level,
+ unsigned internal_format,
unsigned format,
unsigned type,
int width,