summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorkbr@chromium.org <kbr@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-03-14 22:21:44 +0000
committerkbr@chromium.org <kbr@chromium.org@0039d316-1c4b-4281-b951-d872f2087c98>2013-03-14 22:21:44 +0000
commit501b5740bca518414fa38567d10c17312924c28b (patch)
treeabe1a7cc0bc7856a974e4da08ee3eba82693bde1
parentc66cae4f0fc034b034ac480a41f473f666003db2 (diff)
downloadchromium_src-501b5740bca518414fa38567d10c17312924c28b.zip
chromium_src-501b5740bca518414fa38567d10c17312924c28b.tar.gz
chromium_src-501b5740bca518414fa38567d10c17312924c28b.tar.bz2
Don't actually call glGetShaderPrecisionFormat unless running either on a GLES2 implementation or unit tests. Calling it on some Mac OS OpenGL drivers causes a GL_INVALID_OPERATION which breaks either the WebGL implementation or applications later.
This should have been caught by the GPU bots. Tested with WebGL conformance tests, the shiny-teapot demo, and gpu_unittests. BUG=192533 Review URL: https://codereview.chromium.org/12544023 git-svn-id: svn://svn.chromium.org/chrome/trunk/src@188216 0039d316-1c4b-4281-b951-d872f2087c98
-rw-r--r--gpu/command_buffer/service/gles2_cmd_decoder.cc10
-rw-r--r--gpu/command_buffer/service/gles2_cmd_decoder_unittest.cc1
2 files changed, 9 insertions, 2 deletions
diff --git a/gpu/command_buffer/service/gles2_cmd_decoder.cc b/gpu/command_buffer/service/gles2_cmd_decoder.cc
index c9fd9ff..0d6ba07 100644
--- a/gpu/command_buffer/service/gles2_cmd_decoder.cc
+++ b/gpu/command_buffer/service/gles2_cmd_decoder.cc
@@ -107,10 +107,18 @@ static void GetShaderPrecisionFormatImpl(GLenum shader_type,
break;
}
- if (gfx::g_driver_gl.fn.glGetShaderPrecisionFormatFn) {
+ // TODO(kbr): fix this to not require testing for the mock. Tests
+ // should be able to change what GetGLImplementation returns in
+ // order to test all code paths.
+ if (gfx::GetGLImplementation() == gfx::kGLImplementationMockGL ||
+ (gfx::GetGLImplementation() == gfx::kGLImplementationEGLGLES2 &&
+ gfx::g_driver_gl.fn.glGetShaderPrecisionFormatFn)) {
// This function is sometimes defined even though it's really just
// a stub, so we need to set range and precision as if it weren't
// defined before calling it.
+ // On Mac OS with some GPUs, calling this generates a
+ // GL_INVALID_OPERATION error. Avoid calling it on non-GLES2
+ // platforms.
glGetShaderPrecisionFormat(shader_type, precision_type,
range, precision);
}
diff --git a/gpu/command_buffer/service/gles2_cmd_decoder_unittest.cc b/gpu/command_buffer/service/gles2_cmd_decoder_unittest.cc
index 0745c91..8d86800 100644
--- a/gpu/command_buffer/service/gles2_cmd_decoder_unittest.cc
+++ b/gpu/command_buffer/service/gles2_cmd_decoder_unittest.cc
@@ -1372,7 +1372,6 @@ TEST_F(GLES2DecoderWithShaderTest, GetShaderPrecisionFormatSucceeds) {
typedef GetShaderPrecisionFormat::Result Result;
Result* result = static_cast<Result*>(shared_memory_address_);
result->success = 0;
- // NOTE: GL might not be called. There is no Desktop OpenGL equivalent.
const GLint range[2] = { 62, 62 };
const GLint precision = 16;
EXPECT_CALL(*gl_,GetShaderPrecisionFormat(_, _, _, _))