diff options
author | zmo@google.com <zmo@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2012-10-01 23:06:04 +0000 |
---|---|---|
committer | zmo@google.com <zmo@google.com@0039d316-1c4b-4281-b951-d872f2087c98> | 2012-10-01 23:06:04 +0000 |
commit | 48fe7e4cbaf045f280a90c6e5f93867d49082ce6 (patch) | |
tree | 33c1511de1ee714d1eff15f18f9726ca939fcd45 /ui/gl/gl_context_cgl.cc | |
parent | 19be7a6da266c546a7a52a7170df789f1ab8b710 (diff) | |
download | chromium_src-48fe7e4cbaf045f280a90c6e5f93867d49082ce6.zip chromium_src-48fe7e4cbaf045f280a90c6e5f93867d49082ce6.tar.gz chromium_src-48fe7e4cbaf045f280a90c6e5f93867d49082ce6.tar.bz2 |
Move force_discrete GPU on older MacBookPro models to GpuBlacklist
Before we hardwired the forcing.
Also, we refactor the SupportsDualGpus() code so it's only checked on browser process, mainly the part we check the availability of online/offline renderers.
We suspect calling the renderer detection code at the same time in browser/gpu processes might cause GPU driver crashes on certain Mac systems (mostly 10.8 from crash reports). This refactoring hopefully will fix the issue.
BUG=151741,131276
TEST=tree, manual on dual GPU systems (10.7 and 10.8)
Review URL: https://codereview.chromium.org/10995002
git-svn-id: svn://svn.chromium.org/chrome/trunk/src@159579 0039d316-1c4b-4281-b951-d872f2087c98
Diffstat (limited to 'ui/gl/gl_context_cgl.cc')
-rw-r--r-- | ui/gl/gl_context_cgl.cc | 17 |
1 files changed, 3 insertions, 14 deletions
diff --git a/ui/gl/gl_context_cgl.cc b/ui/gl/gl_context_cgl.cc index a2cf560..614ed20 100644 --- a/ui/gl/gl_context_cgl.cc +++ b/ui/gl/gl_context_cgl.cc @@ -37,7 +37,7 @@ bool GLContextCGL::Initialize(GLSurface* compatible_surface, std::vector<CGLPixelFormatAttribute> attribs; // If the system supports dual gpus then allow offline renderers for every // context, so that they can all be in the same share group. - if (SupportsDualGpus()) + if (GpuSwitchingManager::GetInstance()->SupportsDualGpus()) attribs.push_back(kCGLPFAAllowOfflineRenderers); if (GetGLImplementation() == kGLImplementationAppleGL) { attribs.push_back(kCGLPFARendererID); @@ -61,7 +61,8 @@ bool GLContextCGL::Initialize(GLSurface* compatible_surface, // If using the discrete gpu, create a pixel format requiring it before we // create the context. - if (!SupportsDualGpus() || gpu_preference == PreferDiscreteGpu) { + if (!GpuSwitchingManager::GetInstance()->SupportsDualGpus() || + gpu_preference == PreferDiscreteGpu) { std::vector<CGLPixelFormatAttribute> discrete_attribs; discrete_attribs.push_back((CGLPixelFormatAttribute) 0); GLint num_pixel_formats; @@ -73,7 +74,6 @@ bool GLContextCGL::Initialize(GLSurface* compatible_surface, } } - CGLError res = CGLCreateContext( format, share_context ? @@ -224,15 +224,4 @@ void ScopedCGLDestroyRendererInfo::operator()(CGLRendererInfoObj x) const { CGLDestroyRendererInfo(x); } -void GLContextCGL::ForceUseOfDiscreteGPU() { - static CGLPixelFormatObj format = NULL; - if (format) - return; - CGLPixelFormatAttribute attribs[1]; - attribs[0] = static_cast<CGLPixelFormatAttribute>(0); - GLint num_pixel_formats = 0; - CGLChoosePixelFormat(attribs, &format, &num_pixel_formats); - // format is deliberately leaked. -} - } // namespace gfx |