From David Fries, "Fix remote X11 crash querying GL_NUM_EXTENSIONS

In osg::isGLExtensionOrVersionSupported in src/osg/GLExtensions.cpp when
using indirect X11 rendering,
glGetIntegerv( GL_NUM_EXTENSIONS, &numExt );
is leaving numExt uninitilized causing the following glGetStringi to
return NULL when the extension number isn't present.  Passing NULL to
std::string() then crashes.  This is with the following nVidia driver.
OpenGL version string: 3.3.0 NVIDIA 256.35

I went ahead and initialized some of the other variables before
glGetInitegerv in other files as well.  I don't know for sure
which ones can fail, so I don't know which are strictly required.
"
This commit is contained in:
Robert Osfield
2010-11-03 09:28:28 +00:00
parent 079b1c293e
commit 2d28026654
9 changed files with 11 additions and 6 deletions

View File

@@ -99,7 +99,7 @@ public:
#if defined(OSG_GLES1_AVAILABLE) || defined(OSG_GLES2_AVAILABLE)
if (pixelFormat == GL_RGB)
{
GLint value;
GLint value = 0;
#ifndef GL_IMPLEMENTATION_COLOR_READ_FORMAT
#define GL_IMPLEMENTATION_COLOR_READ_FORMAT 0x8B9B
#endif

View File

@@ -150,7 +150,7 @@ void FragmentProgram::apply(State& state) const
_fragmentProgram.length(), _fragmentProgram.c_str());
// Check for errors
GLint errorposition;
GLint errorposition = 0;
glGetIntegerv(GL_PROGRAM_ERROR_POSITION_ARB, &errorposition);
if (errorposition != -1)
{

View File

@@ -129,7 +129,7 @@ bool osg::isGLExtensionOrVersionSupported(unsigned int contextID, const char *ex
# ifndef GL_NUM_EXTENSIONS
# define GL_NUM_EXTENSIONS 0x821D
# endif
GLint numExt;
GLint numExt = 0;
glGetIntegerv( GL_NUM_EXTENSIONS, &numExt );
int idx;
for( idx=0; idx<numExt; idx++ )

View File

@@ -503,7 +503,7 @@ void Shader::PerContextShader::compileShader(osg::State& state)
#if defined(OSG_GLES2_AVAILABLE)
if (_shader->getShaderBinary())
{
GLint numFormats;
GLint numFormats = 0;
glGetIntegerv(GL_NUM_SHADER_BINARY_FORMATS, &numFormats);
if (numFormats>0)

View File

@@ -899,7 +899,7 @@ void State::initializeExtensionProcs()
osg::isGLExtensionSupported(_contextID,"GL_EXT_multitexture") ||
OSG_GLES1_FEATURES)
{
GLint maxTextureUnits;
GLint maxTextureUnits = 0;
glGetIntegerv(GL_MAX_TEXTURE_UNITS,&maxTextureUnits);
_glMaxTextureUnits = maxTextureUnits;
_glMaxTextureCoords = maxTextureUnits;

View File

@@ -2258,6 +2258,7 @@ Texture::Extensions::Extensions(unsigned int contextID)
OSG_INFO<<"Disabling _isNonPowerOfTwoTextureMipMappedSupported for GeForce FX hardware."<<std::endl;
}
_maxTextureSize=0;
glGetIntegerv(GL_MAX_TEXTURE_SIZE,&_maxTextureSize);
char *ptr;
@@ -2274,6 +2275,7 @@ Texture::Extensions::Extensions(unsigned int contextID)
if( _isMultiTexturingSupported )
{
_numTextureUnits = 0;
#if defined(OSG_GLES2_AVAILABLE) || defined(OSG_GL3_AVAILABLE)
glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS,&_numTextureUnits);
#else

View File

@@ -711,7 +711,9 @@ void Texture2DArray::Extensions::setupGLExtensions(unsigned int contextID)
_isTexture3DSupported = OSG_GL3_FEATURES || isGLExtensionSupported(contextID,"GL_EXT_texture3D");
_isTexture2DArraySupported = OSG_GL3_FEATURES || isGLExtensionSupported(contextID,"GL_EXT_texture_array");
_max2DSize = 0;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &_max2DSize);
_maxLayerCount = 0;
glGetIntegerv(GL_MAX_ARRAY_TEXTURE_LAYERS_EXT, &_maxLayerCount);
setGLExtensionFuncPtr(_glTexImage3D, "glTexImage3D","glTexImage3DEXT");

View File

@@ -606,6 +606,7 @@ void Texture3D::Extensions::setupGLExtensions(unsigned int contextID)
if (_isTexture3DFast) _isTexture3DSupported = true;
else _isTexture3DSupported = strncmp((const char*)glGetString(GL_VERSION),"1.2",3)>=0;
_maxTexture3DSize = 0;
glGetIntegerv(GL_MAX_3D_TEXTURE_SIZE, &_maxTexture3DSize);
setGLExtensionFuncPtr(glTexImage3D,"glTexImage3D","glTexImage3DEXT");

View File

@@ -150,7 +150,7 @@ void VertexProgram::apply(State& state) const
_vertexProgram.length(), _vertexProgram.c_str());
// Check for errors
GLint errorposition;
GLint errorposition = 0;
glGetIntegerv(GL_PROGRAM_ERROR_POSITION_ARB, &errorposition);
if (errorposition != -1)
{