Added support for reading the new OSG_MAX_TEXTURE_SIZE environmental variable

inside the osg::Image::ensureValidSizeForTexturing() method. The
smallest of GL_MAX_TEXTURE_SIZE and OSG_MAX_TEXTURE_SIZE is used for the
final max texture size.

This new env. var. allows users to deliberate cap the size of their textures
so they can test whether their models are limited by texture memory.  If
reducing the max texture size increases performance then you are limited
by texture memory!
This commit is contained in:
Robert Osfield
2002-06-19 18:45:05 +00:00
parent 77e1fb7f80
commit 4f4f68e961

View File

@@ -431,9 +431,24 @@ void Image::ensureValidSizeForTexturing()
{
init = false;
glGetIntegerv(GL_MAX_TEXTURE_SIZE,&max_size);
notify(INFO) << "Max texture size "<<max_size<<std::endl;
notify(INFO) << "GL_MAX_TEXTURE_SIZE "<<max_size<<std::endl;
char *ptr;
if( (ptr = getenv("OSG_MAX_TEXTURE_SIZE")) != 0)
{
GLint osg_max_size = atoi(ptr);
notify(INFO) << "OSG_MAX_TEXTURE_SIZE "<<osg_max_size<<std::endl;
if (osg_max_size<max_size)
{
max_size = osg_max_size;
}
}
notify(INFO) << "Selected max texture size "<<max_size<<std::endl;
}
//max_size = 64;
if (new_s>max_size) new_s = max_size;