Refactor maximum texture dimensions.
OpenGL has separate implementation-defined texture size limits, for
3D textures and array textures. For now just give them the same value.
Bug chromium:835299
Change-Id: Ifaf537511f016e21992388f56598d5ec12a393b8
Reviewed-on: https://swiftshader-review.googlesource.com/18788
Tested-by: Nicolas Capens <nicolascapens@google.com>
Reviewed-by: Alexis Hétu <sugoi@google.com>
diff --git a/src/OpenGL/libGLESv2/libGLESv3.cpp b/src/OpenGL/libGLESv2/libGLESv3.cpp
index 381a3aa..5d35d06 100644
--- a/src/OpenGL/libGLESv2/libGLESv3.cpp
+++ b/src/OpenGL/libGLESv2/libGLESv3.cpp
@@ -353,7 +353,7 @@
return error(GL_INVALID_VALUE);
}
- const GLsizei maxSize3D = es2::IMPLEMENTATION_MAX_TEXTURE_SIZE >> level;
+ const GLsizei maxSize3D = es2::IMPLEMENTATION_MAX_3D_TEXTURE_SIZE >> level;
if((width < 0) || (height < 0) || (depth < 0) || (width > maxSize3D) || (height > maxSize3D) || (depth > maxSize3D))
{
return error(GL_INVALID_VALUE);
@@ -523,7 +523,7 @@
return error(GL_INVALID_VALUE);
}
- const GLsizei maxSize3D = es2::IMPLEMENTATION_MAX_TEXTURE_SIZE >> level;
+ const GLsizei maxSize3D = es2::IMPLEMENTATION_MAX_3D_TEXTURE_SIZE >> level;
if((width < 0) || (height < 0) || (depth < 0) || (width > maxSize3D) || (height > maxSize3D) || (depth > maxSize3D) || (border != 0) || (imageSize < 0))
{
return error(GL_INVALID_VALUE);
@@ -1235,12 +1235,22 @@
return error(GL_INVALID_OPERATION);
}
+ if(level >= es2::IMPLEMENTATION_MAX_TEXTURE_LEVELS)
+ {
+ return error(GL_INVALID_VALUE);
+ }
+
textarget = textureObject->getTarget();
switch(textarget)
{
case GL_TEXTURE_3D:
+ if(layer >= es2::IMPLEMENTATION_MAX_3D_TEXTURE_SIZE)
+ {
+ return error(GL_INVALID_VALUE);
+ }
+ break;
case GL_TEXTURE_2D_ARRAY:
- if(layer >= es2::IMPLEMENTATION_MAX_TEXTURE_SIZE || (level >= es2::IMPLEMENTATION_MAX_TEXTURE_LEVELS))
+ if(layer >= es2::IMPLEMENTATION_MAX_ARRAY_TEXTURE_LAYERS)
{
return error(GL_INVALID_VALUE);
}