opengl: Drop unnecessary 16 bit texture support checks

16 bit textures are supported universally in OpenGL.
This commit is contained in:
Vlad Zahorodnii 2024-08-19 18:27:25 +03:00
parent f3406a033c
commit 315fff68e9

View file

@ -161,8 +161,7 @@ void GLTexture::update(const QImage &image, const QRegion &region, const QPoint
if (!context->isOpenGLES()) {
const QImage::Format index = image.format();
if (index < sizeof(formatTable) / sizeof(formatTable[0]) && formatTable[index].internalFormat
&& !(formatTable[index].type == GL_UNSIGNED_SHORT && !context->supports16BitTextures())) {
if (index < sizeof(formatTable) / sizeof(formatTable[0]) && formatTable[index].internalFormat) {
glFormat = formatTable[index].format;
type = formatTable[index].type;
uploadFormat = index;
@ -555,8 +554,7 @@ std::unique_ptr<GLTexture> GLTexture::upload(const QImage &image)
QImage::Format uploadFormat;
if (!context->isOpenGLES()) {
const QImage::Format index = image.format();
if (index < sizeof(formatTable) / sizeof(formatTable[0]) && formatTable[index].internalFormat
&& !(formatTable[index].type == GL_UNSIGNED_SHORT && !context->supports16BitTextures())) {
if (index < sizeof(formatTable) / sizeof(formatTable[0]) && formatTable[index].internalFormat) {
internalFormat = formatTable[index].internalFormat;
format = formatTable[index].format;
type = formatTable[index].type;