opengl: Make GL_EXT_unpack_subimage mandatory

It's widely supported, and other wayland compositors already require it,
so the chances of breaking things should be minimal, although some embedded
GPUs might be affected, in which case kwin will fallback to software
rendering. With the unpack being always available, we can simplify and
also fix bugs in texture uploading.
This commit is contained in:
Vlad Zahorodnii 2024-08-15 14:38:22 +03:00
parent 7ab825cba1
commit 98953b0218
3 changed files with 3 additions and 10 deletions

View file

@ -182,7 +182,7 @@ void GLTexture::update(const QImage &image, const QPoint &offset, const QRect &s
uploadFormat = QImage::Format_RGBA8888_Premultiplied;
}
}
bool useUnpack = context->supportsTextureUnpack() && image.format() == uploadFormat && !src.isNull();
bool useUnpack = image.format() == uploadFormat && !src.isNull();
QImage im;
if (useUnpack) {

View file

@ -87,7 +87,6 @@ OpenGlContext::OpenGlContext(bool EGL)
, m_supportsTextureStorage(checkTextureStorageSupport(this))
, m_supportsTextureSwizzle(checkTextureSwizzleSupport(this))
, m_supportsARGB32Textures(!m_isOpenglES || hasOpenglExtension(QByteArrayLiteral("GL_EXT_texture_format_BGRA8888")))
, m_supportsTextureUnpack(!m_isOpenglES || hasOpenglExtension(QByteArrayLiteral("GL_EXT_unpack_subimage")))
, m_supportsRGTextures(hasVersion(Version(3, 0)) || hasOpenglExtension(QByteArrayLiteral("GL_ARB_texture_rg")) || hasOpenglExtension(QByteArrayLiteral("GL_EXT_texture_rg")))
, m_supports16BitTextures(!m_isOpenglES || hasOpenglExtension(QByteArrayLiteral("GL_EXT_texture_norm16")))
, m_supportsBlits(!m_isOpenglES || hasVersion(Version(3, 0)))
@ -195,11 +194,6 @@ bool OpenGlContext::supportsARGB32Textures() const
return m_supportsARGB32Textures;
}
bool OpenGlContext::supportsTextureUnpack() const
{
return m_supportsTextureUnpack;
}
bool OpenGlContext::supportsRGTextures() const
{
return m_supportsRGTextures;
@ -266,8 +260,9 @@ bool OpenGlContext::checkSupported() const
const bool supportsNonPowerOfTwoTextures = m_isOpenglES || hasOpenglExtension("GL_ARB_texture_non_power_of_two");
const bool supports3DTextures = !m_isOpenglES || hasVersion(Version(3, 0)) || hasOpenglExtension("GL_OES_texture_3D");
const bool supportsFBOs = m_isOpenglES || hasVersion(Version(3, 0)) || hasOpenglExtension("GL_ARB_framebuffer_object") || hasOpenglExtension(QByteArrayLiteral("GL_EXT_framebuffer_object"));
const bool supportsUnpack = !m_isOpenglES || hasOpenglExtension(QByteArrayLiteral("GL_EXT_unpack_subimage"));
if (!supportsGLSL || !supportsNonPowerOfTwoTextures || !supports3DTextures || !supportsFBOs) {
if (!supportsGLSL || !supportsNonPowerOfTwoTextures || !supports3DTextures || !supportsFBOs || !supportsUnpack) {
return false;
}
// some old hardware only supports very limited shaders. To prevent the shaders KWin uses later on from not working,

View file

@ -59,7 +59,6 @@ public:
bool supportsTextureSwizzle() const;
bool supportsTextureStorage() const;
bool supportsARGB32Textures() const;
bool supportsTextureUnpack() const;
bool supportsRGTextures() const;
bool supports16BitTextures() const;
bool supportsBlits() const;
@ -113,7 +112,6 @@ protected:
const bool m_supportsTextureStorage;
const bool m_supportsTextureSwizzle;
const bool m_supportsARGB32Textures;
const bool m_supportsTextureUnpack;
const bool m_supportsRGTextures;
const bool m_supports16BitTextures;
const bool m_supportsBlits;