kwin/effects/backgroundcontrast/contrast.cpp

526 lines
17 KiB
C++
Raw Normal View History

/*
2020-08-02 22:22:19 +00:00
SPDX-FileCopyrightText: 2010 Fredrik Höglund <fredrik@kde.org>
SPDX-FileCopyrightText: 2011 Philipp Knechtges <philipp-dev@knechtges.com>
SPDX-FileCopyrightText: 2014 Marco Martin <mart@kde.org>
SPDX-License-Identifier: GPL-2.0-or-later
*/
#include "contrast.h"
#include "contrastshader.h"
// KConfigSkeleton
#include <QMatrix4x4>
Add windowsystem plugin for KWin's qpa Summary: KWindowSystem provides a plugin interface to have platform specific implementations. So far KWin relied on the implementation in KWayland-integration repository. This is something I find unsuited, for the following reasons: * any test in KWin for functionality set through the plugin would fail * it's not clear what's going on where * in worst case some code could deadlock * KWin shouldn't use KWindowSystem and only a small subset is allowed to be used The last point needs some further explanation. KWin internally does not and cannot use KWindowSystem. KWindowSystem (especially KWindowInfo) is exposing information which KWin sets. It's more than weird if KWin asks KWindowSystem for the state of a window it set itself. On X11 it's just slow, on Wayland it can result in roundtrips to KWin itself which is dangerous. But due to using Plasma components we have a few areas where we use KWindowSystem. E.g. a Plasma::Dialog sets a window type, the slide in direction, blur and background contrast. This we want to support and need to support. Other API elements we do not want, like for examples the available windows. KWin internal windows either have direct access to KWin or a scripting interface exposed providing (limited) access - there is just no need to have this in KWindowSystem. To make it more clear what KWin supports as API of KWindowSystem for internal windows this change implements a stripped down version of the kwayland-integration plugin. The main difference is that it does not use KWayland at all, but a QWindow internal side channel. To support this EffectWindow provides an accessor for internalWindow and the three already mentioned effects are adjusted to read from the internal QWindow and it's dynamic properties. This change is a first step for a further refactoring. I plan to split the internal window out of ShellClient into a dedicated class. I think there are nowadays too many special cases. If it moves out there is the question whether we really want to use Wayland for the internal windows or whether this is just historic ballast (after all we used to use qwayland for that in the beginning). As the change could introduce regressions I'm targetting 5.16. Test Plan: new test case for window type, manual testing using Alt+Tab for the effects integration. Sliding popups, blur and contrast worked fine. Reviewers: #kwin Subscribers: kwin Tags: #kwin Differential Revision: https://phabricator.kde.org/D18228
2019-01-13 16:50:32 +00:00
#include <QWindow>
#include <KWaylandServer/surface_interface.h>
#include <KWaylandServer/contrast_interface.h>
#include <KWaylandServer/display.h>
namespace KWin
{
static const QByteArray s_contrastAtomName = QByteArrayLiteral("_KDE_NET_WM_BACKGROUND_CONTRAST_REGION");
ContrastEffect::ContrastEffect()
{
shader = ContrastShader::create();
reconfigure(ReconfigureAll);
// ### Hackish way to announce support.
// Should be included in _NET_SUPPORTED instead.
if (shader && shader->isValid()) {
net_wm_contrast_region = effects->announceSupportProperty(s_contrastAtomName, this);
KWaylandServer::Display *display = effects->waylandDisplay();
if (display) {
m_contrastManager = new KWaylandServer::ContrastManagerInterface(display, this);
}
} else {
net_wm_contrast_region = 0;
}
connect(effects, &EffectsHandler::windowAdded, this, &ContrastEffect::slotWindowAdded);
connect(effects, &EffectsHandler::windowDeleted, this, &ContrastEffect::slotWindowDeleted);
connect(effects, &EffectsHandler::propertyNotify, this, &ContrastEffect::slotPropertyNotify);
connect(effects, &EffectsHandler::screenGeometryChanged, this, &ContrastEffect::slotScreenGeometryChanged);
connect(effects, &EffectsHandler::xcbConnectionChanged, this,
[this] {
if (shader && shader->isValid()) {
net_wm_contrast_region = effects->announceSupportProperty(s_contrastAtomName, this);
}
}
);
// Fetch the contrast regions for all windows
for (EffectWindow *window: effects->stackingOrder()) {
updateContrastRegion(window);
}
}
ContrastEffect::~ContrastEffect()
{
delete shader;
}
void ContrastEffect::slotScreenGeometryChanged()
{
effects->makeOpenGLContextCurrent();
if (!supported()) {
effects->reloadEffect(this);
return;
}
for (EffectWindow *window: effects->stackingOrder()) {
updateContrastRegion(window);
}
}
void ContrastEffect::reconfigure(ReconfigureFlags flags)
{
Q_UNUSED(flags)
if (shader)
shader->init();
if (!shader || !shader->isValid()) {
effects->removeSupportProperty(s_contrastAtomName, this);
delete m_contrastManager;
m_contrastManager = nullptr;
}
}
void ContrastEffect::updateContrastRegion(EffectWindow *w)
{
QRegion region;
float colorTransform[16];
QByteArray value;
if (net_wm_contrast_region != XCB_ATOM_NONE) {
value = w->readProperty(net_wm_contrast_region, net_wm_contrast_region, 32);
if (value.size() > 0 && !((value.size() - (16 * sizeof(uint32_t))) % ((4 * sizeof(uint32_t))))) {
const uint32_t *cardinals = reinterpret_cast<const uint32_t*>(value.constData());
const float *floatCardinals = reinterpret_cast<const float*>(value.constData());
unsigned int i = 0;
for (; i < ((value.size() - (16 * sizeof(uint32_t)))) / sizeof(uint32_t);) {
int x = cardinals[i++];
int y = cardinals[i++];
int w = cardinals[i++];
int h = cardinals[i++];
region += QRect(x, y, w, h);
}
for (unsigned int j = 0; j < 16; ++j) {
colorTransform[j] = floatCardinals[i + j];
}
QMatrix4x4 colorMatrix(colorTransform);
m_colorMatrices[w] = colorMatrix;
}
}
KWaylandServer::SurfaceInterface *surf = w->surface();
if (surf && surf->contrast()) {
region = surf->contrast()->region();
m_colorMatrices[w] = colorMatrix(surf->contrast()->contrast(), surf->contrast()->intensity(), surf->contrast()->saturation());
}
Add windowsystem plugin for KWin's qpa Summary: KWindowSystem provides a plugin interface to have platform specific implementations. So far KWin relied on the implementation in KWayland-integration repository. This is something I find unsuited, for the following reasons: * any test in KWin for functionality set through the plugin would fail * it's not clear what's going on where * in worst case some code could deadlock * KWin shouldn't use KWindowSystem and only a small subset is allowed to be used The last point needs some further explanation. KWin internally does not and cannot use KWindowSystem. KWindowSystem (especially KWindowInfo) is exposing information which KWin sets. It's more than weird if KWin asks KWindowSystem for the state of a window it set itself. On X11 it's just slow, on Wayland it can result in roundtrips to KWin itself which is dangerous. But due to using Plasma components we have a few areas where we use KWindowSystem. E.g. a Plasma::Dialog sets a window type, the slide in direction, blur and background contrast. This we want to support and need to support. Other API elements we do not want, like for examples the available windows. KWin internal windows either have direct access to KWin or a scripting interface exposed providing (limited) access - there is just no need to have this in KWindowSystem. To make it more clear what KWin supports as API of KWindowSystem for internal windows this change implements a stripped down version of the kwayland-integration plugin. The main difference is that it does not use KWayland at all, but a QWindow internal side channel. To support this EffectWindow provides an accessor for internalWindow and the three already mentioned effects are adjusted to read from the internal QWindow and it's dynamic properties. This change is a first step for a further refactoring. I plan to split the internal window out of ShellClient into a dedicated class. I think there are nowadays too many special cases. If it moves out there is the question whether we really want to use Wayland for the internal windows or whether this is just historic ballast (after all we used to use qwayland for that in the beginning). As the change could introduce regressions I'm targetting 5.16. Test Plan: new test case for window type, manual testing using Alt+Tab for the effects integration. Sliding popups, blur and contrast worked fine. Reviewers: #kwin Subscribers: kwin Tags: #kwin Differential Revision: https://phabricator.kde.org/D18228
2019-01-13 16:50:32 +00:00
if (auto internal = w->internalWindow()) {
const auto property = internal->property("kwin_background_region");
if (property.isValid()) {
region = property.value<QRegion>();
bool ok = false;
qreal contrast = internal->property("kwin_background_contrast").toReal(&ok);
if (!ok) {
contrast = 1.0;
}
qreal intensity = internal->property("kwin_background_intensity").toReal(&ok);
if (!ok) {
intensity = 1.0;
}
qreal saturation = internal->property("kwin_background_saturation").toReal(&ok);
if (!ok) {
saturation = 1.0;
}
m_colorMatrices[w] = colorMatrix(contrast, intensity, saturation);
}
}
//!value.isNull() full window in X11 case, surf->contrast()
//valid, full window in wayland case
if (region.isEmpty() && (!value.isNull() || (surf && surf->contrast()))) {
// Set the data to a dummy value.
// This is needed to be able to distinguish between the value not
// being set, and being set to an empty region.
w->setData(WindowBackgroundContrastRole, 1);
} else
w->setData(WindowBackgroundContrastRole, region);
}
void ContrastEffect::slotWindowAdded(EffectWindow *w)
{
KWaylandServer::SurfaceInterface *surf = w->surface();
if (surf) {
m_contrastChangedConnections[w] = connect(surf, &KWaylandServer::SurfaceInterface::contrastChanged, this, [this, w] () {
if (w) {
updateContrastRegion(w);
}
});
}
Add windowsystem plugin for KWin's qpa Summary: KWindowSystem provides a plugin interface to have platform specific implementations. So far KWin relied on the implementation in KWayland-integration repository. This is something I find unsuited, for the following reasons: * any test in KWin for functionality set through the plugin would fail * it's not clear what's going on where * in worst case some code could deadlock * KWin shouldn't use KWindowSystem and only a small subset is allowed to be used The last point needs some further explanation. KWin internally does not and cannot use KWindowSystem. KWindowSystem (especially KWindowInfo) is exposing information which KWin sets. It's more than weird if KWin asks KWindowSystem for the state of a window it set itself. On X11 it's just slow, on Wayland it can result in roundtrips to KWin itself which is dangerous. But due to using Plasma components we have a few areas where we use KWindowSystem. E.g. a Plasma::Dialog sets a window type, the slide in direction, blur and background contrast. This we want to support and need to support. Other API elements we do not want, like for examples the available windows. KWin internal windows either have direct access to KWin or a scripting interface exposed providing (limited) access - there is just no need to have this in KWindowSystem. To make it more clear what KWin supports as API of KWindowSystem for internal windows this change implements a stripped down version of the kwayland-integration plugin. The main difference is that it does not use KWayland at all, but a QWindow internal side channel. To support this EffectWindow provides an accessor for internalWindow and the three already mentioned effects are adjusted to read from the internal QWindow and it's dynamic properties. This change is a first step for a further refactoring. I plan to split the internal window out of ShellClient into a dedicated class. I think there are nowadays too many special cases. If it moves out there is the question whether we really want to use Wayland for the internal windows or whether this is just historic ballast (after all we used to use qwayland for that in the beginning). As the change could introduce regressions I'm targetting 5.16. Test Plan: new test case for window type, manual testing using Alt+Tab for the effects integration. Sliding popups, blur and contrast worked fine. Reviewers: #kwin Subscribers: kwin Tags: #kwin Differential Revision: https://phabricator.kde.org/D18228
2019-01-13 16:50:32 +00:00
if (auto internal = w->internalWindow()) {
internal->installEventFilter(this);
}
updateContrastRegion(w);
}
Add windowsystem plugin for KWin's qpa Summary: KWindowSystem provides a plugin interface to have platform specific implementations. So far KWin relied on the implementation in KWayland-integration repository. This is something I find unsuited, for the following reasons: * any test in KWin for functionality set through the plugin would fail * it's not clear what's going on where * in worst case some code could deadlock * KWin shouldn't use KWindowSystem and only a small subset is allowed to be used The last point needs some further explanation. KWin internally does not and cannot use KWindowSystem. KWindowSystem (especially KWindowInfo) is exposing information which KWin sets. It's more than weird if KWin asks KWindowSystem for the state of a window it set itself. On X11 it's just slow, on Wayland it can result in roundtrips to KWin itself which is dangerous. But due to using Plasma components we have a few areas where we use KWindowSystem. E.g. a Plasma::Dialog sets a window type, the slide in direction, blur and background contrast. This we want to support and need to support. Other API elements we do not want, like for examples the available windows. KWin internal windows either have direct access to KWin or a scripting interface exposed providing (limited) access - there is just no need to have this in KWindowSystem. To make it more clear what KWin supports as API of KWindowSystem for internal windows this change implements a stripped down version of the kwayland-integration plugin. The main difference is that it does not use KWayland at all, but a QWindow internal side channel. To support this EffectWindow provides an accessor for internalWindow and the three already mentioned effects are adjusted to read from the internal QWindow and it's dynamic properties. This change is a first step for a further refactoring. I plan to split the internal window out of ShellClient into a dedicated class. I think there are nowadays too many special cases. If it moves out there is the question whether we really want to use Wayland for the internal windows or whether this is just historic ballast (after all we used to use qwayland for that in the beginning). As the change could introduce regressions I'm targetting 5.16. Test Plan: new test case for window type, manual testing using Alt+Tab for the effects integration. Sliding popups, blur and contrast worked fine. Reviewers: #kwin Subscribers: kwin Tags: #kwin Differential Revision: https://phabricator.kde.org/D18228
2019-01-13 16:50:32 +00:00
bool ContrastEffect::eventFilter(QObject *watched, QEvent *event)
{
auto internal = qobject_cast<QWindow*>(watched);
if (internal && event->type() == QEvent::DynamicPropertyChange) {
QDynamicPropertyChangeEvent *pe = static_cast<QDynamicPropertyChangeEvent*>(event);
if (pe->propertyName() == "kwin_background_region" ||
pe->propertyName() == "kwin_background_contrast" ||
pe->propertyName() == "kwin_background_intensity" ||
pe->propertyName() == "kwin_background_saturation") {
if (auto w = effects->findWindow(internal)) {
updateContrastRegion(w);
}
}
}
return false;
}
void ContrastEffect::slotWindowDeleted(EffectWindow *w)
{
if (m_contrastChangedConnections.contains(w)) {
disconnect(m_contrastChangedConnections[w]);
m_contrastChangedConnections.remove(w);
m_colorMatrices.remove(w);
}
}
void ContrastEffect::slotPropertyNotify(EffectWindow *w, long atom)
{
if (w && atom == net_wm_contrast_region && net_wm_contrast_region != XCB_ATOM_NONE) {
updateContrastRegion(w);
}
}
QMatrix4x4 ContrastEffect::colorMatrix(qreal contrast, qreal intensity, qreal saturation)
{
QMatrix4x4 satMatrix; //saturation
QMatrix4x4 intMatrix; //intensity
QMatrix4x4 contMatrix; //contrast
//Saturation matrix
if (!qFuzzyCompare(saturation, 1.0)) {
const qreal rval = (1.0 - saturation) * .2126;
const qreal gval = (1.0 - saturation) * .7152;
const qreal bval = (1.0 - saturation) * .0722;
satMatrix = QMatrix4x4(rval + saturation, rval, rval, 0.0,
gval, gval + saturation, gval, 0.0,
bval, bval, bval + saturation, 0.0,
0, 0, 0, 1.0);
}
//IntensityMatrix
if (!qFuzzyCompare(intensity, 1.0)) {
intMatrix.scale(intensity, intensity, intensity);
}
//Contrast Matrix
if (!qFuzzyCompare(contrast, 1.0)) {
const float transl = (1.0 - contrast) / 2.0;
contMatrix = QMatrix4x4(contrast, 0, 0, 0.0,
0, contrast, 0, 0.0,
0, 0, contrast, 0.0,
transl, transl, transl, 1.0);
}
QMatrix4x4 colorMatrix = contMatrix * satMatrix * intMatrix;
//colorMatrix = colorMatrix.transposed();
return colorMatrix;
}
bool ContrastEffect::enabledByDefault()
{
GLPlatform *gl = GLPlatform::instance();
if (gl->isIntel() && gl->chipClass() < SandyBridge)
return false;
if (gl->isSoftwareEmulation()) {
return false;
}
return true;
}
bool ContrastEffect::supported()
{
bool supported = effects->isOpenGLCompositing() && GLRenderTarget::supported();
if (supported) {
int maxTexSize;
glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTexSize);
const QSize screenSize = effects->virtualScreenSize();
if (screenSize.width() > maxTexSize || screenSize.height() > maxTexSize)
supported = false;
}
return supported;
}
QRegion ContrastEffect::contrastRegion(const EffectWindow *w) const
{
QRegion region;
const QVariant value = w->data(WindowBackgroundContrastRole);
if (value.isValid()) {
const QRegion appRegion = qvariant_cast<QRegion>(value);
if (!appRegion.isEmpty()) {
region |= appRegion.translated(w->contentsRect().topLeft()) &
w->decorationInnerRect();
} else {
// An empty region means that the blur effect should be enabled
// for the whole window.
region = w->decorationInnerRect();
}
}
return region;
}
void ContrastEffect::uploadRegion(QVector2D *&map, const QRegion &region)
{
for (const QRect &r : region) {
const QVector2D topLeft(r.x(), r.y());
const QVector2D topRight(r.x() + r.width(), r.y());
const QVector2D bottomLeft(r.x(), r.y() + r.height());
const QVector2D bottomRight(r.x() + r.width(), r.y() + r.height());
// First triangle
*(map++) = topRight;
*(map++) = topLeft;
*(map++) = bottomLeft;
// Second triangle
*(map++) = bottomLeft;
*(map++) = bottomRight;
*(map++) = topRight;
}
}
void ContrastEffect::uploadGeometry(GLVertexBuffer *vbo, const QRegion &region)
{
const int vertexCount = region.rectCount() * 6;
if (!vertexCount)
return;
QVector2D *map = (QVector2D *) vbo->map(vertexCount * sizeof(QVector2D));
uploadRegion(map, region);
vbo->unmap();
const GLVertexAttrib layout[] = {
{ VA_Position, 2, GL_FLOAT, 0 },
{ VA_TexCoord, 2, GL_FLOAT, 0 }
};
vbo->setAttribLayout(layout, 2, sizeof(QVector2D));
}
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
void ContrastEffect::prePaintScreen(ScreenPrePaintData &data, std::chrono::milliseconds presentTime)
{
m_paintedArea = QRegion();
m_currentContrast = QRegion();
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
effects->prePaintScreen(data, presentTime);
}
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
void ContrastEffect::prePaintWindow(EffectWindow* w, WindowPrePaintData& data, std::chrono::milliseconds presentTime)
{
// this effect relies on prePaintWindow being called in the bottom to top order
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
effects->prePaintWindow(w, data, presentTime);
if (!w->isPaintingEnabled()) {
return;
}
if (!shader || !shader->isValid()) {
return;
}
const QRegion oldPaint = data.paint;
// we don't have to blur a region we don't see
m_currentContrast -= data.clip;
// if we have to paint a non-opaque part of this window that intersects with the
// currently blurred region (which is not cached) we have to redraw the whole region
if ((data.paint-data.clip).intersects(m_currentContrast)) {
data.paint |= m_currentContrast;
}
// in case this window has regions to be blurred
const QRect screen = effects->virtualScreenGeometry();
const QRegion contrastArea = contrastRegion(w).translated(w->pos()) & screen;
// we are not caching the window
// if this window or an window underneath the modified area is painted again we have to
// do everything
if (m_paintedArea.intersects(contrastArea) || data.paint.intersects(contrastArea)) {
data.paint |= contrastArea;
// we have to check again whether we do not damage a blurred area
// of a window we do not cache
if (contrastArea.intersects(m_currentContrast)) {
data.paint |= m_currentContrast;
}
}
m_currentContrast |= contrastArea;
// m_paintedArea keep track of all repainted areas
m_paintedArea -= data.clip;
m_paintedArea |= data.paint;
}
bool ContrastEffect::shouldContrast(const EffectWindow *w, int mask, const WindowPaintData &data) const
{
if (!shader || !shader->isValid())
return false;
if (effects->activeFullScreenEffect() && !w->data(WindowForceBackgroundContrastRole).toBool())
return false;
if (w->isDesktop())
return false;
bool scaled = !qFuzzyCompare(data.xScale(), 1.0) && !qFuzzyCompare(data.yScale(), 1.0);
bool translated = data.xTranslation() || data.yTranslation();
if ((scaled || (translated || (mask & PAINT_WINDOW_TRANSFORMED))) && !w->data(WindowForceBackgroundContrastRole).toBool())
return false;
if (!w->hasAlpha())
return false;
return true;
}
void ContrastEffect::drawWindow(EffectWindow *w, int mask, const QRegion &region, WindowPaintData &data)
{
const QRect screen = GLRenderTarget::virtualScreenGeometry();
if (shouldContrast(w, mask, data)) {
QRegion shape = region & contrastRegion(w).translated(w->pos()) & screen;
// let's do the evil parts - someone wants to blur behind a transformed window
const bool translated = data.xTranslation() || data.yTranslation();
const bool scaled = data.xScale() != 1 || data.yScale() != 1;
if (scaled) {
QPoint pt = shape.boundingRect().topLeft();
QRegion scaledShape;
for (QRect r : shape) {
r.moveTo(pt.x() + (r.x() - pt.x()) * data.xScale() + data.xTranslation(),
pt.y() + (r.y() - pt.y()) * data.yScale() + data.yTranslation());
r.setWidth(r.width() * data.xScale());
r.setHeight(r.height() * data.yScale());
scaledShape |= r;
}
shape = scaledShape & region;
//Only translated, not scaled
} else if (translated) {
shape = shape.translated(data.xTranslation(), data.yTranslation());
shape = shape & region;
}
if (!shape.isEmpty()) {
doContrast(w, shape, screen, data.opacity(), data.screenProjectionMatrix());
}
}
// Draw the window over the contrast area
effects->drawWindow(w, mask, region, data);
}
void ContrastEffect::paintEffectFrame(EffectFrame *frame, const QRegion &region, double opacity, double frameOpacity)
{
//FIXME: this is a no-op for now, it should figure out the right contrast, intensity, saturation
effects->paintEffectFrame(frame, region, opacity, frameOpacity);
}
void ContrastEffect::doContrast(EffectWindow *w, const QRegion& shape, const QRect& screen, const float opacity, const QMatrix4x4 &screenProjection)
{
const QRegion actualShape = shape & screen;
const QRect r = actualShape.boundingRect();
qreal scale = GLRenderTarget::virtualScreenScale();
// Upload geometry for the horizontal and vertical passes
GLVertexBuffer *vbo = GLVertexBuffer::streamingBuffer();
vbo->reset();
uploadGeometry(vbo, actualShape);
vbo->bindArrays();
// Create a scratch texture and copy the area in the back buffer that we're
// going to blur into it
GLTexture scratch(GL_RGBA8, r.width() * scale, r.height() * scale);
scratch.setFilter(GL_LINEAR);
scratch.setWrapMode(GL_CLAMP_TO_EDGE);
scratch.bind();
const QRect sg = GLRenderTarget::virtualScreenGeometry();
glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, (r.x() - sg.x()) * scale, (sg.height() - (r.y() - sg.y() + r.height())) * scale,
scratch.width(), scratch.height());
// Draw the texture on the offscreen framebuffer object, while blurring it horizontally
shader->setColorMatrix(m_colorMatrices.value(w));
shader->bind();
shader->setOpacity(opacity);
// Set up the texture matrix to transform from screen coordinates
// to texture coordinates.
QMatrix4x4 textureMatrix;
textureMatrix.scale(1.0 / r.width(), -1.0 / r.height(), 1);
textureMatrix.translate(-r.x(), -r.height() - r.y(), 0);
shader->setTextureMatrix(textureMatrix);
shader->setModelViewProjectionMatrix(screenProjection);
vbo->draw(GL_TRIANGLES, 0, actualShape.rectCount() * 6);
scratch.unbind();
scratch.discard();
vbo->unbindArrays();
if (opacity < 1.0) {
glDisable(GL_BLEND);
}
shader->unbind();
}
bool ContrastEffect::isActive() const
{
return !effects->isScreenLocked();
}
bool ContrastEffect::blocksDirectScanout() const
{
return false;
}
} // namespace KWin