kwin/effects/snaphelper/snaphelper.cpp

332 lines
11 KiB
C++
Raw Normal View History

2020-08-02 22:22:19 +00:00
/*
KWin - the KDE window manager
This file is part of the KDE project.
2020-08-02 22:22:19 +00:00
SPDX-FileCopyrightText: 2009 Lucas Murray <lmurray@undefinedfire.com>
SPDX-FileCopyrightText: 2018 Vlad Zahorodnii <vlad.zahorodnii@kde.org>
2020-08-02 22:22:19 +00:00
SPDX-License-Identifier: GPL-2.0-or-later
*/
#include "snaphelper.h"
#include <kwinglutils.h>
#ifdef KWIN_HAVE_XRENDER_COMPOSITING
#include <kwinxrenderutils.h>
2013-01-29 07:28:30 +00:00
#include <xcb/render.h>
#endif
#include <QPainter>
namespace KWin
{
static const int s_lineWidth = 4;
static const QColor s_lineColor = QColor(128, 128, 128, 128);
static QRegion computeDirtyRegion(const QRect &windowRect)
{
const QMargins outlineMargins(
s_lineWidth / 2,
s_lineWidth / 2,
s_lineWidth / 2,
s_lineWidth / 2
);
QRegion dirtyRegion;
for (int i = 0; i < effects->numScreens(); ++i) {
const QRect screenRect = effects->clientArea(ScreenArea, i, 0);
QRect screenWindowRect = windowRect;
screenWindowRect.moveCenter(screenRect.center());
QRect verticalBarRect(0, 0, s_lineWidth, screenRect.height());
verticalBarRect.moveCenter(screenRect.center());
verticalBarRect.adjust(-1, -1, 1, 1);
dirtyRegion += verticalBarRect;
QRect horizontalBarRect(0, 0, screenRect.width(), s_lineWidth);
horizontalBarRect.moveCenter(screenRect.center());
horizontalBarRect.adjust(-1, -1, 1, 1);
dirtyRegion += horizontalBarRect;
const QRect outlineOuterRect = screenWindowRect
.marginsAdded(outlineMargins)
.adjusted(-1, -1, 1, 1);
const QRect outlineInnerRect = screenWindowRect
.marginsRemoved(outlineMargins)
.adjusted(1, 1, -1, -1);
dirtyRegion += QRegion(outlineOuterRect) - QRegion(outlineInnerRect);
}
return dirtyRegion;
}
SnapHelperEffect::SnapHelperEffect()
2011-01-30 14:34:42 +00:00
{
reconfigure(ReconfigureAll);
connect(effects, &EffectsHandler::windowClosed, this, &SnapHelperEffect::slotWindowClosed);
connect(effects, &EffectsHandler::windowStartUserMovedResized, this, &SnapHelperEffect::slotWindowStartUserMovedResized);
connect(effects, &EffectsHandler::windowFinishUserMovedResized, this, &SnapHelperEffect::slotWindowFinishUserMovedResized);
connect(effects, &EffectsHandler::windowFrameGeometryChanged, this, &SnapHelperEffect::slotWindowFrameGeometryChanged);
2011-01-30 14:34:42 +00:00
}
SnapHelperEffect::~SnapHelperEffect()
2011-01-30 14:34:42 +00:00
{
}
void SnapHelperEffect::reconfigure(ReconfigureFlags flags)
2011-01-30 14:34:42 +00:00
{
Q_UNUSED(flags)
m_animation.timeLine.setDuration(
std::chrono::milliseconds(static_cast<int>(animationTime(250))));
2011-01-30 14:34:42 +00:00
}
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
void SnapHelperEffect::prePaintScreen(ScreenPrePaintData &data, std::chrono::milliseconds presentTime)
2011-01-30 14:34:42 +00:00
{
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
std::chrono::milliseconds delta = std::chrono::milliseconds::zero();
if (m_animation.lastPresentTime.count()) {
delta = (presentTime - m_animation.lastPresentTime);
}
m_animation.lastPresentTime = presentTime;
if (m_animation.active) {
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
m_animation.timeLine.update(delta);
}
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
effects->prePaintScreen(data, presentTime);
2011-01-30 14:34:42 +00:00
}
void SnapHelperEffect::paintScreen(int mask, const QRegion &region, ScreenPaintData &data)
2011-01-30 14:34:42 +00:00
{
effects->paintScreen(mask, region, data);
const qreal opacityFactor = m_animation.active
? m_animation.timeLine.value()
: 1.0;
// Display the guide
if (effects->isOpenGLCompositing()) {
GLVertexBuffer *vbo = GLVertexBuffer::streamingBuffer();
vbo->reset();
vbo->setUseColor(true);
ShaderBinder binder(ShaderTrait::UniformColor);
binder.shader()->setUniform(GLShader::ModelViewProjectionMatrix, data.projectionMatrix());
glEnable(GL_BLEND);
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
QColor color = s_lineColor;
color.setAlphaF(color.alphaF() * opacityFactor);
vbo->setColor(color);
glLineWidth(s_lineWidth);
QVector<float> verts;
verts.reserve(effects->numScreens() * 24);
for (int i = 0; i < effects->numScreens(); ++i) {
const QRect rect = effects->clientArea(ScreenArea, i, 0);
const int midX = rect.x() + rect.width() / 2;
const int midY = rect.y() + rect.height() / 2 ;
const int halfWidth = m_geometry.width() / 2;
const int halfHeight = m_geometry.height() / 2;
// Center vertical line.
verts << rect.x() + rect.width() / 2 << rect.y();
verts << rect.x() + rect.width() / 2 << rect.y() + rect.height();
// Center horizontal line.
verts << rect.x() << rect.y() + rect.height() / 2;
verts << rect.x() + rect.width() << rect.y() + rect.height() / 2;
// Top edge of the window outline.
verts << midX - halfWidth - s_lineWidth / 2 << midY - halfHeight;
verts << midX + halfWidth + s_lineWidth / 2 << midY - halfHeight;
// Right edge of the window outline.
verts << midX + halfWidth << midY - halfHeight + s_lineWidth / 2;
verts << midX + halfWidth << midY + halfHeight - s_lineWidth / 2;
// Bottom edge of the window outline.
verts << midX + halfWidth + s_lineWidth / 2 << midY + halfHeight;
verts << midX - halfWidth - s_lineWidth / 2 << midY + halfHeight;
// Left edge of the window outline.
verts << midX - halfWidth << midY + halfHeight - s_lineWidth / 2;
verts << midX - halfWidth << midY - halfHeight + s_lineWidth / 2;
2011-01-30 14:34:42 +00:00
}
vbo->setData(verts.count() / 2, 2, verts.data(), nullptr);
vbo->render(GL_LINES);
glDisable(GL_BLEND);
glLineWidth(1.0);
}
if (effects->compositingType() == XRenderCompositing) {
#ifdef KWIN_HAVE_XRENDER_COMPOSITING
for (int i = 0; i < effects->numScreens(); ++i) {
const QRect rect = effects->clientArea(ScreenArea, i, 0);
const int midX = rect.x() + rect.width() / 2;
const int midY = rect.y() + rect.height() / 2 ;
const int halfWidth = m_geometry.width() / 2;
const int halfHeight = m_geometry.height() / 2;
xcb_rectangle_t rects[6];
// Center vertical line.
rects[0].x = rect.x() + rect.width() / 2 - s_lineWidth / 2;
rects[0].y = rect.y();
rects[0].width = s_lineWidth;
rects[0].height = rect.height();
// Center horizontal line.
rects[1].x = rect.x();
rects[1].y = rect.y() + rect.height() / 2 - s_lineWidth / 2;
rects[1].width = rect.width();
rects[1].height = s_lineWidth;
// Top edge of the window outline.
rects[2].x = midX - halfWidth - s_lineWidth / 2;
rects[2].y = midY - halfHeight - s_lineWidth / 2;
rects[2].width = 2 * halfWidth + s_lineWidth;
rects[2].height = s_lineWidth;
// Right edge of the window outline.
rects[3].x = midX + halfWidth - s_lineWidth / 2;
rects[3].y = midY - halfHeight + s_lineWidth / 2;
rects[3].width = s_lineWidth;
rects[3].height = 2 * halfHeight - s_lineWidth;
// Bottom edge of the window outline.
rects[4].x = midX - halfWidth - s_lineWidth / 2;
rects[4].y = midY + halfHeight - s_lineWidth / 2;
rects[4].width = 2 * halfWidth + s_lineWidth;
rects[4].height = s_lineWidth;
// Left edge of the window outline.
rects[5].x = midX - halfWidth - s_lineWidth / 2;
rects[5].y = midY - halfHeight + s_lineWidth / 2;
rects[5].width = s_lineWidth;
rects[5].height = 2 * halfHeight - s_lineWidth;
QColor color = s_lineColor;
color.setAlphaF(color.alphaF() * opacityFactor);
xcb_render_fill_rectangles(xcbConnection(), XCB_RENDER_PICT_OP_OVER, effects->xrenderBufferPicture(),
preMultiply(color), 6, rects);
}
#endif
}
if (effects->compositingType() == QPainterCompositing) {
QPainter *painter = effects->scenePainter();
painter->save();
QColor color = s_lineColor;
color.setAlphaF(color.alphaF() * opacityFactor);
QPen pen(color);
pen.setWidth(s_lineWidth);
painter->setPen(pen);
painter->setBrush(Qt::NoBrush);
for (int i = 0; i < effects->numScreens(); ++i) {
const QRect rect = effects->clientArea(ScreenArea, i, 0);
// Center lines.
painter->drawLine(rect.center().x(), rect.y(), rect.center().x(), rect.y() + rect.height());
painter->drawLine(rect.x(), rect.center().y(), rect.x() + rect.width(), rect.center().y());
// Window outline.
QRect outlineRect(0, 0, m_geometry.width(), m_geometry.height());
outlineRect.moveCenter(rect.center());
painter->drawRect(outlineRect);
}
painter->restore();
}
2011-01-30 14:34:42 +00:00
}
void SnapHelperEffect::postPaintScreen()
2011-01-30 14:34:42 +00:00
{
if (m_animation.active) {
effects->addRepaint(computeDirtyRegion(m_geometry));
}
if (m_animation.timeLine.done()) {
m_animation.active = false;
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
m_animation.lastPresentTime = std::chrono::milliseconds::zero();
}
effects->postPaintScreen();
}
void SnapHelperEffect::slotWindowClosed(EffectWindow *w)
{
if (w != m_window) {
return;
}
m_window = nullptr;
m_animation.active = true;
m_animation.timeLine.setDirection(TimeLine::Backward);
if (m_animation.timeLine.done()) {
m_animation.timeLine.reset();
}
effects->addRepaint(computeDirtyRegion(m_geometry));
2011-01-30 14:34:42 +00:00
}
void SnapHelperEffect::slotWindowStartUserMovedResized(EffectWindow *w)
2011-01-30 14:34:42 +00:00
{
if (!w->isMovable()) {
return;
}
m_window = w;
m_geometry = w->geometry();
m_animation.active = true;
m_animation.timeLine.setDirection(TimeLine::Forward);
if (m_animation.timeLine.done()) {
m_animation.timeLine.reset();
}
effects->addRepaint(computeDirtyRegion(m_geometry));
}
void SnapHelperEffect::slotWindowFinishUserMovedResized(EffectWindow *w)
{
if (w != m_window) {
return;
}
m_window = nullptr;
m_geometry = w->geometry();
m_animation.active = true;
m_animation.timeLine.setDirection(TimeLine::Backward);
if (m_animation.timeLine.done()) {
m_animation.timeLine.reset();
}
effects->addRepaint(computeDirtyRegion(m_geometry));
2011-01-30 14:34:42 +00:00
}
void SnapHelperEffect::slotWindowFrameGeometryChanged(EffectWindow *w, const QRect &old)
{
if (w != m_window) {
return;
}
m_geometry = w->geometry();
effects->addRepaint(computeDirtyRegion(old));
}
bool SnapHelperEffect::isActive() const
{
return m_window != nullptr || m_animation.active;
}
} // namespace KWin