kwin/effects/lookingglass/lookingglass.cpp

267 lines
8.1 KiB
C++
Raw Normal View History

2020-08-02 22:22:19 +00:00
/*
KWin - the KDE window manager
This file is part of the KDE project.
2020-08-02 22:22:19 +00:00
SPDX-FileCopyrightText: 2007 Rivo Laks <rivolaks@hot.ee>
SPDX-FileCopyrightText: 2007 Christian Nitschkowski <christian.nitschkowski@kdemail.net>
2020-08-02 22:22:19 +00:00
SPDX-License-Identifier: GPL-2.0-or-later
*/
#include "lookingglass.h"
// KConfigSkeleton
#include "lookingglassconfig.h"
#include <QAction>
#include <kwinglutils.h>
#include <kwinglplatform.h>
#include <KStandardAction>
#include <KGlobalAccel>
#include <KLocalizedString>
#include <QVector2D>
#include <QFile>
#include <kmessagebox.h>
#include <cmath>
namespace KWin
{
LookingGlassEffect::LookingGlassEffect()
: zoom(1.0f)
2011-01-30 14:34:42 +00:00
, target_zoom(1.0f)
, polling(false)
, m_texture(nullptr)
, m_fbo(nullptr)
, m_vbo(nullptr)
, m_shader(nullptr)
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
, m_lastPresentTime(std::chrono::milliseconds::zero())
2011-01-30 14:34:42 +00:00
, m_enabled(false)
, m_valid(false)
{
initConfig<LookingGlassConfig>();
QAction* a;
a = KStandardAction::zoomIn(this, SLOT(zoomIn()), this);
KGlobalAccel::self()->setDefaultShortcut(a, QList<QKeySequence>() << Qt::META + Qt::Key_Equal);
KGlobalAccel::self()->setShortcut(a, QList<QKeySequence>() << Qt::META + Qt::Key_Equal);
effects->registerGlobalShortcut(Qt::META + Qt::Key_Equal, a);
a = KStandardAction::zoomOut(this, SLOT(zoomOut()), this);
KGlobalAccel::self()->setDefaultShortcut(a, QList<QKeySequence>() << Qt::META + Qt::Key_Minus);
KGlobalAccel::self()->setShortcut(a, QList<QKeySequence>() << Qt::META + Qt::Key_Minus);
effects->registerGlobalShortcut(Qt::META + Qt::Key_Minus, a);
a = KStandardAction::actualSize(this, SLOT(toggle()), this);
KGlobalAccel::self()->setDefaultShortcut(a, QList<QKeySequence>() << Qt::META + Qt::Key_0);
KGlobalAccel::self()->setShortcut(a, QList<QKeySequence>() << Qt::META + Qt::Key_0);
effects->registerGlobalShortcut(Qt::META + Qt::Key_0, a);
connect(effects, &EffectsHandler::mouseChanged, this, &LookingGlassEffect::slotMouseChanged);
connect(effects, &EffectsHandler::windowDamaged, this, &LookingGlassEffect::slotWindowDamaged);
2011-01-30 14:34:42 +00:00
reconfigure(ReconfigureAll);
}
LookingGlassEffect::~LookingGlassEffect()
2011-01-30 14:34:42 +00:00
{
delete m_texture;
delete m_fbo;
delete m_shader;
delete m_vbo;
2011-01-30 14:34:42 +00:00
}
bool LookingGlassEffect::supported()
{
return effects->compositingType() == OpenGL2Compositing && !GLPlatform::instance()->supports(LimitedNPOT);
}
2011-01-30 14:34:42 +00:00
void LookingGlassEffect::reconfigure(ReconfigureFlags)
{
LookingGlassConfig::self()->read();
initialradius = LookingGlassConfig::radius();
radius = initialradius;
qCDebug(KWINEFFECTS) << "Radius from config:" << radius;
m_valid = loadData();
2011-01-30 14:34:42 +00:00
}
bool LookingGlassEffect::loadData()
{
const QSize screenSize = effects->virtualScreenSize();
int texw = screenSize.width();
int texh = screenSize.height();
// Create texture and render target
const int levels = std::log2(qMin(texw, texh)) + 1;
m_texture = new GLTexture(GL_RGBA8, texw, texh, levels);
m_texture->setFilter(GL_LINEAR_MIPMAP_LINEAR);
m_texture->setWrapMode(GL_CLAMP_TO_EDGE);
m_fbo = new GLRenderTarget(*m_texture);
if (!m_fbo->valid()) {
return false;
}
m_shader = ShaderManager::instance()->generateShaderFromResources(ShaderTrait::MapTexture, QString(), QStringLiteral("lookingglass.frag"));
if (m_shader->isValid()) {
ShaderBinder binder(m_shader);
m_shader->setUniform("u_textureSize", QVector2D(screenSize.width(), screenSize.height()));
} else {
2015-07-31 08:17:43 +00:00
qCCritical(KWINEFFECTS) << "The shader failed to load!";
return false;
}
m_vbo = new GLVertexBuffer(GLVertexBuffer::Static);
QVector<float> verts;
QVector<float> texcoords;
texcoords << screenSize.width() << 0.0;
verts << screenSize.width() << 0.0;
texcoords << 0.0 << 0.0;
verts << 0.0 << 0.0;
texcoords << 0.0 << screenSize.height();
verts << 0.0 << screenSize.height();
texcoords << 0.0 << screenSize.height();
verts << 0.0 << screenSize.height();
texcoords << screenSize.width() << screenSize.height();
verts << screenSize.width() << screenSize.height();
texcoords << screenSize.width() << 0.0;
verts << screenSize.width() << 0.0;
m_vbo->setData(6, 2, verts.constData(), texcoords.constData());
return true;
}
void LookingGlassEffect::toggle()
2011-01-30 14:34:42 +00:00
{
if (target_zoom == 1.0f) {
target_zoom = 2.0f;
2011-01-30 14:34:42 +00:00
if (!polling) {
polling = true;
effects->startMousePolling();
}
2011-01-30 14:34:42 +00:00
m_enabled = true;
} else {
target_zoom = 1.0f;
2011-01-30 14:34:42 +00:00
if (polling) {
polling = false;
effects->stopMousePolling();
}
if (zoom == target_zoom) {
m_enabled = false;
}
}
effects->addRepaint(cursorPos().x() - radius, cursorPos().y() - radius, 2 * radius, 2 * radius);
2011-01-30 14:34:42 +00:00
}
void LookingGlassEffect::zoomIn()
2011-01-30 14:34:42 +00:00
{
target_zoom = qMin(7.0, target_zoom + 0.5);
m_enabled = true;
2011-01-30 14:34:42 +00:00
if (!polling) {
polling = true;
effects->startMousePolling();
}
effects->addRepaint(magnifierArea());
2011-01-30 14:34:42 +00:00
}
void LookingGlassEffect::zoomOut()
2011-01-30 14:34:42 +00:00
{
target_zoom -= 0.5;
2011-01-30 14:34:42 +00:00
if (target_zoom < 1) {
target_zoom = 1;
2011-01-30 14:34:42 +00:00
if (polling) {
polling = false;
effects->stopMousePolling();
}
if (zoom == target_zoom) {
m_enabled = false;
}
}
effects->addRepaint(magnifierArea());
}
QRect LookingGlassEffect::magnifierArea() const
{
return QRect(cursorPos().x() - radius, cursorPos().y() - radius, 2 * radius, 2 * radius);
2011-01-30 14:34:42 +00:00
}
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
void LookingGlassEffect::prePaintScreen(ScreenPrePaintData& data, std::chrono::milliseconds presentTime)
2011-01-30 14:34:42 +00:00
{
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
const int time = m_lastPresentTime.count() ? (presentTime - m_lastPresentTime).count() : 0;
2011-01-30 14:34:42 +00:00
if (zoom != target_zoom) {
double diff = time / animationTime(500.0);
if (target_zoom > zoom)
zoom = qMin(zoom * qMax(1.0 + diff, 1.2), target_zoom);
else
2011-01-30 14:34:42 +00:00
zoom = qMax(zoom * qMin(1.0 - diff, 0.8), target_zoom);
qCDebug(KWINEFFECTS) << "zoom is now " << zoom;
2011-01-30 14:34:42 +00:00
radius = qBound((double)initialradius, initialradius * zoom, 3.5 * initialradius);
2011-01-30 14:34:42 +00:00
if (zoom <= 1.0f) {
m_enabled = false;
}
2011-01-30 14:34:42 +00:00
effects->addRepaint(cursorPos().x() - radius, cursorPos().y() - radius, 2 * radius, 2 * radius);
}
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
if (zoom != target_zoom) {
m_lastPresentTime = presentTime;
} else {
m_lastPresentTime = std::chrono::milliseconds::zero();
}
if (m_valid && m_enabled) {
data.mask |= PAINT_SCREEN_WITH_TRANSFORMED_WINDOWS;
// Start rendering to texture
GLRenderTarget::pushRenderTarget(m_fbo);
}
Provide expected presentation time to effects Effects are given the interval between two consecutive frames. The main flaw of this approach is that if the Compositor transitions from the idle state to "active" state, i.e. when there is something to repaint, effects may see a very large interval between the last painted frame and the current. In order to address this issue, the Scene invalidates the timer that is used to measure time between consecutive frames before the Compositor is about to become idle. While this works perfectly fine with Xinerama-style rendering, with per screen rendering, determining whether the compositor is about to idle is rather a tedious task mostly because a single output can't be used for the test. Furthermore, since the Compositor schedules pointless repaints just to ensure that it's idle, it might take several attempts to figure out whether the scene timer must be invalidated if you use (true) per screen rendering. Ideally, all effects should use a timeline helper that is aware of the underlying render loop and its timings. However, this option is off the table because it will involve a lot of work to implement it. Alternative and much simpler option is to pass the expected presentation time to effects rather than time between consecutive frames. This means that effects are responsible for determining how much animation timelines have to be advanced. Typically, an effect would have to store the presentation timestamp provided in either prePaint{Screen,Window} and use it in the subsequent prePaint{Screen,Window} call to estimate the amount of time passed between the next and the last frames. Unfortunately, this is an API incompatible change. However, it shouldn't take a lot of work to port third-party binary effects, which don't use the AnimationEffect class, to the new API. On the bright side, we no longer need to be concerned about the Compositor getting idle. We do still try to determine whether the Compositor is about to idle, primarily, because the OpenGL render backend swaps buffers on present, but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
effects->prePaintScreen(data, presentTime);
2011-01-30 14:34:42 +00:00
}
2011-03-12 13:37:30 +00:00
void LookingGlassEffect::slotMouseChanged(const QPoint& pos, const QPoint& old, Qt::MouseButtons,
2011-01-30 14:34:42 +00:00
Qt::MouseButtons, Qt::KeyboardModifiers, Qt::KeyboardModifiers)
{
if (pos != old && m_enabled) {
effects->addRepaint(pos.x() - radius, pos.y() - radius, 2 * radius, 2 * radius);
effects->addRepaint(old.x() - radius, old.y() - radius, 2 * radius, 2 * radius);
}
2011-01-30 14:34:42 +00:00
}
void LookingGlassEffect::slotWindowDamaged()
{
if (isActive()) {
effects->addRepaint(magnifierArea());
}
}
void LookingGlassEffect::paintScreen(int mask, const QRegion &region, ScreenPaintData &data)
{
// Call the next effect.
effects->paintScreen(mask, region, data);
if (m_valid && m_enabled) {
// Disable render texture
GLRenderTarget* target = GLRenderTarget::popRenderTarget();
Q_ASSERT(target == m_fbo);
2011-01-30 14:34:42 +00:00
Q_UNUSED(target);
m_texture->bind();
m_texture->generateMipmaps();
// Use the shader
ShaderBinder binder(m_shader);
m_shader->setUniform("u_zoom", (float)zoom);
m_shader->setUniform("u_radius", (float)radius);
m_shader->setUniform("u_cursor", QVector2D(cursorPos().x(), cursorPos().y()));
m_shader->setUniform(GLShader::ModelViewProjectionMatrix, data.projectionMatrix());
m_vbo->render(GL_TRIANGLES);
m_texture->unbind();
}
}
bool LookingGlassEffect::isActive() const
{
return m_valid && m_enabled;
}
} // namespace