2020-08-02 22:22:19 +00:00
|
|
|
/*
|
|
|
|
KWin - the KDE window manager
|
|
|
|
This file is part of the KDE project.
|
2016-08-16 18:16:41 +00:00
|
|
|
|
2020-08-02 22:22:19 +00:00
|
|
|
SPDX-FileCopyrightText: 2012 Filip Wieladek <wattos@gmail.com>
|
|
|
|
SPDX-FileCopyrightText: 2016 Martin Gräßlin <mgraesslin@kde.org>
|
2016-08-16 18:16:41 +00:00
|
|
|
|
2020-08-02 22:22:19 +00:00
|
|
|
SPDX-License-Identifier: GPL-2.0-or-later
|
|
|
|
*/
|
2016-08-16 18:16:41 +00:00
|
|
|
|
|
|
|
#include "touchpoints.h"
|
|
|
|
|
|
|
|
#include <QAction>
|
|
|
|
#include <kwinglutils.h>
|
|
|
|
|
|
|
|
#ifdef KWIN_HAVE_XRENDER_COMPOSITING
|
|
|
|
#include <kwinxrenderutils.h>
|
|
|
|
#include <xcb/xcb.h>
|
|
|
|
#include <xcb/render.h>
|
|
|
|
#endif
|
|
|
|
|
|
|
|
#include <KConfigGroup>
|
|
|
|
#include <KGlobalAccel>
|
|
|
|
|
|
|
|
#include <QPainter>
|
|
|
|
|
2019-07-09 19:19:26 +00:00
|
|
|
#include <cmath>
|
2016-08-16 18:16:41 +00:00
|
|
|
|
|
|
|
namespace KWin
|
|
|
|
{
|
|
|
|
|
|
|
|
TouchPointsEffect::TouchPointsEffect()
|
|
|
|
: Effect()
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
TouchPointsEffect::~TouchPointsEffect() = default;
|
|
|
|
|
|
|
|
static const Qt::GlobalColor s_colors[] = {
|
|
|
|
Qt::blue,
|
|
|
|
Qt::red,
|
|
|
|
Qt::green,
|
|
|
|
Qt::cyan,
|
|
|
|
Qt::magenta,
|
|
|
|
Qt::yellow,
|
|
|
|
Qt::gray,
|
|
|
|
Qt::darkBlue,
|
|
|
|
Qt::darkRed,
|
|
|
|
Qt::darkGreen
|
|
|
|
};
|
|
|
|
|
|
|
|
Qt::GlobalColor TouchPointsEffect::colorForId(quint32 id)
|
|
|
|
{
|
|
|
|
auto it = m_colors.constFind(id);
|
|
|
|
if (it != m_colors.constEnd()) {
|
|
|
|
return it.value();
|
|
|
|
}
|
|
|
|
static int s_colorIndex = -1;
|
|
|
|
s_colorIndex = (s_colorIndex + 1) % 10;
|
|
|
|
m_colors.insert(id, s_colors[s_colorIndex]);
|
|
|
|
return s_colors[s_colorIndex];
|
|
|
|
}
|
|
|
|
|
2019-08-11 19:57:45 +00:00
|
|
|
bool TouchPointsEffect::touchDown(qint32 id, const QPointF &pos, quint32 time)
|
2016-08-16 18:16:41 +00:00
|
|
|
{
|
2017-07-28 19:31:09 +00:00
|
|
|
Q_UNUSED(time)
|
2016-08-16 18:16:41 +00:00
|
|
|
TouchPoint point;
|
|
|
|
point.pos = pos;
|
|
|
|
point.press = true;
|
|
|
|
point.color = colorForId(id);
|
|
|
|
m_points << point;
|
|
|
|
m_latestPositions.insert(id, pos);
|
|
|
|
repaint();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-08-11 19:57:45 +00:00
|
|
|
bool TouchPointsEffect::touchMotion(qint32 id, const QPointF &pos, quint32 time)
|
2016-08-16 18:16:41 +00:00
|
|
|
{
|
2017-07-28 19:31:09 +00:00
|
|
|
Q_UNUSED(time)
|
2016-08-16 18:16:41 +00:00
|
|
|
TouchPoint point;
|
|
|
|
point.pos = pos;
|
|
|
|
point.press = true;
|
|
|
|
point.color = colorForId(id);
|
|
|
|
m_points << point;
|
|
|
|
m_latestPositions.insert(id, pos);
|
|
|
|
repaint();
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2019-08-11 19:57:45 +00:00
|
|
|
bool TouchPointsEffect::touchUp(qint32 id, quint32 time)
|
2016-08-16 18:16:41 +00:00
|
|
|
{
|
2017-07-28 19:31:09 +00:00
|
|
|
Q_UNUSED(time)
|
2016-08-16 18:16:41 +00:00
|
|
|
auto it = m_latestPositions.constFind(id);
|
|
|
|
if (it != m_latestPositions.constEnd()) {
|
|
|
|
TouchPoint point;
|
|
|
|
point.pos = it.value();
|
|
|
|
point.press = false;
|
|
|
|
point.color = colorForId(id);
|
|
|
|
m_points << point;
|
|
|
|
}
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
Provide expected presentation time to effects
Effects are given the interval between two consecutive frames. The main
flaw of this approach is that if the Compositor transitions from the idle
state to "active" state, i.e. when there is something to repaint,
effects may see a very large interval between the last painted frame and
the current. In order to address this issue, the Scene invalidates the
timer that is used to measure time between consecutive frames before the
Compositor is about to become idle.
While this works perfectly fine with Xinerama-style rendering, with per
screen rendering, determining whether the compositor is about to idle is
rather a tedious task mostly because a single output can't be used for
the test.
Furthermore, since the Compositor schedules pointless repaints just to
ensure that it's idle, it might take several attempts to figure out
whether the scene timer must be invalidated if you use (true) per screen
rendering.
Ideally, all effects should use a timeline helper that is aware of the
underlying render loop and its timings. However, this option is off the
table because it will involve a lot of work to implement it.
Alternative and much simpler option is to pass the expected presentation
time to effects rather than time between consecutive frames. This means
that effects are responsible for determining how much animation timelines
have to be advanced. Typically, an effect would have to store the
presentation timestamp provided in either prePaint{Screen,Window} and
use it in the subsequent prePaint{Screen,Window} call to estimate the
amount of time passed between the next and the last frames.
Unfortunately, this is an API incompatible change. However, it shouldn't
take a lot of work to port third-party binary effects, which don't use the
AnimationEffect class, to the new API. On the bright side, we no longer
need to be concerned about the Compositor getting idle.
We do still try to determine whether the Compositor is about to idle,
primarily, because the OpenGL render backend swaps buffers on present,
but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
|
|
|
void TouchPointsEffect::prePaintScreen(ScreenPrePaintData& data, std::chrono::milliseconds presentTime)
|
2016-08-16 18:16:41 +00:00
|
|
|
{
|
Provide expected presentation time to effects
Effects are given the interval between two consecutive frames. The main
flaw of this approach is that if the Compositor transitions from the idle
state to "active" state, i.e. when there is something to repaint,
effects may see a very large interval between the last painted frame and
the current. In order to address this issue, the Scene invalidates the
timer that is used to measure time between consecutive frames before the
Compositor is about to become idle.
While this works perfectly fine with Xinerama-style rendering, with per
screen rendering, determining whether the compositor is about to idle is
rather a tedious task mostly because a single output can't be used for
the test.
Furthermore, since the Compositor schedules pointless repaints just to
ensure that it's idle, it might take several attempts to figure out
whether the scene timer must be invalidated if you use (true) per screen
rendering.
Ideally, all effects should use a timeline helper that is aware of the
underlying render loop and its timings. However, this option is off the
table because it will involve a lot of work to implement it.
Alternative and much simpler option is to pass the expected presentation
time to effects rather than time between consecutive frames. This means
that effects are responsible for determining how much animation timelines
have to be advanced. Typically, an effect would have to store the
presentation timestamp provided in either prePaint{Screen,Window} and
use it in the subsequent prePaint{Screen,Window} call to estimate the
amount of time passed between the next and the last frames.
Unfortunately, this is an API incompatible change. However, it shouldn't
take a lot of work to port third-party binary effects, which don't use the
AnimationEffect class, to the new API. On the bright side, we no longer
need to be concerned about the Compositor getting idle.
We do still try to determine whether the Compositor is about to idle,
primarily, because the OpenGL render backend swaps buffers on present,
but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
|
|
|
int time = 0;
|
|
|
|
if (m_lastPresentTime.count()) {
|
|
|
|
time = (presentTime - m_lastPresentTime).count();
|
|
|
|
}
|
|
|
|
|
2016-08-16 18:16:41 +00:00
|
|
|
auto it = m_points.begin();
|
|
|
|
while (it != m_points.end()) {
|
|
|
|
it->time += time;
|
|
|
|
if (it->time > m_ringLife) {
|
|
|
|
it = m_points.erase(it);
|
|
|
|
} else {
|
|
|
|
it++;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Provide expected presentation time to effects
Effects are given the interval between two consecutive frames. The main
flaw of this approach is that if the Compositor transitions from the idle
state to "active" state, i.e. when there is something to repaint,
effects may see a very large interval between the last painted frame and
the current. In order to address this issue, the Scene invalidates the
timer that is used to measure time between consecutive frames before the
Compositor is about to become idle.
While this works perfectly fine with Xinerama-style rendering, with per
screen rendering, determining whether the compositor is about to idle is
rather a tedious task mostly because a single output can't be used for
the test.
Furthermore, since the Compositor schedules pointless repaints just to
ensure that it's idle, it might take several attempts to figure out
whether the scene timer must be invalidated if you use (true) per screen
rendering.
Ideally, all effects should use a timeline helper that is aware of the
underlying render loop and its timings. However, this option is off the
table because it will involve a lot of work to implement it.
Alternative and much simpler option is to pass the expected presentation
time to effects rather than time between consecutive frames. This means
that effects are responsible for determining how much animation timelines
have to be advanced. Typically, an effect would have to store the
presentation timestamp provided in either prePaint{Screen,Window} and
use it in the subsequent prePaint{Screen,Window} call to estimate the
amount of time passed between the next and the last frames.
Unfortunately, this is an API incompatible change. However, it shouldn't
take a lot of work to port third-party binary effects, which don't use the
AnimationEffect class, to the new API. On the bright side, we no longer
need to be concerned about the Compositor getting idle.
We do still try to determine whether the Compositor is about to idle,
primarily, because the OpenGL render backend swaps buffers on present,
but that will change with the ongoing compositing timing rework.
2020-11-20 15:44:04 +00:00
|
|
|
if (m_points.isEmpty()) {
|
|
|
|
m_lastPresentTime = std::chrono::milliseconds::zero();
|
|
|
|
} else {
|
|
|
|
m_lastPresentTime = presentTime;
|
|
|
|
}
|
|
|
|
|
|
|
|
effects->prePaintScreen(data, presentTime);
|
2016-08-16 18:16:41 +00:00
|
|
|
}
|
|
|
|
|
2019-10-29 22:04:15 +00:00
|
|
|
void TouchPointsEffect::paintScreen(int mask, const QRegion ®ion, ScreenPaintData& data)
|
2016-08-16 18:16:41 +00:00
|
|
|
{
|
|
|
|
effects->paintScreen(mask, region, data);
|
|
|
|
|
|
|
|
paintScreenSetup(mask, region, data);
|
|
|
|
for (auto it = m_points.constBegin(), end = m_points.constEnd(); it != end; ++it) {
|
|
|
|
for (int i = 0; i < m_ringCount; ++i) {
|
|
|
|
float alpha = computeAlpha(it->time, i);
|
|
|
|
float size = computeRadius(it->time, it->press, i);
|
|
|
|
if (size > 0 && alpha > 0) {
|
|
|
|
QColor color = it->color;
|
|
|
|
color.setAlphaF(alpha);
|
|
|
|
drawCircle(color, it->pos.x(), it->pos.y(), size);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
paintScreenFinish(mask, region, data);
|
|
|
|
}
|
|
|
|
|
|
|
|
void TouchPointsEffect::postPaintScreen()
|
|
|
|
{
|
|
|
|
effects->postPaintScreen();
|
|
|
|
repaint();
|
|
|
|
}
|
|
|
|
|
|
|
|
float TouchPointsEffect::computeRadius(int time, bool press, int ring)
|
|
|
|
{
|
|
|
|
float ringDistance = m_ringLife / (m_ringCount * 3);
|
|
|
|
if (press) {
|
|
|
|
return ((time - ringDistance * ring) / m_ringLife) * m_ringMaxSize;
|
|
|
|
}
|
|
|
|
return ((m_ringLife - time - ringDistance * ring) / m_ringLife) * m_ringMaxSize;
|
|
|
|
}
|
|
|
|
|
|
|
|
float TouchPointsEffect::computeAlpha(int time, int ring)
|
|
|
|
{
|
|
|
|
float ringDistance = m_ringLife / (m_ringCount * 3);
|
|
|
|
return (m_ringLife - (float)time - ringDistance * (ring)) / m_ringLife;
|
|
|
|
}
|
|
|
|
|
|
|
|
void TouchPointsEffect::repaint()
|
|
|
|
{
|
|
|
|
if (!m_points.isEmpty()) {
|
|
|
|
QRegion dirtyRegion;
|
|
|
|
const int radius = m_ringMaxSize + m_lineWidth;
|
|
|
|
for (auto it = m_points.constBegin(), end = m_points.constEnd(); it != end; ++it) {
|
|
|
|
dirtyRegion |= QRect(it->pos.x() - radius, it->pos.y() - radius, 2*radius, 2*radius);
|
|
|
|
}
|
|
|
|
effects->addRepaint(dirtyRegion);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
bool TouchPointsEffect::isActive() const
|
|
|
|
{
|
|
|
|
return !m_points.isEmpty();
|
|
|
|
}
|
|
|
|
|
|
|
|
void TouchPointsEffect::drawCircle(const QColor& color, float cx, float cy, float r)
|
|
|
|
{
|
|
|
|
if (effects->isOpenGLCompositing())
|
|
|
|
drawCircleGl(color, cx, cy, r);
|
|
|
|
if (effects->compositingType() == XRenderCompositing)
|
|
|
|
drawCircleXr(color, cx, cy, r);
|
|
|
|
if (effects->compositingType() == QPainterCompositing)
|
|
|
|
drawCircleQPainter(color, cx, cy, r);
|
|
|
|
}
|
|
|
|
|
|
|
|
void TouchPointsEffect::paintScreenSetup(int mask, QRegion region, ScreenPaintData& data)
|
|
|
|
{
|
|
|
|
if (effects->isOpenGLCompositing())
|
|
|
|
paintScreenSetupGl(mask, region, data);
|
|
|
|
}
|
|
|
|
|
|
|
|
void TouchPointsEffect::paintScreenFinish(int mask, QRegion region, ScreenPaintData& data)
|
|
|
|
{
|
|
|
|
if (effects->isOpenGLCompositing())
|
|
|
|
paintScreenFinishGl(mask, region, data);
|
|
|
|
}
|
|
|
|
|
|
|
|
void TouchPointsEffect::drawCircleGl(const QColor& color, float cx, float cy, float r)
|
|
|
|
{
|
|
|
|
static const int num_segments = 80;
|
|
|
|
static const float theta = 2 * 3.1415926 / float(num_segments);
|
|
|
|
static const float c = cosf(theta); //precalculate the sine and cosine
|
|
|
|
static const float s = sinf(theta);
|
|
|
|
float t;
|
|
|
|
|
|
|
|
float x = r;//we start at angle = 0
|
|
|
|
float y = 0;
|
|
|
|
|
|
|
|
GLVertexBuffer* vbo = GLVertexBuffer::streamingBuffer();
|
|
|
|
vbo->reset();
|
|
|
|
vbo->setUseColor(true);
|
|
|
|
vbo->setColor(color);
|
|
|
|
QVector<float> verts;
|
|
|
|
verts.reserve(num_segments * 2);
|
|
|
|
|
|
|
|
for (int ii = 0; ii < num_segments; ++ii) {
|
|
|
|
verts << x + cx << y + cy;//output vertex
|
|
|
|
//apply the rotation matrix
|
|
|
|
t = x;
|
|
|
|
x = c * x - s * y;
|
|
|
|
y = s * t + c * y;
|
|
|
|
}
|
Use nullptr everywhere
Summary:
Because KWin is a very old project, we use three kinds of null pointer
literals: 0, NULL, and nullptr. Since C++11, it's recommended to use
nullptr keyword.
This change converts all usages of 0 and NULL literal to nullptr. Even
though it breaks git history, we need to do it in order to have consistent
code as well to ease code reviews (it's very tempting for some people to
add unrelated changes to their patches, e.g. converting NULL to nullptr).
Test Plan: Compiles.
Reviewers: #kwin, davidedmundson, romangg
Reviewed By: #kwin, davidedmundson, romangg
Subscribers: romangg, kwin
Tags: #kwin
Differential Revision: https://phabricator.kde.org/D23618
2019-09-19 14:46:54 +00:00
|
|
|
vbo->setData(verts.size() / 2, 2, verts.data(), nullptr);
|
2016-08-16 18:16:41 +00:00
|
|
|
vbo->render(GL_LINE_LOOP);
|
|
|
|
}
|
|
|
|
|
|
|
|
void TouchPointsEffect::drawCircleXr(const QColor& color, float cx, float cy, float r)
|
|
|
|
{
|
|
|
|
#ifdef KWIN_HAVE_XRENDER_COMPOSITING
|
|
|
|
if (r <= m_lineWidth)
|
|
|
|
return;
|
|
|
|
|
|
|
|
int num_segments = r+8;
|
|
|
|
float theta = 2.0 * 3.1415926 / num_segments;
|
|
|
|
float cos = cosf(theta); //precalculate the sine and cosine
|
|
|
|
float sin = sinf(theta);
|
|
|
|
float x[2] = {r, r-m_lineWidth};
|
|
|
|
float y[2] = {0, 0};
|
|
|
|
|
|
|
|
#define DOUBLE_TO_FIXED(d) ((xcb_render_fixed_t) ((d) * 65536))
|
|
|
|
QVector<xcb_render_pointfix_t> strip;
|
|
|
|
strip.reserve(2*num_segments+2);
|
|
|
|
|
|
|
|
xcb_render_pointfix_t point;
|
|
|
|
point.x = DOUBLE_TO_FIXED(x[1]+cx);
|
|
|
|
point.y = DOUBLE_TO_FIXED(y[1]+cy);
|
|
|
|
strip << point;
|
|
|
|
|
|
|
|
for (int i = 0; i < num_segments; ++i) {
|
|
|
|
//apply the rotation matrix
|
|
|
|
const float h[2] = {x[0], x[1]};
|
|
|
|
x[0] = cos * x[0] - sin * y[0];
|
|
|
|
x[1] = cos * x[1] - sin * y[1];
|
|
|
|
y[0] = sin * h[0] + cos * y[0];
|
|
|
|
y[1] = sin * h[1] + cos * y[1];
|
|
|
|
|
|
|
|
point.x = DOUBLE_TO_FIXED(x[0]+cx);
|
|
|
|
point.y = DOUBLE_TO_FIXED(y[0]+cy);
|
|
|
|
strip << point;
|
|
|
|
|
|
|
|
point.x = DOUBLE_TO_FIXED(x[1]+cx);
|
|
|
|
point.y = DOUBLE_TO_FIXED(y[1]+cy);
|
|
|
|
strip << point;
|
|
|
|
}
|
|
|
|
|
|
|
|
const float h = x[0];
|
|
|
|
x[0] = cos * x[0] - sin * y[0];
|
|
|
|
y[0] = sin * h + cos * y[0];
|
|
|
|
|
|
|
|
point.x = DOUBLE_TO_FIXED(x[0]+cx);
|
|
|
|
point.y = DOUBLE_TO_FIXED(y[0]+cy);
|
|
|
|
strip << point;
|
|
|
|
|
|
|
|
XRenderPicture fill = xRenderFill(color);
|
|
|
|
xcb_render_tri_strip(xcbConnection(), XCB_RENDER_PICT_OP_OVER,
|
|
|
|
fill, effects->xrenderBufferPicture(), 0,
|
|
|
|
0, 0, strip.count(), strip.constData());
|
|
|
|
#undef DOUBLE_TO_FIXED
|
|
|
|
#else
|
|
|
|
Q_UNUSED(color)
|
|
|
|
Q_UNUSED(cx)
|
|
|
|
Q_UNUSED(cy)
|
|
|
|
Q_UNUSED(r)
|
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
void TouchPointsEffect::drawCircleQPainter(const QColor &color, float cx, float cy, float r)
|
|
|
|
{
|
|
|
|
QPainter *painter = effects->scenePainter();
|
|
|
|
painter->save();
|
|
|
|
painter->setPen(color);
|
|
|
|
painter->drawArc(cx - r, cy - r, r * 2, r * 2, 0, 5760);
|
|
|
|
painter->restore();
|
|
|
|
}
|
|
|
|
|
|
|
|
void TouchPointsEffect::paintScreenSetupGl(int, QRegion, ScreenPaintData &data)
|
|
|
|
{
|
|
|
|
GLShader *shader = ShaderManager::instance()->pushShader(ShaderTrait::UniformColor);
|
|
|
|
shader->setUniform(GLShader::ModelViewProjectionMatrix, data.projectionMatrix());
|
|
|
|
|
|
|
|
glLineWidth(m_lineWidth);
|
|
|
|
glEnable(GL_BLEND);
|
|
|
|
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
|
|
|
}
|
|
|
|
|
|
|
|
void TouchPointsEffect::paintScreenFinishGl(int, QRegion, ScreenPaintData&)
|
|
|
|
{
|
|
|
|
glDisable(GL_BLEND);
|
|
|
|
|
|
|
|
ShaderManager::instance()->popShader();
|
|
|
|
}
|
|
|
|
|
|
|
|
} // namespace
|
|
|
|
|