re-detect triple-buffering after compositor resume
and also hint the proper blocking condition (if turning off swap control) The detection vars are global, but the swap control requirement is not preserved, so the detection must re-occur. Alternatively (and better) the swap control should be saved and re-applied, but there's trouble on initial detection CCBUG: 343184 REVIEW: 125659
This commit is contained in:
parent
4924e9479b
commit
0788890233
2 changed files with 15 additions and 6 deletions
|
@ -68,12 +68,19 @@ EglOnXBackend::EglOnXBackend(xcb_connection_t *connection, Display *display, xcb
|
|||
setIsDirectRendering(true);
|
||||
}
|
||||
|
||||
static bool gs_tripleBufferUndetected = true;
|
||||
static bool gs_tripleBufferNeedsDetection = false;
|
||||
|
||||
EglOnXBackend::~EglOnXBackend()
|
||||
{
|
||||
if (isFailed() && m_overlayWindow) {
|
||||
m_overlayWindow->destroy();
|
||||
}
|
||||
cleanup();
|
||||
|
||||
gs_tripleBufferUndetected = true;
|
||||
gs_tripleBufferNeedsDetection = false;
|
||||
|
||||
if (m_overlayWindow) {
|
||||
if (overlayWindow()->window()) {
|
||||
overlayWindow()->destroy();
|
||||
|
@ -82,9 +89,6 @@ EglOnXBackend::~EglOnXBackend()
|
|||
}
|
||||
}
|
||||
|
||||
static bool gs_tripleBufferUndetected = true;
|
||||
static bool gs_tripleBufferNeedsDetection = false;
|
||||
|
||||
void EglOnXBackend::init()
|
||||
{
|
||||
if (!initRenderingContext()) {
|
||||
|
@ -343,6 +347,7 @@ void EglOnXBackend::present()
|
|||
if (qstrcmp(qgetenv("__GL_YIELD"), "USLEEP")) {
|
||||
options->setGlPreferBufferSwap(0);
|
||||
eglSwapInterval(eglDisplay(), 0);
|
||||
result = 0; // hint proper behavior
|
||||
qCWarning(KWIN_CORE) << "\nIt seems you are using the nvidia driver without triple buffering\n"
|
||||
"You must export __GL_YIELD=\"USLEEP\" to prevent large CPU overhead on synced swaps\n"
|
||||
"Preferably, enable the TripleBuffer Option in the xorg.conf Device\n"
|
||||
|
|
|
@ -117,6 +117,9 @@ GlxBackend::GlxBackend()
|
|||
init();
|
||||
}
|
||||
|
||||
static bool gs_tripleBufferUndetected = true;
|
||||
static bool gs_tripleBufferNeedsDetection = false;
|
||||
|
||||
GlxBackend::~GlxBackend()
|
||||
{
|
||||
if (isFailed()) {
|
||||
|
@ -127,6 +130,9 @@ GlxBackend::~GlxBackend()
|
|||
cleanupGL();
|
||||
doneCurrent();
|
||||
|
||||
gs_tripleBufferUndetected = true;
|
||||
gs_tripleBufferNeedsDetection = false;
|
||||
|
||||
if (ctx)
|
||||
glXDestroyContext(display(), ctx);
|
||||
|
||||
|
@ -140,9 +146,6 @@ GlxBackend::~GlxBackend()
|
|||
delete m_overlayWindow;
|
||||
}
|
||||
|
||||
static bool gs_tripleBufferUndetected = true;
|
||||
static bool gs_tripleBufferNeedsDetection = false;
|
||||
|
||||
void GlxBackend::init()
|
||||
{
|
||||
initGLX();
|
||||
|
@ -636,6 +639,7 @@ void GlxBackend::present()
|
|||
if (qstrcmp(qgetenv("__GL_YIELD"), "USLEEP")) {
|
||||
options->setGlPreferBufferSwap(0);
|
||||
setSwapInterval(0);
|
||||
result = 0; // hint proper behavior
|
||||
qCWarning(KWIN_CORE) << "\nIt seems you are using the nvidia driver without triple buffering\n"
|
||||
"You must export __GL_YIELD=\"USLEEP\" to prevent large CPU overhead on synced swaps\n"
|
||||
"Preferably, enable the TripleBuffer Option in the xorg.conf Device\n"
|
||||
|
|
Loading…
Reference in a new issue