2015-07-21 16:43:54 +01:00
|
|
|
/*
|
|
|
|
* Copyright © 2013 Keith Packard
|
|
|
|
* Copyright © 2015 Boyan Ding
|
|
|
|
*
|
|
|
|
* Permission to use, copy, modify, distribute, and sell this software and its
|
|
|
|
* documentation for any purpose is hereby granted without fee, provided that
|
|
|
|
* the above copyright notice appear in all copies and that both that copyright
|
|
|
|
* notice and this permission notice appear in supporting documentation, and
|
|
|
|
* that the name of the copyright holders not be used in advertising or
|
|
|
|
* publicity pertaining to distribution of the software without specific,
|
|
|
|
* written prior permission. The copyright holders make no representations
|
|
|
|
* about the suitability of this software for any purpose. It is provided "as
|
|
|
|
* is" without express or implied warranty.
|
|
|
|
*
|
|
|
|
* THE COPYRIGHT HOLDERS DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
|
|
|
* INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
|
|
|
|
* EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY SPECIAL, INDIRECT OR
|
|
|
|
* CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
|
|
|
* DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
|
|
|
* TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
|
|
|
|
* OF THIS SOFTWARE.
|
|
|
|
*/
|
|
|
|
|
|
|
|
#include <fcntl.h>
|
|
|
|
#include <stdlib.h>
|
|
|
|
#include <unistd.h>
|
2017-07-07 07:54:26 +01:00
|
|
|
#include <string.h>
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
#include <X11/xshmfence.h>
|
|
|
|
#include <xcb/xcb.h>
|
|
|
|
#include <xcb/dri3.h>
|
|
|
|
#include <xcb/present.h>
|
2020-05-23 00:45:42 +01:00
|
|
|
#include <xcb/xfixes.h>
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
#include <X11/Xlib-xcb.h>
|
|
|
|
|
2020-12-15 13:52:28 +00:00
|
|
|
#include "loader_dri_helper.h"
|
2015-07-21 16:43:54 +01:00
|
|
|
#include "loader_dri3_helper.h"
|
2018-02-21 11:39:09 +00:00
|
|
|
#include "util/macros.h"
|
2019-02-12 18:18:03 +00:00
|
|
|
#include "drm-uapi/drm_fourcc.h"
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2017-08-10 14:35:39 +01:00
|
|
|
/**
|
|
|
|
* A cached blit context.
|
|
|
|
*/
|
|
|
|
struct loader_dri3_blit_context {
|
|
|
|
mtx_t mtx;
|
|
|
|
__DRIcontext *ctx;
|
|
|
|
__DRIscreen *cur_screen;
|
|
|
|
const __DRIcoreExtension *core;
|
|
|
|
};
|
|
|
|
|
|
|
|
/* For simplicity we maintain the cache only for a single screen at a time */
|
|
|
|
static struct loader_dri3_blit_context blit_context = {
|
|
|
|
_MTX_INITIALIZER_NP, NULL
|
|
|
|
};
|
|
|
|
|
2017-08-10 14:59:58 +01:00
|
|
|
static void
|
|
|
|
dri3_flush_present_events(struct loader_dri3_drawable *draw);
|
|
|
|
|
2017-08-11 08:49:54 +01:00
|
|
|
static struct loader_dri3_buffer *
|
|
|
|
dri3_find_back_alloc(struct loader_dri3_drawable *draw);
|
|
|
|
|
loader_dri3: Handle mismatched depth 30 formats for Prime renderoffload.
Detect if the display (X-Server) gpu and Prime renderoffload gpu prefer
different channel ordering for color depth 30 formats ([X/A]BGR2101010
vs. [X/A]RGB2101010) and perform format conversion during the blitImage()
detiling op from tiled backbuffer -> linear buffer.
For this we need to find the visual (= red channel mask) for the
X-Drawable used to display on the server gpu. We use the same proven
logic for finding that visual as in commit "egl/x11: Handle both depth
30 formats for eglCreateImage()".
This is mostly to allow "NVidia Optimus" at depth 30, as Intel/AMD
gpu's prefer xRGB2101010 ordering, whereas NVidia gpu's prefer
xBGR2101010 ordering, so we can offload to nouveau without getting
funky colors.
Tested on Intel single gpu, NVidia single gpu, Intel + NVidia prime
offload with DRI3/Present.
Note: An unintended but pleasant surprise of this patch is that it also
seems to make the modesetting-ddx of server 1.20.0 work at depth 30
on nouveau, at least with unredirected "classic" X rendering, and
with redirected desktop compositing under XRender accel, and with OpenGL
compositing under GLX. Only X11 compositing via OpenGL + EGL still gives
funky colors. modesetting-ddx + glamor are not yet ready to deal with
nouveau's ABGR2101010 format, and treat it as ARGB2101010, also exposing
X-visuals with ARGB2101010 style channel masks. Seems somehow this triggers
the logic in this patch on modesetting-ddx + depth 30 + DRI3 buffer sharing
and does the "wrong" channel swizzling that then cancels out the "wrong"
swizzling of glamor and we end up with the proper pixel formatting in
the scanout buffer :). This so far tested on a NVA5 Tesla card under KDE5
Plasma as shipping with Ubuntu 16.04.4 LTS.
Signed-off-by: Mario Kleiner <mario.kleiner.de@gmail.com>
Cc: Ilia Mirkin <imirkin@alum.mit.edu>
Reviewed-by: Eric Engestrom <eric.engestrom@intel.com>
2018-06-14 05:04:24 +01:00
|
|
|
static xcb_screen_t *
|
|
|
|
get_screen_for_root(xcb_connection_t *conn, xcb_window_t root)
|
|
|
|
{
|
|
|
|
xcb_screen_iterator_t screen_iter =
|
|
|
|
xcb_setup_roots_iterator(xcb_get_setup(conn));
|
|
|
|
|
|
|
|
for (; screen_iter.rem; xcb_screen_next (&screen_iter)) {
|
|
|
|
if (screen_iter.data->root == root)
|
|
|
|
return screen_iter.data;
|
|
|
|
}
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
static xcb_visualtype_t *
|
|
|
|
get_xcb_visualtype_for_depth(struct loader_dri3_drawable *draw, int depth)
|
|
|
|
{
|
|
|
|
xcb_visualtype_iterator_t visual_iter;
|
|
|
|
xcb_screen_t *screen = draw->screen;
|
|
|
|
xcb_depth_iterator_t depth_iter;
|
|
|
|
|
|
|
|
if (!screen)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
depth_iter = xcb_screen_allowed_depths_iterator(screen);
|
|
|
|
for (; depth_iter.rem; xcb_depth_next(&depth_iter)) {
|
|
|
|
if (depth_iter.data->depth != depth)
|
|
|
|
continue;
|
|
|
|
|
|
|
|
visual_iter = xcb_depth_visuals_iterator(depth_iter.data);
|
|
|
|
if (visual_iter.rem)
|
|
|
|
return visual_iter.data;
|
|
|
|
}
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2018-10-23 16:38:51 +01:00
|
|
|
/* Sets the adaptive sync window property state. */
|
|
|
|
static void
|
|
|
|
set_adaptive_sync_property(xcb_connection_t *conn, xcb_drawable_t drawable,
|
|
|
|
uint32_t state)
|
|
|
|
{
|
|
|
|
static char const name[] = "_VARIABLE_REFRESH";
|
|
|
|
xcb_intern_atom_cookie_t cookie;
|
|
|
|
xcb_intern_atom_reply_t* reply;
|
|
|
|
xcb_void_cookie_t check;
|
|
|
|
|
2019-02-04 17:53:52 +00:00
|
|
|
cookie = xcb_intern_atom(conn, 0, strlen(name), name);
|
2018-10-23 16:38:51 +01:00
|
|
|
reply = xcb_intern_atom_reply(conn, cookie, NULL);
|
|
|
|
if (reply == NULL)
|
|
|
|
return;
|
|
|
|
|
|
|
|
if (state)
|
|
|
|
check = xcb_change_property_checked(conn, XCB_PROP_MODE_REPLACE,
|
|
|
|
drawable, reply->atom,
|
|
|
|
XCB_ATOM_CARDINAL, 32, 1, &state);
|
|
|
|
else
|
|
|
|
check = xcb_delete_property_checked(conn, drawable, reply->atom);
|
|
|
|
|
|
|
|
xcb_discard_reply(conn, check.sequence);
|
|
|
|
free(reply);
|
|
|
|
}
|
|
|
|
|
loader_dri3: Handle mismatched depth 30 formats for Prime renderoffload.
Detect if the display (X-Server) gpu and Prime renderoffload gpu prefer
different channel ordering for color depth 30 formats ([X/A]BGR2101010
vs. [X/A]RGB2101010) and perform format conversion during the blitImage()
detiling op from tiled backbuffer -> linear buffer.
For this we need to find the visual (= red channel mask) for the
X-Drawable used to display on the server gpu. We use the same proven
logic for finding that visual as in commit "egl/x11: Handle both depth
30 formats for eglCreateImage()".
This is mostly to allow "NVidia Optimus" at depth 30, as Intel/AMD
gpu's prefer xRGB2101010 ordering, whereas NVidia gpu's prefer
xBGR2101010 ordering, so we can offload to nouveau without getting
funky colors.
Tested on Intel single gpu, NVidia single gpu, Intel + NVidia prime
offload with DRI3/Present.
Note: An unintended but pleasant surprise of this patch is that it also
seems to make the modesetting-ddx of server 1.20.0 work at depth 30
on nouveau, at least with unredirected "classic" X rendering, and
with redirected desktop compositing under XRender accel, and with OpenGL
compositing under GLX. Only X11 compositing via OpenGL + EGL still gives
funky colors. modesetting-ddx + glamor are not yet ready to deal with
nouveau's ABGR2101010 format, and treat it as ARGB2101010, also exposing
X-visuals with ARGB2101010 style channel masks. Seems somehow this triggers
the logic in this patch on modesetting-ddx + depth 30 + DRI3 buffer sharing
and does the "wrong" channel swizzling that then cancels out the "wrong"
swizzling of glamor and we end up with the proper pixel formatting in
the scanout buffer :). This so far tested on a NVA5 Tesla card under KDE5
Plasma as shipping with Ubuntu 16.04.4 LTS.
Signed-off-by: Mario Kleiner <mario.kleiner.de@gmail.com>
Cc: Ilia Mirkin <imirkin@alum.mit.edu>
Reviewed-by: Eric Engestrom <eric.engestrom@intel.com>
2018-06-14 05:04:24 +01:00
|
|
|
/* Get red channel mask for given drawable at given depth. */
|
|
|
|
static unsigned int
|
|
|
|
dri3_get_red_mask_for_depth(struct loader_dri3_drawable *draw, int depth)
|
|
|
|
{
|
|
|
|
xcb_visualtype_t *visual = get_xcb_visualtype_for_depth(draw, depth);
|
|
|
|
|
|
|
|
if (visual)
|
|
|
|
return visual->red_mask;
|
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2017-08-10 14:35:39 +01:00
|
|
|
/**
|
|
|
|
* Do we have blit functionality in the image blit extension?
|
|
|
|
*
|
|
|
|
* \param draw[in] The drawable intended to blit from / to.
|
|
|
|
* \return true if we have blit functionality. false otherwise.
|
|
|
|
*/
|
|
|
|
static bool loader_dri3_have_image_blit(const struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
return draw->ext->image->base.version >= 9 &&
|
|
|
|
draw->ext->image->blitImage != NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Get and lock (for use with the current thread) a dri context associated
|
|
|
|
* with the drawable's dri screen. The context is intended to be used with
|
|
|
|
* the dri image extension's blitImage method.
|
|
|
|
*
|
|
|
|
* \param draw[in] Pointer to the drawable whose dri screen we want a
|
|
|
|
* dri context for.
|
|
|
|
* \return A dri context or NULL if context creation failed.
|
|
|
|
*
|
|
|
|
* When the caller is done with the context (even if the context returned was
|
|
|
|
* NULL), the caller must call loader_dri3_blit_context_put.
|
|
|
|
*/
|
|
|
|
static __DRIcontext *
|
|
|
|
loader_dri3_blit_context_get(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
mtx_lock(&blit_context.mtx);
|
|
|
|
|
|
|
|
if (blit_context.ctx && blit_context.cur_screen != draw->dri_screen) {
|
|
|
|
blit_context.core->destroyContext(blit_context.ctx);
|
|
|
|
blit_context.ctx = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!blit_context.ctx) {
|
|
|
|
blit_context.ctx = draw->ext->core->createNewContext(draw->dri_screen,
|
|
|
|
NULL, NULL, NULL);
|
|
|
|
blit_context.cur_screen = draw->dri_screen;
|
|
|
|
blit_context.core = draw->ext->core;
|
|
|
|
}
|
|
|
|
|
|
|
|
return blit_context.ctx;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Release (for use with other threads) a dri context previously obtained using
|
|
|
|
* loader_dri3_blit_context_get.
|
|
|
|
*/
|
|
|
|
static void
|
|
|
|
loader_dri3_blit_context_put(void)
|
|
|
|
{
|
|
|
|
mtx_unlock(&blit_context.mtx);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Blit (parts of) the contents of a DRI image to another dri image
|
|
|
|
*
|
|
|
|
* \param draw[in] The drawable which owns the images.
|
|
|
|
* \param dst[in] The destination image.
|
|
|
|
* \param src[in] The source image.
|
|
|
|
* \param dstx0[in] Start destination coordinate.
|
|
|
|
* \param dsty0[in] Start destination coordinate.
|
|
|
|
* \param width[in] Blit width.
|
|
|
|
* \param height[in] Blit height.
|
|
|
|
* \param srcx0[in] Start source coordinate.
|
|
|
|
* \param srcy0[in] Start source coordinate.
|
|
|
|
* \param flush_flag[in] Image blit flush flag.
|
|
|
|
* \return true iff successful.
|
|
|
|
*/
|
|
|
|
static bool
|
|
|
|
loader_dri3_blit_image(struct loader_dri3_drawable *draw,
|
|
|
|
__DRIimage *dst, __DRIimage *src,
|
|
|
|
int dstx0, int dsty0, int width, int height,
|
|
|
|
int srcx0, int srcy0, int flush_flag)
|
|
|
|
{
|
|
|
|
__DRIcontext *dri_context;
|
|
|
|
bool use_blit_context = false;
|
|
|
|
|
|
|
|
if (!loader_dri3_have_image_blit(draw))
|
|
|
|
return false;
|
|
|
|
|
|
|
|
dri_context = draw->vtable->get_dri_context(draw);
|
|
|
|
|
|
|
|
if (!dri_context || !draw->vtable->in_current_context(draw)) {
|
|
|
|
dri_context = loader_dri3_blit_context_get(draw);
|
|
|
|
use_blit_context = true;
|
|
|
|
flush_flag |= __BLIT_FLAG_FLUSH;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (dri_context)
|
|
|
|
draw->ext->image->blitImage(dri_context, dst, src, dstx0, dsty0,
|
|
|
|
width, height, srcx0, srcy0,
|
|
|
|
width, height, flush_flag);
|
|
|
|
|
|
|
|
if (use_blit_context)
|
|
|
|
loader_dri3_blit_context_put();
|
|
|
|
|
|
|
|
return dri_context != NULL;
|
|
|
|
}
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
static inline void
|
|
|
|
dri3_fence_reset(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
|
|
|
|
{
|
|
|
|
xshmfence_reset(buffer->shm_fence);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
dri3_fence_set(struct loader_dri3_buffer *buffer)
|
|
|
|
{
|
|
|
|
xshmfence_trigger(buffer->shm_fence);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
|
|
|
dri3_fence_trigger(xcb_connection_t *c, struct loader_dri3_buffer *buffer)
|
|
|
|
{
|
|
|
|
xcb_sync_trigger_fence(c, buffer->sync_fence);
|
|
|
|
}
|
|
|
|
|
|
|
|
static inline void
|
2017-09-05 08:58:08 +01:00
|
|
|
dri3_fence_await(xcb_connection_t *c, struct loader_dri3_drawable *draw,
|
|
|
|
struct loader_dri3_buffer *buffer)
|
2015-07-21 16:43:54 +01:00
|
|
|
{
|
|
|
|
xcb_flush(c);
|
|
|
|
xshmfence_await(buffer->shm_fence);
|
2017-09-19 18:41:22 +01:00
|
|
|
if (draw) {
|
|
|
|
mtx_lock(&draw->mtx);
|
2017-09-05 08:58:08 +01:00
|
|
|
dri3_flush_present_events(draw);
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_unlock(&draw->mtx);
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2020-10-02 14:20:17 +01:00
|
|
|
dri3_update_max_num_back(struct loader_dri3_drawable *draw)
|
2015-07-21 16:43:54 +01:00
|
|
|
{
|
2020-10-02 14:20:17 +01:00
|
|
|
switch (draw->last_present_mode) {
|
2020-10-02 14:55:05 +01:00
|
|
|
case XCB_PRESENT_COMPLETE_MODE_FLIP: {
|
|
|
|
int new_max;
|
|
|
|
|
|
|
|
if (draw->swap_interval == 0)
|
|
|
|
new_max = 4;
|
|
|
|
else
|
|
|
|
new_max = 3;
|
|
|
|
|
|
|
|
assert(new_max <= LOADER_DRI3_MAX_BACK);
|
|
|
|
|
|
|
|
if (new_max != draw->max_num_back) {
|
|
|
|
/* On transition from swap interval == 0 to != 0, start with two
|
|
|
|
* buffers again. Otherwise keep the current number of buffers. Either
|
|
|
|
* way, more will be allocated if needed.
|
|
|
|
*/
|
|
|
|
if (new_max < draw->max_num_back)
|
|
|
|
draw->cur_num_back = 2;
|
|
|
|
|
|
|
|
draw->max_num_back = new_max;
|
|
|
|
}
|
|
|
|
|
2020-10-02 14:20:17 +01:00
|
|
|
break;
|
2020-10-02 14:55:05 +01:00
|
|
|
}
|
2020-10-02 14:20:17 +01:00
|
|
|
|
2020-10-06 16:52:08 +01:00
|
|
|
case XCB_PRESENT_COMPLETE_MODE_SKIP:
|
|
|
|
break;
|
|
|
|
|
2020-10-02 14:20:17 +01:00
|
|
|
default:
|
|
|
|
/* On transition from flips to copies, start with a single buffer again,
|
|
|
|
* a second one will be allocated if needed
|
|
|
|
*/
|
|
|
|
if (draw->max_num_back != 2)
|
|
|
|
draw->cur_num_back = 1;
|
|
|
|
|
|
|
|
draw->max_num_back = 2;
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
loader_dri3_set_swap_interval(struct loader_dri3_drawable *draw, int interval)
|
|
|
|
{
|
2021-09-24 08:47:50 +01:00
|
|
|
/* Wait all previous swap done before changing swap interval.
|
|
|
|
*
|
|
|
|
* This is for preventing swap out of order in the following cases:
|
|
|
|
* 1. Change from sync swap mode (>0) to async mode (=0), so async swap occurs
|
|
|
|
* before previous pending sync swap.
|
|
|
|
* 2. Change from value A to B and A > B, so the target_msc for the previous
|
|
|
|
* pending swap may be bigger than newer swap.
|
|
|
|
*
|
|
|
|
* PS. changing from value A to B and A < B won't cause swap out of order but
|
|
|
|
* may still gets wrong target_msc value at the beginning.
|
|
|
|
*/
|
|
|
|
if (draw->swap_interval != interval)
|
|
|
|
loader_dri3_swapbuffer_barrier(draw);
|
|
|
|
|
2017-08-02 00:59:26 +01:00
|
|
|
draw->swap_interval = interval;
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/** dri3_free_render_buffer
|
|
|
|
*
|
|
|
|
* Free everything associated with one render buffer including pixmap, fence
|
|
|
|
* stuff and the driver image
|
|
|
|
*/
|
|
|
|
static void
|
|
|
|
dri3_free_render_buffer(struct loader_dri3_drawable *draw,
|
|
|
|
struct loader_dri3_buffer *buffer)
|
|
|
|
{
|
|
|
|
if (buffer->own_pixmap)
|
|
|
|
xcb_free_pixmap(draw->conn, buffer->pixmap);
|
|
|
|
xcb_sync_destroy_fence(draw->conn, buffer->sync_fence);
|
|
|
|
xshmfence_unmap_shm(buffer->shm_fence);
|
2015-11-25 05:27:04 +00:00
|
|
|
draw->ext->image->destroyImage(buffer->image);
|
2015-07-21 16:43:54 +01:00
|
|
|
if (buffer->linear_buffer)
|
2015-11-25 05:27:04 +00:00
|
|
|
draw->ext->image->destroyImage(buffer->linear_buffer);
|
2015-07-21 16:43:54 +01:00
|
|
|
free(buffer);
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
loader_dri3_drawable_fini(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
int i;
|
|
|
|
|
2015-11-25 05:27:04 +00:00
|
|
|
draw->ext->core->destroyDrawable(draw->dri_drawable);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2018-02-21 11:39:09 +00:00
|
|
|
for (i = 0; i < ARRAY_SIZE(draw->buffers); i++) {
|
2015-07-21 16:43:54 +01:00
|
|
|
if (draw->buffers[i])
|
|
|
|
dri3_free_render_buffer(draw, draw->buffers[i]);
|
|
|
|
}
|
|
|
|
|
2016-07-28 09:44:49 +01:00
|
|
|
if (draw->special_event) {
|
|
|
|
xcb_void_cookie_t cookie =
|
|
|
|
xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
|
|
|
|
XCB_PRESENT_EVENT_MASK_NO_EVENT);
|
|
|
|
|
|
|
|
xcb_discard_reply(draw->conn, cookie.sequence);
|
2015-07-21 16:43:54 +01:00
|
|
|
xcb_unregister_for_special_event(draw->conn, draw->special_event);
|
2016-07-28 09:44:49 +01:00
|
|
|
}
|
2017-09-19 18:41:22 +01:00
|
|
|
|
2021-06-23 16:05:44 +01:00
|
|
|
if (draw->region)
|
|
|
|
xcb_xfixes_destroy_region(draw->conn, draw->region);
|
|
|
|
|
2017-09-19 18:41:22 +01:00
|
|
|
cnd_destroy(&draw->event_cnd);
|
|
|
|
mtx_destroy(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
int
|
|
|
|
loader_dri3_drawable_init(xcb_connection_t *conn,
|
|
|
|
xcb_drawable_t drawable,
|
2021-11-12 02:25:11 +00:00
|
|
|
enum loader_dri3_drawable_type type,
|
2015-07-21 16:43:54 +01:00
|
|
|
__DRIscreen *dri_screen,
|
|
|
|
bool is_different_gpu,
|
2017-07-07 07:54:26 +01:00
|
|
|
bool multiplanes_available,
|
2021-09-14 09:57:39 +01:00
|
|
|
bool prefer_back_buffer_reuse,
|
2015-07-21 16:43:54 +01:00
|
|
|
const __DRIconfig *dri_config,
|
|
|
|
struct loader_dri3_extensions *ext,
|
2016-08-25 11:08:30 +01:00
|
|
|
const struct loader_dri3_vtable *vtable,
|
2015-07-21 16:43:54 +01:00
|
|
|
struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
xcb_get_geometry_cookie_t cookie;
|
|
|
|
xcb_get_geometry_reply_t *reply;
|
|
|
|
xcb_generic_error_t *error;
|
|
|
|
|
|
|
|
draw->conn = conn;
|
|
|
|
draw->ext = ext;
|
|
|
|
draw->vtable = vtable;
|
|
|
|
draw->drawable = drawable;
|
2021-11-12 02:25:11 +00:00
|
|
|
draw->type = type;
|
2021-06-23 16:05:44 +01:00
|
|
|
draw->region = 0;
|
2015-07-21 16:43:54 +01:00
|
|
|
draw->dri_screen = dri_screen;
|
|
|
|
draw->is_different_gpu = is_different_gpu;
|
2017-07-07 07:54:26 +01:00
|
|
|
draw->multiplanes_available = multiplanes_available;
|
2021-09-14 09:57:39 +01:00
|
|
|
draw->prefer_back_buffer_reuse = prefer_back_buffer_reuse;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
draw->have_back = 0;
|
|
|
|
draw->have_fake_front = 0;
|
|
|
|
draw->first_init = true;
|
2018-10-23 16:38:51 +01:00
|
|
|
draw->adaptive_sync = false;
|
|
|
|
draw->adaptive_sync_active = false;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2017-08-10 15:14:23 +01:00
|
|
|
draw->cur_blit_source = -1;
|
2017-08-11 08:49:54 +01:00
|
|
|
draw->back_format = __DRI_IMAGE_FORMAT_NONE;
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_init(&draw->mtx, mtx_plain);
|
|
|
|
cnd_init(&draw->event_cnd);
|
2017-08-10 15:14:23 +01:00
|
|
|
|
2018-10-23 16:38:51 +01:00
|
|
|
if (draw->ext->config) {
|
2019-01-07 08:52:56 +00:00
|
|
|
unsigned char adaptive_sync = 0;
|
2018-10-23 16:38:51 +01:00
|
|
|
|
|
|
|
draw->ext->config->configQueryb(draw->dri_screen,
|
|
|
|
"adaptive_sync",
|
|
|
|
&adaptive_sync);
|
|
|
|
|
|
|
|
draw->adaptive_sync = adaptive_sync;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!draw->adaptive_sync)
|
|
|
|
set_adaptive_sync_property(conn, draw->drawable, false);
|
|
|
|
|
2022-07-11 20:01:27 +01:00
|
|
|
draw->swap_interval = dri_get_initial_swap_interval(draw->dri_screen,
|
|
|
|
draw->ext->config);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2020-10-02 14:20:17 +01:00
|
|
|
dri3_update_max_num_back(draw);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
/* Create a new drawable */
|
|
|
|
draw->dri_drawable =
|
2015-11-25 05:27:04 +00:00
|
|
|
draw->ext->image_driver->createNewDrawable(dri_screen,
|
|
|
|
dri_config,
|
|
|
|
draw);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
if (!draw->dri_drawable)
|
|
|
|
return 1;
|
|
|
|
|
|
|
|
cookie = xcb_get_geometry(draw->conn, draw->drawable);
|
|
|
|
reply = xcb_get_geometry_reply(draw->conn, cookie, &error);
|
|
|
|
if (reply == NULL || error != NULL) {
|
|
|
|
draw->ext->core->destroyDrawable(draw->dri_drawable);
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
loader_dri3: Handle mismatched depth 30 formats for Prime renderoffload.
Detect if the display (X-Server) gpu and Prime renderoffload gpu prefer
different channel ordering for color depth 30 formats ([X/A]BGR2101010
vs. [X/A]RGB2101010) and perform format conversion during the blitImage()
detiling op from tiled backbuffer -> linear buffer.
For this we need to find the visual (= red channel mask) for the
X-Drawable used to display on the server gpu. We use the same proven
logic for finding that visual as in commit "egl/x11: Handle both depth
30 formats for eglCreateImage()".
This is mostly to allow "NVidia Optimus" at depth 30, as Intel/AMD
gpu's prefer xRGB2101010 ordering, whereas NVidia gpu's prefer
xBGR2101010 ordering, so we can offload to nouveau without getting
funky colors.
Tested on Intel single gpu, NVidia single gpu, Intel + NVidia prime
offload with DRI3/Present.
Note: An unintended but pleasant surprise of this patch is that it also
seems to make the modesetting-ddx of server 1.20.0 work at depth 30
on nouveau, at least with unredirected "classic" X rendering, and
with redirected desktop compositing under XRender accel, and with OpenGL
compositing under GLX. Only X11 compositing via OpenGL + EGL still gives
funky colors. modesetting-ddx + glamor are not yet ready to deal with
nouveau's ABGR2101010 format, and treat it as ARGB2101010, also exposing
X-visuals with ARGB2101010 style channel masks. Seems somehow this triggers
the logic in this patch on modesetting-ddx + depth 30 + DRI3 buffer sharing
and does the "wrong" channel swizzling that then cancels out the "wrong"
swizzling of glamor and we end up with the proper pixel formatting in
the scanout buffer :). This so far tested on a NVA5 Tesla card under KDE5
Plasma as shipping with Ubuntu 16.04.4 LTS.
Signed-off-by: Mario Kleiner <mario.kleiner.de@gmail.com>
Cc: Ilia Mirkin <imirkin@alum.mit.edu>
Reviewed-by: Eric Engestrom <eric.engestrom@intel.com>
2018-06-14 05:04:24 +01:00
|
|
|
draw->screen = get_screen_for_root(draw->conn, reply->root);
|
2015-07-21 16:43:54 +01:00
|
|
|
draw->width = reply->width;
|
|
|
|
draw->height = reply->height;
|
|
|
|
draw->depth = reply->depth;
|
|
|
|
draw->vtable->set_drawable_size(draw, draw->width, draw->height);
|
|
|
|
free(reply);
|
|
|
|
|
2017-08-10 16:10:47 +01:00
|
|
|
draw->swap_method = __DRI_ATTRIB_SWAP_UNDEFINED;
|
|
|
|
if (draw->ext->core->base.version >= 2) {
|
|
|
|
(void )draw->ext->core->getConfigAttrib(dri_config,
|
|
|
|
__DRI_ATTRIB_SWAP_METHOD,
|
|
|
|
&draw->swap_method);
|
|
|
|
}
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
/*
|
|
|
|
* Make sure server has the same swap interval we do for the new
|
|
|
|
* drawable.
|
|
|
|
*/
|
2022-07-11 20:01:27 +01:00
|
|
|
loader_dri3_set_swap_interval(draw, draw->swap_interval);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
/*
|
|
|
|
* Process one Present event
|
|
|
|
*/
|
|
|
|
static void
|
|
|
|
dri3_handle_present_event(struct loader_dri3_drawable *draw,
|
|
|
|
xcb_present_generic_event_t *ge)
|
|
|
|
{
|
|
|
|
switch (ge->evtype) {
|
|
|
|
case XCB_PRESENT_CONFIGURE_NOTIFY: {
|
|
|
|
xcb_present_configure_notify_event_t *ce = (void *) ge;
|
|
|
|
|
|
|
|
draw->width = ce->width;
|
|
|
|
draw->height = ce->height;
|
|
|
|
draw->vtable->set_drawable_size(draw, draw->width, draw->height);
|
2017-09-05 09:07:13 +01:00
|
|
|
draw->ext->flush->invalidate(draw->dri_drawable);
|
2015-07-21 16:43:54 +01:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
case XCB_PRESENT_COMPLETE_NOTIFY: {
|
|
|
|
xcb_present_complete_notify_event_t *ce = (void *) ge;
|
|
|
|
|
|
|
|
/* Compute the processed SBC number from the received 32-bit serial number
|
|
|
|
* merged with the upper 32-bits of the sent 64-bit serial number while
|
|
|
|
* checking for wrap.
|
|
|
|
*/
|
|
|
|
if (ce->kind == XCB_PRESENT_COMPLETE_KIND_PIXMAP) {
|
2018-05-08 10:51:09 +01:00
|
|
|
uint64_t recv_sbc = (draw->send_sbc & 0xffffffff00000000LL) | ce->serial;
|
|
|
|
|
|
|
|
/* Only assume wraparound if that results in exactly the previous
|
|
|
|
* SBC + 1, otherwise ignore received SBC > sent SBC (those are
|
|
|
|
* probably from a previous loader_dri3_drawable instance) to avoid
|
|
|
|
* calculating bogus target MSC values in loader_dri3_swap_buffers_msc
|
|
|
|
*/
|
|
|
|
if (recv_sbc <= draw->send_sbc)
|
|
|
|
draw->recv_sbc = recv_sbc;
|
|
|
|
else if (recv_sbc == (draw->recv_sbc + 0x100000001ULL))
|
|
|
|
draw->recv_sbc = recv_sbc - 0x100000000ULL;
|
2018-02-21 10:39:34 +00:00
|
|
|
|
2017-10-06 06:26:51 +01:00
|
|
|
/* When moving from flip to copy, we assume that we can allocate in
|
|
|
|
* a more optimal way if we don't need to cater for the display
|
|
|
|
* controller.
|
|
|
|
*/
|
|
|
|
if (ce->mode == XCB_PRESENT_COMPLETE_MODE_COPY &&
|
|
|
|
draw->last_present_mode == XCB_PRESENT_COMPLETE_MODE_FLIP) {
|
|
|
|
for (int b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
|
|
|
|
if (draw->buffers[b])
|
|
|
|
draw->buffers[b]->reallocate = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-02 11:49:40 +01:00
|
|
|
/* If the server tells us that our allocation is suboptimal, we
|
2017-10-06 06:26:51 +01:00
|
|
|
* reallocate once.
|
|
|
|
*/
|
2018-03-13 20:06:00 +00:00
|
|
|
#ifdef HAVE_DRI3_MODIFIERS
|
2017-10-06 06:26:51 +01:00
|
|
|
if (ce->mode == XCB_PRESENT_COMPLETE_MODE_SUBOPTIMAL_COPY &&
|
|
|
|
draw->last_present_mode != ce->mode) {
|
|
|
|
for (int b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
|
|
|
|
if (draw->buffers[b])
|
|
|
|
draw->buffers[b]->reallocate = true;
|
|
|
|
}
|
|
|
|
}
|
2018-03-13 20:06:00 +00:00
|
|
|
#endif
|
2018-02-21 10:39:34 +00:00
|
|
|
draw->last_present_mode = ce->mode;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
if (draw->vtable->show_fps)
|
|
|
|
draw->vtable->show_fps(draw, ce->ust);
|
|
|
|
|
|
|
|
draw->ust = ce->ust;
|
|
|
|
draw->msc = ce->msc;
|
2018-01-03 11:21:56 +00:00
|
|
|
} else if (ce->serial == draw->eid) {
|
2015-07-21 16:43:54 +01:00
|
|
|
draw->notify_ust = ce->ust;
|
|
|
|
draw->notify_msc = ce->msc;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case XCB_PRESENT_EVENT_IDLE_NOTIFY: {
|
|
|
|
xcb_present_idle_notify_event_t *ie = (void *) ge;
|
|
|
|
int b;
|
|
|
|
|
2018-02-21 11:39:09 +00:00
|
|
|
for (b = 0; b < ARRAY_SIZE(draw->buffers); b++) {
|
2015-07-21 16:43:54 +01:00
|
|
|
struct loader_dri3_buffer *buf = draw->buffers[b];
|
|
|
|
|
2017-09-14 12:09:05 +01:00
|
|
|
if (buf && buf->pixmap == ie->pixmap)
|
2015-07-21 16:43:54 +01:00
|
|
|
buf->busy = 0;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
free(ge);
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool
|
2020-06-05 17:12:33 +01:00
|
|
|
dri3_wait_for_event_locked(struct loader_dri3_drawable *draw,
|
|
|
|
unsigned *full_sequence)
|
2015-07-21 16:43:54 +01:00
|
|
|
{
|
|
|
|
xcb_generic_event_t *ev;
|
|
|
|
xcb_present_generic_event_t *ge;
|
|
|
|
|
|
|
|
xcb_flush(draw->conn);
|
2017-09-19 18:41:22 +01:00
|
|
|
|
|
|
|
/* Only have one thread waiting for events at a time */
|
|
|
|
if (draw->has_event_waiter) {
|
|
|
|
cnd_wait(&draw->event_cnd, &draw->mtx);
|
2020-06-05 17:12:33 +01:00
|
|
|
if (full_sequence)
|
|
|
|
*full_sequence = draw->last_special_event_sequence;
|
2017-09-19 18:41:22 +01:00
|
|
|
/* Another thread has updated the protected info, so retest. */
|
|
|
|
return true;
|
|
|
|
} else {
|
|
|
|
draw->has_event_waiter = true;
|
|
|
|
/* Allow other threads access to the drawable while we're waiting. */
|
|
|
|
mtx_unlock(&draw->mtx);
|
2020-07-02 17:49:26 +01:00
|
|
|
ev = xcb_wait_for_special_event(draw->conn, draw->special_event);
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_lock(&draw->mtx);
|
|
|
|
draw->has_event_waiter = false;
|
|
|
|
cnd_broadcast(&draw->event_cnd);
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
if (!ev)
|
|
|
|
return false;
|
2020-06-05 17:12:33 +01:00
|
|
|
draw->last_special_event_sequence = ev->full_sequence;
|
|
|
|
if (full_sequence)
|
|
|
|
*full_sequence = ev->full_sequence;
|
2015-07-21 16:43:54 +01:00
|
|
|
ge = (void *) ev;
|
|
|
|
dri3_handle_present_event(draw, ge);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
/** loader_dri3_wait_for_msc
|
|
|
|
*
|
|
|
|
* Get the X server to send an event when the target msc/divisor/remainder is
|
|
|
|
* reached.
|
|
|
|
*/
|
|
|
|
bool
|
|
|
|
loader_dri3_wait_for_msc(struct loader_dri3_drawable *draw,
|
|
|
|
int64_t target_msc,
|
|
|
|
int64_t divisor, int64_t remainder,
|
|
|
|
int64_t *ust, int64_t *msc, int64_t *sbc)
|
|
|
|
{
|
2018-01-03 11:21:56 +00:00
|
|
|
xcb_void_cookie_t cookie = xcb_present_notify_msc(draw->conn,
|
|
|
|
draw->drawable,
|
|
|
|
draw->eid,
|
|
|
|
target_msc,
|
|
|
|
divisor,
|
|
|
|
remainder);
|
|
|
|
unsigned full_sequence;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_lock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
/* Wait for the event */
|
2018-01-03 11:21:56 +00:00
|
|
|
do {
|
2020-06-05 17:22:31 +01:00
|
|
|
if (!dri3_wait_for_event_locked(draw, &full_sequence)) {
|
2018-01-03 11:21:56 +00:00
|
|
|
mtx_unlock(&draw->mtx);
|
|
|
|
return false;
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
2018-01-03 11:21:56 +00:00
|
|
|
} while (full_sequence != cookie.sequence || draw->notify_msc < target_msc);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
*ust = draw->notify_ust;
|
|
|
|
*msc = draw->notify_msc;
|
|
|
|
*sbc = draw->recv_sbc;
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_unlock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
/** loader_dri3_wait_for_sbc
|
|
|
|
*
|
|
|
|
* Wait for the completed swap buffer count to reach the specified
|
|
|
|
* target. Presumably the application knows that this will be reached with
|
|
|
|
* outstanding complete events, or we're going to be here awhile.
|
|
|
|
*/
|
|
|
|
int
|
|
|
|
loader_dri3_wait_for_sbc(struct loader_dri3_drawable *draw,
|
|
|
|
int64_t target_sbc, int64_t *ust,
|
|
|
|
int64_t *msc, int64_t *sbc)
|
|
|
|
{
|
|
|
|
/* From the GLX_OML_sync_control spec:
|
|
|
|
*
|
|
|
|
* "If <target_sbc> = 0, the function will block until all previous
|
|
|
|
* swaps requested with glXSwapBuffersMscOML for that window have
|
|
|
|
* completed."
|
|
|
|
*/
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_lock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
if (!target_sbc)
|
|
|
|
target_sbc = draw->send_sbc;
|
|
|
|
|
|
|
|
while (draw->recv_sbc < target_sbc) {
|
2020-06-05 17:12:33 +01:00
|
|
|
if (!dri3_wait_for_event_locked(draw, NULL)) {
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_unlock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
return 0;
|
2017-09-19 18:41:22 +01:00
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
*ust = draw->ust;
|
|
|
|
*msc = draw->msc;
|
|
|
|
*sbc = draw->recv_sbc;
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_unlock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
/** loader_dri3_find_back
|
|
|
|
*
|
|
|
|
* Find an idle back buffer. If there isn't one, then
|
|
|
|
* wait for a present idle notify event from the X server
|
|
|
|
*/
|
|
|
|
static int
|
2021-09-08 19:08:57 +01:00
|
|
|
dri3_find_back(struct loader_dri3_drawable *draw, bool prefer_a_different)
|
2015-07-21 16:43:54 +01:00
|
|
|
{
|
|
|
|
int b;
|
2017-09-19 18:41:22 +01:00
|
|
|
int num_to_consider;
|
2020-10-02 14:20:17 +01:00
|
|
|
int max_num;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_lock(&draw->mtx);
|
2017-08-10 14:59:58 +01:00
|
|
|
/* Increase the likelyhood of reusing current buffer */
|
|
|
|
dri3_flush_present_events(draw);
|
|
|
|
|
2017-08-10 15:14:23 +01:00
|
|
|
/* Check whether we need to reuse the current back buffer as new back.
|
|
|
|
* In that case, wait until it's not busy anymore.
|
|
|
|
*/
|
|
|
|
if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1) {
|
|
|
|
num_to_consider = 1;
|
2020-10-02 14:20:17 +01:00
|
|
|
max_num = 1;
|
2017-08-10 15:14:23 +01:00
|
|
|
draw->cur_blit_source = -1;
|
2020-10-02 14:20:17 +01:00
|
|
|
} else {
|
|
|
|
num_to_consider = draw->cur_num_back;
|
|
|
|
max_num = draw->max_num_back;
|
2017-08-10 15:14:23 +01:00
|
|
|
}
|
|
|
|
|
2021-09-08 19:08:57 +01:00
|
|
|
/* In a DRI_PRIME situation, if prefer_a_different is true, we first try
|
|
|
|
* to find an idle buffer that is not the last used one.
|
|
|
|
* This is useful if we receive a XCB_PRESENT_EVENT_IDLE_NOTIFY event
|
|
|
|
* for a pixmap but it's not actually idle (eg: the DRI_PRIME blit is
|
|
|
|
* still in progress).
|
|
|
|
* Unigine Superposition hits this and this allows to use 2 back buffers
|
|
|
|
* instead of reusing the same one all the time, causing the next frame
|
|
|
|
* to wait for the copy to finish.
|
|
|
|
*/
|
|
|
|
int current_back_id = draw->cur_back;
|
2015-07-21 16:43:54 +01:00
|
|
|
for (;;) {
|
2017-08-10 15:14:23 +01:00
|
|
|
for (b = 0; b < num_to_consider; b++) {
|
2020-10-02 14:20:17 +01:00
|
|
|
int id = LOADER_DRI3_BACK_ID((b + draw->cur_back) % draw->cur_num_back);
|
2015-07-21 16:43:54 +01:00
|
|
|
struct loader_dri3_buffer *buffer = draw->buffers[id];
|
|
|
|
|
2021-09-08 19:08:57 +01:00
|
|
|
if (!buffer || (!buffer->busy &&
|
|
|
|
(!prefer_a_different || id != current_back_id))) {
|
2015-07-21 16:43:54 +01:00
|
|
|
draw->cur_back = id;
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_unlock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
return id;
|
|
|
|
}
|
|
|
|
}
|
2020-10-02 14:20:17 +01:00
|
|
|
|
|
|
|
if (num_to_consider < max_num) {
|
|
|
|
num_to_consider = ++draw->cur_num_back;
|
2021-09-08 19:08:57 +01:00
|
|
|
} else if (prefer_a_different) {
|
|
|
|
prefer_a_different = false;
|
2020-10-02 14:20:17 +01:00
|
|
|
} else if (!dri3_wait_for_event_locked(draw, NULL)) {
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_unlock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
return -1;
|
2017-09-19 18:41:22 +01:00
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static xcb_gcontext_t
|
|
|
|
dri3_drawable_gc(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
if (!draw->gc) {
|
|
|
|
uint32_t v = 0;
|
|
|
|
xcb_create_gc(draw->conn,
|
|
|
|
(draw->gc = xcb_generate_id(draw->conn)),
|
|
|
|
draw->drawable,
|
|
|
|
XCB_GC_GRAPHICS_EXPOSURES,
|
|
|
|
&v);
|
|
|
|
}
|
|
|
|
return draw->gc;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
static struct loader_dri3_buffer *
|
|
|
|
dri3_back_buffer(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
return draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)];
|
|
|
|
}
|
|
|
|
|
|
|
|
static struct loader_dri3_buffer *
|
2021-11-10 11:06:14 +00:00
|
|
|
dri3_front_buffer(struct loader_dri3_drawable *draw)
|
2015-07-21 16:43:54 +01:00
|
|
|
{
|
|
|
|
return draw->buffers[LOADER_DRI3_FRONT_ID];
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
dri3_copy_area(xcb_connection_t *c,
|
|
|
|
xcb_drawable_t src_drawable,
|
|
|
|
xcb_drawable_t dst_drawable,
|
|
|
|
xcb_gcontext_t gc,
|
|
|
|
int16_t src_x,
|
|
|
|
int16_t src_y,
|
|
|
|
int16_t dst_x,
|
|
|
|
int16_t dst_y,
|
|
|
|
uint16_t width,
|
|
|
|
uint16_t height)
|
|
|
|
{
|
|
|
|
xcb_void_cookie_t cookie;
|
|
|
|
|
|
|
|
cookie = xcb_copy_area_checked(c,
|
|
|
|
src_drawable,
|
|
|
|
dst_drawable,
|
|
|
|
gc,
|
|
|
|
src_x,
|
|
|
|
src_y,
|
|
|
|
dst_x,
|
|
|
|
dst_y,
|
|
|
|
width,
|
|
|
|
height);
|
|
|
|
xcb_discard_reply(c, cookie.sequence);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Asks the driver to flush any queued work necessary for serializing with the
|
|
|
|
* X command stream, and optionally the slightly more strict requirement of
|
|
|
|
* glFlush() equivalence (which would require flushing even if nothing had
|
|
|
|
* been drawn to a window system framebuffer, for example).
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
loader_dri3_flush(struct loader_dri3_drawable *draw,
|
|
|
|
unsigned flags,
|
|
|
|
enum __DRI2throttleReason throttle_reason)
|
|
|
|
{
|
|
|
|
/* NEED TO CHECK WHETHER CONTEXT IS NULL */
|
|
|
|
__DRIcontext *dri_context = draw->vtable->get_dri_context(draw);
|
|
|
|
|
|
|
|
if (dri_context) {
|
|
|
|
draw->ext->flush->flush_with_flags(dri_context, draw->dri_drawable,
|
|
|
|
flags, throttle_reason);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
loader_dri3_copy_sub_buffer(struct loader_dri3_drawable *draw,
|
|
|
|
int x, int y,
|
|
|
|
int width, int height,
|
|
|
|
bool flush)
|
|
|
|
{
|
|
|
|
struct loader_dri3_buffer *back;
|
|
|
|
unsigned flags = __DRI2_FLUSH_DRAWABLE;
|
|
|
|
|
|
|
|
/* Check we have the right attachments */
|
2021-11-10 02:00:14 +00:00
|
|
|
if (!draw->have_back || draw->type != LOADER_DRI3_DRAWABLE_WINDOW)
|
2015-07-21 16:43:54 +01:00
|
|
|
return;
|
|
|
|
|
|
|
|
if (flush)
|
|
|
|
flags |= __DRI2_FLUSH_CONTEXT;
|
2019-10-03 17:16:50 +01:00
|
|
|
loader_dri3_flush(draw, flags, __DRI2_THROTTLE_COPYSUBBUFFER);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2017-08-11 08:49:54 +01:00
|
|
|
back = dri3_find_back_alloc(draw);
|
|
|
|
if (!back)
|
|
|
|
return;
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
y = draw->height - y - height;
|
|
|
|
|
2017-08-10 14:35:39 +01:00
|
|
|
if (draw->is_different_gpu) {
|
2015-07-21 16:43:54 +01:00
|
|
|
/* Update the linear buffer part of the back buffer
|
|
|
|
* for the dri3_copy_area operation
|
|
|
|
*/
|
2017-08-10 14:35:39 +01:00
|
|
|
(void) loader_dri3_blit_image(draw,
|
|
|
|
back->linear_buffer,
|
|
|
|
back->image,
|
|
|
|
0, 0, back->width, back->height,
|
|
|
|
0, 0, __BLIT_FLAG_FLUSH);
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
2017-06-22 08:24:34 +01:00
|
|
|
loader_dri3_swapbuffer_barrier(draw);
|
2015-07-21 16:43:54 +01:00
|
|
|
dri3_fence_reset(draw->conn, back);
|
|
|
|
dri3_copy_area(draw->conn,
|
2017-08-11 08:49:54 +01:00
|
|
|
back->pixmap,
|
2015-07-21 16:43:54 +01:00
|
|
|
draw->drawable,
|
|
|
|
dri3_drawable_gc(draw),
|
|
|
|
x, y, x, y, width, height);
|
|
|
|
dri3_fence_trigger(draw->conn, back);
|
|
|
|
/* Refresh the fake front (if present) after we just damaged the real
|
|
|
|
* front.
|
|
|
|
*/
|
2017-09-04 13:05:25 +01:00
|
|
|
if (draw->have_fake_front &&
|
|
|
|
!loader_dri3_blit_image(draw,
|
2021-11-10 11:06:14 +00:00
|
|
|
dri3_front_buffer(draw)->image,
|
2017-09-04 13:05:25 +01:00
|
|
|
back->image,
|
|
|
|
x, y, width, height,
|
|
|
|
x, y, __BLIT_FLAG_FLUSH) &&
|
|
|
|
!draw->is_different_gpu) {
|
2021-11-10 11:06:14 +00:00
|
|
|
dri3_fence_reset(draw->conn, dri3_front_buffer(draw));
|
2015-07-21 16:43:54 +01:00
|
|
|
dri3_copy_area(draw->conn,
|
2017-08-11 08:49:54 +01:00
|
|
|
back->pixmap,
|
2021-11-10 11:06:14 +00:00
|
|
|
dri3_front_buffer(draw)->pixmap,
|
2015-07-21 16:43:54 +01:00
|
|
|
dri3_drawable_gc(draw),
|
|
|
|
x, y, x, y, width, height);
|
2021-11-10 11:06:14 +00:00
|
|
|
dri3_fence_trigger(draw->conn, dri3_front_buffer(draw));
|
|
|
|
dri3_fence_await(draw->conn, NULL, dri3_front_buffer(draw));
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
2017-09-05 08:58:08 +01:00
|
|
|
dri3_fence_await(draw->conn, draw, back);
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
loader_dri3_copy_drawable(struct loader_dri3_drawable *draw,
|
|
|
|
xcb_drawable_t dest,
|
|
|
|
xcb_drawable_t src)
|
|
|
|
{
|
2019-10-03 11:23:45 +01:00
|
|
|
loader_dri3_flush(draw, __DRI2_FLUSH_DRAWABLE, __DRI2_THROTTLE_COPYSUBBUFFER);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2021-11-10 11:06:14 +00:00
|
|
|
struct loader_dri3_buffer *front = dri3_front_buffer(draw);
|
2021-11-19 01:24:38 +00:00
|
|
|
if (front)
|
|
|
|
dri3_fence_reset(draw->conn, front);
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
dri3_copy_area(draw->conn,
|
|
|
|
src, dest,
|
|
|
|
dri3_drawable_gc(draw),
|
|
|
|
0, 0, 0, 0, draw->width, draw->height);
|
2021-11-19 01:24:38 +00:00
|
|
|
|
|
|
|
if (front) {
|
|
|
|
dri3_fence_trigger(draw->conn, front);
|
|
|
|
dri3_fence_await(draw->conn, draw, front);
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
loader_dri3_wait_x(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
struct loader_dri3_buffer *front;
|
|
|
|
|
|
|
|
if (draw == NULL || !draw->have_fake_front)
|
|
|
|
return;
|
|
|
|
|
2021-11-10 11:06:14 +00:00
|
|
|
front = dri3_front_buffer(draw);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
loader_dri3_copy_drawable(draw, front->pixmap, draw->drawable);
|
|
|
|
|
|
|
|
/* In the psc->is_different_gpu case, the linear buffer has been updated,
|
|
|
|
* but not yet the tiled buffer.
|
|
|
|
* Copy back to the tiled buffer we use for rendering.
|
|
|
|
* Note that we don't need flushing.
|
|
|
|
*/
|
2017-08-10 14:35:39 +01:00
|
|
|
if (draw->is_different_gpu)
|
|
|
|
(void) loader_dri3_blit_image(draw,
|
|
|
|
front->image,
|
|
|
|
front->linear_buffer,
|
|
|
|
0, 0, front->width, front->height,
|
|
|
|
0, 0, 0);
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void
|
|
|
|
loader_dri3_wait_gl(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
struct loader_dri3_buffer *front;
|
|
|
|
|
|
|
|
if (draw == NULL || !draw->have_fake_front)
|
|
|
|
return;
|
|
|
|
|
2021-11-10 11:06:14 +00:00
|
|
|
front = dri3_front_buffer(draw);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
/* In the psc->is_different_gpu case, we update the linear_buffer
|
|
|
|
* before updating the real front.
|
|
|
|
*/
|
2017-08-10 14:35:39 +01:00
|
|
|
if (draw->is_different_gpu)
|
|
|
|
(void) loader_dri3_blit_image(draw,
|
|
|
|
front->linear_buffer,
|
|
|
|
front->image,
|
|
|
|
0, 0, front->width, front->height,
|
|
|
|
0, 0, __BLIT_FLAG_FLUSH);
|
2017-06-22 08:24:34 +01:00
|
|
|
loader_dri3_swapbuffer_barrier(draw);
|
2015-07-21 16:43:54 +01:00
|
|
|
loader_dri3_copy_drawable(draw, draw->drawable, front->pixmap);
|
|
|
|
}
|
|
|
|
|
|
|
|
/** dri3_flush_present_events
|
|
|
|
*
|
|
|
|
* Process any present events that have been received from the X server
|
|
|
|
*/
|
|
|
|
static void
|
|
|
|
dri3_flush_present_events(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
/* Check to see if any configuration changes have occurred
|
|
|
|
* since we were last invoked
|
|
|
|
*/
|
2017-09-19 18:41:22 +01:00
|
|
|
if (draw->has_event_waiter)
|
|
|
|
return;
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
if (draw->special_event) {
|
|
|
|
xcb_generic_event_t *ev;
|
|
|
|
|
|
|
|
while ((ev = xcb_poll_for_special_event(draw->conn,
|
|
|
|
draw->special_event)) != NULL) {
|
|
|
|
xcb_present_generic_event_t *ge = (void *) ev;
|
|
|
|
dri3_handle_present_event(draw, ge);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/** loader_dri3_swap_buffers_msc
|
|
|
|
*
|
|
|
|
* Make the current back buffer visible using the present extension
|
|
|
|
*/
|
|
|
|
int64_t
|
|
|
|
loader_dri3_swap_buffers_msc(struct loader_dri3_drawable *draw,
|
|
|
|
int64_t target_msc, int64_t divisor,
|
|
|
|
int64_t remainder, unsigned flush_flags,
|
2020-05-23 00:45:42 +01:00
|
|
|
const int *rects, int n_rects,
|
2015-07-21 16:43:54 +01:00
|
|
|
bool force_copy)
|
|
|
|
{
|
|
|
|
struct loader_dri3_buffer *back;
|
|
|
|
int64_t ret = 0;
|
|
|
|
|
2021-11-10 14:02:53 +00:00
|
|
|
/* GLX spec:
|
|
|
|
* void glXSwapBuffers(Display *dpy, GLXDrawable draw);
|
|
|
|
* This operation is a no-op if draw was created with a non-double-buffered
|
|
|
|
* GLXFBConfig, or if draw is a GLXPixmap.
|
|
|
|
* ...
|
|
|
|
* GLX pixmaps may be created with a config that includes back buffers and
|
|
|
|
* stereoscopic buffers. However, glXSwapBuffers is ignored for these pixmaps.
|
|
|
|
* ...
|
|
|
|
* It is possible to create a pbuffer with back buffers and to swap the
|
|
|
|
* front and back buffers by calling glXSwapBuffers.
|
|
|
|
*
|
|
|
|
* EGL spec:
|
|
|
|
* EGLBoolean eglSwapBuffers(EGLDisplay dpy, EGLSurface surface);
|
|
|
|
* If surface is a back-buffered window surface, then the color buffer is
|
|
|
|
* copied to the native window associated with that surface. If surface is
|
|
|
|
* a single-buffered window, pixmap, or pbuffer surface, eglSwapBuffers has
|
|
|
|
* no effect.
|
|
|
|
*
|
|
|
|
* SwapBuffer effect:
|
|
|
|
* | GLX | EGL |
|
|
|
|
* | window | pixmap | pbuffer | window | pixmap | pbuffer|
|
|
|
|
*-------+--------+--------+---------+--------+--------+--------+
|
|
|
|
* single| nop | nop | nop | nop | nop | nop |
|
|
|
|
* double| swap | nop | swap | swap | NA | NA |
|
|
|
|
*/
|
|
|
|
if (!draw->have_back || draw->type == LOADER_DRI3_DRAWABLE_PIXMAP)
|
|
|
|
return ret;
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
draw->vtable->flush_drawable(draw, flush_flags);
|
|
|
|
|
2017-08-11 08:49:54 +01:00
|
|
|
back = dri3_find_back_alloc(draw);
|
2021-11-10 14:02:53 +00:00
|
|
|
/* Could only happen when error case, like display is already closed. */
|
|
|
|
if (!back)
|
|
|
|
return ret;
|
2017-08-11 08:49:54 +01:00
|
|
|
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_lock(&draw->mtx);
|
2018-10-23 16:38:51 +01:00
|
|
|
|
|
|
|
if (draw->adaptive_sync && !draw->adaptive_sync_active) {
|
|
|
|
set_adaptive_sync_property(draw->conn, draw->drawable, true);
|
|
|
|
draw->adaptive_sync_active = true;
|
|
|
|
}
|
|
|
|
|
2021-11-10 14:02:53 +00:00
|
|
|
if (draw->is_different_gpu) {
|
2015-07-21 16:43:54 +01:00
|
|
|
/* Update the linear buffer before presenting the pixmap */
|
2017-08-10 14:35:39 +01:00
|
|
|
(void) loader_dri3_blit_image(draw,
|
|
|
|
back->linear_buffer,
|
|
|
|
back->image,
|
|
|
|
0, 0, back->width, back->height,
|
|
|
|
0, 0, __BLIT_FLAG_FLUSH);
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
2017-08-10 15:14:23 +01:00
|
|
|
/* If we need to preload the new back buffer, remember the source.
|
|
|
|
* The force_copy parameter is used by EGL to attempt to preserve
|
|
|
|
* the back buffer across a call to this function.
|
|
|
|
*/
|
2017-08-10 16:34:05 +01:00
|
|
|
if (draw->swap_method != __DRI_ATTRIB_SWAP_UNDEFINED || force_copy)
|
2017-08-10 15:14:23 +01:00
|
|
|
draw->cur_blit_source = LOADER_DRI3_BACK_ID(draw->cur_back);
|
|
|
|
|
2017-08-10 16:20:49 +01:00
|
|
|
/* Exchange the back and fake front. Even though the server knows about these
|
|
|
|
* buffers, it has no notion of back and fake front.
|
|
|
|
*/
|
2021-11-10 14:02:53 +00:00
|
|
|
if (draw->have_fake_front) {
|
2017-08-10 16:20:49 +01:00
|
|
|
struct loader_dri3_buffer *tmp;
|
|
|
|
|
2021-11-10 11:06:14 +00:00
|
|
|
tmp = dri3_front_buffer(draw);
|
2017-08-10 16:20:49 +01:00
|
|
|
draw->buffers[LOADER_DRI3_FRONT_ID] = back;
|
|
|
|
draw->buffers[LOADER_DRI3_BACK_ID(draw->cur_back)] = tmp;
|
|
|
|
|
|
|
|
if (draw->swap_method == __DRI_ATTRIB_SWAP_COPY || force_copy)
|
|
|
|
draw->cur_blit_source = LOADER_DRI3_FRONT_ID;
|
|
|
|
}
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
dri3_flush_present_events(draw);
|
|
|
|
|
2021-11-10 14:02:53 +00:00
|
|
|
if (draw->type == LOADER_DRI3_DRAWABLE_WINDOW) {
|
2015-07-21 16:43:54 +01:00
|
|
|
dri3_fence_reset(draw->conn, back);
|
|
|
|
|
|
|
|
/* Compute when we want the frame shown by taking the last known
|
|
|
|
* successful MSC and adding in a swap interval for each outstanding swap
|
|
|
|
* request. target_msc=divisor=remainder=0 means "Use glXSwapBuffers()
|
|
|
|
* semantic"
|
|
|
|
*/
|
|
|
|
++draw->send_sbc;
|
|
|
|
if (target_msc == 0 && divisor == 0 && remainder == 0)
|
2020-09-09 20:14:30 +01:00
|
|
|
target_msc = draw->msc + abs(draw->swap_interval) *
|
2015-07-21 16:43:54 +01:00
|
|
|
(draw->send_sbc - draw->recv_sbc);
|
|
|
|
else if (divisor == 0 && remainder > 0) {
|
|
|
|
/* From the GLX_OML_sync_control spec:
|
|
|
|
* "If <divisor> = 0, the swap will occur when MSC becomes
|
|
|
|
* greater than or equal to <target_msc>."
|
|
|
|
*
|
|
|
|
* Note that there's no mention of the remainder. The Present
|
|
|
|
* extension throws BadValue for remainder != 0 with divisor == 0, so
|
|
|
|
* just drop the passed in value.
|
|
|
|
*/
|
|
|
|
remainder = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* From the GLX_EXT_swap_control spec
|
|
|
|
* and the EGL 1.4 spec (page 53):
|
|
|
|
*
|
|
|
|
* "If <interval> is set to a value of 0, buffer swaps are not
|
|
|
|
* synchronized to a video frame."
|
|
|
|
*
|
2020-09-09 20:14:30 +01:00
|
|
|
* From GLX_EXT_swap_control_tear:
|
|
|
|
*
|
|
|
|
* "If <interval> is negative, the minimum number of video frames
|
|
|
|
* between buffer swaps is the absolute value of <interval>. In this
|
|
|
|
* case, if abs(<interval>) video frames have already passed from
|
|
|
|
* the previous swap when the swap is ready to be performed, the
|
|
|
|
* swap will occur without synchronization to a video frame."
|
|
|
|
*
|
2015-07-21 16:43:54 +01:00
|
|
|
* Implementation note: It is possible to enable triple buffering
|
|
|
|
* behaviour by not using XCB_PRESENT_OPTION_ASYNC, but this should not be
|
|
|
|
* the default.
|
|
|
|
*/
|
2021-11-10 10:59:34 +00:00
|
|
|
uint32_t options = XCB_PRESENT_OPTION_NONE;
|
2020-09-09 20:14:30 +01:00
|
|
|
if (draw->swap_interval <= 0)
|
2021-11-10 10:59:34 +00:00
|
|
|
options |= XCB_PRESENT_OPTION_ASYNC;
|
2017-08-10 15:14:23 +01:00
|
|
|
|
|
|
|
/* If we need to populate the new back, but need to reuse the back
|
|
|
|
* buffer slot due to lack of local blit capabilities, make sure
|
|
|
|
* the server doesn't flip and we deadlock.
|
|
|
|
*/
|
|
|
|
if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1)
|
|
|
|
options |= XCB_PRESENT_OPTION_COPY;
|
2018-03-13 20:06:00 +00:00
|
|
|
#ifdef HAVE_DRI3_MODIFIERS
|
2017-10-06 06:26:51 +01:00
|
|
|
if (draw->multiplanes_available)
|
|
|
|
options |= XCB_PRESENT_OPTION_SUBOPTIMAL;
|
2018-03-13 20:06:00 +00:00
|
|
|
#endif
|
2015-07-21 16:43:54 +01:00
|
|
|
back->busy = 1;
|
|
|
|
back->last_swap = draw->send_sbc;
|
2020-05-23 00:45:42 +01:00
|
|
|
|
2021-06-23 16:05:44 +01:00
|
|
|
if (!draw->region) {
|
|
|
|
draw->region = xcb_generate_id(draw->conn);
|
|
|
|
xcb_xfixes_create_region(draw->conn, draw->region, 0, NULL);
|
|
|
|
}
|
|
|
|
|
2020-05-23 00:45:42 +01:00
|
|
|
xcb_xfixes_region_t region = 0;
|
|
|
|
xcb_rectangle_t xcb_rects[64];
|
|
|
|
|
|
|
|
if (n_rects > 0 && n_rects <= ARRAY_SIZE(xcb_rects)) {
|
|
|
|
for (int i = 0; i < n_rects; i++) {
|
|
|
|
const int *rect = &rects[i * 4];
|
|
|
|
xcb_rects[i].x = rect[0];
|
|
|
|
xcb_rects[i].y = draw->height - rect[1] - rect[3];
|
|
|
|
xcb_rects[i].width = rect[2];
|
|
|
|
xcb_rects[i].height = rect[3];
|
|
|
|
}
|
|
|
|
|
2021-06-23 16:05:44 +01:00
|
|
|
region = draw->region;
|
|
|
|
xcb_xfixes_set_region(draw->conn, region, n_rects, xcb_rects);
|
2020-05-23 00:45:42 +01:00
|
|
|
}
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
xcb_present_pixmap(draw->conn,
|
|
|
|
draw->drawable,
|
|
|
|
back->pixmap,
|
|
|
|
(uint32_t) draw->send_sbc,
|
|
|
|
0, /* valid */
|
2020-05-23 00:45:42 +01:00
|
|
|
region, /* update */
|
2015-07-21 16:43:54 +01:00
|
|
|
0, /* x_off */
|
|
|
|
0, /* y_off */
|
|
|
|
None, /* target_crtc */
|
|
|
|
None,
|
|
|
|
back->sync_fence,
|
|
|
|
options,
|
|
|
|
target_msc,
|
|
|
|
divisor,
|
|
|
|
remainder, 0, NULL);
|
2021-11-10 10:59:34 +00:00
|
|
|
} else {
|
|
|
|
/* This can only be reached by double buffered GLXPbuffer. */
|
|
|
|
assert(draw->type == LOADER_DRI3_DRAWABLE_PBUFFER);
|
|
|
|
/* GLX does not have damage regions. */
|
|
|
|
assert(n_rects == 0);
|
|
|
|
|
|
|
|
/* For wait and buffer age usage. */
|
|
|
|
draw->send_sbc++;
|
|
|
|
draw->recv_sbc = back->last_swap = draw->send_sbc;
|
|
|
|
|
|
|
|
/* Pixmap is imported as front buffer image when same GPU case, so just
|
|
|
|
* locally blit back buffer image to it is enough. Otherwise front buffer
|
|
|
|
* is a fake one which needs to be synced with pixmap by xserver remotely.
|
2015-07-21 16:43:54 +01:00
|
|
|
*/
|
2021-11-10 10:59:34 +00:00
|
|
|
if (draw->is_different_gpu ||
|
|
|
|
!loader_dri3_blit_image(draw,
|
|
|
|
dri3_front_buffer(draw)->image,
|
|
|
|
back->image,
|
|
|
|
0, 0, draw->width, draw->height,
|
|
|
|
0, 0, __BLIT_FLAG_FLUSH)) {
|
|
|
|
dri3_copy_area(draw->conn, back->pixmap,
|
|
|
|
draw->drawable,
|
2015-07-21 16:43:54 +01:00
|
|
|
dri3_drawable_gc(draw),
|
2017-08-10 16:20:49 +01:00
|
|
|
0, 0, 0, 0, draw->width, draw->height);
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
2021-11-10 10:59:34 +00:00
|
|
|
}
|
2017-08-10 16:20:49 +01:00
|
|
|
|
2021-11-10 10:59:34 +00:00
|
|
|
ret = (int64_t) draw->send_sbc;
|
|
|
|
|
|
|
|
/* Schedule a server-side back-preserving blit if necessary.
|
|
|
|
* This happens iff all conditions below are satisfied:
|
|
|
|
* a) We have a fake front,
|
|
|
|
* b) We need to preserve the back buffer,
|
|
|
|
* c) We don't have local blit capabilities.
|
|
|
|
*/
|
|
|
|
if (!loader_dri3_have_image_blit(draw) && draw->cur_blit_source != -1 &&
|
|
|
|
draw->cur_blit_source != LOADER_DRI3_BACK_ID(draw->cur_back)) {
|
|
|
|
struct loader_dri3_buffer *new_back = dri3_back_buffer(draw);
|
|
|
|
struct loader_dri3_buffer *src = draw->buffers[draw->cur_blit_source];
|
|
|
|
|
|
|
|
dri3_fence_reset(draw->conn, new_back);
|
|
|
|
dri3_copy_area(draw->conn, src->pixmap,
|
|
|
|
new_back->pixmap,
|
|
|
|
dri3_drawable_gc(draw),
|
|
|
|
0, 0, 0, 0, draw->width, draw->height);
|
|
|
|
dri3_fence_trigger(draw->conn, new_back);
|
|
|
|
new_back->last_swap = src->last_swap;
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
2021-11-10 10:59:34 +00:00
|
|
|
|
|
|
|
xcb_flush(draw->conn);
|
|
|
|
if (draw->stamp)
|
|
|
|
++(*draw->stamp);
|
|
|
|
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_unlock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2015-11-25 05:27:04 +00:00
|
|
|
draw->ext->flush->invalidate(draw->dri_drawable);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
int
|
|
|
|
loader_dri3_query_buffer_age(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
2017-08-11 08:49:54 +01:00
|
|
|
struct loader_dri3_buffer *back = dri3_find_back_alloc(draw);
|
2017-09-19 18:41:22 +01:00
|
|
|
int ret;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_lock(&draw->mtx);
|
|
|
|
ret = (!back || back->last_swap == 0) ? 0 :
|
|
|
|
draw->send_sbc - back->last_swap + 1;
|
|
|
|
mtx_unlock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2017-09-19 18:41:22 +01:00
|
|
|
return ret;
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/** loader_dri3_open
|
|
|
|
*
|
|
|
|
* Wrapper around xcb_dri3_open
|
|
|
|
*/
|
|
|
|
int
|
|
|
|
loader_dri3_open(xcb_connection_t *conn,
|
|
|
|
xcb_window_t root,
|
|
|
|
uint32_t provider)
|
|
|
|
{
|
|
|
|
xcb_dri3_open_cookie_t cookie;
|
|
|
|
xcb_dri3_open_reply_t *reply;
|
2021-06-23 14:59:22 +01:00
|
|
|
xcb_xfixes_query_version_cookie_t fixes_cookie;
|
|
|
|
xcb_xfixes_query_version_reply_t *fixes_reply;
|
2015-07-21 16:43:54 +01:00
|
|
|
int fd;
|
|
|
|
|
|
|
|
cookie = xcb_dri3_open(conn,
|
|
|
|
root,
|
|
|
|
provider);
|
|
|
|
|
|
|
|
reply = xcb_dri3_open_reply(conn, cookie, NULL);
|
|
|
|
if (!reply)
|
|
|
|
return -1;
|
|
|
|
|
|
|
|
if (reply->nfd != 1) {
|
|
|
|
free(reply);
|
|
|
|
return -1;
|
|
|
|
}
|
|
|
|
|
|
|
|
fd = xcb_dri3_open_reply_fds(conn, reply)[0];
|
2016-07-30 01:31:10 +01:00
|
|
|
free(reply);
|
2015-07-21 16:43:54 +01:00
|
|
|
fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
|
|
|
|
|
2021-06-23 14:59:22 +01:00
|
|
|
/* let the server know our xfixes level */
|
|
|
|
fixes_cookie = xcb_xfixes_query_version(conn,
|
|
|
|
XCB_XFIXES_MAJOR_VERSION,
|
|
|
|
XCB_XFIXES_MINOR_VERSION);
|
|
|
|
fixes_reply = xcb_xfixes_query_version_reply(conn, fixes_cookie, NULL);
|
|
|
|
free(fixes_reply);
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
return fd;
|
|
|
|
}
|
|
|
|
|
|
|
|
static uint32_t
|
|
|
|
dri3_cpp_for_format(uint32_t format) {
|
|
|
|
switch (format) {
|
|
|
|
case __DRI_IMAGE_FORMAT_R8:
|
|
|
|
return 1;
|
|
|
|
case __DRI_IMAGE_FORMAT_RGB565:
|
|
|
|
case __DRI_IMAGE_FORMAT_GR88:
|
|
|
|
return 2;
|
|
|
|
case __DRI_IMAGE_FORMAT_XRGB8888:
|
|
|
|
case __DRI_IMAGE_FORMAT_ARGB8888:
|
|
|
|
case __DRI_IMAGE_FORMAT_ABGR8888:
|
|
|
|
case __DRI_IMAGE_FORMAT_XBGR8888:
|
|
|
|
case __DRI_IMAGE_FORMAT_XRGB2101010:
|
|
|
|
case __DRI_IMAGE_FORMAT_ARGB2101010:
|
2017-12-31 06:05:06 +00:00
|
|
|
case __DRI_IMAGE_FORMAT_XBGR2101010:
|
|
|
|
case __DRI_IMAGE_FORMAT_ABGR2101010:
|
2015-07-21 16:43:54 +01:00
|
|
|
case __DRI_IMAGE_FORMAT_SARGB8:
|
2018-03-19 11:41:44 +00:00
|
|
|
case __DRI_IMAGE_FORMAT_SABGR8:
|
2019-11-15 07:12:15 +00:00
|
|
|
case __DRI_IMAGE_FORMAT_SXRGB8:
|
2015-07-21 16:43:54 +01:00
|
|
|
return 4;
|
2022-01-18 03:03:20 +00:00
|
|
|
case __DRI_IMAGE_FORMAT_ABGR16161616:
|
|
|
|
case __DRI_IMAGE_FORMAT_XBGR16161616:
|
2019-01-25 01:10:02 +00:00
|
|
|
case __DRI_IMAGE_FORMAT_XBGR16161616F:
|
|
|
|
case __DRI_IMAGE_FORMAT_ABGR16161616F:
|
|
|
|
return 8;
|
2015-07-21 16:43:54 +01:00
|
|
|
case __DRI_IMAGE_FORMAT_NONE:
|
|
|
|
default:
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
loader_dri3: Handle mismatched depth 30 formats for Prime renderoffload.
Detect if the display (X-Server) gpu and Prime renderoffload gpu prefer
different channel ordering for color depth 30 formats ([X/A]BGR2101010
vs. [X/A]RGB2101010) and perform format conversion during the blitImage()
detiling op from tiled backbuffer -> linear buffer.
For this we need to find the visual (= red channel mask) for the
X-Drawable used to display on the server gpu. We use the same proven
logic for finding that visual as in commit "egl/x11: Handle both depth
30 formats for eglCreateImage()".
This is mostly to allow "NVidia Optimus" at depth 30, as Intel/AMD
gpu's prefer xRGB2101010 ordering, whereas NVidia gpu's prefer
xBGR2101010 ordering, so we can offload to nouveau without getting
funky colors.
Tested on Intel single gpu, NVidia single gpu, Intel + NVidia prime
offload with DRI3/Present.
Note: An unintended but pleasant surprise of this patch is that it also
seems to make the modesetting-ddx of server 1.20.0 work at depth 30
on nouveau, at least with unredirected "classic" X rendering, and
with redirected desktop compositing under XRender accel, and with OpenGL
compositing under GLX. Only X11 compositing via OpenGL + EGL still gives
funky colors. modesetting-ddx + glamor are not yet ready to deal with
nouveau's ABGR2101010 format, and treat it as ARGB2101010, also exposing
X-visuals with ARGB2101010 style channel masks. Seems somehow this triggers
the logic in this patch on modesetting-ddx + depth 30 + DRI3 buffer sharing
and does the "wrong" channel swizzling that then cancels out the "wrong"
swizzling of glamor and we end up with the proper pixel formatting in
the scanout buffer :). This so far tested on a NVA5 Tesla card under KDE5
Plasma as shipping with Ubuntu 16.04.4 LTS.
Signed-off-by: Mario Kleiner <mario.kleiner.de@gmail.com>
Cc: Ilia Mirkin <imirkin@alum.mit.edu>
Reviewed-by: Eric Engestrom <eric.engestrom@intel.com>
2018-06-14 05:04:24 +01:00
|
|
|
/* Map format of render buffer to corresponding format for the linear_buffer
|
|
|
|
* used for sharing with the display gpu of a Prime setup (== is_different_gpu).
|
|
|
|
* Usually linear_format == format, except for depth >= 30 formats, where
|
|
|
|
* different gpu vendors have different preferences wrt. color channel ordering.
|
|
|
|
*/
|
|
|
|
static uint32_t
|
|
|
|
dri3_linear_format_for_format(struct loader_dri3_drawable *draw, uint32_t format)
|
|
|
|
{
|
|
|
|
switch (format) {
|
|
|
|
case __DRI_IMAGE_FORMAT_XRGB2101010:
|
|
|
|
case __DRI_IMAGE_FORMAT_XBGR2101010:
|
|
|
|
/* Different preferred formats for different hw */
|
|
|
|
if (dri3_get_red_mask_for_depth(draw, 30) == 0x3ff)
|
|
|
|
return __DRI_IMAGE_FORMAT_XBGR2101010;
|
|
|
|
else
|
|
|
|
return __DRI_IMAGE_FORMAT_XRGB2101010;
|
|
|
|
|
|
|
|
case __DRI_IMAGE_FORMAT_ARGB2101010:
|
|
|
|
case __DRI_IMAGE_FORMAT_ABGR2101010:
|
|
|
|
/* Different preferred formats for different hw */
|
|
|
|
if (dri3_get_red_mask_for_depth(draw, 30) == 0x3ff)
|
|
|
|
return __DRI_IMAGE_FORMAT_ABGR2101010;
|
|
|
|
else
|
|
|
|
return __DRI_IMAGE_FORMAT_ARGB2101010;
|
|
|
|
|
|
|
|
default:
|
|
|
|
return format;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-07-07 07:53:28 +01:00
|
|
|
/* the DRIimage createImage function takes __DRI_IMAGE_FORMAT codes, while
|
2019-08-30 01:00:09 +01:00
|
|
|
* the createImageFromFds call takes DRM_FORMAT codes. To avoid
|
2017-07-07 07:53:28 +01:00
|
|
|
* complete confusion, just deal in __DRI_IMAGE_FORMAT codes for now and
|
2019-08-30 01:00:09 +01:00
|
|
|
* translate to DRM_FORMAT codes in the call to createImageFromFds
|
2017-07-07 07:53:28 +01:00
|
|
|
*/
|
|
|
|
static int
|
|
|
|
image_format_to_fourcc(int format)
|
|
|
|
{
|
|
|
|
|
2019-08-30 01:00:09 +01:00
|
|
|
/* Convert from __DRI_IMAGE_FORMAT to DRM_FORMAT (sigh) */
|
2017-07-07 07:53:28 +01:00
|
|
|
switch (format) {
|
|
|
|
case __DRI_IMAGE_FORMAT_SARGB8: return __DRI_IMAGE_FOURCC_SARGB8888;
|
2018-03-19 11:41:44 +00:00
|
|
|
case __DRI_IMAGE_FORMAT_SABGR8: return __DRI_IMAGE_FOURCC_SABGR8888;
|
2019-11-15 07:12:15 +00:00
|
|
|
case __DRI_IMAGE_FORMAT_SXRGB8: return __DRI_IMAGE_FOURCC_SXRGB8888;
|
2019-08-30 01:00:09 +01:00
|
|
|
case __DRI_IMAGE_FORMAT_RGB565: return DRM_FORMAT_RGB565;
|
|
|
|
case __DRI_IMAGE_FORMAT_XRGB8888: return DRM_FORMAT_XRGB8888;
|
|
|
|
case __DRI_IMAGE_FORMAT_ARGB8888: return DRM_FORMAT_ARGB8888;
|
|
|
|
case __DRI_IMAGE_FORMAT_ABGR8888: return DRM_FORMAT_ABGR8888;
|
|
|
|
case __DRI_IMAGE_FORMAT_XBGR8888: return DRM_FORMAT_XBGR8888;
|
|
|
|
case __DRI_IMAGE_FORMAT_XRGB2101010: return DRM_FORMAT_XRGB2101010;
|
|
|
|
case __DRI_IMAGE_FORMAT_ARGB2101010: return DRM_FORMAT_ARGB2101010;
|
|
|
|
case __DRI_IMAGE_FORMAT_XBGR2101010: return DRM_FORMAT_XBGR2101010;
|
|
|
|
case __DRI_IMAGE_FORMAT_ABGR2101010: return DRM_FORMAT_ABGR2101010;
|
2022-01-18 03:03:20 +00:00
|
|
|
case __DRI_IMAGE_FORMAT_ABGR16161616: return DRM_FORMAT_ABGR16161616;
|
|
|
|
case __DRI_IMAGE_FORMAT_XBGR16161616: return DRM_FORMAT_XBGR16161616;
|
2019-08-30 01:00:09 +01:00
|
|
|
case __DRI_IMAGE_FORMAT_XBGR16161616F: return DRM_FORMAT_XBGR16161616F;
|
|
|
|
case __DRI_IMAGE_FORMAT_ABGR16161616F: return DRM_FORMAT_ABGR16161616F;
|
2017-07-07 07:53:28 +01:00
|
|
|
}
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
2018-03-13 20:06:00 +00:00
|
|
|
#ifdef HAVE_DRI3_MODIFIERS
|
2017-07-07 07:54:26 +01:00
|
|
|
static bool
|
|
|
|
has_supported_modifier(struct loader_dri3_drawable *draw, unsigned int format,
|
|
|
|
uint64_t *modifiers, uint32_t count)
|
|
|
|
{
|
|
|
|
uint64_t *supported_modifiers;
|
|
|
|
int32_t supported_modifiers_count;
|
|
|
|
bool found = false;
|
|
|
|
int i, j;
|
|
|
|
|
|
|
|
if (!draw->ext->image->queryDmaBufModifiers(draw->dri_screen,
|
|
|
|
format, 0, NULL, NULL,
|
|
|
|
&supported_modifiers_count) ||
|
|
|
|
supported_modifiers_count == 0)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
supported_modifiers = malloc(supported_modifiers_count * sizeof(uint64_t));
|
|
|
|
if (!supported_modifiers)
|
|
|
|
return false;
|
|
|
|
|
|
|
|
draw->ext->image->queryDmaBufModifiers(draw->dri_screen, format,
|
|
|
|
supported_modifiers_count,
|
|
|
|
supported_modifiers, NULL,
|
|
|
|
&supported_modifiers_count);
|
|
|
|
|
|
|
|
for (i = 0; !found && i < supported_modifiers_count; i++) {
|
|
|
|
for (j = 0; !found && j < count; j++) {
|
|
|
|
if (supported_modifiers[i] == modifiers[j])
|
|
|
|
found = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
free(supported_modifiers);
|
|
|
|
return found;
|
|
|
|
}
|
2018-03-13 20:06:00 +00:00
|
|
|
#endif
|
2017-07-07 07:54:26 +01:00
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
/** loader_dri3_alloc_render_buffer
|
|
|
|
*
|
|
|
|
* Use the driver createImage function to construct a __DRIimage, then
|
|
|
|
* get a file descriptor for that and create an X pixmap from that
|
|
|
|
*
|
|
|
|
* Allocate an xshmfence for synchronization
|
|
|
|
*/
|
|
|
|
static struct loader_dri3_buffer *
|
|
|
|
dri3_alloc_render_buffer(struct loader_dri3_drawable *draw, unsigned int format,
|
|
|
|
int width, int height, int depth)
|
|
|
|
{
|
|
|
|
struct loader_dri3_buffer *buffer;
|
2021-05-03 19:22:40 +01:00
|
|
|
__DRIimage *pixmap_buffer = NULL, *linear_buffer_display_gpu = NULL;
|
2015-07-21 16:43:54 +01:00
|
|
|
xcb_pixmap_t pixmap;
|
|
|
|
xcb_sync_fence_t sync_fence;
|
|
|
|
struct xshmfence *shm_fence;
|
2017-07-07 07:54:26 +01:00
|
|
|
int buffer_fds[4], fence_fd;
|
|
|
|
int num_planes = 0;
|
2020-12-15 13:52:28 +00:00
|
|
|
uint64_t *modifiers = NULL;
|
|
|
|
uint32_t count = 0;
|
2017-07-07 07:54:26 +01:00
|
|
|
int i, mod;
|
|
|
|
int ret;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
/* Create an xshmfence object and
|
|
|
|
* prepare to send that to the X server
|
|
|
|
*/
|
|
|
|
|
|
|
|
fence_fd = xshmfence_alloc_shm();
|
|
|
|
if (fence_fd < 0)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
shm_fence = xshmfence_map_shm(fence_fd);
|
|
|
|
if (shm_fence == NULL)
|
|
|
|
goto no_shm_fence;
|
|
|
|
|
|
|
|
/* Allocate the image from the driver
|
|
|
|
*/
|
|
|
|
buffer = calloc(1, sizeof *buffer);
|
|
|
|
if (!buffer)
|
|
|
|
goto no_buffer;
|
|
|
|
|
|
|
|
buffer->cpp = dri3_cpp_for_format(format);
|
|
|
|
if (!buffer->cpp)
|
|
|
|
goto no_image;
|
|
|
|
|
|
|
|
if (!draw->is_different_gpu) {
|
2018-03-13 20:06:00 +00:00
|
|
|
#ifdef HAVE_DRI3_MODIFIERS
|
2017-07-07 07:54:26 +01:00
|
|
|
if (draw->multiplanes_available &&
|
|
|
|
draw->ext->image->base.version >= 15 &&
|
|
|
|
draw->ext->image->queryDmaBufModifiers &&
|
|
|
|
draw->ext->image->createImageWithModifiers) {
|
|
|
|
xcb_dri3_get_supported_modifiers_cookie_t mod_cookie;
|
|
|
|
xcb_dri3_get_supported_modifiers_reply_t *mod_reply;
|
|
|
|
xcb_generic_error_t *error = NULL;
|
|
|
|
|
|
|
|
mod_cookie = xcb_dri3_get_supported_modifiers(draw->conn,
|
2018-07-26 08:46:39 +01:00
|
|
|
draw->window,
|
2017-07-07 07:54:26 +01:00
|
|
|
depth, buffer->cpp * 8);
|
|
|
|
mod_reply = xcb_dri3_get_supported_modifiers_reply(draw->conn,
|
|
|
|
mod_cookie,
|
|
|
|
&error);
|
|
|
|
if (!mod_reply)
|
|
|
|
goto no_image;
|
|
|
|
|
|
|
|
if (mod_reply->num_window_modifiers) {
|
|
|
|
count = mod_reply->num_window_modifiers;
|
|
|
|
modifiers = malloc(count * sizeof(uint64_t));
|
|
|
|
if (!modifiers) {
|
|
|
|
free(mod_reply);
|
|
|
|
goto no_image;
|
|
|
|
}
|
|
|
|
|
|
|
|
memcpy(modifiers,
|
|
|
|
xcb_dri3_get_supported_modifiers_window_modifiers(mod_reply),
|
|
|
|
count * sizeof(uint64_t));
|
|
|
|
|
|
|
|
if (!has_supported_modifier(draw, image_format_to_fourcc(format),
|
|
|
|
modifiers, count)) {
|
|
|
|
free(modifiers);
|
|
|
|
count = 0;
|
|
|
|
modifiers = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (mod_reply->num_screen_modifiers && modifiers == NULL) {
|
|
|
|
count = mod_reply->num_screen_modifiers;
|
|
|
|
modifiers = malloc(count * sizeof(uint64_t));
|
|
|
|
if (!modifiers) {
|
|
|
|
free(modifiers);
|
|
|
|
free(mod_reply);
|
|
|
|
goto no_image;
|
|
|
|
}
|
|
|
|
|
|
|
|
memcpy(modifiers,
|
|
|
|
xcb_dri3_get_supported_modifiers_screen_modifiers(mod_reply),
|
|
|
|
count * sizeof(uint64_t));
|
|
|
|
}
|
|
|
|
|
|
|
|
free(mod_reply);
|
|
|
|
}
|
2018-03-13 20:06:00 +00:00
|
|
|
#endif
|
2020-12-15 13:52:28 +00:00
|
|
|
buffer->image = loader_dri_create_image(draw->dri_screen, draw->ext->image,
|
|
|
|
width, height, format,
|
|
|
|
__DRI_IMAGE_USE_SHARE |
|
|
|
|
__DRI_IMAGE_USE_SCANOUT |
|
|
|
|
__DRI_IMAGE_USE_BACKBUFFER |
|
|
|
|
(draw->is_protected_content ?
|
|
|
|
__DRI_IMAGE_USE_PROTECTED : 0),
|
|
|
|
modifiers, count, buffer);
|
|
|
|
free(modifiers);
|
2017-07-07 07:54:26 +01:00
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
pixmap_buffer = buffer->image;
|
|
|
|
|
|
|
|
if (!buffer->image)
|
|
|
|
goto no_image;
|
|
|
|
} else {
|
2015-11-25 05:27:04 +00:00
|
|
|
buffer->image = draw->ext->image->createImage(draw->dri_screen,
|
|
|
|
width, height,
|
|
|
|
format,
|
|
|
|
0,
|
|
|
|
buffer);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
if (!buffer->image)
|
|
|
|
goto no_image;
|
|
|
|
|
2021-05-03 19:22:40 +01:00
|
|
|
/* if driver name is same only then dri_screen_display_gpu is set.
|
|
|
|
* This check is needed because for simplicity render gpu image extension
|
|
|
|
* is also used for display gpu.
|
|
|
|
*/
|
|
|
|
if (draw->dri_screen_display_gpu) {
|
|
|
|
linear_buffer_display_gpu =
|
|
|
|
draw->ext->image->createImage(draw->dri_screen_display_gpu,
|
|
|
|
width, height,
|
|
|
|
dri3_linear_format_for_format(draw, format),
|
|
|
|
__DRI_IMAGE_USE_SHARE |
|
|
|
|
__DRI_IMAGE_USE_LINEAR |
|
2021-07-17 15:38:42 +01:00
|
|
|
__DRI_IMAGE_USE_BACKBUFFER |
|
|
|
|
__DRI_IMAGE_USE_SCANOUT,
|
2021-05-03 19:22:40 +01:00
|
|
|
buffer);
|
|
|
|
pixmap_buffer = linear_buffer_display_gpu;
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2021-05-03 19:22:40 +01:00
|
|
|
if (!pixmap_buffer) {
|
|
|
|
buffer->linear_buffer =
|
|
|
|
draw->ext->image->createImage(draw->dri_screen,
|
|
|
|
width, height,
|
|
|
|
dri3_linear_format_for_format(draw, format),
|
|
|
|
__DRI_IMAGE_USE_SHARE |
|
|
|
|
__DRI_IMAGE_USE_LINEAR |
|
2021-07-17 15:38:42 +01:00
|
|
|
__DRI_IMAGE_USE_BACKBUFFER |
|
2022-01-19 10:32:22 +00:00
|
|
|
__DRI_IMAGE_USE_SCANOUT |
|
|
|
|
__DRI_IMAGE_USE_PRIME_BUFFER,
|
2021-05-03 19:22:40 +01:00
|
|
|
buffer);
|
|
|
|
|
|
|
|
pixmap_buffer = buffer->linear_buffer;
|
|
|
|
if (!buffer->linear_buffer) {
|
|
|
|
goto no_linear_buffer;
|
|
|
|
}
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
2017-07-07 07:54:26 +01:00
|
|
|
/* X want some information about the planes, so ask the image for it
|
2015-07-21 16:43:54 +01:00
|
|
|
*/
|
2017-07-07 07:54:26 +01:00
|
|
|
if (!draw->ext->image->queryImage(pixmap_buffer, __DRI_IMAGE_ATTRIB_NUM_PLANES,
|
|
|
|
&num_planes))
|
|
|
|
num_planes = 1;
|
|
|
|
|
|
|
|
for (i = 0; i < num_planes; i++) {
|
|
|
|
__DRIimage *image = draw->ext->image->fromPlanar(pixmap_buffer, i, NULL);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2017-07-07 07:54:26 +01:00
|
|
|
if (!image) {
|
|
|
|
assert(i == 0);
|
|
|
|
image = pixmap_buffer;
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2019-12-18 15:48:26 +00:00
|
|
|
buffer_fds[i] = -1;
|
|
|
|
|
2017-07-07 07:54:26 +01:00
|
|
|
ret = draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_FD,
|
|
|
|
&buffer_fds[i]);
|
|
|
|
ret &= draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_STRIDE,
|
|
|
|
&buffer->strides[i]);
|
|
|
|
ret &= draw->ext->image->queryImage(image, __DRI_IMAGE_ATTRIB_OFFSET,
|
|
|
|
&buffer->offsets[i]);
|
|
|
|
if (image != pixmap_buffer)
|
|
|
|
draw->ext->image->destroyImage(image);
|
|
|
|
|
|
|
|
if (!ret)
|
|
|
|
goto no_buffer_attrib;
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2017-07-07 07:54:26 +01:00
|
|
|
ret = draw->ext->image->queryImage(pixmap_buffer,
|
|
|
|
__DRI_IMAGE_ATTRIB_MODIFIER_UPPER, &mod);
|
|
|
|
buffer->modifier = (uint64_t) mod << 32;
|
|
|
|
ret &= draw->ext->image->queryImage(pixmap_buffer,
|
|
|
|
__DRI_IMAGE_ATTRIB_MODIFIER_LOWER, &mod);
|
|
|
|
buffer->modifier |= (uint64_t)(mod & 0xffffffff);
|
|
|
|
|
|
|
|
if (!ret)
|
|
|
|
buffer->modifier = DRM_FORMAT_MOD_INVALID;
|
|
|
|
|
2021-05-03 19:22:40 +01:00
|
|
|
if (draw->is_different_gpu && draw->dri_screen_display_gpu &&
|
|
|
|
linear_buffer_display_gpu) {
|
|
|
|
/* The linear buffer was created in the display GPU's vram, so we
|
|
|
|
* need to make it visible to render GPU
|
|
|
|
*/
|
2021-10-14 14:12:12 +01:00
|
|
|
if (draw->ext->image->base.version >= 20)
|
|
|
|
buffer->linear_buffer =
|
|
|
|
draw->ext->image->createImageFromFds2(draw->dri_screen,
|
|
|
|
width,
|
|
|
|
height,
|
|
|
|
image_format_to_fourcc(format),
|
|
|
|
&buffer_fds[0], num_planes,
|
|
|
|
__DRI_IMAGE_PRIME_LINEAR_BUFFER,
|
|
|
|
&buffer->strides[0],
|
|
|
|
&buffer->offsets[0],
|
|
|
|
buffer);
|
|
|
|
else
|
|
|
|
buffer->linear_buffer =
|
|
|
|
draw->ext->image->createImageFromFds(draw->dri_screen,
|
|
|
|
width,
|
|
|
|
height,
|
|
|
|
image_format_to_fourcc(format),
|
|
|
|
&buffer_fds[0], num_planes,
|
|
|
|
&buffer->strides[0],
|
|
|
|
&buffer->offsets[0],
|
|
|
|
buffer);
|
2021-05-03 19:22:40 +01:00
|
|
|
if (!buffer->linear_buffer)
|
|
|
|
goto no_buffer_attrib;
|
|
|
|
|
|
|
|
draw->ext->image->destroyImage(linear_buffer_display_gpu);
|
|
|
|
}
|
|
|
|
|
2017-07-07 07:54:26 +01:00
|
|
|
pixmap = xcb_generate_id(draw->conn);
|
2018-03-19 15:03:22 +00:00
|
|
|
#ifdef HAVE_DRI3_MODIFIERS
|
2017-07-07 07:54:26 +01:00
|
|
|
if (draw->multiplanes_available &&
|
|
|
|
buffer->modifier != DRM_FORMAT_MOD_INVALID) {
|
|
|
|
xcb_dri3_pixmap_from_buffers(draw->conn,
|
|
|
|
pixmap,
|
2018-07-26 08:46:39 +01:00
|
|
|
draw->window,
|
2017-07-07 07:54:26 +01:00
|
|
|
num_planes,
|
|
|
|
width, height,
|
|
|
|
buffer->strides[0], buffer->offsets[0],
|
|
|
|
buffer->strides[1], buffer->offsets[1],
|
|
|
|
buffer->strides[2], buffer->offsets[2],
|
|
|
|
buffer->strides[3], buffer->offsets[3],
|
|
|
|
depth, buffer->cpp * 8,
|
|
|
|
buffer->modifier,
|
|
|
|
buffer_fds);
|
2018-03-19 15:03:22 +00:00
|
|
|
} else
|
2018-03-13 20:06:00 +00:00
|
|
|
#endif
|
2018-03-19 15:03:22 +00:00
|
|
|
{
|
2017-07-07 07:54:26 +01:00
|
|
|
xcb_dri3_pixmap_from_buffer(draw->conn,
|
|
|
|
pixmap,
|
|
|
|
draw->drawable,
|
|
|
|
buffer->size,
|
|
|
|
width, height, buffer->strides[0],
|
|
|
|
depth, buffer->cpp * 8,
|
|
|
|
buffer_fds[0]);
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
xcb_dri3_fence_from_fd(draw->conn,
|
|
|
|
pixmap,
|
|
|
|
(sync_fence = xcb_generate_id(draw->conn)),
|
|
|
|
false,
|
|
|
|
fence_fd);
|
|
|
|
|
|
|
|
buffer->pixmap = pixmap;
|
|
|
|
buffer->own_pixmap = true;
|
|
|
|
buffer->sync_fence = sync_fence;
|
|
|
|
buffer->shm_fence = shm_fence;
|
|
|
|
buffer->width = width;
|
|
|
|
buffer->height = height;
|
|
|
|
|
|
|
|
/* Mark the buffer as idle
|
|
|
|
*/
|
|
|
|
dri3_fence_set(buffer);
|
|
|
|
|
|
|
|
return buffer;
|
|
|
|
|
|
|
|
no_buffer_attrib:
|
2017-07-07 07:54:26 +01:00
|
|
|
do {
|
2019-12-18 15:48:26 +00:00
|
|
|
if (buffer_fds[i] != -1)
|
|
|
|
close(buffer_fds[i]);
|
2017-07-07 07:54:26 +01:00
|
|
|
} while (--i >= 0);
|
2015-11-25 05:27:04 +00:00
|
|
|
draw->ext->image->destroyImage(pixmap_buffer);
|
2015-07-21 16:43:54 +01:00
|
|
|
no_linear_buffer:
|
|
|
|
if (draw->is_different_gpu)
|
2015-11-25 05:27:04 +00:00
|
|
|
draw->ext->image->destroyImage(buffer->image);
|
2015-07-21 16:43:54 +01:00
|
|
|
no_image:
|
|
|
|
free(buffer);
|
|
|
|
no_buffer:
|
|
|
|
xshmfence_unmap_shm(shm_fence);
|
|
|
|
no_shm_fence:
|
|
|
|
close(fence_fd);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2021-11-11 09:21:48 +00:00
|
|
|
static bool
|
|
|
|
dri3_detect_drawable_is_window(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
/* Try to select for input on the window.
|
|
|
|
*
|
|
|
|
* If the drawable is a window, this will get our events
|
|
|
|
* delivered.
|
|
|
|
*
|
|
|
|
* Otherwise, we'll get a BadWindow error back from this request which
|
|
|
|
* will let us know that the drawable is a pixmap instead.
|
|
|
|
*/
|
|
|
|
|
|
|
|
xcb_void_cookie_t cookie =
|
|
|
|
xcb_present_select_input_checked(draw->conn, draw->eid, draw->drawable,
|
|
|
|
XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
|
|
|
|
XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
|
|
|
|
XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
|
|
|
|
|
|
|
|
/* Check to see if our select input call failed. If it failed with a
|
|
|
|
* BadWindow error, then assume the drawable is a pixmap.
|
|
|
|
*/
|
|
|
|
xcb_generic_error_t *error = xcb_request_check(draw->conn, cookie);
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
if (error->error_code != BadWindow) {
|
|
|
|
free(error);
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
free(error);
|
2021-11-10 02:00:14 +00:00
|
|
|
|
|
|
|
/* pixmap can't get here, see driFetchDrawable(). */
|
|
|
|
draw->type = LOADER_DRI3_DRAWABLE_PBUFFER;
|
2021-11-11 09:21:48 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-11-10 02:00:14 +00:00
|
|
|
draw->type = LOADER_DRI3_DRAWABLE_WINDOW;
|
2021-11-11 09:21:48 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
static bool
|
|
|
|
dri3_setup_present_event(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
2021-11-12 02:49:07 +00:00
|
|
|
/* No need to setup for pixmap drawable. */
|
|
|
|
if (draw->type == LOADER_DRI3_DRAWABLE_PIXMAP ||
|
2021-11-10 02:00:14 +00:00
|
|
|
draw->type == LOADER_DRI3_DRAWABLE_PBUFFER)
|
2021-11-12 02:49:07 +00:00
|
|
|
return true;
|
|
|
|
|
2021-11-11 09:21:48 +00:00
|
|
|
draw->eid = xcb_generate_id(draw->conn);
|
|
|
|
|
2021-11-12 02:49:07 +00:00
|
|
|
if (draw->type == LOADER_DRI3_DRAWABLE_WINDOW) {
|
|
|
|
xcb_present_select_input(draw->conn, draw->eid, draw->drawable,
|
|
|
|
XCB_PRESENT_EVENT_MASK_CONFIGURE_NOTIFY |
|
|
|
|
XCB_PRESENT_EVENT_MASK_COMPLETE_NOTIFY |
|
|
|
|
XCB_PRESENT_EVENT_MASK_IDLE_NOTIFY);
|
|
|
|
} else {
|
|
|
|
assert(draw->type == LOADER_DRI3_DRAWABLE_UNKNOWN);
|
2021-11-11 09:21:48 +00:00
|
|
|
|
2021-11-12 02:49:07 +00:00
|
|
|
if (!dri3_detect_drawable_is_window(draw))
|
|
|
|
return false;
|
|
|
|
|
2021-11-10 02:00:14 +00:00
|
|
|
if (draw->type != LOADER_DRI3_DRAWABLE_WINDOW)
|
2021-11-12 02:49:07 +00:00
|
|
|
return true;
|
|
|
|
}
|
2021-11-11 09:21:48 +00:00
|
|
|
|
|
|
|
/* Create an XCB event queue to hold present events outside of the usual
|
|
|
|
* application event queue
|
|
|
|
*/
|
|
|
|
draw->special_event = xcb_register_for_special_xge(draw->conn,
|
|
|
|
&xcb_present_id,
|
|
|
|
draw->eid,
|
|
|
|
draw->stamp);
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
/** loader_dri3_update_drawable
|
|
|
|
*
|
|
|
|
* Called the first time we use the drawable and then
|
|
|
|
* after we receive present configure notify events to
|
|
|
|
* track the geometry of the drawable
|
|
|
|
*/
|
|
|
|
static int
|
2018-08-30 18:24:51 +01:00
|
|
|
dri3_update_drawable(struct loader_dri3_drawable *draw)
|
2015-07-21 16:43:54 +01:00
|
|
|
{
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_lock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
if (draw->first_init) {
|
|
|
|
xcb_get_geometry_cookie_t geom_cookie;
|
|
|
|
xcb_get_geometry_reply_t *geom_reply;
|
2018-07-26 08:46:39 +01:00
|
|
|
xcb_window_t root_win;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
draw->first_init = false;
|
|
|
|
|
2021-11-11 09:21:48 +00:00
|
|
|
if (!dri3_setup_present_event(draw)) {
|
|
|
|
mtx_unlock(&draw->mtx);
|
|
|
|
return false;
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
|
|
|
|
|
|
|
|
geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
|
|
|
|
|
2017-09-19 18:41:22 +01:00
|
|
|
if (!geom_reply) {
|
|
|
|
mtx_unlock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
return false;
|
2017-09-19 18:41:22 +01:00
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
draw->width = geom_reply->width;
|
|
|
|
draw->height = geom_reply->height;
|
|
|
|
draw->depth = geom_reply->depth;
|
|
|
|
draw->vtable->set_drawable_size(draw, draw->width, draw->height);
|
2018-07-26 08:46:39 +01:00
|
|
|
root_win = geom_reply->root;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
free(geom_reply);
|
|
|
|
|
2021-11-10 02:00:14 +00:00
|
|
|
if (draw->type != LOADER_DRI3_DRAWABLE_WINDOW)
|
2018-07-26 08:46:39 +01:00
|
|
|
draw->window = root_win;
|
|
|
|
else
|
|
|
|
draw->window = draw->drawable;
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
dri3_flush_present_events(draw);
|
2017-09-19 18:41:22 +01:00
|
|
|
mtx_unlock(&draw->mtx);
|
2015-07-21 16:43:54 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2015-07-21 16:44:01 +01:00
|
|
|
__DRIimage *
|
|
|
|
loader_dri3_create_image(xcb_connection_t *c,
|
|
|
|
xcb_dri3_buffer_from_pixmap_reply_t *bp_reply,
|
|
|
|
unsigned int format,
|
|
|
|
__DRIscreen *dri_screen,
|
|
|
|
const __DRIimageExtension *image,
|
|
|
|
void *loaderPrivate)
|
|
|
|
{
|
|
|
|
int *fds;
|
|
|
|
__DRIimage *image_planar, *ret;
|
|
|
|
int stride, offset;
|
|
|
|
|
|
|
|
/* Get an FD for the pixmap object
|
|
|
|
*/
|
|
|
|
fds = xcb_dri3_buffer_from_pixmap_reply_fds(c, bp_reply);
|
|
|
|
|
|
|
|
stride = bp_reply->stride;
|
|
|
|
offset = 0;
|
|
|
|
|
|
|
|
/* createImageFromFds creates a wrapper __DRIimage structure which
|
|
|
|
* can deal with multiple planes for things like Yuv images. So, once
|
|
|
|
* we've gotten the planar wrapper, pull the single plane out of it and
|
|
|
|
* discard the wrapper.
|
|
|
|
*/
|
2015-11-25 05:27:04 +00:00
|
|
|
image_planar = image->createImageFromFds(dri_screen,
|
|
|
|
bp_reply->width,
|
|
|
|
bp_reply->height,
|
|
|
|
image_format_to_fourcc(format),
|
|
|
|
fds, 1,
|
|
|
|
&stride, &offset, loaderPrivate);
|
2015-07-21 16:44:01 +01:00
|
|
|
close(fds[0]);
|
|
|
|
if (!image_planar)
|
|
|
|
return NULL;
|
|
|
|
|
2015-11-25 05:27:04 +00:00
|
|
|
ret = image->fromPlanar(image_planar, 0, loaderPrivate);
|
2015-07-21 16:44:01 +01:00
|
|
|
|
2017-09-28 08:18:33 +01:00
|
|
|
if (!ret)
|
|
|
|
ret = image_planar;
|
|
|
|
else
|
|
|
|
image->destroyImage(image_planar);
|
2015-07-21 16:44:01 +01:00
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2018-03-13 20:06:00 +00:00
|
|
|
#ifdef HAVE_DRI3_MODIFIERS
|
2017-07-07 07:54:26 +01:00
|
|
|
__DRIimage *
|
|
|
|
loader_dri3_create_image_from_buffers(xcb_connection_t *c,
|
|
|
|
xcb_dri3_buffers_from_pixmap_reply_t *bp_reply,
|
|
|
|
unsigned int format,
|
|
|
|
__DRIscreen *dri_screen,
|
|
|
|
const __DRIimageExtension *image,
|
|
|
|
void *loaderPrivate)
|
|
|
|
{
|
|
|
|
__DRIimage *ret;
|
|
|
|
int *fds;
|
|
|
|
uint32_t *strides_in, *offsets_in;
|
|
|
|
int strides[4], offsets[4];
|
|
|
|
unsigned error;
|
|
|
|
int i;
|
|
|
|
|
|
|
|
if (bp_reply->nfd > 4)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
fds = xcb_dri3_buffers_from_pixmap_reply_fds(c, bp_reply);
|
|
|
|
strides_in = xcb_dri3_buffers_from_pixmap_strides(bp_reply);
|
|
|
|
offsets_in = xcb_dri3_buffers_from_pixmap_offsets(bp_reply);
|
|
|
|
for (i = 0; i < bp_reply->nfd; i++) {
|
|
|
|
strides[i] = strides_in[i];
|
|
|
|
offsets[i] = offsets_in[i];
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = image->createImageFromDmaBufs2(dri_screen,
|
|
|
|
bp_reply->width,
|
|
|
|
bp_reply->height,
|
|
|
|
image_format_to_fourcc(format),
|
|
|
|
bp_reply->modifier,
|
|
|
|
fds, bp_reply->nfd,
|
|
|
|
strides, offsets,
|
|
|
|
0, 0, 0, 0, /* UNDEFINED */
|
|
|
|
&error, loaderPrivate);
|
|
|
|
|
|
|
|
for (i = 0; i < bp_reply->nfd; i++)
|
|
|
|
close(fds[i]);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
2018-03-13 20:06:00 +00:00
|
|
|
#endif
|
2017-07-07 07:54:26 +01:00
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
/** dri3_get_pixmap_buffer
|
|
|
|
*
|
|
|
|
* Get the DRM object for a pixmap from the X server and
|
|
|
|
* wrap that with a __DRIimage structure using createImageFromFds
|
|
|
|
*/
|
|
|
|
static struct loader_dri3_buffer *
|
|
|
|
dri3_get_pixmap_buffer(__DRIdrawable *driDrawable, unsigned int format,
|
|
|
|
enum loader_dri3_buffer_type buffer_type,
|
|
|
|
struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
int buf_id = loader_dri3_pixmap_buf_id(buffer_type);
|
|
|
|
struct loader_dri3_buffer *buffer = draw->buffers[buf_id];
|
|
|
|
xcb_drawable_t pixmap;
|
|
|
|
xcb_sync_fence_t sync_fence;
|
|
|
|
struct xshmfence *shm_fence;
|
2017-07-07 07:54:26 +01:00
|
|
|
int width;
|
|
|
|
int height;
|
2015-07-21 16:43:54 +01:00
|
|
|
int fence_fd;
|
2018-02-09 08:37:19 +00:00
|
|
|
__DRIscreen *cur_screen;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
if (buffer)
|
|
|
|
return buffer;
|
|
|
|
|
|
|
|
pixmap = draw->drawable;
|
|
|
|
|
|
|
|
buffer = calloc(1, sizeof *buffer);
|
|
|
|
if (!buffer)
|
|
|
|
goto no_buffer;
|
|
|
|
|
|
|
|
fence_fd = xshmfence_alloc_shm();
|
|
|
|
if (fence_fd < 0)
|
|
|
|
goto no_fence;
|
|
|
|
shm_fence = xshmfence_map_shm(fence_fd);
|
|
|
|
if (shm_fence == NULL) {
|
|
|
|
close (fence_fd);
|
|
|
|
goto no_fence;
|
|
|
|
}
|
|
|
|
|
2018-02-09 08:37:19 +00:00
|
|
|
/* Get the currently-bound screen or revert to using the drawable's screen if
|
|
|
|
* no contexts are currently bound. The latter case is at least necessary for
|
|
|
|
* obs-studio, when using Window Capture (Xcomposite) as a Source.
|
|
|
|
*/
|
|
|
|
cur_screen = draw->vtable->get_dri_screen();
|
|
|
|
if (!cur_screen) {
|
|
|
|
cur_screen = draw->dri_screen;
|
|
|
|
}
|
|
|
|
|
2017-07-07 07:54:26 +01:00
|
|
|
xcb_dri3_fence_from_fd(draw->conn,
|
|
|
|
pixmap,
|
|
|
|
(sync_fence = xcb_generate_id(draw->conn)),
|
|
|
|
false,
|
|
|
|
fence_fd);
|
2018-03-13 20:06:00 +00:00
|
|
|
#ifdef HAVE_DRI3_MODIFIERS
|
2017-07-07 07:54:26 +01:00
|
|
|
if (draw->multiplanes_available &&
|
|
|
|
draw->ext->image->base.version >= 15 &&
|
|
|
|
draw->ext->image->createImageFromDmaBufs2) {
|
|
|
|
xcb_dri3_buffers_from_pixmap_cookie_t bps_cookie;
|
|
|
|
xcb_dri3_buffers_from_pixmap_reply_t *bps_reply;
|
|
|
|
|
|
|
|
bps_cookie = xcb_dri3_buffers_from_pixmap(draw->conn, pixmap);
|
|
|
|
bps_reply = xcb_dri3_buffers_from_pixmap_reply(draw->conn, bps_cookie,
|
|
|
|
NULL);
|
|
|
|
if (!bps_reply)
|
|
|
|
goto no_image;
|
|
|
|
buffer->image =
|
|
|
|
loader_dri3_create_image_from_buffers(draw->conn, bps_reply, format,
|
|
|
|
cur_screen, draw->ext->image,
|
|
|
|
buffer);
|
|
|
|
width = bps_reply->width;
|
|
|
|
height = bps_reply->height;
|
|
|
|
free(bps_reply);
|
2018-03-13 20:06:00 +00:00
|
|
|
} else
|
|
|
|
#endif
|
|
|
|
{
|
2017-07-07 07:54:26 +01:00
|
|
|
xcb_dri3_buffer_from_pixmap_cookie_t bp_cookie;
|
|
|
|
xcb_dri3_buffer_from_pixmap_reply_t *bp_reply;
|
|
|
|
|
|
|
|
bp_cookie = xcb_dri3_buffer_from_pixmap(draw->conn, pixmap);
|
|
|
|
bp_reply = xcb_dri3_buffer_from_pixmap_reply(draw->conn, bp_cookie, NULL);
|
|
|
|
if (!bp_reply)
|
|
|
|
goto no_image;
|
|
|
|
|
|
|
|
buffer->image = loader_dri3_create_image(draw->conn, bp_reply, format,
|
|
|
|
cur_screen, draw->ext->image,
|
|
|
|
buffer);
|
|
|
|
width = bp_reply->width;
|
|
|
|
height = bp_reply->height;
|
|
|
|
free(bp_reply);
|
|
|
|
}
|
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
if (!buffer->image)
|
|
|
|
goto no_image;
|
|
|
|
|
|
|
|
buffer->pixmap = pixmap;
|
|
|
|
buffer->own_pixmap = false;
|
2017-07-07 07:54:26 +01:00
|
|
|
buffer->width = width;
|
|
|
|
buffer->height = height;
|
2015-07-21 16:43:54 +01:00
|
|
|
buffer->shm_fence = shm_fence;
|
|
|
|
buffer->sync_fence = sync_fence;
|
|
|
|
|
|
|
|
draw->buffers[buf_id] = buffer;
|
2015-07-21 16:44:01 +01:00
|
|
|
|
2015-07-21 16:43:54 +01:00
|
|
|
return buffer;
|
|
|
|
|
|
|
|
no_image:
|
|
|
|
xcb_sync_destroy_fence(draw->conn, sync_fence);
|
|
|
|
xshmfence_unmap_shm(shm_fence);
|
|
|
|
no_fence:
|
|
|
|
free(buffer);
|
|
|
|
no_buffer:
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
/** dri3_get_buffer
|
|
|
|
*
|
|
|
|
* Find a front or back buffer, allocating new ones as necessary
|
|
|
|
*/
|
|
|
|
static struct loader_dri3_buffer *
|
|
|
|
dri3_get_buffer(__DRIdrawable *driDrawable,
|
|
|
|
unsigned int format,
|
|
|
|
enum loader_dri3_buffer_type buffer_type,
|
|
|
|
struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
struct loader_dri3_buffer *buffer;
|
2018-10-01 17:43:46 +01:00
|
|
|
bool fence_await = buffer_type == loader_dri3_buffer_back;
|
2015-07-21 16:43:54 +01:00
|
|
|
int buf_id;
|
|
|
|
|
|
|
|
if (buffer_type == loader_dri3_buffer_back) {
|
2017-08-11 08:49:54 +01:00
|
|
|
draw->back_format = format;
|
|
|
|
|
2021-09-08 19:08:57 +01:00
|
|
|
buf_id = dri3_find_back(draw, !draw->prefer_back_buffer_reuse);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
if (buf_id < 0)
|
|
|
|
return NULL;
|
|
|
|
} else {
|
|
|
|
buf_id = LOADER_DRI3_FRONT_ID;
|
|
|
|
}
|
|
|
|
|
|
|
|
buffer = draw->buffers[buf_id];
|
|
|
|
|
2017-10-06 06:26:51 +01:00
|
|
|
/* Allocate a new buffer if there isn't an old one, if that
|
|
|
|
* old one is the wrong size, or if it's suboptimal
|
2015-07-21 16:43:54 +01:00
|
|
|
*/
|
|
|
|
if (!buffer || buffer->width != draw->width ||
|
2017-10-06 06:26:51 +01:00
|
|
|
buffer->height != draw->height ||
|
|
|
|
buffer->reallocate) {
|
2015-07-21 16:43:54 +01:00
|
|
|
struct loader_dri3_buffer *new_buffer;
|
|
|
|
|
|
|
|
/* Allocate the new buffers
|
|
|
|
*/
|
|
|
|
new_buffer = dri3_alloc_render_buffer(draw,
|
|
|
|
format,
|
|
|
|
draw->width,
|
|
|
|
draw->height,
|
|
|
|
draw->depth);
|
|
|
|
if (!new_buffer)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
/* When resizing, copy the contents of the old buffer, waiting for that
|
|
|
|
* copy to complete using our fences before proceeding
|
|
|
|
*/
|
2017-09-14 11:15:43 +01:00
|
|
|
if ((buffer_type == loader_dri3_buffer_back ||
|
|
|
|
(buffer_type == loader_dri3_buffer_front && draw->have_fake_front))
|
|
|
|
&& buffer) {
|
|
|
|
|
|
|
|
/* Fill the new buffer with data from an old buffer */
|
|
|
|
if (!loader_dri3_blit_image(draw,
|
|
|
|
new_buffer->image,
|
|
|
|
buffer->image,
|
2019-03-25 08:47:58 +00:00
|
|
|
0, 0,
|
|
|
|
MIN2(buffer->width, new_buffer->width),
|
|
|
|
MIN2(buffer->height, new_buffer->height),
|
2017-09-14 11:15:43 +01:00
|
|
|
0, 0, 0) &&
|
|
|
|
!buffer->linear_buffer) {
|
|
|
|
dri3_fence_reset(draw->conn, new_buffer);
|
|
|
|
dri3_copy_area(draw->conn,
|
|
|
|
buffer->pixmap,
|
|
|
|
new_buffer->pixmap,
|
|
|
|
dri3_drawable_gc(draw),
|
|
|
|
0, 0, 0, 0,
|
|
|
|
draw->width, draw->height);
|
|
|
|
dri3_fence_trigger(draw->conn, new_buffer);
|
2018-10-01 17:43:46 +01:00
|
|
|
fence_await = true;
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
2017-09-14 11:15:43 +01:00
|
|
|
dri3_free_render_buffer(draw, buffer);
|
|
|
|
} else if (buffer_type == loader_dri3_buffer_front) {
|
|
|
|
/* Fill the new fake front with data from a real front */
|
2017-06-22 08:24:34 +01:00
|
|
|
loader_dri3_swapbuffer_barrier(draw);
|
2015-07-21 16:43:54 +01:00
|
|
|
dri3_fence_reset(draw->conn, new_buffer);
|
|
|
|
dri3_copy_area(draw->conn,
|
|
|
|
draw->drawable,
|
|
|
|
new_buffer->pixmap,
|
|
|
|
dri3_drawable_gc(draw),
|
|
|
|
0, 0, 0, 0,
|
|
|
|
draw->width, draw->height);
|
|
|
|
dri3_fence_trigger(draw->conn, new_buffer);
|
|
|
|
|
2017-09-14 11:15:43 +01:00
|
|
|
if (new_buffer->linear_buffer) {
|
2017-09-05 08:58:08 +01:00
|
|
|
dri3_fence_await(draw->conn, draw, new_buffer);
|
2017-08-10 14:35:39 +01:00
|
|
|
(void) loader_dri3_blit_image(draw,
|
|
|
|
new_buffer->image,
|
|
|
|
new_buffer->linear_buffer,
|
|
|
|
0, 0, draw->width, draw->height,
|
|
|
|
0, 0, 0);
|
2018-10-01 17:43:46 +01:00
|
|
|
} else
|
|
|
|
fence_await = true;
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
buffer = new_buffer;
|
|
|
|
draw->buffers[buf_id] = buffer;
|
|
|
|
}
|
2018-09-04 11:18:19 +01:00
|
|
|
|
2018-10-01 17:43:46 +01:00
|
|
|
if (fence_await)
|
2018-09-04 11:18:19 +01:00
|
|
|
dri3_fence_await(draw->conn, draw, buffer);
|
2015-07-21 16:43:54 +01:00
|
|
|
|
2017-08-10 15:14:23 +01:00
|
|
|
/*
|
|
|
|
* Do we need to preserve the content of a previous buffer?
|
|
|
|
*
|
|
|
|
* Note that this blit is needed only to avoid a wait for a buffer that
|
|
|
|
* is currently in the flip chain or being scanned out from. That's really
|
|
|
|
* a tradeoff. If we're ok with the wait we can reduce the number of back
|
|
|
|
* buffers to 1 for SWAP_EXCHANGE, and 1 for SWAP_COPY,
|
|
|
|
* but in the latter case we must disallow page-flipping.
|
|
|
|
*/
|
|
|
|
if (buffer_type == loader_dri3_buffer_back &&
|
|
|
|
draw->cur_blit_source != -1 &&
|
|
|
|
draw->buffers[draw->cur_blit_source] &&
|
|
|
|
buffer != draw->buffers[draw->cur_blit_source]) {
|
|
|
|
|
|
|
|
struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
|
|
|
|
|
|
|
|
/* Avoid flushing here. Will propably do good for tiling hardware. */
|
|
|
|
(void) loader_dri3_blit_image(draw,
|
|
|
|
buffer->image,
|
|
|
|
source->image,
|
|
|
|
0, 0, draw->width, draw->height,
|
|
|
|
0, 0, 0);
|
|
|
|
buffer->last_swap = source->last_swap;
|
|
|
|
draw->cur_blit_source = -1;
|
|
|
|
}
|
2015-07-21 16:43:54 +01:00
|
|
|
/* Return the requested buffer */
|
|
|
|
return buffer;
|
|
|
|
}
|
|
|
|
|
|
|
|
/** dri3_free_buffers
|
|
|
|
*
|
|
|
|
* Free the front bufffer or all of the back buffers. Used
|
|
|
|
* when the application changes which buffers it needs
|
|
|
|
*/
|
|
|
|
static void
|
|
|
|
dri3_free_buffers(__DRIdrawable *driDrawable,
|
|
|
|
enum loader_dri3_buffer_type buffer_type,
|
|
|
|
struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
struct loader_dri3_buffer *buffer;
|
|
|
|
int first_id;
|
|
|
|
int n_id;
|
|
|
|
int buf_id;
|
|
|
|
|
|
|
|
switch (buffer_type) {
|
|
|
|
case loader_dri3_buffer_back:
|
|
|
|
first_id = LOADER_DRI3_BACK_ID(0);
|
|
|
|
n_id = LOADER_DRI3_MAX_BACK;
|
2017-09-14 12:09:05 +01:00
|
|
|
draw->cur_blit_source = -1;
|
2015-07-21 16:43:54 +01:00
|
|
|
break;
|
|
|
|
case loader_dri3_buffer_front:
|
|
|
|
first_id = LOADER_DRI3_FRONT_ID;
|
2017-09-14 12:09:05 +01:00
|
|
|
/* Don't free a fake front holding new backbuffer content. */
|
|
|
|
n_id = (draw->cur_blit_source == LOADER_DRI3_FRONT_ID) ? 0 : 1;
|
2021-01-26 19:11:52 +00:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
unreachable("unhandled buffer_type");
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for (buf_id = first_id; buf_id < first_id + n_id; buf_id++) {
|
|
|
|
buffer = draw->buffers[buf_id];
|
|
|
|
if (buffer) {
|
|
|
|
dri3_free_render_buffer(draw, buffer);
|
|
|
|
draw->buffers[buf_id] = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/** loader_dri3_get_buffers
|
|
|
|
*
|
|
|
|
* The published buffer allocation API.
|
|
|
|
* Returns all of the necessary buffers, allocating
|
|
|
|
* as needed.
|
|
|
|
*/
|
|
|
|
int
|
|
|
|
loader_dri3_get_buffers(__DRIdrawable *driDrawable,
|
|
|
|
unsigned int format,
|
|
|
|
uint32_t *stamp,
|
|
|
|
void *loaderPrivate,
|
|
|
|
uint32_t buffer_mask,
|
|
|
|
struct __DRIimageList *buffers)
|
|
|
|
{
|
|
|
|
struct loader_dri3_drawable *draw = loaderPrivate;
|
|
|
|
struct loader_dri3_buffer *front, *back;
|
2018-04-27 16:41:48 +01:00
|
|
|
int buf_id;
|
2015-07-21 16:43:54 +01:00
|
|
|
|
|
|
|
buffers->image_mask = 0;
|
|
|
|
buffers->front = NULL;
|
|
|
|
buffers->back = NULL;
|
|
|
|
|
|
|
|
front = NULL;
|
|
|
|
back = NULL;
|
|
|
|
|
2018-08-30 18:24:51 +01:00
|
|
|
if (!dri3_update_drawable(draw))
|
2015-07-21 16:43:54 +01:00
|
|
|
return false;
|
|
|
|
|
2020-10-02 14:20:17 +01:00
|
|
|
dri3_update_max_num_back(draw);
|
2018-04-27 16:41:48 +01:00
|
|
|
|
|
|
|
/* Free no longer needed back buffers */
|
2020-10-02 14:20:17 +01:00
|
|
|
for (buf_id = draw->cur_num_back; buf_id < LOADER_DRI3_MAX_BACK; buf_id++) {
|
2018-04-27 16:41:48 +01:00
|
|
|
if (draw->cur_blit_source != buf_id && draw->buffers[buf_id]) {
|
|
|
|
dri3_free_render_buffer(draw, draw->buffers[buf_id]);
|
|
|
|
draw->buffers[buf_id] = NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-08-10 16:34:05 +01:00
|
|
|
/* pixmaps always have front buffers.
|
|
|
|
* Exchange swaps also mandate fake front buffers.
|
|
|
|
*/
|
2021-11-10 02:00:14 +00:00
|
|
|
if (draw->type != LOADER_DRI3_DRAWABLE_WINDOW ||
|
|
|
|
draw->swap_method == __DRI_ATTRIB_SWAP_EXCHANGE)
|
2015-07-21 16:43:54 +01:00
|
|
|
buffer_mask |= __DRI_IMAGE_BUFFER_FRONT;
|
|
|
|
|
|
|
|
if (buffer_mask & __DRI_IMAGE_BUFFER_FRONT) {
|
|
|
|
/* All pixmaps are owned by the server gpu.
|
|
|
|
* When we use a different gpu, we can't use the pixmap
|
|
|
|
* as buffer since it is potentially tiled a way
|
|
|
|
* our device can't understand. In this case, use
|
|
|
|
* a fake front buffer. Hopefully the pixmap
|
|
|
|
* content will get synced with the fake front
|
|
|
|
* buffer.
|
|
|
|
*/
|
2021-11-10 02:00:14 +00:00
|
|
|
if (draw->type != LOADER_DRI3_DRAWABLE_WINDOW && !draw->is_different_gpu)
|
2015-07-21 16:43:54 +01:00
|
|
|
front = dri3_get_pixmap_buffer(driDrawable,
|
|
|
|
format,
|
|
|
|
loader_dri3_buffer_front,
|
|
|
|
draw);
|
|
|
|
else
|
|
|
|
front = dri3_get_buffer(driDrawable,
|
|
|
|
format,
|
|
|
|
loader_dri3_buffer_front,
|
|
|
|
draw);
|
|
|
|
|
|
|
|
if (!front)
|
|
|
|
return false;
|
|
|
|
} else {
|
|
|
|
dri3_free_buffers(driDrawable, loader_dri3_buffer_front, draw);
|
|
|
|
draw->have_fake_front = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (buffer_mask & __DRI_IMAGE_BUFFER_BACK) {
|
|
|
|
back = dri3_get_buffer(driDrawable,
|
|
|
|
format,
|
|
|
|
loader_dri3_buffer_back,
|
|
|
|
draw);
|
|
|
|
if (!back)
|
|
|
|
return false;
|
|
|
|
draw->have_back = 1;
|
|
|
|
} else {
|
|
|
|
dri3_free_buffers(driDrawable, loader_dri3_buffer_back, draw);
|
|
|
|
draw->have_back = 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (front) {
|
|
|
|
buffers->image_mask |= __DRI_IMAGE_BUFFER_FRONT;
|
|
|
|
buffers->front = front->image;
|
2021-11-10 02:00:14 +00:00
|
|
|
draw->have_fake_front =
|
|
|
|
draw->is_different_gpu ||
|
|
|
|
draw->type == LOADER_DRI3_DRAWABLE_WINDOW;
|
2015-07-21 16:43:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (back) {
|
|
|
|
buffers->image_mask |= __DRI_IMAGE_BUFFER_BACK;
|
|
|
|
buffers->back = back->image;
|
|
|
|
}
|
|
|
|
|
|
|
|
draw->stamp = stamp;
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
2017-02-17 15:12:21 +00:00
|
|
|
|
|
|
|
/** loader_dri3_update_drawable_geometry
|
|
|
|
*
|
|
|
|
* Get the current drawable geometry.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
loader_dri3_update_drawable_geometry(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
xcb_get_geometry_cookie_t geom_cookie;
|
|
|
|
xcb_get_geometry_reply_t *geom_reply;
|
|
|
|
|
|
|
|
geom_cookie = xcb_get_geometry(draw->conn, draw->drawable);
|
|
|
|
|
|
|
|
geom_reply = xcb_get_geometry_reply(draw->conn, geom_cookie, NULL);
|
|
|
|
|
|
|
|
if (geom_reply) {
|
|
|
|
draw->width = geom_reply->width;
|
|
|
|
draw->height = geom_reply->height;
|
|
|
|
draw->vtable->set_drawable_size(draw, draw->width, draw->height);
|
2017-09-05 09:07:13 +01:00
|
|
|
draw->ext->flush->invalidate(draw->dri_drawable);
|
2017-02-17 15:12:21 +00:00
|
|
|
|
|
|
|
free(geom_reply);
|
|
|
|
}
|
|
|
|
}
|
2017-06-22 08:24:34 +01:00
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Make sure the server has flushed all pending swap buffers to hardware
|
|
|
|
* for this drawable. Ideally we'd want to send an X protocol request to
|
|
|
|
* have the server block our connection until the swaps are complete. That
|
|
|
|
* would avoid the potential round-trip here.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
loader_dri3_swapbuffer_barrier(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
int64_t ust, msc, sbc;
|
|
|
|
|
|
|
|
(void) loader_dri3_wait_for_sbc(draw, 0, &ust, &msc, &sbc);
|
|
|
|
}
|
2017-08-10 14:35:39 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Perform any cleanup associated with a close screen operation.
|
|
|
|
* \param dri_screen[in,out] Pointer to __DRIscreen about to be closed.
|
|
|
|
*
|
|
|
|
* This function destroys the screen's cached swap context if any.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
loader_dri3_close_screen(__DRIscreen *dri_screen)
|
|
|
|
{
|
|
|
|
mtx_lock(&blit_context.mtx);
|
|
|
|
if (blit_context.ctx && blit_context.cur_screen == dri_screen) {
|
|
|
|
blit_context.core->destroyContext(blit_context.ctx);
|
|
|
|
blit_context.ctx = NULL;
|
|
|
|
}
|
|
|
|
mtx_unlock(&blit_context.mtx);
|
|
|
|
}
|
2017-08-11 08:49:54 +01:00
|
|
|
|
|
|
|
/**
|
|
|
|
* Find a backbuffer slot - potentially allocating a back buffer
|
|
|
|
*
|
|
|
|
* \param draw[in,out] Pointer to the drawable for which to find back.
|
|
|
|
* \return Pointer to a new back buffer or NULL if allocation failed or was
|
|
|
|
* not mandated.
|
|
|
|
*
|
|
|
|
* Find a potentially new back buffer, and if it's not been allocated yet and
|
|
|
|
* in addition needs initializing, then try to allocate and initialize it.
|
|
|
|
*/
|
2017-09-14 11:39:18 +01:00
|
|
|
#include <stdio.h>
|
2017-08-11 08:49:54 +01:00
|
|
|
static struct loader_dri3_buffer *
|
|
|
|
dri3_find_back_alloc(struct loader_dri3_drawable *draw)
|
|
|
|
{
|
|
|
|
struct loader_dri3_buffer *back;
|
|
|
|
int id;
|
|
|
|
|
2021-09-08 19:08:57 +01:00
|
|
|
id = dri3_find_back(draw, false);
|
2017-09-14 11:39:18 +01:00
|
|
|
if (id < 0)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
back = draw->buffers[id];
|
|
|
|
/* Allocate a new back if we haven't got one */
|
|
|
|
if (!back && draw->back_format != __DRI_IMAGE_FORMAT_NONE &&
|
2018-08-30 18:24:51 +01:00
|
|
|
dri3_update_drawable(draw))
|
2017-09-14 11:39:18 +01:00
|
|
|
back = dri3_alloc_render_buffer(draw, draw->back_format,
|
|
|
|
draw->width, draw->height, draw->depth);
|
|
|
|
|
|
|
|
if (!back)
|
|
|
|
return NULL;
|
|
|
|
|
|
|
|
draw->buffers[id] = back;
|
|
|
|
|
|
|
|
/* If necessary, prefill the back with data according to swap_method mode. */
|
|
|
|
if (draw->cur_blit_source != -1 &&
|
|
|
|
draw->buffers[draw->cur_blit_source] &&
|
|
|
|
back != draw->buffers[draw->cur_blit_source]) {
|
|
|
|
struct loader_dri3_buffer *source = draw->buffers[draw->cur_blit_source];
|
|
|
|
|
|
|
|
dri3_fence_await(draw->conn, draw, source);
|
|
|
|
dri3_fence_await(draw->conn, draw, back);
|
|
|
|
(void) loader_dri3_blit_image(draw,
|
|
|
|
back->image,
|
|
|
|
source->image,
|
|
|
|
0, 0, draw->width, draw->height,
|
|
|
|
0, 0, 0);
|
|
|
|
back->last_swap = source->last_swap;
|
|
|
|
draw->cur_blit_source = -1;
|
2017-08-11 08:49:54 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
return back;
|
|
|
|
}
|