virgl: Fix texture transfers by using a staging resource

This commit fixes the following flaws in the implementation:

* when a resource was re-allocated, the guest side storage
  was also allocated
* when a source needs a readback before being written to, then
  the call would go through vws->transfer_get, thereby bypassing the
  staging resource, and this would fail on the host, because no
  the allocated IOV was too small (just one byte)
* if the texture write would need neither flush nor readback, the
  old code path would be used expecting that guest side backing stogage
  for the texture.

v2: - actually do a readback to the stageing resource when it is required
    - fix typo (Lepton)

v3: Don't use stageing transfers if the host can't read back the data
    by rendering to an FBO or calling getTexImage, because in this case
    we rely on the IOV to hold the date.

v4: Also don't use staging transfers if the format is no readback
    format. Otherwise we have to deal with the resolve blit, and
    this is currently not working correctly.

v5: add a new flag that indicates whether non-renderable textures can
    be read back (either via glGetTexImage or GBM)

v6: Restrict the use of staging texture transfers to textures that can
    be read back, and on GLES also if the they are bound to scanout and
    the host uses minigbm to allocate such textures.
    For that replace the flag indicating the capability to read back
    non-renderable textures with a cap that indicates whether scanout
    textures can be read back.

v7: update virglrenderer version in the CI

v8: update use of stageing (Chia-I)

v9: remove superflous check and assignment (Chia-I)

v10: disable stageing textures for arrays with stencil format. This is a
     workaround for failures of the CI.

Fixes: cdc480585c
    virgl/drm: New optimization for uploading textures

Signed-off-by: Gert Wollny <gert.wollny@collabora.com>
Reviewed-by: Chia-I Wu <olvaffe@gmail.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/14495>
This commit is contained in:
Gert Wollny 2022-01-12 13:01:30 +01:00
parent 165a880f1a
commit c9d99b7eec
7 changed files with 101 additions and 37 deletions

View File

@ -8,7 +8,7 @@ pushd /platform/crosvm
git checkout "$CROSVM_VERSION"
git submodule update --init
VIRGLRENDERER_VERSION=1f1448ff100c19cf97ce1ad0d8fb5a9e63342fa6
VIRGLRENDERER_VERSION=e420a5aab92de8fb42fad50762f0ac3b5fcb3bfb
rm -rf third_party/virglrenderer
git clone --single-branch -b master --no-checkout https://gitlab.freedesktop.org/virgl/virglrenderer.git third_party/virglrenderer
pushd third_party/virglrenderer

View File

@ -51,21 +51,59 @@ enum virgl_transfer_map_type {
/* Map type for read of texture data from host to guest
* using staging buffer. */
VIRGL_TRANSFER_MAP_READ_FROM_STAGING,
/* Map type for write of texture data to host using staging
* buffer that needs a readback first. */
VIRGL_TRANSFER_MAP_WRITE_TO_STAGING_WITH_READBACK,
};
/* Check if copy transfer from host can be used:
* 1. if resource is a texture
* 2. if renderer supports copy transfer from host
* 1. if resource is a texture,
* 2. if renderer supports copy transfer from host,
* 3. the host is not GLES (no fake FP64)
* 4. the format can be rendered to and the format is a readback format
* or the format is a scanout format and we can read back from scanout
*/
static bool virgl_can_copy_transfer_from_host(struct virgl_screen *vs,
struct virgl_resource *res)
static bool virgl_can_readback_from_rendertarget(struct virgl_screen *vs,
struct virgl_resource *res)
{
#if 0 /* TODO re-enable this */
if (vs->caps.caps.v2.capability_bits_v2 & VIRGL_CAP_V2_COPY_TRANSFER_BOTH_DIRECTIONS &&
res->b.target != PIPE_BUFFER)
return true;
#endif
return false;
return res->b.nr_samples < 2 &&
vs->base.is_format_supported(&vs->base, res->b.format, res->b.target,
res->b.nr_samples, res->b.nr_samples,
PIPE_BIND_RENDER_TARGET);
}
static bool virgl_can_readback_from_scanout(struct virgl_screen *vs,
struct virgl_resource *res,
int bind)
{
return (vs->caps.caps.v2.capability_bits_v2 & VIRGL_CAP_V2_SCANOUT_USES_GBM) &&
(bind & VIRGL_BIND_SCANOUT) &&
virgl_has_scanout_format(vs, res->b.format, true);
}
static bool virgl_can_use_staging(struct virgl_screen *vs,
struct virgl_resource *res)
{
return (vs->caps.caps.v2.capability_bits_v2 & VIRGL_CAP_V2_COPY_TRANSFER_BOTH_DIRECTIONS) &&
(res->b.target != PIPE_BUFFER);
}
static bool is_stencil_array(struct virgl_resource *res)
{
const struct util_format_description *descr = util_format_description(res->b.format);
return (res->b.array_size > 1 || res->b.depth0 > 1) && util_format_has_stencil(descr);
}
static bool virgl_can_copy_transfer_from_host(struct virgl_screen *vs,
struct virgl_resource *res,
int bind)
{
return virgl_can_use_staging(vs, res) &&
!is_stencil_array(res) &&
virgl_has_readback_format(&vs->base, pipe_to_virgl_format(res->b.format), false) &&
((!(vs->caps.caps.v2.capability_bits & VIRGL_CAP_FAKE_FP64)) ||
virgl_can_readback_from_rendertarget(vs, res) ||
virgl_can_readback_from_scanout(vs, res, bind));
}
/* We need to flush to properly sync the transfer with the current cmdbuf.
@ -167,7 +205,6 @@ virgl_resource_transfer_prepare(struct virgl_context *vctx,
PIPE_MAP_DISCARD_WHOLE_RESOURCE)) &&
likely(!(virgl_debug & VIRGL_DEBUG_XFER))) {
bool can_realloc = false;
bool can_staging = false;
/* A PIPE_MAP_DISCARD_WHOLE_RESOURCE transfer may be followed by
* PIPE_MAP_UNSYNCHRONIZED transfers to non-overlapping regions.
@ -177,14 +214,12 @@ virgl_resource_transfer_prepare(struct virgl_context *vctx,
*/
if (xfer->base.usage & PIPE_MAP_DISCARD_WHOLE_RESOURCE) {
can_realloc = virgl_can_rebind_resource(vctx, &res->b);
} else {
can_staging = vctx->supports_staging;
}
/* discard implies no readback */
assert(!readback);
if (can_realloc || can_staging) {
if (can_realloc || vctx->supports_staging) {
/* Both map types have some costs. Do them only when the resource is
* (or will be) busy for real. Otherwise, set wait to false.
*/
@ -193,6 +228,7 @@ virgl_resource_transfer_prepare(struct virgl_context *vctx,
map_type = (can_realloc) ?
VIRGL_TRANSFER_MAP_REALLOC :
VIRGL_TRANSFER_MAP_WRITE_TO_STAGING;
wait = false;
/* There is normally no need to flush either, unless the amount of
@ -203,13 +239,6 @@ virgl_resource_transfer_prepare(struct virgl_context *vctx,
flush = (vctx->queued_staging_res_size >
VIRGL_QUEUED_STAGING_RES_SIZE_LIMIT);
}
#if 0 /* TODO re-enable this */
/* We can use staging buffer for texture uploads from guest to host */
if (can_staging && res->b.target != PIPE_BUFFER) {
map_type = VIRGL_TRANSFER_MAP_WRITE_TO_STAGING;
}
#endif
}
}
@ -218,9 +247,11 @@ virgl_resource_transfer_prepare(struct virgl_context *vctx,
/* If we are performing readback for textures and renderer supports
* copy_transfer_from_host, then we can return here with proper map.
*/
if (virgl_can_copy_transfer_from_host(vs, res) && vctx->supports_staging &&
xfer->base.usage & PIPE_MAP_READ) {
return VIRGL_TRANSFER_MAP_READ_FROM_STAGING;
if (res->use_staging) {
if (xfer->base.usage & PIPE_MAP_READ)
return VIRGL_TRANSFER_MAP_READ_FROM_STAGING;
else
return VIRGL_TRANSFER_MAP_WRITE_TO_STAGING_WITH_READBACK;
}
/* Readback is yet another command and is transparent to the state
* trackers. It should be waited for in all cases, including when
@ -257,6 +288,10 @@ virgl_resource_transfer_prepare(struct virgl_context *vctx,
if (wait)
vws->resource_wait(vws, res->hw_res);
if (res->use_staging) {
map_type = VIRGL_TRANSFER_MAP_WRITE_TO_STAGING;
}
return map_type;
}
@ -404,6 +439,9 @@ virgl_resource_realloc(struct virgl_context *vctx, struct virgl_resource *res)
vbind = pipe_to_virgl_bind(vs, templ->bind);
vflags = pipe_to_virgl_flags(vs, templ->flags);
int alloc_size = res->use_staging ? 1 : res->metadata.total_size;
hw_res = vs->vws->resource_create(vs->vws,
templ->target,
NULL,
@ -416,7 +454,7 @@ virgl_resource_realloc(struct virgl_context *vctx, struct virgl_resource *res)
templ->last_level,
templ->nr_samples,
vflags,
res->metadata.total_size);
alloc_size);
if (!hw_res)
return false;
@ -485,6 +523,12 @@ virgl_resource_transfer_map(struct pipe_context *ctx,
/* Copy transfers don't make use of hw_res_map at the moment. */
trans->hw_res_map = NULL;
break;
case VIRGL_TRANSFER_MAP_WRITE_TO_STAGING_WITH_READBACK:
map_addr = virgl_staging_read_map(vctx, trans);
/* Copy transfers don't make use of hw_res_map at the moment. */
trans->hw_res_map = NULL;
trans->direction = VIRGL_TRANSFER_TO_HOST;
break;
case VIRGL_TRANSFER_MAP_ERROR:
default:
trans->hw_res_map = NULL;
@ -597,10 +641,12 @@ static struct pipe_resource *virgl_resource_create_front(struct pipe_screen *scr
vbind |= VIRGL_BIND_PREFER_EMULATED_BGRA;
}
// If renderer supports copy transfer from host,
// then for textures alloc minimum size of bo
// If renderer supports copy transfer from host, and we either have support
// for then for textures alloc minimum size of bo
// This size is not passed to the host
if (virgl_can_copy_transfer_from_host(vs, res))
res->use_staging = virgl_can_copy_transfer_from_host(vs, res, vbind);
if (res->use_staging)
alloc_size = 1;
else
alloc_size = res->metadata.total_size;

View File

@ -52,7 +52,6 @@ struct virgl_resource_metadata
struct virgl_resource {
struct pipe_resource b;
uint16_t clean_mask;
struct virgl_hw_res *hw_res;
struct virgl_resource_metadata metadata;
@ -68,6 +67,10 @@ struct virgl_resource {
*/
unsigned bind_history;
uint32_t blob_mem;
uint16_t clean_mask;
uint16_t use_staging : 1;
uint16_t reserved : 15;
};
struct virgl_transfer {

View File

@ -551,14 +551,14 @@ has_format_bit(struct virgl_supported_format_mask *mask,
bool
virgl_has_readback_format(struct pipe_screen *screen,
enum virgl_formats fmt)
enum virgl_formats fmt, bool allow_tweak)
{
struct virgl_screen *vscreen = virgl_screen(screen);
if (has_format_bit(&vscreen->caps.caps.v2.supported_readback_formats,
fmt))
return true;
if (fmt == VIRGL_FORMAT_L8_SRGB && vscreen->tweak_l8_srgb_readback) {
if (allow_tweak && fmt == VIRGL_FORMAT_L8_SRGB && vscreen->tweak_l8_srgb_readback) {
return true;
}
@ -635,6 +635,15 @@ virgl_format_check_bitmask(enum pipe_format format,
return false;
}
bool virgl_has_scanout_format(struct virgl_screen *vscreen,
enum pipe_format format,
bool may_emulate_bgra)
{
return virgl_format_check_bitmask(format,
vscreen->caps.caps.v2.scanout.bitmask,
may_emulate_bgra);
}
/**
* Query format support for creating a texture, drawing surface, etc.
* \param format the format to test

View File

@ -74,7 +74,13 @@ virgl_screen(struct pipe_screen *pipe)
}
bool
virgl_has_readback_format(struct pipe_screen *screen, enum virgl_formats fmt);
virgl_has_readback_format(struct pipe_screen *screen, enum virgl_formats fmt,
bool allow_tweak);
bool
virgl_has_scanout_format(struct virgl_screen *vscreen,
enum pipe_format format,
bool may_emulate_bgra);
/* GL_ARB_map_buffer_alignment requires 64 as the minimum alignment value. In
* addition to complying with the extension, a high enough alignment value is

View File

@ -130,7 +130,7 @@ static void *texture_transfer_map_resolve(struct pipe_context *ctx,
return NULL;
enum pipe_format fmt = resource->format;
if (!virgl_has_readback_format(ctx->screen, pipe_to_virgl_format(fmt))) {
if (!virgl_has_readback_format(ctx->screen, pipe_to_virgl_format(fmt), true)) {
if (util_format_fits_8unorm(util_format_description(fmt)))
fmt = PIPE_FORMAT_R8G8B8A8_UNORM;
else if (util_format_is_pure_sint(fmt))
@ -139,7 +139,7 @@ static void *texture_transfer_map_resolve(struct pipe_context *ctx,
fmt = PIPE_FORMAT_R32G32B32A32_UINT;
else
fmt = PIPE_FORMAT_R32G32B32A32_FLOAT;
assert(virgl_has_readback_format(ctx->screen, pipe_to_virgl_format(fmt)));
assert(virgl_has_readback_format(ctx->screen, pipe_to_virgl_format(fmt), true));
}
struct pipe_box dst_box = *box;
@ -225,7 +225,7 @@ static bool needs_resolve(struct pipe_screen *screen,
if (usage & PIPE_MAP_READ)
return !util_format_is_depth_or_stencil(resource->format) &&
!virgl_has_readback_format(screen, pipe_to_virgl_format(resource->format));
!virgl_has_readback_format(screen, pipe_to_virgl_format(resource->format), true);
return false;
}

View File

@ -444,7 +444,7 @@ enum virgl_formats {
#define VIRGL_CAP_V2_STRING_MARKER (1 << 4)
#define VIRGL_CAP_V2_IMPLICIT_MSAA (1 << 6)
#define VIRGL_CAP_V2_COPY_TRANSFER_BOTH_DIRECTIONS (1 << 7)
#define VIRGL_CAP_V2_SCANOUT_USES_GBM (1 << 8)
/* virgl bind flags - these are compatible with mesa 10.5 gallium.
* but are fixed, no other should be passed to virgl either.
*/