anv: fix dynamic state emission

The problem is that we missed looking at pipeline changes. Pipelines
hold bits of dynamic states and when it changes we might need to
reemit a packet.

v2: fix comment (Tapani)
    Add missing anv_cmd_buffer_needs_dynamic_state() use (Tapani)

Cc: mesa-stable
Fixes: 505d176a8e ("anv: disable baked in pipeline bits from dynamic emission path")
Reviewed-by: Tapani Pälli <tapani.palli@intel.com>
Part-of: <https://gitlab.freedesktop.org/mesa/mesa/-/merge_requests/15310>
This commit is contained in:
Lionel Landwerlin 2022-03-09 10:38:01 +02:00 committed by Marge Bot
parent 1cd7d6ce37
commit f348103fce
4 changed files with 126 additions and 93 deletions

View File

@ -3543,6 +3543,31 @@ anv_pipeline_get_last_vue_prog_data(const struct anv_graphics_pipeline *pipeline
return &get_vs_prog_data(pipeline)->base;
}
static inline bool
anv_cmd_buffer_needs_dynamic_state(const struct anv_cmd_buffer *cmd_buffer,
anv_cmd_dirty_mask_t mask)
{
/* Only dynamic state */
assert((mask & ANV_CMD_DIRTY_PIPELINE) == 0);
/* If all the state is statically put into the pipeline batch, nothing to
* do.
*/
if ((cmd_buffer->state.gfx.pipeline->static_state_mask & mask) == mask)
return false;
/* Dynamic state affected by vkCmd* commands */
if (cmd_buffer->state.gfx.dirty & mask)
return true;
/* For all other states we might have part of the information in the
* anv_graphics_pipeline::dynamic_state not emitted as part of the pipeline
* batch so we need to reemit the packet associated with this state if the
* pipeline changed.
*/
return (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_PIPELINE) != 0;
}
VkResult
anv_device_init_rt_shaders(struct anv_device *device);

View File

@ -3961,11 +3961,6 @@ genX(cmd_buffer_flush_state)(struct anv_cmd_buffer *cmd_buffer)
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_PIPELINE) {
anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->base.batch);
/* Remove from dynamic state emission all of stuff that is baked into
* the pipeline.
*/
cmd_buffer->state.gfx.dirty &= ~pipeline->static_state_mask;
/* If the pipeline changed, we may need to re-allocate push constant
* space in the URB.
*/
@ -4039,23 +4034,23 @@ genX(cmd_buffer_flush_state)(struct anv_cmd_buffer *cmd_buffer)
cmd_buffer_emit_clip(cmd_buffer);
if (pipeline->dynamic_states & ANV_CMD_DIRTY_DYNAMIC_RASTERIZER_DISCARD_ENABLE) {
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_RASTERIZER_DISCARD_ENABLE | ANV_CMD_DIRTY_XFB_ENABLE))
cmd_buffer_emit_streamout(cmd_buffer);
}
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_RASTERIZER_DISCARD_ENABLE))
cmd_buffer_emit_streamout(cmd_buffer);
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_SCISSOR |
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_VIEWPORT |
ANV_CMD_DIRTY_PIPELINE)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_SCISSOR |
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_VIEWPORT)) {
cmd_buffer_emit_viewport(cmd_buffer);
cmd_buffer_emit_depth_viewport(cmd_buffer,
pipeline->depth_clamp_enable);
}
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_SCISSOR |
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_VIEWPORT))
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_SCISSOR |
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_VIEWPORT))
cmd_buffer_emit_scissor(cmd_buffer);
genX(cmd_buffer_flush_dynamic_state)(cmd_buffer);

View File

@ -110,7 +110,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
struct anv_graphics_pipeline *pipeline = cmd_buffer->state.gfx.pipeline;
struct anv_dynamic_state *d = &cmd_buffer->state.gfx.dynamic;
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) {
uint32_t topology;
if (anv_pipeline_has_stage(pipeline, MESA_SHADER_TESS_EVAL))
topology = pipeline->topology;
@ -120,21 +121,21 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
cmd_buffer->state.gfx.primitive_topology = topology;
}
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS |
ANV_CMD_DIRTY_DYNAMIC_CULL_MODE |
ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS |
ANV_CMD_DIRTY_DYNAMIC_CULL_MODE |
ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) {
/* Take dynamic primitive topology in to account with
* 3DSTATE_SF::MultisampleRasterizationMode
*/
uint32_t ms_rast_mode = 0;
if (cmd_buffer->state.gfx.pipeline->dynamic_states &
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) {
VkPrimitiveTopology primitive_topology =
cmd_buffer->state.gfx.dynamic.primitive_topology;
@ -166,8 +167,9 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gfx7.sf);
}
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) {
struct anv_state cc_state =
anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
GENX(COLOR_CALC_STATE_length) * 4,
@ -187,7 +189,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
}
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE)) {
anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_LINE_STIPPLE), ls) {
ls.LineStipplePattern = d->line_stipple.pattern;
ls.LineStippleInverseRepeatCount =
@ -196,15 +199,15 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
}
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) {
uint32_t depth_stencil_dw[GENX(DEPTH_STENCIL_STATE_length)];
struct GENX(DEPTH_STENCIL_STATE) depth_stencil = {
@ -245,9 +248,9 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
if (cmd_buffer->state.gfx.gfx7.index_buffer &&
cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_DIRTY_INDEX_BUFFER |
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) {
anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_INDEX_BUFFER |
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) {
struct anv_buffer *buffer = cmd_buffer->state.gfx.gfx7.index_buffer;
uint32_t offset = cmd_buffer->state.gfx.gfx7.index_offset;
@ -277,8 +280,9 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
* threads or if we have dirty dynamic primitive topology state and
* need to toggle 3DSTATE_WM::MultisampleRasterizationMode dynamically.
*/
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE ||
cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE |
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) {
const uint8_t color_writes = cmd_buffer->state.gfx.dynamic.color_writes;
bool dirty_color_blend =
@ -311,14 +315,16 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_SAMPLE_LOCATIONS) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_SAMPLE_LOCATIONS)) {
genX(emit_multisample)(&cmd_buffer->batch,
cmd_buffer->state.gfx.dynamic.sample_locations.samples,
cmd_buffer->state.gfx.dynamic.sample_locations.locations);
}
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE ||
cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_LOGIC_OP) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE |
ANV_CMD_DIRTY_DYNAMIC_LOGIC_OP)) {
const uint8_t color_writes = cmd_buffer->state.gfx.dynamic.color_writes;
bool dirty_color_blend =
cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE;

View File

@ -323,13 +323,15 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
struct anv_dynamic_state *d = &cmd_buffer->state.gfx.dynamic;
#if GFX_VER >= 11
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_SHADING_RATE) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_SHADING_RATE)) {
genX(emit_shading_rate)(&cmd_buffer->batch, pipeline,
&cmd_buffer->state.gfx.dynamic);
}
#endif /* GFX_VER >= 11 */
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) {
uint32_t topology;
if (anv_pipeline_has_stage(pipeline, MESA_SHADER_TESS_EVAL))
topology = pipeline->topology;
@ -343,8 +345,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
}
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH)) {
uint32_t sf_dw[GENX(3DSTATE_SF_length)];
struct GENX(3DSTATE_SF) sf = {
GENX(3DSTATE_SF_header),
@ -362,12 +364,12 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gfx8.sf);
}
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS |
ANV_CMD_DIRTY_DYNAMIC_CULL_MODE |
ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS |
ANV_CMD_DIRTY_DYNAMIC_CULL_MODE |
ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) {
/* Take dynamic primitive topology in to account with
* 3DSTATE_RASTER::APIMode
* 3DSTATE_RASTER::DXMultisampleRasterizationEnable
@ -377,8 +379,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
bool msaa_raster_enable = false;
bool aa_enable = 0;
if (cmd_buffer->state.gfx.pipeline->dynamic_states &
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) {
VkPrimitiveTopology primitive_topology =
cmd_buffer->state.gfx.dynamic.primitive_topology;
@ -421,8 +423,9 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
* using a big old #if switch here.
*/
#if GFX_VER == 8
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) {
struct anv_state cc_state =
anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
GENX(COLOR_CALC_STATE_length) * 4,
@ -443,15 +446,15 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
}
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) {
uint32_t wm_depth_stencil_dw[GENX(3DSTATE_WM_DEPTH_STENCIL_length)];
struct GENX(3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil) = {
@ -490,7 +493,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
want_depth_pma_fix(cmd_buffer));
}
#else
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS)) {
struct anv_state cc_state =
anv_cmd_buffer_alloc_dynamic_state(cmd_buffer,
GENX(COLOR_CALC_STATE_length) * 4,
@ -509,16 +513,16 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
}
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_RENDER_TARGETS |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE |
ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) {
uint32_t dwords[GENX(3DSTATE_WM_DEPTH_STENCIL_length)];
struct GENX(3DSTATE_WM_DEPTH_STENCIL) wm_depth_stencil = {
GENX(3DSTATE_WM_DEPTH_STENCIL_header),
@ -561,9 +565,9 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
#endif
#if GFX_VER >= 12
if(cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS |
ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE)) {
anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_DEPTH_BOUNDS), db) {
db.DepthBoundsTestValueModifyDisable = false;
db.DepthBoundsTestEnableModifyDisable = false;
@ -574,7 +578,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
#endif
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE)) {
anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_LINE_STIPPLE), ls) {
ls.LineStipplePattern = d->line_stipple.pattern;
ls.LineStippleInverseRepeatCount =
@ -583,9 +588,9 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
}
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_DIRTY_INDEX_BUFFER |
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_INDEX_BUFFER |
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) {
anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_VF), vf) {
#if GFX_VERx10 >= 125
vf.GeometryDistributionEnable = true;
@ -596,8 +601,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
#if GFX_VERx10 >= 125
if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE |
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) {
anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_VFG), vfg) {
/* If 3DSTATE_TE: TE Enable == 1 then RR_STRICT else RR_FREE*/
vfg.DistributionMode =
@ -626,14 +631,16 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer)
}
#endif
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_SAMPLE_LOCATIONS) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_SAMPLE_LOCATIONS)) {
genX(emit_sample_pattern)(&cmd_buffer->batch,
cmd_buffer->state.gfx.dynamic.sample_locations.samples,
cmd_buffer->state.gfx.dynamic.sample_locations.locations);
}
if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE ||
cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_LOGIC_OP) {
if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer,
ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE |
ANV_CMD_DIRTY_DYNAMIC_LOGIC_OP)) {
const uint8_t color_writes = cmd_buffer->state.gfx.dynamic.color_writes;
/* 3DSTATE_WM in the hope we can avoid spawning fragment shaders
* threads.