diff --git a/src/intel/vulkan/anv_private.h b/src/intel/vulkan/anv_private.h index c67a9d9d68b..d8f198204bd 100644 --- a/src/intel/vulkan/anv_private.h +++ b/src/intel/vulkan/anv_private.h @@ -3519,31 +3519,6 @@ anv_pipeline_get_last_vue_prog_data(const struct anv_graphics_pipeline *pipeline return &get_vs_prog_data(pipeline)->base; } -static inline bool -anv_cmd_buffer_needs_dynamic_state(const struct anv_cmd_buffer *cmd_buffer, - anv_cmd_dirty_mask_t mask) -{ - /* Only dynamic state */ - assert((mask & ANV_CMD_DIRTY_PIPELINE) == 0); - - /* If all the state is statically put into the pipeline batch, nothing to - * do. - */ - if ((cmd_buffer->state.gfx.pipeline->static_state_mask & mask) == mask) - return false; - - /* Dynamic state affected by vkCmd* commands */ - if (cmd_buffer->state.gfx.dirty & mask) - return true; - - /* For all other states we might have part of the information in the - * anv_graphics_pipeline::dynamic_state not emitted as part of the pipeline - * batch so we need to reemit the packet associated with this state if the - * pipeline changed. - */ - return (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_PIPELINE) != 0; -} - VkResult anv_device_init_rt_shaders(struct anv_device *device); diff --git a/src/intel/vulkan/genX_cmd_buffer.c b/src/intel/vulkan/genX_cmd_buffer.c index 97bc5de06e8..f1fbfe0c6d0 100644 --- a/src/intel/vulkan/genX_cmd_buffer.c +++ b/src/intel/vulkan/genX_cmd_buffer.c @@ -3965,6 +3965,11 @@ genX(cmd_buffer_flush_state)(struct anv_cmd_buffer *cmd_buffer) if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_PIPELINE) { anv_batch_emit_batch(&cmd_buffer->batch, &pipeline->base.batch); + /* Remove from dynamic state emission all of stuff that is baked into + * the pipeline. + */ + cmd_buffer->state.gfx.dirty &= ~pipeline->static_state_mask; + /* If the pipeline changed, we may need to re-allocate push constant * space in the URB. */ @@ -4038,23 +4043,23 @@ genX(cmd_buffer_flush_state)(struct anv_cmd_buffer *cmd_buffer) cmd_buffer_emit_clip(cmd_buffer); - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_RASTERIZER_DISCARD_ENABLE)) - cmd_buffer_emit_streamout(cmd_buffer); + if (pipeline->dynamic_states & ANV_CMD_DIRTY_DYNAMIC_RASTERIZER_DISCARD_ENABLE) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_RASTERIZER_DISCARD_ENABLE | ANV_CMD_DIRTY_XFB_ENABLE)) + cmd_buffer_emit_streamout(cmd_buffer); + } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_SCISSOR | - ANV_CMD_DIRTY_RENDER_TARGETS | - ANV_CMD_DIRTY_DYNAMIC_VIEWPORT)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_SCISSOR | + ANV_CMD_DIRTY_RENDER_TARGETS | + ANV_CMD_DIRTY_DYNAMIC_VIEWPORT | + ANV_CMD_DIRTY_PIPELINE)) { cmd_buffer_emit_viewport(cmd_buffer); cmd_buffer_emit_depth_viewport(cmd_buffer, pipeline->depth_clamp_enable); } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_SCISSOR | - ANV_CMD_DIRTY_RENDER_TARGETS | - ANV_CMD_DIRTY_DYNAMIC_VIEWPORT)) + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_SCISSOR | + ANV_CMD_DIRTY_RENDER_TARGETS | + ANV_CMD_DIRTY_DYNAMIC_VIEWPORT)) cmd_buffer_emit_scissor(cmd_buffer); genX(cmd_buffer_flush_dynamic_state)(cmd_buffer); diff --git a/src/intel/vulkan/gfx7_cmd_buffer.c b/src/intel/vulkan/gfx7_cmd_buffer.c index 9d9ead4b695..3d0b14277f1 100644 --- a/src/intel/vulkan/gfx7_cmd_buffer.c +++ b/src/intel/vulkan/gfx7_cmd_buffer.c @@ -110,8 +110,7 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) struct anv_graphics_pipeline *pipeline = cmd_buffer->state.gfx.pipeline; struct anv_dynamic_state *d = &cmd_buffer->state.gfx.dynamic; - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) { + if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY) { uint32_t topology; if (anv_pipeline_has_stage(pipeline, MESA_SHADER_TESS_EVAL)) topology = pipeline->topology; @@ -121,22 +120,24 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) cmd_buffer->state.gfx.primitive_topology = topology; } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_RENDER_TARGETS | - ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS | - ANV_CMD_DIRTY_DYNAMIC_CULL_MODE | - ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_RENDER_TARGETS | + ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS | + ANV_CMD_DIRTY_DYNAMIC_CULL_MODE | + ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) { /* Take dynamic primitive topology in to account with * 3DSTATE_SF::MultisampleRasterizationMode */ uint32_t ms_rast_mode = 0; - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) { - VkPrimitiveTopology primitive_topology = d->primitive_topology; + if (cmd_buffer->state.gfx.pipeline->dynamic_states & + ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY) { + VkPrimitiveTopology primitive_topology = + cmd_buffer->state.gfx.dynamic.primitive_topology; + VkPolygonMode dynamic_raster_mode = genX(raster_polygon_mode)(cmd_buffer->state.gfx.pipeline, primitive_topology); @@ -165,9 +166,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gfx7.sf); } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) { struct anv_state cc_state = anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, GENX(COLOR_CALC_STATE_length) * 4, @@ -187,8 +187,7 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) } } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE)) { + if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE) { anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_LINE_STIPPLE), ls) { ls.LineStipplePattern = d->line_stipple.pattern; ls.LineStippleInverseRepeatCount = @@ -197,15 +196,15 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) } } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_RENDER_TARGETS | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_RENDER_TARGETS | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) { uint32_t depth_stencil_dw[GENX(DEPTH_STENCIL_STATE_length)]; struct GENX(DEPTH_STENCIL_STATE) depth_stencil = { @@ -246,9 +245,9 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) } if (cmd_buffer->state.gfx.gfx7.index_buffer && - anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_INDEX_BUFFER | - ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) { + cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_INDEX_BUFFER | + ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) { struct anv_buffer *buffer = cmd_buffer->state.gfx.gfx7.index_buffer; uint32_t offset = cmd_buffer->state.gfx.gfx7.index_offset; @@ -278,9 +277,9 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) * threads or if we have dirty dynamic primitive topology state and * need to toggle 3DSTATE_WM::MultisampleRasterizationMode dynamically. */ - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE | - ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE | + ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) { VkPolygonMode dynamic_raster_mode; VkPrimitiveTopology primitive_topology = d->primitive_topology; dynamic_raster_mode = @@ -302,18 +301,17 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) anv_batch_emit_merge(&cmd_buffer->batch, dwords, pipeline->gfx7.wm); } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_SAMPLE_LOCATIONS)) { + if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_SAMPLE_LOCATIONS) { genX(emit_multisample)(&cmd_buffer->batch, pipeline->rasterization_samples, d->sample_locations.locations); } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE | - ANV_CMD_DIRTY_DYNAMIC_LOGIC_OP)) { - const uint8_t color_writes = d->color_writes; + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE | + ANV_CMD_DIRTY_DYNAMIC_LOGIC_OP)) { + const uint8_t color_writes = cmd_buffer->state.gfx.dynamic.color_writes; + /* Blend states of each RT */ uint32_t blend_dws[GENX(BLEND_STATE_length) + MAX_RTS * GENX(BLEND_STATE_ENTRY_length)]; uint32_t *dws = blend_dws; diff --git a/src/intel/vulkan/gfx8_cmd_buffer.c b/src/intel/vulkan/gfx8_cmd_buffer.c index b9518fe5db8..68ca35033d3 100644 --- a/src/intel/vulkan/gfx8_cmd_buffer.c +++ b/src/intel/vulkan/gfx8_cmd_buffer.c @@ -323,14 +323,11 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) struct anv_dynamic_state *d = &cmd_buffer->state.gfx.dynamic; #if GFX_VER >= 11 - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_SHADING_RATE)) { + if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_SHADING_RATE) genX(emit_shading_rate)(&cmd_buffer->batch, pipeline, d); - } #endif /* GFX_VER >= 11 */ - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) { + if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY) { uint32_t topology; if (anv_pipeline_has_stage(pipeline, MESA_SHADER_TESS_EVAL)) topology = pipeline->topology; @@ -344,8 +341,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) } } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_LINE_WIDTH)) { uint32_t sf_dw[GENX(3DSTATE_SF_length)]; struct GENX(3DSTATE_SF) sf = { GENX(3DSTATE_SF_header), @@ -363,12 +360,12 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) anv_batch_emit_merge(&cmd_buffer->batch, sf_dw, pipeline->gfx8.sf); } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS | - ANV_CMD_DIRTY_DYNAMIC_CULL_MODE | - ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS | + ANV_CMD_DIRTY_DYNAMIC_CULL_MODE | + ANV_CMD_DIRTY_DYNAMIC_FRONT_FACE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_BIAS_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) { /* Take dynamic primitive topology in to account with * 3DSTATE_RASTER::APIMode * 3DSTATE_RASTER::DXMultisampleRasterizationEnable @@ -378,8 +375,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) bool msaa_raster_enable = false; bool aa_enable = 0; - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY)) { + if (cmd_buffer->state.gfx.pipeline->dynamic_states & + ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_TOPOLOGY) { VkPrimitiveTopology primitive_topology = cmd_buffer->state.gfx.dynamic.primitive_topology; @@ -422,9 +419,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) * using a big old #if switch here. */ #if GFX_VER == 8 - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE)) { struct anv_state cc_state = anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, GENX(COLOR_CALC_STATE_length) * 4, @@ -445,15 +441,15 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) } } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_RENDER_TARGETS | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_RENDER_TARGETS | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) { uint32_t wm_depth_stencil_dw[GENX(3DSTATE_WM_DEPTH_STENCIL_length)]; struct GENX(3DSTATE_WM_DEPTH_STENCIL wm_depth_stencil) = { @@ -492,8 +488,7 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) want_depth_pma_fix(cmd_buffer)); } #else - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS)) { + if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_BLEND_CONSTANTS) { struct anv_state cc_state = anv_cmd_buffer_alloc_dynamic_state(cmd_buffer, GENX(COLOR_CALC_STATE_length) * 4, @@ -512,16 +507,16 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) } } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_RENDER_TARGETS | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE | - ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_RENDER_TARGETS | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_COMPARE_MASK | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_WRITE_MASK | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_REFERENCE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_TEST_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_WRITE_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_COMPARE_OP | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_TEST_ENABLE | + ANV_CMD_DIRTY_DYNAMIC_STENCIL_OP)) { uint32_t dwords[GENX(3DSTATE_WM_DEPTH_STENCIL_length)]; struct GENX(3DSTATE_WM_DEPTH_STENCIL) wm_depth_stencil = { GENX(3DSTATE_WM_DEPTH_STENCIL_header), @@ -564,9 +559,9 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) #endif #if GFX_VER >= 12 - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS | - ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE)) { + if(cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS | + ANV_CMD_DIRTY_DYNAMIC_DEPTH_BOUNDS_TEST_ENABLE)) { anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_DEPTH_BOUNDS), db) { db.DepthBoundsTestEnable = d->depth_bounds_test_enable; db.DepthBoundsTestMinValue = d->depth_bounds.min; @@ -575,8 +570,7 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) } #endif - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE)) { + if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_LINE_STIPPLE) { anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_LINE_STIPPLE), ls) { ls.LineStipplePattern = d->line_stipple.pattern; ls.LineStippleInverseRepeatCount = @@ -585,9 +579,9 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) } } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_INDEX_BUFFER | - ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_INDEX_BUFFER | + ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) { anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_VF), vf) { #if GFX_VERx10 >= 125 vf.GeometryDistributionEnable = true; @@ -598,8 +592,8 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) } #if GFX_VERx10 >= 125 - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_PRIMITIVE_RESTART_ENABLE)) { anv_batch_emit(&cmd_buffer->batch, GENX(3DSTATE_VFG), vfg) { /* If 3DSTATE_TE: TE Enable == 1 then RR_STRICT else RR_FREE*/ vfg.DistributionMode = @@ -628,16 +622,15 @@ genX(cmd_buffer_flush_dynamic_state)(struct anv_cmd_buffer *cmd_buffer) } #endif - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_SAMPLE_LOCATIONS)) { + if (cmd_buffer->state.gfx.dirty & ANV_CMD_DIRTY_DYNAMIC_SAMPLE_LOCATIONS) { genX(emit_sample_pattern)(&cmd_buffer->batch, pipeline->rasterization_samples, d->sample_locations.locations); } - if (anv_cmd_buffer_needs_dynamic_state(cmd_buffer, - ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE | - ANV_CMD_DIRTY_DYNAMIC_LOGIC_OP)) { + if (cmd_buffer->state.gfx.dirty & (ANV_CMD_DIRTY_PIPELINE | + ANV_CMD_DIRTY_DYNAMIC_COLOR_BLEND_STATE | + ANV_CMD_DIRTY_DYNAMIC_LOGIC_OP)) { const uint8_t color_writes = d->color_writes; /* 3DSTATE_WM in the hope we can avoid spawning fragment shaders * threads.