nir: Fix OpAtomicCounterIDecrement for uniform atomic counters
From the SPIR-V 1.0 specification, section 3.32.18, "Atomic Instructions": "OpAtomicIDecrement: <skip> The instruction's result is the Original Value." However, we were implementing it, for uniform atomic counters, as a pre-decrement operation, as was the one available from GLSL. Renamed the former nir intrinsic 'atomic_counter_dec*' to 'atomic_counter_pre_dec*' for clarification purposes, as it implements a pre-decrement operation as specified for GLSL. From GLSL 4.50 spec, section 8.10, "Atomic Counter Functions": "uint atomicCounterDecrement (atomic_uint c) Atomically 1. decrements the counter for c, and 2. returns the value resulting from the decrement operation. These two steps are done atomically with respect to the atomic counter functions in this table." Added a new nir intrinsic 'atomic_counter_post_dec*' which implements a post-decrement operation as required by SPIR-V. v2: (Timothy Arceri) * Add extra spec quotes on commit message * Use "post" instead "pos" to avoid confusion with "position" Signed-off-by: Antia Puentes <apuentes@igalia.com> Signed-off-by: Alejandro Piñeiro <apinheiro@igalia.com> Reviewed-by: Timothy Arceri <tarceri@itsqueeze.com>
This commit is contained in:
parent
6677e131b8
commit
fbcebfc5bf
|
@ -53,8 +53,12 @@ lower_deref_instr(nir_builder *b, nir_intrinsic_instr *instr,
|
|||
op = nir_intrinsic_atomic_counter_inc;
|
||||
break;
|
||||
|
||||
case nir_intrinsic_atomic_counter_dec_deref:
|
||||
op = nir_intrinsic_atomic_counter_dec;
|
||||
case nir_intrinsic_atomic_counter_pre_dec_deref:
|
||||
op = nir_intrinsic_atomic_counter_pre_dec;
|
||||
break;
|
||||
|
||||
case nir_intrinsic_atomic_counter_post_dec_deref:
|
||||
op = nir_intrinsic_atomic_counter_post_dec;
|
||||
break;
|
||||
|
||||
case nir_intrinsic_atomic_counter_add_deref:
|
||||
|
|
|
@ -630,7 +630,7 @@ nir_visitor::visit(ir_call *ir)
|
|||
op = nir_intrinsic_atomic_counter_inc_deref;
|
||||
break;
|
||||
case ir_intrinsic_atomic_counter_predecrement:
|
||||
op = nir_intrinsic_atomic_counter_dec_deref;
|
||||
op = nir_intrinsic_atomic_counter_pre_dec_deref;
|
||||
break;
|
||||
case ir_intrinsic_atomic_counter_add:
|
||||
op = nir_intrinsic_atomic_counter_add_deref;
|
||||
|
@ -831,7 +831,7 @@ nir_visitor::visit(ir_call *ir)
|
|||
switch (op) {
|
||||
case nir_intrinsic_atomic_counter_read_deref:
|
||||
case nir_intrinsic_atomic_counter_inc_deref:
|
||||
case nir_intrinsic_atomic_counter_dec_deref:
|
||||
case nir_intrinsic_atomic_counter_pre_dec_deref:
|
||||
case nir_intrinsic_atomic_counter_add_deref:
|
||||
case nir_intrinsic_atomic_counter_min_deref:
|
||||
case nir_intrinsic_atomic_counter_max_deref:
|
||||
|
|
|
@ -270,7 +270,8 @@ def atomic3(name):
|
|||
intrinsic(name, src_comp=[1, 1, 1], dest_comp=1, indices=[BASE])
|
||||
|
||||
atomic("atomic_counter_inc")
|
||||
atomic("atomic_counter_dec")
|
||||
atomic("atomic_counter_pre_dec")
|
||||
atomic("atomic_counter_post_dec")
|
||||
atomic("atomic_counter_read", flags=[CAN_ELIMINATE])
|
||||
atomic2("atomic_counter_add")
|
||||
atomic2("atomic_counter_min")
|
||||
|
|
|
@ -71,7 +71,8 @@ lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b)
|
|||
return true;
|
||||
case nir_intrinsic_atomic_counter_inc:
|
||||
case nir_intrinsic_atomic_counter_add:
|
||||
case nir_intrinsic_atomic_counter_dec:
|
||||
case nir_intrinsic_atomic_counter_pre_dec:
|
||||
case nir_intrinsic_atomic_counter_post_dec:
|
||||
/* inc and dec get remapped to add: */
|
||||
op = nir_intrinsic_ssbo_atomic_add;
|
||||
break;
|
||||
|
@ -119,7 +120,8 @@ lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b)
|
|||
nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr);
|
||||
new_instr->src[2] = nir_src_for_ssa(temp);
|
||||
break;
|
||||
case nir_intrinsic_atomic_counter_dec:
|
||||
case nir_intrinsic_atomic_counter_pre_dec:
|
||||
case nir_intrinsic_atomic_counter_post_dec:
|
||||
/* remapped to ssbo_atomic_add: { buffer_idx, offset, -1 } */
|
||||
/* NOTE semantic difference so we adjust the return value below */
|
||||
temp = nir_imm_int(b, -1);
|
||||
|
@ -148,7 +150,7 @@ lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b)
|
|||
nir_instr_insert_before(&instr->instr, &new_instr->instr);
|
||||
nir_instr_remove(&instr->instr);
|
||||
|
||||
if (instr->intrinsic == nir_intrinsic_atomic_counter_dec) {
|
||||
if (instr->intrinsic == nir_intrinsic_atomic_counter_pre_dec) {
|
||||
b->cursor = nir_after_instr(&new_instr->instr);
|
||||
nir_ssa_def *result = nir_iadd(b, &new_instr->dest.ssa, temp);
|
||||
nir_ssa_def_rewrite_uses(&instr->dest.ssa, nir_src_for_ssa(result));
|
||||
|
|
|
@ -2519,7 +2519,7 @@ get_uniform_nir_atomic_op(struct vtn_builder *b, SpvOp opcode)
|
|||
OP(AtomicExchange, exchange)
|
||||
OP(AtomicCompareExchange, comp_swap)
|
||||
OP(AtomicIIncrement, inc_deref)
|
||||
OP(AtomicIDecrement, dec_deref)
|
||||
OP(AtomicIDecrement, post_dec_deref)
|
||||
OP(AtomicIAdd, add_deref)
|
||||
OP(AtomicISub, add_deref)
|
||||
OP(AtomicUMin, min_deref)
|
||||
|
|
Loading…
Reference in New Issue