Path: blob/21.2-virgl/src/compiler/nir/nir_lower_atomics_to_ssbo.c
4545 views
/*1* Copyright © 2017 Red Hat2*3* Permission is hereby granted, free of charge, to any person obtaining a4* copy of this software and associated documentation files (the "Software"),5* to deal in the Software without restriction, including without limitation6* the rights to use, copy, modify, merge, publish, distribute, sublicense,7* and/or sell copies of the Software, and to permit persons to whom the8* Software is furnished to do so, subject to the following conditions:9*10* The above copyright notice and this permission notice (including the next11* paragraph) shall be included in all copies or substantial portions of the12* Software.13*14* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR15* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,16* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL17* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER18* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING19* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS20* IN THE SOFTWARE.21*22* Authors:23* Rob Clark <[email protected]>24*/2526#include "nir.h"27#include "nir_builder.h"2829#if defined(_WIN32) && !defined(snprintf)30#define snprintf _snprintf31#endif3233/*34* Remap atomic counters to SSBOs, starting from the shader's next SSBO slot35* (info.num_ssbos).36*/3738static bool39lower_instr(nir_intrinsic_instr *instr, unsigned ssbo_offset, nir_builder *b)40{41nir_intrinsic_op op;4243b->cursor = nir_before_instr(&instr->instr);4445switch (instr->intrinsic) {46case nir_intrinsic_memory_barrier_atomic_counter:47/* Atomic counters are now SSBOs so memoryBarrierAtomicCounter() is now48* memoryBarrierBuffer().49*/50instr->intrinsic = nir_intrinsic_memory_barrier_buffer;51return true;5253case nir_intrinsic_atomic_counter_inc:54case nir_intrinsic_atomic_counter_add:55case nir_intrinsic_atomic_counter_pre_dec:56case nir_intrinsic_atomic_counter_post_dec:57/* inc and dec get remapped to add: */58op = nir_intrinsic_ssbo_atomic_add;59break;60case nir_intrinsic_atomic_counter_read:61op = nir_intrinsic_load_ssbo;62break;63case nir_intrinsic_atomic_counter_min:64op = nir_intrinsic_ssbo_atomic_umin;65break;66case nir_intrinsic_atomic_counter_max:67op = nir_intrinsic_ssbo_atomic_umax;68break;69case nir_intrinsic_atomic_counter_and:70op = nir_intrinsic_ssbo_atomic_and;71break;72case nir_intrinsic_atomic_counter_or:73op = nir_intrinsic_ssbo_atomic_or;74break;75case nir_intrinsic_atomic_counter_xor:76op = nir_intrinsic_ssbo_atomic_xor;77break;78case nir_intrinsic_atomic_counter_exchange:79op = nir_intrinsic_ssbo_atomic_exchange;80break;81case nir_intrinsic_atomic_counter_comp_swap:82op = nir_intrinsic_ssbo_atomic_comp_swap;83break;84default:85return false;86}8788nir_ssa_def *buffer = nir_imm_int(b, ssbo_offset + nir_intrinsic_base(instr));89nir_ssa_def *temp = NULL;90nir_intrinsic_instr *new_instr =91nir_intrinsic_instr_create(ralloc_parent(instr), op);9293/* a couple instructions need special handling since they don't map94* 1:1 with ssbo atomics95*/96switch (instr->intrinsic) {97case nir_intrinsic_atomic_counter_inc:98/* remapped to ssbo_atomic_add: { buffer_idx, offset, +1 } */99temp = nir_imm_int(b, +1);100new_instr->src[0] = nir_src_for_ssa(buffer);101nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr);102new_instr->src[2] = nir_src_for_ssa(temp);103break;104case nir_intrinsic_atomic_counter_pre_dec:105case nir_intrinsic_atomic_counter_post_dec:106/* remapped to ssbo_atomic_add: { buffer_idx, offset, -1 } */107/* NOTE semantic difference so we adjust the return value below */108temp = nir_imm_int(b, -1);109new_instr->src[0] = nir_src_for_ssa(buffer);110nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr);111new_instr->src[2] = nir_src_for_ssa(temp);112break;113case nir_intrinsic_atomic_counter_read:114/* remapped to load_ssbo: { buffer_idx, offset } */115new_instr->src[0] = nir_src_for_ssa(buffer);116nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr);117break;118default:119/* remapped to ssbo_atomic_x: { buffer_idx, offset, data, (compare)? } */120new_instr->src[0] = nir_src_for_ssa(buffer);121nir_src_copy(&new_instr->src[1], &instr->src[0], new_instr);122nir_src_copy(&new_instr->src[2], &instr->src[1], new_instr);123if (op == nir_intrinsic_ssbo_atomic_comp_swap ||124op == nir_intrinsic_ssbo_atomic_fcomp_swap)125nir_src_copy(&new_instr->src[3], &instr->src[2], new_instr);126break;127}128129if (new_instr->intrinsic == nir_intrinsic_load_ssbo) {130nir_intrinsic_set_align(new_instr, 4, 0);131132/* we could be replacing an intrinsic with fixed # of dest133* num_components with one that has variable number. So134* best to take this from the dest:135*/136new_instr->num_components = instr->dest.ssa.num_components;137}138139nir_ssa_dest_init(&new_instr->instr, &new_instr->dest,140instr->dest.ssa.num_components,141instr->dest.ssa.bit_size, NULL);142nir_instr_insert_before(&instr->instr, &new_instr->instr);143nir_instr_remove(&instr->instr);144145if (instr->intrinsic == nir_intrinsic_atomic_counter_pre_dec) {146b->cursor = nir_after_instr(&new_instr->instr);147nir_ssa_def *result = nir_iadd(b, &new_instr->dest.ssa, temp);148nir_ssa_def_rewrite_uses(&instr->dest.ssa, result);149} else {150nir_ssa_def_rewrite_uses(&instr->dest.ssa, &new_instr->dest.ssa);151}152153return true;154}155156static bool157is_atomic_uint(const struct glsl_type *type)158{159if (glsl_get_base_type(type) == GLSL_TYPE_ARRAY)160return is_atomic_uint(glsl_get_array_element(type));161return glsl_get_base_type(type) == GLSL_TYPE_ATOMIC_UINT;162}163164bool165nir_lower_atomics_to_ssbo(nir_shader *shader)166{167unsigned ssbo_offset = shader->info.num_ssbos;168bool progress = false;169170nir_foreach_function(function, shader) {171if (function->impl) {172nir_builder builder;173nir_builder_init(&builder, function->impl);174nir_foreach_block(block, function->impl) {175nir_foreach_instr_safe(instr, block) {176if (instr->type == nir_instr_type_intrinsic)177progress |= lower_instr(nir_instr_as_intrinsic(instr),178ssbo_offset, &builder);179}180}181182nir_metadata_preserve(function->impl, nir_metadata_block_index |183nir_metadata_dominance);184}185}186187if (progress) {188/* replace atomic_uint uniforms with ssbo's: */189unsigned replaced = 0;190nir_foreach_uniform_variable_safe(var, shader) {191if (is_atomic_uint(var->type)) {192exec_node_remove(&var->node);193194if (replaced & (1 << var->data.binding))195continue;196197nir_variable *ssbo;198char name[16];199200/* A length of 0 is used to denote unsized arrays */201const struct glsl_type *type = glsl_array_type(glsl_uint_type(), 0, 0);202203snprintf(name, sizeof(name), "counter%d", var->data.binding);204205ssbo = nir_variable_create(shader, nir_var_mem_ssbo, type, name);206ssbo->data.binding = ssbo_offset + var->data.binding;207ssbo->data.explicit_binding = var->data.explicit_binding;208209/* We can't use num_abos, because it only represents the number of210* active atomic counters, and currently unlike SSBO's they aren't211* compacted so num_abos actually isn't a bound on the index passed212* to nir_intrinsic_atomic_counter_*. e.g. if we have a single atomic213* counter declared like:214*215* layout(binding=1) atomic_uint counter0;216*217* then when we lower accesses to it the atomic_counter_* intrinsics218* will have 1 as the index but num_abos will still be 1.219*/220shader->info.num_ssbos = MAX2(shader->info.num_ssbos,221ssbo->data.binding + 1);222223struct glsl_struct_field field = {224.type = type,225.name = "counters",226.location = -1,227};228229ssbo->interface_type =230glsl_interface_type(&field, 1, GLSL_INTERFACE_PACKING_STD430,231false, "counters");232233replaced |= (1 << var->data.binding);234}235}236237shader->info.num_abos = 0;238}239240return progress;241}242243244245