Path: blob/21.2-virgl/src/gallium/frontends/lavapipe/lvp_lower_vulkan_resource.c
4565 views
/*1* Copyright © 2019 Red Hat.2*3* Permission is hereby granted, free of charge, to any person obtaining a4* copy of this software and associated documentation files (the "Software"),5* to deal in the Software without restriction, including without limitation6* the rights to use, copy, modify, merge, publish, distribute, sublicense,7* and/or sell copies of the Software, and to permit persons to whom the8* Software is furnished to do so, subject to the following conditions:9*10* The above copyright notice and this permission notice (including the next11* paragraph) shall be included in all copies or substantial portions of the12* Software.13*14* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR15* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,16* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL17* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER18* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING19* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS20* IN THE SOFTWARE.21*/2223#include "lvp_private.h"24#include "nir.h"25#include "nir_builder.h"26#include "lvp_lower_vulkan_resource.h"2728static bool29lower_vulkan_resource_index(const nir_instr *instr, const void *data_cb)30{31if (instr->type == nir_instr_type_intrinsic) {32nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);33switch (intrin->intrinsic) {34case nir_intrinsic_vulkan_resource_index:35case nir_intrinsic_vulkan_resource_reindex:36case nir_intrinsic_load_vulkan_descriptor:37case nir_intrinsic_get_ssbo_size:38return true;39default:40return false;41}42}43if (instr->type == nir_instr_type_tex) {44return true;45}46return false;47}4849static nir_ssa_def *lower_vri_intrin_vri(struct nir_builder *b,50nir_instr *instr, void *data_cb)51{52nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);53unsigned desc_set_idx = nir_intrinsic_desc_set(intrin);54unsigned binding_idx = nir_intrinsic_binding(intrin);55struct lvp_pipeline_layout *layout = data_cb;56struct lvp_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];57int value = 0;58bool is_ubo = (binding->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||59binding->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);6061for (unsigned s = 0; s < desc_set_idx; s++) {62if (is_ubo)63value += layout->set[s].layout->stage[b->shader->info.stage].const_buffer_count;64else65value += layout->set[s].layout->stage[b->shader->info.stage].shader_buffer_count;66}67if (is_ubo)68value += binding->stage[b->shader->info.stage].const_buffer_index + 1;69else70value += binding->stage[b->shader->info.stage].shader_buffer_index;7172/* The SSA size for indices is the same as for pointers. We use73* nir_addr_format_32bit_index_offset so we need a vec2. We don't need all74* that data so just stuff a 0 in the second component.75*/76if (nir_src_is_const(intrin->src[0])) {77value += nir_src_comp_as_int(intrin->src[0], 0);78return nir_imm_ivec2(b, value, 0);79} else80return nir_vec2(b, nir_iadd_imm(b, intrin->src[0].ssa, value),81nir_imm_int(b, 0));82}8384static nir_ssa_def *lower_vri_intrin_vrri(struct nir_builder *b,85nir_instr *instr, void *data_cb)86{87nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);88nir_ssa_def *old_index = nir_ssa_for_src(b, intrin->src[0], 1);89nir_ssa_def *delta = nir_ssa_for_src(b, intrin->src[1], 1);90return nir_vec2(b, nir_iadd(b, old_index, delta),91nir_imm_int(b, 0));92}9394static nir_ssa_def *lower_vri_intrin_lvd(struct nir_builder *b,95nir_instr *instr, void *data_cb)96{97nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);98nir_ssa_def *index = nir_ssa_for_src(b, intrin->src[0], 1);99return nir_vec2(b, index, nir_imm_int(b, 0));100}101102static unsigned103lower_vri_instr_tex_deref(nir_tex_instr *tex,104nir_tex_src_type deref_src_type,105gl_shader_stage stage,106struct lvp_pipeline_layout *layout)107{108int deref_src_idx = nir_tex_instr_src_index(tex, deref_src_type);109110if (deref_src_idx < 0)111return 0;112113nir_deref_instr *deref_instr = nir_src_as_deref(tex->src[deref_src_idx].src);114nir_variable *var = nir_deref_instr_get_variable(deref_instr);115unsigned desc_set_idx = var->data.descriptor_set;116unsigned binding_idx = var->data.binding;117int value = 0;118struct lvp_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];119nir_tex_instr_remove_src(tex, deref_src_idx);120for (unsigned s = 0; s < desc_set_idx; s++) {121if (deref_src_type == nir_tex_src_sampler_deref)122value += layout->set[s].layout->stage[stage].sampler_count;123else124value += layout->set[s].layout->stage[stage].sampler_view_count;125}126if (deref_src_type == nir_tex_src_sampler_deref)127value += binding->stage[stage].sampler_index;128else129value += binding->stage[stage].sampler_view_index;130131if (deref_instr->deref_type == nir_deref_type_array) {132if (nir_src_is_const(deref_instr->arr.index))133value += nir_src_as_uint(deref_instr->arr.index);134else {135if (deref_src_type == nir_tex_src_sampler_deref)136nir_tex_instr_add_src(tex, nir_tex_src_sampler_offset, deref_instr->arr.index);137else138nir_tex_instr_add_src(tex, nir_tex_src_texture_offset, deref_instr->arr.index);139}140}141if (deref_src_type == nir_tex_src_sampler_deref)142tex->sampler_index = value;143else144tex->texture_index = value;145146if (deref_src_type == nir_tex_src_sampler_deref)147return 0;148149if (deref_instr->deref_type == nir_deref_type_array) {150assert(glsl_type_is_array(var->type));151assert(value >= 0);152unsigned size = glsl_get_aoa_size(var->type);153return u_bit_consecutive(value, size);154} else155return 1u << value;156}157158static void lower_vri_instr_tex(struct nir_builder *b,159nir_tex_instr *tex, void *data_cb)160{161struct lvp_pipeline_layout *layout = data_cb;162unsigned textures_used;163164lower_vri_instr_tex_deref(tex, nir_tex_src_sampler_deref, b->shader->info.stage, layout);165textures_used = lower_vri_instr_tex_deref(tex, nir_tex_src_texture_deref, b->shader->info.stage, layout);166while (textures_used) {167int i = u_bit_scan(&textures_used);168BITSET_SET(b->shader->info.textures_used, i);169}170}171172static nir_ssa_def *lower_vri_instr(struct nir_builder *b,173nir_instr *instr, void *data_cb)174{175if (instr->type == nir_instr_type_intrinsic) {176nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);177switch (intrin->intrinsic) {178case nir_intrinsic_vulkan_resource_index:179return lower_vri_intrin_vri(b, instr, data_cb);180181case nir_intrinsic_vulkan_resource_reindex:182return lower_vri_intrin_vrri(b, instr, data_cb);183184case nir_intrinsic_load_vulkan_descriptor:185return lower_vri_intrin_lvd(b, instr, data_cb);186187case nir_intrinsic_get_ssbo_size: {188/* The result of the load_vulkan_descriptor is a vec2(index, offset)189* but we only want the index in get_ssbo_size.190*/191b->cursor = nir_before_instr(&intrin->instr);192nir_ssa_def *index = nir_ssa_for_src(b, intrin->src[0], 1);193nir_instr_rewrite_src(&intrin->instr, &intrin->src[0],194nir_src_for_ssa(index));195return NULL;196}197198default:199return NULL;200}201}202if (instr->type == nir_instr_type_tex)203lower_vri_instr_tex(b, nir_instr_as_tex(instr), data_cb);204return NULL;205}206207void lvp_lower_pipeline_layout(const struct lvp_device *device,208struct lvp_pipeline_layout *layout,209nir_shader *shader)210{211nir_shader_lower_instructions(shader, lower_vulkan_resource_index, lower_vri_instr, layout);212nir_foreach_uniform_variable(var, shader) {213const struct glsl_type *type = var->type;214enum glsl_base_type base_type =215glsl_get_base_type(glsl_without_array(type));216unsigned desc_set_idx = var->data.descriptor_set;217unsigned binding_idx = var->data.binding;218struct lvp_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];219int value = 0;220var->data.descriptor_set = 0;221if (base_type == GLSL_TYPE_SAMPLER) {222if (binding->type == VK_DESCRIPTOR_TYPE_SAMPLER) {223for (unsigned s = 0; s < desc_set_idx; s++)224value += layout->set[s].layout->stage[shader->info.stage].sampler_count;225value += binding->stage[shader->info.stage].sampler_index;226} else {227for (unsigned s = 0; s < desc_set_idx; s++)228value += layout->set[s].layout->stage[shader->info.stage].sampler_view_count;229value += binding->stage[shader->info.stage].sampler_view_index;230}231var->data.binding = value;232}233if (base_type == GLSL_TYPE_IMAGE) {234var->data.descriptor_set = 0;235for (unsigned s = 0; s < desc_set_idx; s++)236value += layout->set[s].layout->stage[shader->info.stage].image_count;237value += binding->stage[shader->info.stage].image_index;238var->data.binding = value;239}240}241}242243244