Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/gallium/frontends/lavapipe/lvp_lower_vulkan_resource.c
4565 views
1
/*
2
* Copyright © 2019 Red Hat.
3
*
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
* and/or sell copies of the Software, and to permit persons to whom the
9
* Software is furnished to do so, subject to the following conditions:
10
*
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
13
* Software.
14
*
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21
* IN THE SOFTWARE.
22
*/
23
24
#include "lvp_private.h"
25
#include "nir.h"
26
#include "nir_builder.h"
27
#include "lvp_lower_vulkan_resource.h"
28
29
static bool
30
lower_vulkan_resource_index(const nir_instr *instr, const void *data_cb)
31
{
32
if (instr->type == nir_instr_type_intrinsic) {
33
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
34
switch (intrin->intrinsic) {
35
case nir_intrinsic_vulkan_resource_index:
36
case nir_intrinsic_vulkan_resource_reindex:
37
case nir_intrinsic_load_vulkan_descriptor:
38
case nir_intrinsic_get_ssbo_size:
39
return true;
40
default:
41
return false;
42
}
43
}
44
if (instr->type == nir_instr_type_tex) {
45
return true;
46
}
47
return false;
48
}
49
50
static nir_ssa_def *lower_vri_intrin_vri(struct nir_builder *b,
51
nir_instr *instr, void *data_cb)
52
{
53
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
54
unsigned desc_set_idx = nir_intrinsic_desc_set(intrin);
55
unsigned binding_idx = nir_intrinsic_binding(intrin);
56
struct lvp_pipeline_layout *layout = data_cb;
57
struct lvp_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];
58
int value = 0;
59
bool is_ubo = (binding->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
60
binding->type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC);
61
62
for (unsigned s = 0; s < desc_set_idx; s++) {
63
if (is_ubo)
64
value += layout->set[s].layout->stage[b->shader->info.stage].const_buffer_count;
65
else
66
value += layout->set[s].layout->stage[b->shader->info.stage].shader_buffer_count;
67
}
68
if (is_ubo)
69
value += binding->stage[b->shader->info.stage].const_buffer_index + 1;
70
else
71
value += binding->stage[b->shader->info.stage].shader_buffer_index;
72
73
/* The SSA size for indices is the same as for pointers. We use
74
* nir_addr_format_32bit_index_offset so we need a vec2. We don't need all
75
* that data so just stuff a 0 in the second component.
76
*/
77
if (nir_src_is_const(intrin->src[0])) {
78
value += nir_src_comp_as_int(intrin->src[0], 0);
79
return nir_imm_ivec2(b, value, 0);
80
} else
81
return nir_vec2(b, nir_iadd_imm(b, intrin->src[0].ssa, value),
82
nir_imm_int(b, 0));
83
}
84
85
static nir_ssa_def *lower_vri_intrin_vrri(struct nir_builder *b,
86
nir_instr *instr, void *data_cb)
87
{
88
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
89
nir_ssa_def *old_index = nir_ssa_for_src(b, intrin->src[0], 1);
90
nir_ssa_def *delta = nir_ssa_for_src(b, intrin->src[1], 1);
91
return nir_vec2(b, nir_iadd(b, old_index, delta),
92
nir_imm_int(b, 0));
93
}
94
95
static nir_ssa_def *lower_vri_intrin_lvd(struct nir_builder *b,
96
nir_instr *instr, void *data_cb)
97
{
98
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
99
nir_ssa_def *index = nir_ssa_for_src(b, intrin->src[0], 1);
100
return nir_vec2(b, index, nir_imm_int(b, 0));
101
}
102
103
static unsigned
104
lower_vri_instr_tex_deref(nir_tex_instr *tex,
105
nir_tex_src_type deref_src_type,
106
gl_shader_stage stage,
107
struct lvp_pipeline_layout *layout)
108
{
109
int deref_src_idx = nir_tex_instr_src_index(tex, deref_src_type);
110
111
if (deref_src_idx < 0)
112
return 0;
113
114
nir_deref_instr *deref_instr = nir_src_as_deref(tex->src[deref_src_idx].src);
115
nir_variable *var = nir_deref_instr_get_variable(deref_instr);
116
unsigned desc_set_idx = var->data.descriptor_set;
117
unsigned binding_idx = var->data.binding;
118
int value = 0;
119
struct lvp_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];
120
nir_tex_instr_remove_src(tex, deref_src_idx);
121
for (unsigned s = 0; s < desc_set_idx; s++) {
122
if (deref_src_type == nir_tex_src_sampler_deref)
123
value += layout->set[s].layout->stage[stage].sampler_count;
124
else
125
value += layout->set[s].layout->stage[stage].sampler_view_count;
126
}
127
if (deref_src_type == nir_tex_src_sampler_deref)
128
value += binding->stage[stage].sampler_index;
129
else
130
value += binding->stage[stage].sampler_view_index;
131
132
if (deref_instr->deref_type == nir_deref_type_array) {
133
if (nir_src_is_const(deref_instr->arr.index))
134
value += nir_src_as_uint(deref_instr->arr.index);
135
else {
136
if (deref_src_type == nir_tex_src_sampler_deref)
137
nir_tex_instr_add_src(tex, nir_tex_src_sampler_offset, deref_instr->arr.index);
138
else
139
nir_tex_instr_add_src(tex, nir_tex_src_texture_offset, deref_instr->arr.index);
140
}
141
}
142
if (deref_src_type == nir_tex_src_sampler_deref)
143
tex->sampler_index = value;
144
else
145
tex->texture_index = value;
146
147
if (deref_src_type == nir_tex_src_sampler_deref)
148
return 0;
149
150
if (deref_instr->deref_type == nir_deref_type_array) {
151
assert(glsl_type_is_array(var->type));
152
assert(value >= 0);
153
unsigned size = glsl_get_aoa_size(var->type);
154
return u_bit_consecutive(value, size);
155
} else
156
return 1u << value;
157
}
158
159
static void lower_vri_instr_tex(struct nir_builder *b,
160
nir_tex_instr *tex, void *data_cb)
161
{
162
struct lvp_pipeline_layout *layout = data_cb;
163
unsigned textures_used;
164
165
lower_vri_instr_tex_deref(tex, nir_tex_src_sampler_deref, b->shader->info.stage, layout);
166
textures_used = lower_vri_instr_tex_deref(tex, nir_tex_src_texture_deref, b->shader->info.stage, layout);
167
while (textures_used) {
168
int i = u_bit_scan(&textures_used);
169
BITSET_SET(b->shader->info.textures_used, i);
170
}
171
}
172
173
static nir_ssa_def *lower_vri_instr(struct nir_builder *b,
174
nir_instr *instr, void *data_cb)
175
{
176
if (instr->type == nir_instr_type_intrinsic) {
177
nir_intrinsic_instr *intrin = nir_instr_as_intrinsic(instr);
178
switch (intrin->intrinsic) {
179
case nir_intrinsic_vulkan_resource_index:
180
return lower_vri_intrin_vri(b, instr, data_cb);
181
182
case nir_intrinsic_vulkan_resource_reindex:
183
return lower_vri_intrin_vrri(b, instr, data_cb);
184
185
case nir_intrinsic_load_vulkan_descriptor:
186
return lower_vri_intrin_lvd(b, instr, data_cb);
187
188
case nir_intrinsic_get_ssbo_size: {
189
/* The result of the load_vulkan_descriptor is a vec2(index, offset)
190
* but we only want the index in get_ssbo_size.
191
*/
192
b->cursor = nir_before_instr(&intrin->instr);
193
nir_ssa_def *index = nir_ssa_for_src(b, intrin->src[0], 1);
194
nir_instr_rewrite_src(&intrin->instr, &intrin->src[0],
195
nir_src_for_ssa(index));
196
return NULL;
197
}
198
199
default:
200
return NULL;
201
}
202
}
203
if (instr->type == nir_instr_type_tex)
204
lower_vri_instr_tex(b, nir_instr_as_tex(instr), data_cb);
205
return NULL;
206
}
207
208
void lvp_lower_pipeline_layout(const struct lvp_device *device,
209
struct lvp_pipeline_layout *layout,
210
nir_shader *shader)
211
{
212
nir_shader_lower_instructions(shader, lower_vulkan_resource_index, lower_vri_instr, layout);
213
nir_foreach_uniform_variable(var, shader) {
214
const struct glsl_type *type = var->type;
215
enum glsl_base_type base_type =
216
glsl_get_base_type(glsl_without_array(type));
217
unsigned desc_set_idx = var->data.descriptor_set;
218
unsigned binding_idx = var->data.binding;
219
struct lvp_descriptor_set_binding_layout *binding = &layout->set[desc_set_idx].layout->binding[binding_idx];
220
int value = 0;
221
var->data.descriptor_set = 0;
222
if (base_type == GLSL_TYPE_SAMPLER) {
223
if (binding->type == VK_DESCRIPTOR_TYPE_SAMPLER) {
224
for (unsigned s = 0; s < desc_set_idx; s++)
225
value += layout->set[s].layout->stage[shader->info.stage].sampler_count;
226
value += binding->stage[shader->info.stage].sampler_index;
227
} else {
228
for (unsigned s = 0; s < desc_set_idx; s++)
229
value += layout->set[s].layout->stage[shader->info.stage].sampler_view_count;
230
value += binding->stage[shader->info.stage].sampler_view_index;
231
}
232
var->data.binding = value;
233
}
234
if (base_type == GLSL_TYPE_IMAGE) {
235
var->data.descriptor_set = 0;
236
for (unsigned s = 0; s < desc_set_idx; s++)
237
value += layout->set[s].layout->stage[shader->info.stage].image_count;
238
value += binding->stage[shader->info.stage].image_index;
239
var->data.binding = value;
240
}
241
}
242
}
243
244