Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/gallium/drivers/zink/zink_descriptors_lazy.c
4570 views
1
/*
2
* Copyright © 2021 Valve Corporation
3
*
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
* and/or sell copies of the Software, and to permit persons to whom the
9
* Software is furnished to do so, subject to the following conditions:
10
*
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
13
* Software.
14
*
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21
* IN THE SOFTWARE.
22
*
23
* Authors:
24
* Mike Blumenkrantz <[email protected]>
25
*/
26
#include "tgsi/tgsi_from_mesa.h"
27
28
29
30
#include "zink_context.h"
31
#include "zink_compiler.h"
32
#include "zink_descriptors.h"
33
#include "zink_program.h"
34
#include "zink_resource.h"
35
#include "zink_screen.h"
36
37
struct zink_descriptor_data_lazy {
38
struct zink_descriptor_data base;
39
VkDescriptorUpdateTemplateEntry push_entries[PIPE_SHADER_TYPES];
40
bool push_state_changed[2]; //gfx, compute
41
uint8_t state_changed[2]; //gfx, compute
42
};
43
44
struct zink_descriptor_pool {
45
VkDescriptorPool pool;
46
VkDescriptorSet sets[ZINK_DEFAULT_MAX_DESCS];
47
unsigned set_idx;
48
unsigned sets_alloc;
49
};
50
51
struct zink_batch_descriptor_data_lazy {
52
struct zink_batch_descriptor_data base;
53
struct hash_table pools[ZINK_DESCRIPTOR_TYPES];
54
struct zink_descriptor_pool *push_pool[2];
55
struct zink_program *pg[2]; //gfx, compute
56
VkDescriptorSetLayout dsl[2][ZINK_DESCRIPTOR_TYPES];
57
unsigned push_usage[2];
58
};
59
60
ALWAYS_INLINE static struct zink_descriptor_data_lazy *
61
dd_lazy(struct zink_context *ctx)
62
{
63
return (struct zink_descriptor_data_lazy*)ctx->dd;
64
}
65
66
ALWAYS_INLINE static struct zink_batch_descriptor_data_lazy *
67
bdd_lazy(struct zink_batch_state *bs)
68
{
69
return (struct zink_batch_descriptor_data_lazy*)bs->dd;
70
}
71
72
static void
73
init_template_entry(struct zink_shader *shader, enum zink_descriptor_type type,
74
unsigned idx, unsigned offset, VkDescriptorUpdateTemplateEntry *entry, unsigned *entry_idx, bool flatten_dynamic)
75
{
76
int index = shader->bindings[type][idx].index;
77
enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);
78
entry->dstArrayElement = 0;
79
entry->dstBinding = shader->bindings[type][idx].binding;
80
if (shader->bindings[type][idx].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC && flatten_dynamic)
81
/* filter out DYNAMIC type here */
82
entry->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
83
else
84
entry->descriptorType = shader->bindings[type][idx].type;
85
switch (shader->bindings[type][idx].type) {
86
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
87
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
88
entry->descriptorCount = 1;
89
entry->offset = offsetof(struct zink_context, di.ubos[stage][index + offset]);
90
entry->stride = sizeof(VkDescriptorBufferInfo);
91
break;
92
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
93
entry->descriptorCount = shader->bindings[type][idx].size;
94
entry->offset = offsetof(struct zink_context, di.textures[stage][index + offset]);
95
entry->stride = sizeof(VkDescriptorImageInfo);
96
break;
97
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
98
entry->descriptorCount = shader->bindings[type][idx].size;
99
entry->offset = offsetof(struct zink_context, di.tbos[stage][index + offset]);
100
entry->stride = sizeof(VkBufferView);
101
break;
102
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
103
entry->descriptorCount = 1;
104
entry->offset = offsetof(struct zink_context, di.ssbos[stage][index + offset]);
105
entry->stride = sizeof(VkDescriptorBufferInfo);
106
break;
107
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
108
entry->descriptorCount = shader->bindings[type][idx].size;
109
entry->offset = offsetof(struct zink_context, di.images[stage][index + offset]);
110
entry->stride = sizeof(VkDescriptorImageInfo);
111
break;
112
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
113
entry->descriptorCount = shader->bindings[type][idx].size;
114
entry->offset = offsetof(struct zink_context, di.texel_images[stage][index + offset]);
115
entry->stride = sizeof(VkBufferView);
116
break;
117
default:
118
unreachable("unknown type");
119
}
120
(*entry_idx)++;
121
}
122
123
bool
124
zink_descriptor_program_init_lazy(struct zink_context *ctx, struct zink_program *pg)
125
{
126
struct zink_screen *screen = zink_screen(ctx->base.screen);
127
VkDescriptorSetLayoutBinding bindings[ZINK_DESCRIPTOR_TYPES][PIPE_SHADER_TYPES * 32];
128
VkDescriptorUpdateTemplateEntry entries[ZINK_DESCRIPTOR_TYPES][PIPE_SHADER_TYPES * 32];
129
unsigned num_bindings[ZINK_DESCRIPTOR_TYPES] = {0};
130
uint8_t has_bindings = 0;
131
132
struct zink_shader **stages;
133
if (pg->is_compute)
134
stages = &((struct zink_compute_program*)pg)->shader;
135
else
136
stages = ((struct zink_gfx_program*)pg)->shaders;
137
138
if (!pg->dd)
139
pg->dd = (void*)rzalloc(pg, struct zink_program_descriptor_data);
140
if (!pg->dd)
141
return false;
142
143
unsigned push_count = 0;
144
unsigned entry_idx[ZINK_DESCRIPTOR_TYPES] = {0};
145
146
unsigned num_shaders = pg->is_compute ? 1 : ZINK_SHADER_COUNT;
147
bool have_push = screen->info.have_KHR_push_descriptor;
148
for (int i = 0; i < num_shaders; i++) {
149
struct zink_shader *shader = stages[i];
150
if (!shader)
151
continue;
152
153
enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);
154
VkShaderStageFlagBits stage_flags = zink_shader_stage(stage);
155
for (int j = 0; j < ZINK_DESCRIPTOR_TYPES; j++) {
156
for (int k = 0; k < shader->num_bindings[j]; k++) {
157
/* dynamic ubos handled in push */
158
if (shader->bindings[j][k].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
159
pg->dd->push_usage |= BITFIELD64_BIT(stage);
160
161
push_count++;
162
continue;
163
}
164
165
assert(num_bindings[j] < ARRAY_SIZE(bindings[j]));
166
VkDescriptorSetLayoutBinding *binding = &bindings[j][num_bindings[j]];
167
binding->binding = shader->bindings[j][k].binding;
168
binding->descriptorType = shader->bindings[j][k].type;
169
binding->descriptorCount = shader->bindings[j][k].size;
170
binding->stageFlags = stage_flags;
171
binding->pImmutableSamplers = NULL;
172
173
enum zink_descriptor_size_index idx = zink_vktype_to_size_idx(shader->bindings[j][k].type);
174
pg->dd->sizes[idx].descriptorCount += shader->bindings[j][k].size;
175
pg->dd->sizes[idx].type = shader->bindings[j][k].type;
176
switch (shader->bindings[j][k].type) {
177
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
178
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
179
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
180
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
181
init_template_entry(shader, j, k, 0, &entries[j][entry_idx[j]], &entry_idx[j], screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY);
182
break;
183
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
184
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
185
for (unsigned l = 0; l < shader->bindings[j][k].size; l++)
186
init_template_entry(shader, j, k, l, &entries[j][entry_idx[j]], &entry_idx[j], screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY);
187
break;
188
default:
189
break;
190
}
191
num_bindings[j]++;
192
has_bindings |= BITFIELD_BIT(j);
193
}
194
}
195
}
196
pg->dd->binding_usage = has_bindings;
197
if (!has_bindings && !push_count) {
198
ralloc_free(pg->dd);
199
pg->dd = NULL;
200
201
pg->layout = zink_pipeline_layout_create(screen, pg);
202
return !!pg->layout;
203
}
204
205
pg->dsl[pg->num_dsl++] = push_count ? ctx->dd->push_dsl[pg->is_compute]->layout : ctx->dd->dummy_dsl->layout;
206
if (has_bindings) {
207
u_foreach_bit(type, has_bindings) {
208
for (unsigned i = 0; i < type; i++) {
209
/* push set is always 0 */
210
if (!pg->dsl[i + 1]) {
211
/* inject a null dsl */
212
pg->dsl[pg->num_dsl++] = ctx->dd->dummy_dsl->layout;
213
pg->dd->binding_usage |= BITFIELD_BIT(i);
214
}
215
}
216
pg->dd->layouts[pg->num_dsl] = zink_descriptor_util_layout_get(ctx, type, bindings[type], num_bindings[type], &pg->dd->layout_key[type]);
217
pg->dd->layout_key[type]->use_count++;
218
pg->dsl[pg->num_dsl] = pg->dd->layouts[pg->num_dsl]->layout;
219
pg->num_dsl++;
220
}
221
for (unsigned i = 0; i < ARRAY_SIZE(pg->dd->sizes); i++)
222
pg->dd->sizes[i].descriptorCount *= ZINK_DEFAULT_MAX_DESCS;
223
}
224
225
pg->layout = zink_pipeline_layout_create(screen, pg);
226
if (!pg->layout)
227
return false;
228
if (!screen->info.have_KHR_descriptor_update_template || screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_NOTEMPLATES)
229
return true;
230
231
VkDescriptorUpdateTemplateCreateInfo template[ZINK_DESCRIPTOR_TYPES + 1] = {0};
232
/* type of template */
233
VkDescriptorUpdateTemplateType types[ZINK_DESCRIPTOR_TYPES + 1] = {VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET};
234
if (have_push && screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY)
235
types[0] = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;
236
237
/* number of descriptors in template */
238
unsigned wd_count[ZINK_DESCRIPTOR_TYPES + 1];
239
if (push_count)
240
wd_count[0] = pg->is_compute ? 1 : ZINK_SHADER_COUNT;
241
for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++)
242
wd_count[i + 1] = pg->dd->layout_key[i] ? pg->dd->layout_key[i]->num_descriptors : 0;
243
244
VkDescriptorUpdateTemplateEntry *push_entries[2] = {
245
dd_lazy(ctx)->push_entries,
246
&dd_lazy(ctx)->push_entries[PIPE_SHADER_COMPUTE],
247
};
248
for (unsigned i = 0; i < pg->num_dsl; i++) {
249
bool is_push = i == 0;
250
/* no need for empty templates */
251
if (pg->dsl[i] == ctx->dd->dummy_dsl->layout ||
252
(!is_push && pg->dd->layouts[i]->desc_template))
253
continue;
254
template[i].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;
255
assert(wd_count[i]);
256
template[i].descriptorUpdateEntryCount = wd_count[i];
257
if (is_push)
258
template[i].pDescriptorUpdateEntries = push_entries[pg->is_compute];
259
else
260
template[i].pDescriptorUpdateEntries = entries[i - 1];
261
template[i].templateType = types[i];
262
template[i].descriptorSetLayout = pg->dsl[i];
263
template[i].pipelineBindPoint = pg->is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS;
264
template[i].pipelineLayout = pg->layout;
265
template[i].set = i;
266
VkDescriptorUpdateTemplateKHR t;
267
if (screen->vk.CreateDescriptorUpdateTemplate(screen->dev, &template[i], NULL, &t) != VK_SUCCESS)
268
return false;
269
if (is_push)
270
pg->dd->push_template = t;
271
else
272
pg->dd->layouts[i]->desc_template = t;
273
}
274
return true;
275
}
276
277
void
278
zink_descriptor_program_deinit_lazy(struct zink_screen *screen, struct zink_program *pg)
279
{
280
for (unsigned i = 0; pg->num_dsl && i < ZINK_DESCRIPTOR_TYPES; i++) {
281
if (pg->dd->layout_key[i])
282
pg->dd->layout_key[i]->use_count--;
283
}
284
if (pg->dd && pg->dd->push_template)
285
screen->vk.DestroyDescriptorUpdateTemplate(screen->dev, pg->dd->push_template, NULL);
286
ralloc_free(pg->dd);
287
}
288
289
static VkDescriptorPool
290
create_pool(struct zink_screen *screen, unsigned num_type_sizes, VkDescriptorPoolSize *sizes, unsigned flags)
291
{
292
VkDescriptorPool pool;
293
VkDescriptorPoolCreateInfo dpci = {0};
294
dpci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
295
dpci.pPoolSizes = sizes;
296
dpci.poolSizeCount = num_type_sizes;
297
dpci.flags = flags;
298
dpci.maxSets = ZINK_DEFAULT_MAX_DESCS;
299
if (vkCreateDescriptorPool(screen->dev, &dpci, 0, &pool) != VK_SUCCESS) {
300
debug_printf("vkCreateDescriptorPool failed\n");
301
return VK_NULL_HANDLE;
302
}
303
return pool;
304
}
305
306
static struct zink_descriptor_pool *
307
get_descriptor_pool_lazy(struct zink_context *ctx, struct zink_program *pg, enum zink_descriptor_type type, struct zink_batch_state *bs)
308
{
309
struct zink_screen *screen = zink_screen(ctx->base.screen);
310
struct hash_entry *he = _mesa_hash_table_search(&bdd_lazy(bs)->pools[type], pg->dd->layout_key[type]);
311
if (he)
312
return he->data;
313
struct zink_descriptor_pool *pool = rzalloc(bs, struct zink_descriptor_pool);
314
if (!pool)
315
return NULL;
316
unsigned idx = zink_descriptor_type_to_size_idx(type);
317
VkDescriptorPoolSize *size = &pg->dd->sizes[idx];
318
/* this is a sampler/image set with no images only texels */
319
if (!size->descriptorCount)
320
size++;
321
pool->pool = create_pool(screen, zink_descriptor_program_num_sizes(pg, type), size, 0);
322
if (!pool->pool) {
323
ralloc_free(pool);
324
return NULL;
325
}
326
_mesa_hash_table_insert(&bdd_lazy(bs)->pools[type], pg->dd->layout_key[type], pool);
327
return pool;
328
}
329
330
static VkDescriptorSet
331
get_descriptor_set_lazy(struct zink_context *ctx, struct zink_program *pg, enum zink_descriptor_type type, struct zink_descriptor_pool *pool, bool is_compute)
332
{
333
struct zink_screen *screen = zink_screen(ctx->base.screen);
334
if (!pool)
335
return VK_NULL_HANDLE;
336
337
if (pool->set_idx < pool->sets_alloc)
338
return pool->sets[pool->set_idx++];
339
340
/* allocate up to $current * 10, e.g., 10 -> 100 or 100 -> 1000 */
341
unsigned sets_to_alloc = MIN2(MAX2(pool->sets_alloc * 10, 10), ZINK_DEFAULT_MAX_DESCS) - pool->sets_alloc;
342
if (!sets_to_alloc) {//pool full
343
zink_fence_wait(&ctx->base);
344
return get_descriptor_set_lazy(ctx, pg, type, pool, is_compute);
345
}
346
if (!zink_descriptor_util_alloc_sets(screen, pg ? pg->dsl[type + 1] : ctx->dd->push_dsl[is_compute]->layout,
347
pool->pool, &pool->sets[pool->sets_alloc], sets_to_alloc))
348
return VK_NULL_HANDLE;
349
pool->sets_alloc += sets_to_alloc;
350
return pool->sets[pool->set_idx++];
351
}
352
353
static bool
354
populate_sets(struct zink_context *ctx, struct zink_program *pg, uint8_t *changed_sets, bool need_push, VkDescriptorSet *sets)
355
{
356
struct zink_batch_state *bs = ctx->batch.state;
357
if (need_push && !zink_screen(ctx->base.screen)->info.have_KHR_push_descriptor) {
358
struct zink_descriptor_pool *pool = bdd_lazy(bs)->push_pool[pg->is_compute];
359
sets[0] = get_descriptor_set_lazy(ctx, NULL, 0, pool, pg->is_compute);
360
if (!sets[0])
361
return false;
362
} else
363
sets[0] = VK_NULL_HANDLE;
364
/* may have flushed */
365
if (bs != ctx->batch.state)
366
*changed_sets = pg->dd->binding_usage;
367
bs = ctx->batch.state;
368
u_foreach_bit(type, *changed_sets) {
369
if (pg->dd->layout_key[type]) {
370
struct zink_descriptor_pool *pool = get_descriptor_pool_lazy(ctx, pg, type, bs);
371
sets[type + 1] = get_descriptor_set_lazy(ctx, pg, type, pool, pg->is_compute);
372
if (ctx->batch.state != bs && (sets[0] || type != ffs(*changed_sets))) {
373
/* sets are allocated by batch state, so if flush occurs on anything
374
* but the first set that has been fetched here, get all new sets
375
*/
376
*changed_sets = pg->dd->binding_usage;
377
if (pg->dd->push_usage)
378
need_push = true;
379
return populate_sets(ctx, pg, changed_sets, need_push, sets);
380
}
381
} else
382
sets[type + 1] = ctx->dd->dummy_set;
383
if (!sets[type + 1])
384
return false;
385
}
386
return true;
387
}
388
389
void
390
zink_descriptor_set_update_lazy(struct zink_context *ctx, struct zink_program *pg, enum zink_descriptor_type type, VkDescriptorSet set)
391
{
392
struct zink_screen *screen = zink_screen(ctx->base.screen);
393
screen->vk.UpdateDescriptorSetWithTemplate(screen->dev, set, pg->dd->layouts[type + 1]->desc_template, ctx);
394
}
395
396
void
397
zink_descriptors_update_lazy(struct zink_context *ctx, bool is_compute)
398
{
399
struct zink_screen *screen = zink_screen(ctx->base.screen);
400
struct zink_batch *batch = &ctx->batch;
401
struct zink_batch_state *bs = ctx->batch.state;
402
struct zink_program *pg = is_compute ? &ctx->curr_compute->base : &ctx->curr_program->base;
403
404
bool batch_changed = !bdd_lazy(bs)->pg[is_compute];
405
if (batch_changed) {
406
/* update all sets and bind null sets */
407
dd_lazy(ctx)->state_changed[is_compute] = pg->dd->binding_usage;
408
dd_lazy(ctx)->push_state_changed[is_compute] = !!pg->dd->push_usage;
409
}
410
411
if (pg != bdd_lazy(bs)->pg[is_compute]) {
412
/* if we don't already know that we have to update all sets,
413
* check to see if any dsls changed
414
*
415
* also always update the dsl pointers on program change
416
*/
417
for (unsigned i = 0; i < ARRAY_SIZE(bdd_lazy(bs)->dsl[is_compute]); i++) {
418
/* push set is already detected, start at 1 */
419
if (bdd_lazy(bs)->dsl[is_compute][i] != pg->dsl[i + 1])
420
dd_lazy(ctx)->state_changed[is_compute] |= BITFIELD_BIT(i);
421
bdd_lazy(bs)->dsl[is_compute][i] = pg->dsl[i + 1];
422
}
423
dd_lazy(ctx)->push_state_changed[is_compute] |= bdd_lazy(bs)->push_usage[is_compute] != pg->dd->push_usage;
424
bdd_lazy(bs)->push_usage[is_compute] = pg->dd->push_usage;
425
}
426
bdd_lazy(bs)->pg[is_compute] = pg;
427
428
VkDescriptorSet desc_sets[5];
429
uint8_t changed_sets = pg->dd->binding_usage & dd_lazy(ctx)->state_changed[is_compute];
430
bool need_push = pg->dd->push_usage &&
431
(dd_lazy(ctx)->push_state_changed[is_compute] || batch_changed);
432
if (!populate_sets(ctx, pg, &changed_sets, need_push, desc_sets)) {
433
debug_printf("ZINK: couldn't get descriptor sets!\n");
434
return;
435
}
436
if (ctx->batch.state != bs) {
437
/* recheck: populate may have overflowed the pool and triggered a flush */
438
batch_changed = true;
439
dd_lazy(ctx)->state_changed[is_compute] = pg->dd->binding_usage;
440
changed_sets = pg->dd->binding_usage & dd_lazy(ctx)->state_changed[is_compute];
441
dd_lazy(ctx)->push_state_changed[is_compute] = !!pg->dd->push_usage;
442
}
443
bs = ctx->batch.state;
444
445
if (pg->dd->binding_usage && changed_sets) {
446
u_foreach_bit(type, changed_sets) {
447
if (pg->dd->layout_key[type])
448
screen->vk.UpdateDescriptorSetWithTemplate(screen->dev, desc_sets[type + 1], pg->dd->layouts[type + 1]->desc_template, ctx);
449
assert(type + 1 < pg->num_dsl);
450
vkCmdBindDescriptorSets(bs->cmdbuf,
451
is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,
452
/* set index incremented by 1 to account for push set */
453
pg->layout, type + 1, 1, &desc_sets[type + 1],
454
0, NULL);
455
}
456
dd_lazy(ctx)->state_changed[is_compute] = false;
457
}
458
459
if (pg->dd->push_usage && dd_lazy(ctx)->push_state_changed[is_compute]) {
460
if (screen->info.have_KHR_push_descriptor)
461
screen->vk.CmdPushDescriptorSetWithTemplateKHR(batch->state->cmdbuf, pg->dd->push_template,
462
pg->layout, 0, ctx);
463
else {
464
assert(desc_sets[0]);
465
screen->vk.UpdateDescriptorSetWithTemplate(screen->dev, desc_sets[0], pg->dd->push_template, ctx);
466
vkCmdBindDescriptorSets(batch->state->cmdbuf,
467
is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,
468
pg->layout, 0, 1, &desc_sets[0],
469
0, NULL);
470
}
471
dd_lazy(ctx)->push_state_changed[is_compute] = false;
472
} else if (dd_lazy(ctx)->push_state_changed[is_compute]) {
473
vkCmdBindDescriptorSets(bs->cmdbuf,
474
is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,
475
pg->layout, 0, 1, &ctx->dd->dummy_set,
476
0, NULL);
477
dd_lazy(ctx)->push_state_changed[is_compute] = false;
478
}
479
/* set again in case of flushing */
480
bdd_lazy(bs)->pg[is_compute] = pg;
481
ctx->dd->pg[is_compute] = pg;
482
}
483
484
void
485
zink_context_invalidate_descriptor_state_lazy(struct zink_context *ctx, enum pipe_shader_type shader, enum zink_descriptor_type type, unsigned start, unsigned count)
486
{
487
if (type == ZINK_DESCRIPTOR_TYPE_UBO && !start)
488
dd_lazy(ctx)->push_state_changed[shader == PIPE_SHADER_COMPUTE] = true;
489
else
490
dd_lazy(ctx)->state_changed[shader == PIPE_SHADER_COMPUTE] |= BITFIELD_BIT(type);
491
}
492
493
void
494
zink_batch_descriptor_deinit_lazy(struct zink_screen *screen, struct zink_batch_state *bs)
495
{
496
if (!bs->dd)
497
return;
498
if (screen->info.have_KHR_descriptor_update_template) {
499
for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {
500
hash_table_foreach(&bdd_lazy(bs)->pools[i], entry) {
501
struct zink_descriptor_pool *pool = (void*)entry->data;
502
vkDestroyDescriptorPool(screen->dev, pool->pool, NULL);
503
}
504
}
505
if (bdd_lazy(bs)->push_pool[0])
506
vkDestroyDescriptorPool(screen->dev, bdd_lazy(bs)->push_pool[0]->pool, NULL);
507
if (bdd_lazy(bs)->push_pool[1])
508
vkDestroyDescriptorPool(screen->dev, bdd_lazy(bs)->push_pool[1]->pool, NULL);
509
}
510
ralloc_free(bs->dd);
511
}
512
513
void
514
zink_batch_descriptor_reset_lazy(struct zink_screen *screen, struct zink_batch_state *bs)
515
{
516
if (!screen->info.have_KHR_descriptor_update_template)
517
return;
518
for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {
519
hash_table_foreach(&bdd_lazy(bs)->pools[i], entry) {
520
const struct zink_descriptor_layout_key *key = entry->key;
521
struct zink_descriptor_pool *pool = (void*)entry->data;
522
if (key->use_count)
523
pool->set_idx = 0;
524
else {
525
vkDestroyDescriptorPool(screen->dev, pool->pool, NULL);
526
ralloc_free(pool);
527
_mesa_hash_table_remove(&bdd_lazy(bs)->pools[i], entry);
528
}
529
}
530
}
531
for (unsigned i = 0; i < 2; i++) {
532
bdd_lazy(bs)->pg[i] = NULL;
533
if (bdd_lazy(bs)->push_pool[i])
534
bdd_lazy(bs)->push_pool[i]->set_idx = 0;
535
}
536
}
537
538
bool
539
zink_batch_descriptor_init_lazy(struct zink_screen *screen, struct zink_batch_state *bs)
540
{
541
bs->dd = (void*)rzalloc(bs, struct zink_batch_descriptor_data_lazy);
542
if (!bs->dd)
543
return false;
544
if (!screen->info.have_KHR_descriptor_update_template)
545
return true;
546
for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {
547
if (!_mesa_hash_table_init(&bdd_lazy(bs)->pools[i], bs->dd, _mesa_hash_pointer, _mesa_key_pointer_equal))
548
return false;
549
}
550
if (!screen->info.have_KHR_push_descriptor) {
551
VkDescriptorPoolSize sizes;
552
sizes.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
553
sizes.descriptorCount = ZINK_SHADER_COUNT * ZINK_DEFAULT_MAX_DESCS;
554
bdd_lazy(bs)->push_pool[0] = rzalloc(bs, struct zink_descriptor_pool);
555
bdd_lazy(bs)->push_pool[0]->pool = create_pool(screen, 1, &sizes, 0);
556
sizes.descriptorCount = ZINK_DEFAULT_MAX_DESCS;
557
bdd_lazy(bs)->push_pool[1] = rzalloc(bs, struct zink_descriptor_pool);
558
bdd_lazy(bs)->push_pool[1]->pool = create_pool(screen, 1, &sizes, 0);
559
}
560
return true;
561
}
562
563
bool
564
zink_descriptors_init_lazy(struct zink_context *ctx)
565
{
566
struct zink_screen *screen = zink_screen(ctx->base.screen);
567
ctx->dd = (void*)rzalloc(ctx, struct zink_descriptor_data_lazy);
568
if (!ctx->dd)
569
return false;
570
571
if (screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_NOTEMPLATES)
572
printf("ZINK: CACHED/NOTEMPLATES DESCRIPTORS\n");
573
else if (screen->info.have_KHR_descriptor_update_template) {
574
for (unsigned i = 0; i < PIPE_SHADER_TYPES; i++) {
575
VkDescriptorUpdateTemplateEntry *entry = &dd_lazy(ctx)->push_entries[i];
576
entry->dstBinding = tgsi_processor_to_shader_stage(i);
577
entry->descriptorCount = 1;
578
entry->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
579
entry->offset = offsetof(struct zink_context, di.ubos[i][0]);
580
entry->stride = sizeof(VkDescriptorBufferInfo);
581
}
582
if (screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY)
583
printf("ZINK: USING LAZY DESCRIPTORS\n");
584
}
585
struct zink_descriptor_layout_key *layout_key;
586
if (!zink_descriptor_util_push_layouts_get(ctx, ctx->dd->push_dsl, ctx->dd->push_layout_keys))
587
return false;
588
589
ctx->dd->dummy_dsl = zink_descriptor_util_layout_get(ctx, 0, NULL, 0, &layout_key);
590
if (!ctx->dd->dummy_dsl)
591
return false;
592
VkDescriptorPoolSize null_size = {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1};
593
ctx->dd->dummy_pool = create_pool(screen, 1, &null_size, 0);
594
zink_descriptor_util_alloc_sets(screen, ctx->dd->dummy_dsl->layout,
595
ctx->dd->dummy_pool, &ctx->dd->dummy_set, 1);
596
zink_descriptor_util_init_null_set(ctx, ctx->dd->dummy_set);
597
return true;
598
}
599
600
void
601
zink_descriptors_deinit_lazy(struct zink_context *ctx)
602
{
603
if (ctx->dd) {
604
struct zink_screen *screen = zink_screen(ctx->base.screen);
605
if (ctx->dd->dummy_pool)
606
vkDestroyDescriptorPool(screen->dev, ctx->dd->dummy_pool, NULL);
607
if (screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY &&
608
screen->info.have_KHR_push_descriptor) {
609
vkDestroyDescriptorSetLayout(screen->dev, ctx->dd->push_dsl[0]->layout, NULL);
610
vkDestroyDescriptorSetLayout(screen->dev, ctx->dd->push_dsl[1]->layout, NULL);
611
}
612
}
613
ralloc_free(ctx->dd);
614
}
615
616