Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/broadcom/vulkan/v3dv_descriptor_set.c
4560 views
1
/*
2
* Copyright © 2019 Raspberry Pi
3
*
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
* and/or sell copies of the Software, and to permit persons to whom the
9
* Software is furnished to do so, subject to the following conditions:
10
*
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
13
* Software.
14
*
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21
* IN THE SOFTWARE.
22
*/
23
24
#include "vk_descriptors.h"
25
#include "vk_util.h"
26
27
#include "v3dv_private.h"
28
29
/*
30
* For a given descriptor defined by the descriptor_set it belongs, its
31
* binding layout, and array_index, it returns the map region assigned to it
32
* from the descriptor pool bo.
33
*/
34
static void*
35
descriptor_bo_map(struct v3dv_device *device,
36
struct v3dv_descriptor_set *set,
37
const struct v3dv_descriptor_set_binding_layout *binding_layout,
38
uint32_t array_index)
39
{
40
assert(v3dv_X(device, descriptor_bo_size)(binding_layout->type) > 0);
41
return set->pool->bo->map +
42
set->base_offset + binding_layout->descriptor_offset +
43
array_index * v3dv_X(device, descriptor_bo_size)(binding_layout->type);
44
}
45
46
static bool
47
descriptor_type_is_dynamic(VkDescriptorType type)
48
{
49
switch (type) {
50
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
51
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
52
return true;
53
break;
54
default:
55
return false;
56
}
57
}
58
59
/*
60
* Tries to get a real descriptor using a descriptor map index from the
61
* descriptor_state + pipeline_layout.
62
*/
63
struct v3dv_descriptor *
64
v3dv_descriptor_map_get_descriptor(struct v3dv_descriptor_state *descriptor_state,
65
struct v3dv_descriptor_map *map,
66
struct v3dv_pipeline_layout *pipeline_layout,
67
uint32_t index,
68
uint32_t *dynamic_offset)
69
{
70
assert(index < map->num_desc);
71
72
uint32_t set_number = map->set[index];
73
assert((descriptor_state->valid & 1 << set_number));
74
75
struct v3dv_descriptor_set *set =
76
descriptor_state->descriptor_sets[set_number];
77
assert(set);
78
79
uint32_t binding_number = map->binding[index];
80
assert(binding_number < set->layout->binding_count);
81
82
const struct v3dv_descriptor_set_binding_layout *binding_layout =
83
&set->layout->binding[binding_number];
84
85
uint32_t array_index = map->array_index[index];
86
assert(array_index < binding_layout->array_size);
87
88
if (descriptor_type_is_dynamic(binding_layout->type)) {
89
uint32_t dynamic_offset_index =
90
pipeline_layout->set[set_number].dynamic_offset_start +
91
binding_layout->dynamic_offset_index + array_index;
92
93
*dynamic_offset = descriptor_state->dynamic_offsets[dynamic_offset_index];
94
}
95
96
return &set->descriptors[binding_layout->descriptor_index + array_index];
97
}
98
99
/* Equivalent to map_get_descriptor but it returns a reloc with the bo
100
* associated with that descriptor (suballocation of the descriptor pool bo)
101
*
102
* It also returns the descriptor type, so the caller could do extra
103
* validation or adding extra offsets if the bo contains more that one field.
104
*/
105
static struct v3dv_cl_reloc
106
v3dv_descriptor_map_get_descriptor_bo(struct v3dv_device *device,
107
struct v3dv_descriptor_state *descriptor_state,
108
struct v3dv_descriptor_map *map,
109
struct v3dv_pipeline_layout *pipeline_layout,
110
uint32_t index,
111
VkDescriptorType *out_type)
112
{
113
assert(index < map->num_desc);
114
115
uint32_t set_number = map->set[index];
116
assert(descriptor_state->valid & 1 << set_number);
117
118
struct v3dv_descriptor_set *set =
119
descriptor_state->descriptor_sets[set_number];
120
assert(set);
121
122
uint32_t binding_number = map->binding[index];
123
assert(binding_number < set->layout->binding_count);
124
125
const struct v3dv_descriptor_set_binding_layout *binding_layout =
126
&set->layout->binding[binding_number];
127
128
assert(v3dv_X(device, descriptor_bo_size)(binding_layout->type) > 0);
129
*out_type = binding_layout->type;
130
131
uint32_t array_index = map->array_index[index];
132
assert(array_index < binding_layout->array_size);
133
134
struct v3dv_cl_reloc reloc = {
135
.bo = set->pool->bo,
136
.offset = set->base_offset + binding_layout->descriptor_offset +
137
array_index * v3dv_X(device, descriptor_bo_size)(binding_layout->type),
138
};
139
140
return reloc;
141
}
142
143
/*
144
* The difference between this method and v3dv_descriptor_map_get_descriptor,
145
* is that if the sampler are added as immutable when creating the set layout,
146
* they are bound to the set layout, so not part of the descriptor per
147
* se. This method return early in that case.
148
*/
149
const struct v3dv_sampler *
150
v3dv_descriptor_map_get_sampler(struct v3dv_descriptor_state *descriptor_state,
151
struct v3dv_descriptor_map *map,
152
struct v3dv_pipeline_layout *pipeline_layout,
153
uint32_t index)
154
{
155
assert(index < map->num_desc);
156
157
uint32_t set_number = map->set[index];
158
assert(descriptor_state->valid & 1 << set_number);
159
160
struct v3dv_descriptor_set *set =
161
descriptor_state->descriptor_sets[set_number];
162
assert(set);
163
164
uint32_t binding_number = map->binding[index];
165
assert(binding_number < set->layout->binding_count);
166
167
const struct v3dv_descriptor_set_binding_layout *binding_layout =
168
&set->layout->binding[binding_number];
169
170
uint32_t array_index = map->array_index[index];
171
assert(array_index < binding_layout->array_size);
172
173
if (binding_layout->immutable_samplers_offset != 0) {
174
assert(binding_layout->type == VK_DESCRIPTOR_TYPE_SAMPLER ||
175
binding_layout->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
176
177
const struct v3dv_sampler *immutable_samplers =
178
v3dv_immutable_samplers(set->layout, binding_layout);
179
180
assert(immutable_samplers);
181
const struct v3dv_sampler *sampler = &immutable_samplers[array_index];
182
assert(sampler);
183
184
return sampler;
185
}
186
187
struct v3dv_descriptor *descriptor =
188
&set->descriptors[binding_layout->descriptor_index + array_index];
189
190
assert(descriptor->type == VK_DESCRIPTOR_TYPE_SAMPLER ||
191
descriptor->type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
192
193
assert(descriptor->sampler);
194
195
return descriptor->sampler;
196
}
197
198
199
struct v3dv_cl_reloc
200
v3dv_descriptor_map_get_sampler_state(struct v3dv_device *device,
201
struct v3dv_descriptor_state *descriptor_state,
202
struct v3dv_descriptor_map *map,
203
struct v3dv_pipeline_layout *pipeline_layout,
204
uint32_t index)
205
{
206
VkDescriptorType type;
207
struct v3dv_cl_reloc reloc =
208
v3dv_descriptor_map_get_descriptor_bo(device, descriptor_state, map,
209
pipeline_layout,
210
index, &type);
211
212
assert(type == VK_DESCRIPTOR_TYPE_SAMPLER ||
213
type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
214
215
if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
216
reloc.offset += v3dv_X(device, combined_image_sampler_sampler_state_offset)();
217
218
return reloc;
219
}
220
221
const struct v3dv_format*
222
v3dv_descriptor_map_get_texture_format(struct v3dv_descriptor_state *descriptor_state,
223
struct v3dv_descriptor_map *map,
224
struct v3dv_pipeline_layout *pipeline_layout,
225
uint32_t index,
226
VkFormat *out_vk_format)
227
{
228
struct v3dv_descriptor *descriptor =
229
v3dv_descriptor_map_get_descriptor(descriptor_state, map,
230
pipeline_layout, index, NULL);
231
232
switch (descriptor->type) {
233
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
234
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
235
assert(descriptor->buffer_view);
236
*out_vk_format = descriptor->buffer_view->vk_format;
237
return descriptor->buffer_view->format;
238
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
239
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
240
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
241
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
242
assert(descriptor->image_view);
243
*out_vk_format = descriptor->image_view->vk_format;
244
return descriptor->image_view->format;
245
default:
246
unreachable("descriptor type doesn't has a texture format");
247
}
248
}
249
250
struct v3dv_bo*
251
v3dv_descriptor_map_get_texture_bo(struct v3dv_descriptor_state *descriptor_state,
252
struct v3dv_descriptor_map *map,
253
struct v3dv_pipeline_layout *pipeline_layout,
254
uint32_t index)
255
256
{
257
struct v3dv_descriptor *descriptor =
258
v3dv_descriptor_map_get_descriptor(descriptor_state, map,
259
pipeline_layout, index, NULL);
260
261
switch (descriptor->type) {
262
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
263
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
264
assert(descriptor->buffer_view);
265
return descriptor->buffer_view->buffer->mem->bo;
266
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
267
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
268
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
269
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
270
assert(descriptor->image_view);
271
return descriptor->image_view->image->mem->bo;
272
default:
273
unreachable("descriptor type doesn't has a texture bo");
274
}
275
}
276
277
struct v3dv_cl_reloc
278
v3dv_descriptor_map_get_texture_shader_state(struct v3dv_device *device,
279
struct v3dv_descriptor_state *descriptor_state,
280
struct v3dv_descriptor_map *map,
281
struct v3dv_pipeline_layout *pipeline_layout,
282
uint32_t index)
283
{
284
VkDescriptorType type;
285
struct v3dv_cl_reloc reloc =
286
v3dv_descriptor_map_get_descriptor_bo(device,
287
descriptor_state, map,
288
pipeline_layout,
289
index, &type);
290
291
assert(type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
292
type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
293
type == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT ||
294
type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ||
295
type == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER ||
296
type == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
297
298
if (type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
299
reloc.offset += v3dv_X(device, combined_image_sampler_texture_state_offset)();
300
301
return reloc;
302
}
303
304
/*
305
* As anv and tu already points:
306
*
307
* "Pipeline layouts. These have nothing to do with the pipeline. They are
308
* just multiple descriptor set layouts pasted together."
309
*/
310
311
VKAPI_ATTR VkResult VKAPI_CALL
312
v3dv_CreatePipelineLayout(VkDevice _device,
313
const VkPipelineLayoutCreateInfo *pCreateInfo,
314
const VkAllocationCallbacks *pAllocator,
315
VkPipelineLayout *pPipelineLayout)
316
{
317
V3DV_FROM_HANDLE(v3dv_device, device, _device);
318
struct v3dv_pipeline_layout *layout;
319
320
assert(pCreateInfo->sType ==
321
VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
322
323
layout = vk_object_zalloc(&device->vk, pAllocator, sizeof(*layout),
324
VK_OBJECT_TYPE_PIPELINE_LAYOUT);
325
if (layout == NULL)
326
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
327
328
layout->num_sets = pCreateInfo->setLayoutCount;
329
330
uint32_t dynamic_offset_count = 0;
331
for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
332
V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout,
333
pCreateInfo->pSetLayouts[set]);
334
layout->set[set].layout = set_layout;
335
336
layout->set[set].dynamic_offset_start = dynamic_offset_count;
337
for (uint32_t b = 0; b < set_layout->binding_count; b++) {
338
dynamic_offset_count += set_layout->binding[b].array_size *
339
set_layout->binding[b].dynamic_offset_count;
340
}
341
342
layout->shader_stages |= set_layout->shader_stages;
343
}
344
345
layout->push_constant_size = 0;
346
for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
347
const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
348
layout->push_constant_size =
349
MAX2(layout->push_constant_size, range->offset + range->size);
350
}
351
352
layout->push_constant_size = align(layout->push_constant_size, 4096);
353
354
layout->dynamic_offset_count = dynamic_offset_count;
355
356
*pPipelineLayout = v3dv_pipeline_layout_to_handle(layout);
357
358
return VK_SUCCESS;
359
}
360
361
VKAPI_ATTR void VKAPI_CALL
362
v3dv_DestroyPipelineLayout(VkDevice _device,
363
VkPipelineLayout _pipelineLayout,
364
const VkAllocationCallbacks *pAllocator)
365
{
366
V3DV_FROM_HANDLE(v3dv_device, device, _device);
367
V3DV_FROM_HANDLE(v3dv_pipeline_layout, pipeline_layout, _pipelineLayout);
368
369
if (!pipeline_layout)
370
return;
371
vk_object_free(&device->vk, pAllocator, pipeline_layout);
372
}
373
374
VKAPI_ATTR VkResult VKAPI_CALL
375
v3dv_CreateDescriptorPool(VkDevice _device,
376
const VkDescriptorPoolCreateInfo *pCreateInfo,
377
const VkAllocationCallbacks *pAllocator,
378
VkDescriptorPool *pDescriptorPool)
379
{
380
V3DV_FROM_HANDLE(v3dv_device, device, _device);
381
struct v3dv_descriptor_pool *pool;
382
/* size is for the vulkan object descriptor pool. The final size would
383
* depend on some of FREE_DESCRIPTOR flags used
384
*/
385
uint64_t size = sizeof(struct v3dv_descriptor_pool);
386
/* bo_size is for the descriptor related info that we need to have on a GPU
387
* address (so on v3dv_bo_alloc allocated memory), like for example the
388
* texture sampler state. Note that not all the descriptors use it
389
*/
390
uint32_t bo_size = 0;
391
uint32_t descriptor_count = 0;
392
393
assert(pCreateInfo->poolSizeCount > 0);
394
for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
395
/* Verify supported descriptor type */
396
switch(pCreateInfo->pPoolSizes[i].type) {
397
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
398
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
399
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
400
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
401
case VK_DESCRIPTOR_TYPE_SAMPLER:
402
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
403
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
404
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
405
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
406
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
407
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
408
break;
409
default:
410
unreachable("Unimplemented descriptor type");
411
break;
412
}
413
414
assert(pCreateInfo->pPoolSizes[i].descriptorCount > 0);
415
descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
416
bo_size += v3dv_X(device, descriptor_bo_size)(pCreateInfo->pPoolSizes[i].type) *
417
pCreateInfo->pPoolSizes[i].descriptorCount;
418
}
419
420
if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
421
uint64_t host_size =
422
pCreateInfo->maxSets * sizeof(struct v3dv_descriptor_set);
423
host_size += sizeof(struct v3dv_descriptor) * descriptor_count;
424
size += host_size;
425
} else {
426
size += sizeof(struct v3dv_descriptor_pool_entry) * pCreateInfo->maxSets;
427
}
428
429
pool = vk_object_zalloc(&device->vk, pAllocator, size,
430
VK_OBJECT_TYPE_DESCRIPTOR_POOL);
431
432
if (!pool)
433
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
434
435
if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
436
pool->host_memory_base = (uint8_t*)pool + sizeof(struct v3dv_descriptor_pool);
437
pool->host_memory_ptr = pool->host_memory_base;
438
pool->host_memory_end = (uint8_t*)pool + size;
439
}
440
441
pool->max_entry_count = pCreateInfo->maxSets;
442
443
if (bo_size > 0) {
444
pool->bo = v3dv_bo_alloc(device, bo_size, "descriptor pool bo", true);
445
if (!pool->bo)
446
goto out_of_device_memory;
447
448
bool ok = v3dv_bo_map(device, pool->bo, pool->bo->size);
449
if (!ok)
450
goto out_of_device_memory;
451
452
pool->current_offset = 0;
453
} else {
454
pool->bo = NULL;
455
}
456
457
*pDescriptorPool = v3dv_descriptor_pool_to_handle(pool);
458
459
return VK_SUCCESS;
460
461
out_of_device_memory:
462
vk_object_free(&device->vk, pAllocator, pool);
463
return vk_error(device->instance, VK_ERROR_OUT_OF_DEVICE_MEMORY);
464
}
465
466
static void
467
descriptor_set_destroy(struct v3dv_device *device,
468
struct v3dv_descriptor_pool *pool,
469
struct v3dv_descriptor_set *set,
470
bool free_bo)
471
{
472
assert(!pool->host_memory_base);
473
474
if (free_bo && !pool->host_memory_base) {
475
for (uint32_t i = 0; i < pool->entry_count; i++) {
476
if (pool->entries[i].set == set) {
477
memmove(&pool->entries[i], &pool->entries[i+1],
478
sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
479
--pool->entry_count;
480
break;
481
}
482
}
483
}
484
vk_object_free(&device->vk, NULL, set);
485
}
486
487
VKAPI_ATTR void VKAPI_CALL
488
v3dv_DestroyDescriptorPool(VkDevice _device,
489
VkDescriptorPool _pool,
490
const VkAllocationCallbacks *pAllocator)
491
{
492
V3DV_FROM_HANDLE(v3dv_device, device, _device);
493
V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, _pool);
494
495
if (!pool)
496
return;
497
498
if (!pool->host_memory_base) {
499
for(int i = 0; i < pool->entry_count; ++i) {
500
descriptor_set_destroy(device, pool, pool->entries[i].set, false);
501
}
502
}
503
504
if (pool->bo) {
505
v3dv_bo_free(device, pool->bo);
506
pool->bo = NULL;
507
}
508
509
vk_object_free(&device->vk, pAllocator, pool);
510
}
511
512
VKAPI_ATTR VkResult VKAPI_CALL
513
v3dv_ResetDescriptorPool(VkDevice _device,
514
VkDescriptorPool descriptorPool,
515
VkDescriptorPoolResetFlags flags)
516
{
517
V3DV_FROM_HANDLE(v3dv_device, device, _device);
518
V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, descriptorPool);
519
520
if (!pool->host_memory_base) {
521
for(int i = 0; i < pool->entry_count; ++i) {
522
descriptor_set_destroy(device, pool, pool->entries[i].set, false);
523
}
524
} else {
525
/* We clean-up the host memory, so when allocating a new set from the
526
* pool, it is already 0
527
*/
528
uint32_t host_size = pool->host_memory_end - pool->host_memory_base;
529
memset(pool->host_memory_base, 0, host_size);
530
}
531
532
pool->entry_count = 0;
533
pool->host_memory_ptr = pool->host_memory_base;
534
pool->current_offset = 0;
535
536
return VK_SUCCESS;
537
}
538
539
VKAPI_ATTR VkResult VKAPI_CALL
540
v3dv_CreateDescriptorSetLayout(VkDevice _device,
541
const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
542
const VkAllocationCallbacks *pAllocator,
543
VkDescriptorSetLayout *pSetLayout)
544
{
545
V3DV_FROM_HANDLE(v3dv_device, device, _device);
546
struct v3dv_descriptor_set_layout *set_layout;
547
548
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
549
550
uint32_t num_bindings = 0;
551
uint32_t immutable_sampler_count = 0;
552
for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
553
num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
554
555
/* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
556
*
557
* "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
558
* VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
559
* pImmutableSamplers can be used to initialize a set of immutable
560
* samplers. [...] If descriptorType is not one of these descriptor
561
* types, then pImmutableSamplers is ignored.
562
*
563
* We need to be careful here and only parse pImmutableSamplers if we
564
* have one of the right descriptor types.
565
*/
566
VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
567
if ((desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
568
desc_type == VK_DESCRIPTOR_TYPE_SAMPLER) &&
569
pCreateInfo->pBindings[j].pImmutableSamplers) {
570
immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
571
}
572
}
573
574
uint32_t samplers_offset = sizeof(struct v3dv_descriptor_set_layout) +
575
num_bindings * sizeof(set_layout->binding[0]);
576
uint32_t size = samplers_offset +
577
immutable_sampler_count * sizeof(struct v3dv_sampler);
578
579
set_layout = vk_object_zalloc(&device->vk, pAllocator, size,
580
VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
581
582
if (!set_layout)
583
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
584
585
/* We just allocate all the immutable samplers at the end of the struct */
586
struct v3dv_sampler *samplers = (void*) &set_layout->binding[num_bindings];
587
588
assert(pCreateInfo->bindingCount == 0 || num_bindings > 0);
589
590
VkDescriptorSetLayoutBinding *bindings = NULL;
591
VkResult result = vk_create_sorted_bindings(pCreateInfo->pBindings,
592
pCreateInfo->bindingCount, &bindings);
593
if (result != VK_SUCCESS) {
594
vk_object_free(&device->vk, pAllocator, set_layout);
595
return vk_error(device->instance, result);
596
}
597
598
memset(set_layout->binding, 0,
599
size - sizeof(struct v3dv_descriptor_set_layout));
600
601
set_layout->binding_count = num_bindings;
602
set_layout->flags = pCreateInfo->flags;
603
set_layout->shader_stages = 0;
604
set_layout->bo_size = 0;
605
606
uint32_t descriptor_count = 0;
607
uint32_t dynamic_offset_count = 0;
608
609
for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
610
const VkDescriptorSetLayoutBinding *binding = bindings + i;
611
uint32_t binding_number = binding->binding;
612
613
switch (binding->descriptorType) {
614
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
615
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
616
break;
617
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
618
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
619
set_layout->binding[binding_number].dynamic_offset_count = 1;
620
break;
621
case VK_DESCRIPTOR_TYPE_SAMPLER:
622
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
623
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
624
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
625
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
626
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
627
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
628
/* Nothing here, just to keep the descriptor type filtering below */
629
break;
630
default:
631
unreachable("Unknown descriptor type\n");
632
break;
633
}
634
635
set_layout->binding[binding_number].type = binding->descriptorType;
636
set_layout->binding[binding_number].array_size = binding->descriptorCount;
637
set_layout->binding[binding_number].descriptor_index = descriptor_count;
638
set_layout->binding[binding_number].dynamic_offset_index = dynamic_offset_count;
639
640
if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
641
binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
642
binding->pImmutableSamplers) {
643
644
set_layout->binding[binding_number].immutable_samplers_offset = samplers_offset;
645
646
for (uint32_t i = 0; i < binding->descriptorCount; i++)
647
samplers[i] = *v3dv_sampler_from_handle(binding->pImmutableSamplers[i]);
648
649
samplers += binding->descriptorCount;
650
samplers_offset += sizeof(struct v3dv_sampler) * binding->descriptorCount;
651
}
652
653
descriptor_count += binding->descriptorCount;
654
dynamic_offset_count += binding->descriptorCount *
655
set_layout->binding[binding_number].dynamic_offset_count;
656
657
set_layout->shader_stages |= binding->stageFlags;
658
659
set_layout->binding[binding_number].descriptor_offset = set_layout->bo_size;
660
set_layout->bo_size +=
661
v3dv_X(device, descriptor_bo_size)(set_layout->binding[binding_number].type) *
662
binding->descriptorCount;
663
}
664
665
free(bindings);
666
667
set_layout->descriptor_count = descriptor_count;
668
set_layout->dynamic_offset_count = dynamic_offset_count;
669
670
*pSetLayout = v3dv_descriptor_set_layout_to_handle(set_layout);
671
672
return VK_SUCCESS;
673
}
674
675
VKAPI_ATTR void VKAPI_CALL
676
v3dv_DestroyDescriptorSetLayout(VkDevice _device,
677
VkDescriptorSetLayout _set_layout,
678
const VkAllocationCallbacks *pAllocator)
679
{
680
V3DV_FROM_HANDLE(v3dv_device, device, _device);
681
V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, set_layout, _set_layout);
682
683
if (!set_layout)
684
return;
685
686
vk_object_free(&device->vk, pAllocator, set_layout);
687
}
688
689
static inline VkResult
690
out_of_pool_memory(const struct v3dv_device *device,
691
const struct v3dv_descriptor_pool *pool)
692
{
693
/* Don't log OOPM errors for internal driver pools, we handle these properly
694
* by allocating a new pool, so they don't point to real issues.
695
*/
696
if (!pool->is_driver_internal)
697
return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY)
698
else
699
return VK_ERROR_OUT_OF_POOL_MEMORY;
700
}
701
702
static VkResult
703
descriptor_set_create(struct v3dv_device *device,
704
struct v3dv_descriptor_pool *pool,
705
const struct v3dv_descriptor_set_layout *layout,
706
struct v3dv_descriptor_set **out_set)
707
{
708
struct v3dv_descriptor_set *set;
709
uint32_t descriptor_count = layout->descriptor_count;
710
unsigned mem_size = sizeof(struct v3dv_descriptor_set) +
711
sizeof(struct v3dv_descriptor) * descriptor_count;
712
713
if (pool->host_memory_base) {
714
if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
715
return out_of_pool_memory(device, pool);
716
717
set = (struct v3dv_descriptor_set*)pool->host_memory_ptr;
718
pool->host_memory_ptr += mem_size;
719
720
vk_object_base_init(&device->vk, &set->base, VK_OBJECT_TYPE_DESCRIPTOR_SET);
721
} else {
722
set = vk_object_zalloc(&device->vk, NULL, mem_size,
723
VK_OBJECT_TYPE_DESCRIPTOR_SET);
724
725
if (!set)
726
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
727
}
728
729
set->pool = pool;
730
731
set->layout = layout;
732
733
/* FIXME: VK_EXT_descriptor_indexing introduces
734
* VARIABLE_DESCRIPTOR_LAYOUT_COUNT. That would affect the layout_size used
735
* below for bo allocation
736
*/
737
738
uint32_t offset = 0;
739
uint32_t index = pool->entry_count;
740
741
if (layout->bo_size) {
742
if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
743
vk_object_free(&device->vk, NULL, set);
744
return out_of_pool_memory(device, pool);
745
}
746
747
/* We first try to allocate linearly fist, so that we don't spend time
748
* looking for gaps if the app only allocates & resets via the pool.
749
*
750
* If that fails, we try to find a gap from previously freed subregions
751
* iterating through the descriptor pool entries. Note that we are not
752
* doing that if we have a pool->host_memory_base. We only have that if
753
* VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT is not set, so in
754
* that case the user can't free subregions, so it doesn't make sense to
755
* even try (or track those subregions).
756
*/
757
if (pool->current_offset + layout->bo_size <= pool->bo->size) {
758
offset = pool->current_offset;
759
pool->current_offset += layout->bo_size;
760
} else if (!pool->host_memory_base) {
761
for (index = 0; index < pool->entry_count; index++) {
762
if (pool->entries[index].offset - offset >= layout->bo_size)
763
break;
764
offset = pool->entries[index].offset + pool->entries[index].size;
765
}
766
if (pool->bo->size - offset < layout->bo_size) {
767
vk_object_free(&device->vk, NULL, set);
768
return out_of_pool_memory(device, pool);
769
}
770
memmove(&pool->entries[index + 1], &pool->entries[index],
771
sizeof(pool->entries[0]) * (pool->entry_count - index));
772
} else {
773
assert(pool->host_memory_base);
774
return out_of_pool_memory(device, pool);
775
}
776
777
set->base_offset = offset;
778
}
779
780
if (!pool->host_memory_base) {
781
pool->entries[index].set = set;
782
pool->entries[index].offset = offset;
783
pool->entries[index].size = layout->bo_size;
784
pool->entry_count++;
785
}
786
787
/* Go through and fill out immutable samplers if we have any */
788
for (uint32_t b = 0; b < layout->binding_count; b++) {
789
if (layout->binding[b].immutable_samplers_offset == 0)
790
continue;
791
792
const struct v3dv_sampler *samplers =
793
(const struct v3dv_sampler *)((const char *)layout +
794
layout->binding[b].immutable_samplers_offset);
795
796
for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
797
uint32_t combined_offset =
798
layout->binding[b].type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ?
799
v3dv_X(device, combined_image_sampler_sampler_state_offset)() : 0;
800
801
void *desc_map = descriptor_bo_map(device, set, &layout->binding[b], i);
802
desc_map += combined_offset;
803
804
memcpy(desc_map,
805
samplers[i].sampler_state,
806
sizeof(samplers[i].sampler_state));
807
}
808
}
809
810
*out_set = set;
811
812
return VK_SUCCESS;
813
}
814
815
VKAPI_ATTR VkResult VKAPI_CALL
816
v3dv_AllocateDescriptorSets(VkDevice _device,
817
const VkDescriptorSetAllocateInfo *pAllocateInfo,
818
VkDescriptorSet *pDescriptorSets)
819
{
820
V3DV_FROM_HANDLE(v3dv_device, device, _device);
821
V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
822
823
VkResult result = VK_SUCCESS;
824
struct v3dv_descriptor_set *set = NULL;
825
uint32_t i = 0;
826
827
for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
828
V3DV_FROM_HANDLE(v3dv_descriptor_set_layout, layout,
829
pAllocateInfo->pSetLayouts[i]);
830
831
result = descriptor_set_create(device, pool, layout, &set);
832
if (result != VK_SUCCESS)
833
break;
834
835
pDescriptorSets[i] = v3dv_descriptor_set_to_handle(set);
836
}
837
838
if (result != VK_SUCCESS) {
839
v3dv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
840
i, pDescriptorSets);
841
for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
842
pDescriptorSets[i] = VK_NULL_HANDLE;
843
}
844
}
845
846
return result;
847
}
848
849
VKAPI_ATTR VkResult VKAPI_CALL
850
v3dv_FreeDescriptorSets(VkDevice _device,
851
VkDescriptorPool descriptorPool,
852
uint32_t count,
853
const VkDescriptorSet *pDescriptorSets)
854
{
855
V3DV_FROM_HANDLE(v3dv_device, device, _device);
856
V3DV_FROM_HANDLE(v3dv_descriptor_pool, pool, descriptorPool);
857
858
for (uint32_t i = 0; i < count; i++) {
859
V3DV_FROM_HANDLE(v3dv_descriptor_set, set, pDescriptorSets[i]);
860
if (set && !pool->host_memory_base)
861
descriptor_set_destroy(device, pool, set, true);
862
}
863
864
return VK_SUCCESS;
865
}
866
867
static void
868
descriptor_bo_copy(struct v3dv_device *device,
869
struct v3dv_descriptor_set *dst_set,
870
const struct v3dv_descriptor_set_binding_layout *dst_binding_layout,
871
uint32_t dst_array_index,
872
struct v3dv_descriptor_set *src_set,
873
const struct v3dv_descriptor_set_binding_layout *src_binding_layout,
874
uint32_t src_array_index)
875
{
876
assert(dst_binding_layout->type == src_binding_layout->type);
877
878
void *dst_map = descriptor_bo_map(device, dst_set, dst_binding_layout, dst_array_index);
879
void *src_map = descriptor_bo_map(device, src_set, src_binding_layout, src_array_index);
880
881
memcpy(dst_map, src_map, v3dv_X(device, descriptor_bo_size)(src_binding_layout->type));
882
}
883
884
static void
885
write_buffer_descriptor(struct v3dv_descriptor *descriptor,
886
VkDescriptorType desc_type,
887
const VkDescriptorBufferInfo *buffer_info)
888
{
889
V3DV_FROM_HANDLE(v3dv_buffer, buffer, buffer_info->buffer);
890
891
descriptor->type = desc_type;
892
descriptor->buffer = buffer;
893
descriptor->offset = buffer_info->offset;
894
if (buffer_info->range == VK_WHOLE_SIZE) {
895
descriptor->range = buffer->size - buffer_info->offset;
896
} else {
897
assert(descriptor->range <= UINT32_MAX);
898
descriptor->range = buffer_info->range;
899
}
900
}
901
902
static void
903
write_image_descriptor(struct v3dv_device *device,
904
struct v3dv_descriptor *descriptor,
905
VkDescriptorType desc_type,
906
struct v3dv_descriptor_set *set,
907
const struct v3dv_descriptor_set_binding_layout *binding_layout,
908
struct v3dv_image_view *iview,
909
struct v3dv_sampler *sampler,
910
uint32_t array_index)
911
{
912
descriptor->type = desc_type;
913
descriptor->sampler = sampler;
914
descriptor->image_view = iview;
915
916
void *desc_map = descriptor_bo_map(device, set,
917
binding_layout, array_index);
918
919
if (iview) {
920
const uint32_t tex_state_index =
921
iview->type != VK_IMAGE_VIEW_TYPE_CUBE_ARRAY ||
922
desc_type != VK_DESCRIPTOR_TYPE_STORAGE_IMAGE ? 0 : 1;
923
memcpy(desc_map,
924
iview->texture_shader_state[tex_state_index],
925
sizeof(iview->texture_shader_state[0]));
926
desc_map += v3dv_X(device, combined_image_sampler_sampler_state_offset)();
927
}
928
929
if (sampler && !binding_layout->immutable_samplers_offset) {
930
/* For immutable samplers this was already done as part of the
931
* descriptor set create, as that info can't change later
932
*/
933
memcpy(desc_map,
934
sampler->sampler_state,
935
sizeof(sampler->sampler_state));
936
}
937
}
938
939
940
static void
941
write_buffer_view_descriptor(struct v3dv_device *device,
942
struct v3dv_descriptor *descriptor,
943
VkDescriptorType desc_type,
944
struct v3dv_descriptor_set *set,
945
const struct v3dv_descriptor_set_binding_layout *binding_layout,
946
struct v3dv_buffer_view *bview,
947
uint32_t array_index)
948
{
949
assert(bview);
950
descriptor->type = desc_type;
951
descriptor->buffer_view = bview;
952
953
void *desc_map = descriptor_bo_map(device, set, binding_layout, array_index);
954
955
memcpy(desc_map,
956
bview->texture_shader_state,
957
sizeof(bview->texture_shader_state));
958
}
959
960
VKAPI_ATTR void VKAPI_CALL
961
v3dv_UpdateDescriptorSets(VkDevice _device,
962
uint32_t descriptorWriteCount,
963
const VkWriteDescriptorSet *pDescriptorWrites,
964
uint32_t descriptorCopyCount,
965
const VkCopyDescriptorSet *pDescriptorCopies)
966
{
967
V3DV_FROM_HANDLE(v3dv_device, device, _device);
968
for (uint32_t i = 0; i < descriptorWriteCount; i++) {
969
const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
970
V3DV_FROM_HANDLE(v3dv_descriptor_set, set, writeset->dstSet);
971
972
const struct v3dv_descriptor_set_binding_layout *binding_layout =
973
set->layout->binding + writeset->dstBinding;
974
975
struct v3dv_descriptor *descriptor = set->descriptors;
976
977
descriptor += binding_layout->descriptor_index;
978
descriptor += writeset->dstArrayElement;
979
980
for (uint32_t j = 0; j < writeset->descriptorCount; ++j) {
981
switch(writeset->descriptorType) {
982
983
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
984
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
985
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
986
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
987
const VkDescriptorBufferInfo *buffer_info = writeset->pBufferInfo + j;
988
write_buffer_descriptor(descriptor, writeset->descriptorType,
989
buffer_info);
990
break;
991
}
992
case VK_DESCRIPTOR_TYPE_SAMPLER: {
993
/* If we are here we shouldn't be modifying a immutable sampler,
994
* so we don't ensure that would work or not crash. But let the
995
* validation layers check that
996
*/
997
const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
998
V3DV_FROM_HANDLE(v3dv_sampler, sampler, image_info->sampler);
999
write_image_descriptor(device, descriptor, writeset->descriptorType,
1000
set, binding_layout, NULL, sampler,
1001
writeset->dstArrayElement + j);
1002
1003
break;
1004
}
1005
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1006
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1007
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
1008
const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1009
V3DV_FROM_HANDLE(v3dv_image_view, iview, image_info->imageView);
1010
write_image_descriptor(device, descriptor, writeset->descriptorType,
1011
set, binding_layout, iview, NULL,
1012
writeset->dstArrayElement + j);
1013
1014
break;
1015
}
1016
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
1017
const VkDescriptorImageInfo *image_info = writeset->pImageInfo + j;
1018
V3DV_FROM_HANDLE(v3dv_image_view, iview, image_info->imageView);
1019
V3DV_FROM_HANDLE(v3dv_sampler, sampler, image_info->sampler);
1020
write_image_descriptor(device, descriptor, writeset->descriptorType,
1021
set, binding_layout, iview, sampler,
1022
writeset->dstArrayElement + j);
1023
1024
break;
1025
}
1026
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1027
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
1028
V3DV_FROM_HANDLE(v3dv_buffer_view, buffer_view,
1029
writeset->pTexelBufferView[j]);
1030
write_buffer_view_descriptor(device, descriptor, writeset->descriptorType,
1031
set, binding_layout, buffer_view,
1032
writeset->dstArrayElement + j);
1033
break;
1034
}
1035
default:
1036
unreachable("unimplemented descriptor type");
1037
break;
1038
}
1039
descriptor++;
1040
}
1041
}
1042
1043
for (uint32_t i = 0; i < descriptorCopyCount; i++) {
1044
const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
1045
V3DV_FROM_HANDLE(v3dv_descriptor_set, src_set,
1046
copyset->srcSet);
1047
V3DV_FROM_HANDLE(v3dv_descriptor_set, dst_set,
1048
copyset->dstSet);
1049
1050
const struct v3dv_descriptor_set_binding_layout *src_binding_layout =
1051
src_set->layout->binding + copyset->srcBinding;
1052
const struct v3dv_descriptor_set_binding_layout *dst_binding_layout =
1053
dst_set->layout->binding + copyset->dstBinding;
1054
1055
assert(src_binding_layout->type == dst_binding_layout->type);
1056
1057
struct v3dv_descriptor *src_descriptor = src_set->descriptors;
1058
struct v3dv_descriptor *dst_descriptor = dst_set->descriptors;
1059
1060
src_descriptor += src_binding_layout->descriptor_index;
1061
src_descriptor += copyset->srcArrayElement;
1062
1063
dst_descriptor += dst_binding_layout->descriptor_index;
1064
dst_descriptor += copyset->dstArrayElement;
1065
1066
for (uint32_t j = 0; j < copyset->descriptorCount; j++) {
1067
*dst_descriptor = *src_descriptor;
1068
dst_descriptor++;
1069
src_descriptor++;
1070
1071
if (v3dv_X(device, descriptor_bo_size)(src_binding_layout->type) > 0) {
1072
descriptor_bo_copy(device,
1073
dst_set, dst_binding_layout,
1074
j + copyset->dstArrayElement,
1075
src_set, src_binding_layout,
1076
j + copyset->srcArrayElement);
1077
}
1078
1079
}
1080
}
1081
}
1082
1083
VKAPI_ATTR void VKAPI_CALL
1084
v3dv_GetDescriptorSetLayoutSupport(
1085
VkDevice _device,
1086
const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
1087
VkDescriptorSetLayoutSupport *pSupport)
1088
{
1089
V3DV_FROM_HANDLE(v3dv_device, device, _device);
1090
VkDescriptorSetLayoutBinding *bindings = NULL;
1091
VkResult result = vk_create_sorted_bindings(
1092
pCreateInfo->pBindings, pCreateInfo->bindingCount, &bindings);
1093
if (result != VK_SUCCESS) {
1094
pSupport->supported = false;
1095
return;
1096
}
1097
1098
bool supported = true;
1099
1100
uint32_t desc_host_size = sizeof(struct v3dv_descriptor);
1101
uint32_t host_size = sizeof(struct v3dv_descriptor_set);
1102
uint32_t bo_size = 0;
1103
for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
1104
const VkDescriptorSetLayoutBinding *binding = bindings + i;
1105
1106
if ((UINT32_MAX - host_size) / desc_host_size < binding->descriptorCount) {
1107
supported = false;
1108
break;
1109
}
1110
1111
uint32_t desc_bo_size = v3dv_X(device, descriptor_bo_size)(binding->descriptorType);
1112
if (desc_bo_size > 0 &&
1113
(UINT32_MAX - bo_size) / desc_bo_size < binding->descriptorCount) {
1114
supported = false;
1115
break;
1116
}
1117
1118
host_size += binding->descriptorCount * desc_host_size;
1119
bo_size += binding->descriptorCount * desc_bo_size;
1120
}
1121
1122
free(bindings);
1123
1124
pSupport->supported = supported;
1125
}
1126
1127
VkResult
1128
v3dv_CreateDescriptorUpdateTemplate(
1129
VkDevice _device,
1130
const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
1131
const VkAllocationCallbacks *pAllocator,
1132
VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
1133
{
1134
V3DV_FROM_HANDLE(v3dv_device, device, _device);
1135
struct v3dv_descriptor_update_template *template;
1136
1137
size_t size = sizeof(*template) +
1138
pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
1139
template = vk_object_alloc(&device->vk, pAllocator, size,
1140
VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
1141
if (template == NULL)
1142
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1143
1144
template->bind_point = pCreateInfo->pipelineBindPoint;
1145
1146
assert(pCreateInfo->templateType ==
1147
VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET);
1148
template->set = pCreateInfo->set;
1149
1150
template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
1151
for (uint32_t i = 0; i < template->entry_count; i++) {
1152
const VkDescriptorUpdateTemplateEntry *pEntry =
1153
&pCreateInfo->pDescriptorUpdateEntries[i];
1154
1155
template->entries[i] = (struct v3dv_descriptor_template_entry) {
1156
.type = pEntry->descriptorType,
1157
.binding = pEntry->dstBinding,
1158
.array_element = pEntry->dstArrayElement,
1159
.array_count = pEntry->descriptorCount,
1160
.offset = pEntry->offset,
1161
.stride = pEntry->stride,
1162
};
1163
}
1164
1165
*pDescriptorUpdateTemplate =
1166
v3dv_descriptor_update_template_to_handle(template);
1167
1168
return VK_SUCCESS;
1169
}
1170
1171
void
1172
v3dv_DestroyDescriptorUpdateTemplate(
1173
VkDevice _device,
1174
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1175
const VkAllocationCallbacks *pAllocator)
1176
{
1177
V3DV_FROM_HANDLE(v3dv_device, device, _device);
1178
V3DV_FROM_HANDLE(v3dv_descriptor_update_template, template,
1179
descriptorUpdateTemplate);
1180
1181
if (!template)
1182
return;
1183
1184
vk_object_free(&device->vk, pAllocator, template);
1185
}
1186
1187
void
1188
v3dv_UpdateDescriptorSetWithTemplate(
1189
VkDevice _device,
1190
VkDescriptorSet descriptorSet,
1191
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1192
const void *pData)
1193
{
1194
V3DV_FROM_HANDLE(v3dv_device, device, _device);
1195
V3DV_FROM_HANDLE(v3dv_descriptor_set, set, descriptorSet);
1196
V3DV_FROM_HANDLE(v3dv_descriptor_update_template, template,
1197
descriptorUpdateTemplate);
1198
1199
for (int i = 0; i < template->entry_count; i++) {
1200
const struct v3dv_descriptor_template_entry *entry =
1201
&template->entries[i];
1202
1203
const struct v3dv_descriptor_set_binding_layout *binding_layout =
1204
set->layout->binding + entry->binding;
1205
1206
struct v3dv_descriptor *descriptor =
1207
set->descriptors +
1208
binding_layout->descriptor_index +
1209
entry->array_element;
1210
1211
switch (entry->type) {
1212
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1213
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1214
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1215
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1216
for (uint32_t j = 0; j < entry->array_count; j++) {
1217
const VkDescriptorBufferInfo *info =
1218
pData + entry->offset + j * entry->stride;
1219
write_buffer_descriptor(descriptor + j, entry->type, info);
1220
}
1221
break;
1222
1223
case VK_DESCRIPTOR_TYPE_SAMPLER:
1224
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1225
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1226
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1227
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1228
for (uint32_t j = 0; j < entry->array_count; j++) {
1229
const VkDescriptorImageInfo *info =
1230
pData + entry->offset + j * entry->stride;
1231
V3DV_FROM_HANDLE(v3dv_image_view, iview, info->imageView);
1232
V3DV_FROM_HANDLE(v3dv_sampler, sampler, info->sampler);
1233
write_image_descriptor(device, descriptor + j, entry->type,
1234
set, binding_layout, iview, sampler,
1235
entry->array_element + j);
1236
}
1237
break;
1238
1239
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1240
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1241
for (uint32_t j = 0; j < entry->array_count; j++) {
1242
const VkBufferView *_bview =
1243
pData + entry->offset + j * entry->stride;
1244
V3DV_FROM_HANDLE(v3dv_buffer_view, bview, *_bview);
1245
write_buffer_view_descriptor(device, descriptor + j, entry->type,
1246
set, binding_layout, bview,
1247
entry->array_element + j);
1248
}
1249
break;
1250
1251
default:
1252
unreachable("Unsupported descriptor type");
1253
}
1254
}
1255
}
1256
1257