Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/virtio/vulkan/vn_descriptor_set.c
4560 views
1
/*
2
* Copyright 2019 Google LLC
3
* SPDX-License-Identifier: MIT
4
*
5
* based in part on anv and radv which are:
6
* Copyright © 2015 Intel Corporation
7
* Copyright © 2016 Red Hat.
8
* Copyright © 2016 Bas Nieuwenhuizen
9
*/
10
11
#include "vn_descriptor_set.h"
12
13
#include "venus-protocol/vn_protocol_driver_descriptor_pool.h"
14
#include "venus-protocol/vn_protocol_driver_descriptor_set.h"
15
#include "venus-protocol/vn_protocol_driver_descriptor_set_layout.h"
16
#include "venus-protocol/vn_protocol_driver_descriptor_update_template.h"
17
18
#include "vn_device.h"
19
20
/* descriptor set layout commands */
21
22
void
23
vn_GetDescriptorSetLayoutSupport(
24
VkDevice device,
25
const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
26
VkDescriptorSetLayoutSupport *pSupport)
27
{
28
struct vn_device *dev = vn_device_from_handle(device);
29
30
/* TODO per-device cache */
31
vn_call_vkGetDescriptorSetLayoutSupport(dev->instance, device, pCreateInfo,
32
pSupport);
33
}
34
35
VkResult
36
vn_CreateDescriptorSetLayout(
37
VkDevice device,
38
const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
39
const VkAllocationCallbacks *pAllocator,
40
VkDescriptorSetLayout *pSetLayout)
41
{
42
struct vn_device *dev = vn_device_from_handle(device);
43
const VkAllocationCallbacks *alloc =
44
pAllocator ? pAllocator : &dev->base.base.alloc;
45
46
uint32_t max_binding = 0;
47
VkDescriptorSetLayoutBinding *local_bindings = NULL;
48
VkDescriptorSetLayoutCreateInfo local_create_info;
49
if (pCreateInfo->bindingCount) {
50
/* the encoder does not ignore
51
* VkDescriptorSetLayoutBinding::pImmutableSamplers when it should
52
*/
53
const size_t binding_size =
54
sizeof(*pCreateInfo->pBindings) * pCreateInfo->bindingCount;
55
local_bindings = vk_alloc(alloc, binding_size, VN_DEFAULT_ALIGN,
56
VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
57
if (!local_bindings)
58
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
59
60
memcpy(local_bindings, pCreateInfo->pBindings, binding_size);
61
for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
62
VkDescriptorSetLayoutBinding *binding = &local_bindings[i];
63
64
if (max_binding < binding->binding)
65
max_binding = binding->binding;
66
67
switch (binding->descriptorType) {
68
case VK_DESCRIPTOR_TYPE_SAMPLER:
69
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
70
break;
71
default:
72
binding->pImmutableSamplers = NULL;
73
break;
74
}
75
}
76
77
local_create_info = *pCreateInfo;
78
local_create_info.pBindings = local_bindings;
79
pCreateInfo = &local_create_info;
80
}
81
82
const size_t layout_size =
83
offsetof(struct vn_descriptor_set_layout, bindings[max_binding + 1]);
84
struct vn_descriptor_set_layout *layout =
85
vk_zalloc(alloc, layout_size, VN_DEFAULT_ALIGN,
86
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
87
if (!layout) {
88
vk_free(alloc, local_bindings);
89
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
90
}
91
92
vn_object_base_init(&layout->base, VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
93
&dev->base);
94
95
for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
96
const VkDescriptorSetLayoutBinding *binding =
97
&pCreateInfo->pBindings[i];
98
struct vn_descriptor_set_layout_binding *dst =
99
&layout->bindings[binding->binding];
100
101
switch (binding->descriptorType) {
102
case VK_DESCRIPTOR_TYPE_SAMPLER:
103
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
104
dst->has_immutable_samplers = binding->pImmutableSamplers;
105
break;
106
default:
107
break;
108
}
109
}
110
111
VkDescriptorSetLayout layout_handle =
112
vn_descriptor_set_layout_to_handle(layout);
113
vn_async_vkCreateDescriptorSetLayout(dev->instance, device, pCreateInfo,
114
NULL, &layout_handle);
115
116
vk_free(alloc, local_bindings);
117
118
*pSetLayout = layout_handle;
119
120
return VK_SUCCESS;
121
}
122
123
void
124
vn_DestroyDescriptorSetLayout(VkDevice device,
125
VkDescriptorSetLayout descriptorSetLayout,
126
const VkAllocationCallbacks *pAllocator)
127
{
128
struct vn_device *dev = vn_device_from_handle(device);
129
struct vn_descriptor_set_layout *layout =
130
vn_descriptor_set_layout_from_handle(descriptorSetLayout);
131
const VkAllocationCallbacks *alloc =
132
pAllocator ? pAllocator : &dev->base.base.alloc;
133
134
if (!layout)
135
return;
136
137
vn_async_vkDestroyDescriptorSetLayout(dev->instance, device,
138
descriptorSetLayout, NULL);
139
140
vn_object_base_fini(&layout->base);
141
vk_free(alloc, layout);
142
}
143
144
/* descriptor pool commands */
145
146
VkResult
147
vn_CreateDescriptorPool(VkDevice device,
148
const VkDescriptorPoolCreateInfo *pCreateInfo,
149
const VkAllocationCallbacks *pAllocator,
150
VkDescriptorPool *pDescriptorPool)
151
{
152
struct vn_device *dev = vn_device_from_handle(device);
153
const VkAllocationCallbacks *alloc =
154
pAllocator ? pAllocator : &dev->base.base.alloc;
155
156
struct vn_descriptor_pool *pool =
157
vk_zalloc(alloc, sizeof(*pool), VN_DEFAULT_ALIGN,
158
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
159
if (!pool)
160
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
161
162
vn_object_base_init(&pool->base, VK_OBJECT_TYPE_DESCRIPTOR_POOL,
163
&dev->base);
164
165
pool->allocator = *alloc;
166
list_inithead(&pool->descriptor_sets);
167
168
VkDescriptorPool pool_handle = vn_descriptor_pool_to_handle(pool);
169
vn_async_vkCreateDescriptorPool(dev->instance, device, pCreateInfo, NULL,
170
&pool_handle);
171
172
*pDescriptorPool = pool_handle;
173
174
return VK_SUCCESS;
175
}
176
177
void
178
vn_DestroyDescriptorPool(VkDevice device,
179
VkDescriptorPool descriptorPool,
180
const VkAllocationCallbacks *pAllocator)
181
{
182
struct vn_device *dev = vn_device_from_handle(device);
183
struct vn_descriptor_pool *pool =
184
vn_descriptor_pool_from_handle(descriptorPool);
185
const VkAllocationCallbacks *alloc;
186
187
if (!pool)
188
return;
189
190
alloc = pAllocator ? pAllocator : &pool->allocator;
191
192
/* We must emit vkDestroyDescriptorPool before freeing the sets in
193
* pool->descriptor_sets. Otherwise, another thread might reuse their
194
* object ids while they still refer to the sets in the renderer.
195
*/
196
vn_async_vkDestroyDescriptorPool(dev->instance, device, descriptorPool,
197
NULL);
198
199
list_for_each_entry_safe(struct vn_descriptor_set, set,
200
&pool->descriptor_sets, head) {
201
list_del(&set->head);
202
203
vn_object_base_fini(&set->base);
204
vk_free(alloc, set);
205
}
206
207
vn_object_base_fini(&pool->base);
208
vk_free(alloc, pool);
209
}
210
211
VkResult
212
vn_ResetDescriptorPool(VkDevice device,
213
VkDescriptorPool descriptorPool,
214
VkDescriptorPoolResetFlags flags)
215
{
216
struct vn_device *dev = vn_device_from_handle(device);
217
struct vn_descriptor_pool *pool =
218
vn_descriptor_pool_from_handle(descriptorPool);
219
const VkAllocationCallbacks *alloc = &pool->allocator;
220
221
vn_async_vkResetDescriptorPool(dev->instance, device, descriptorPool,
222
flags);
223
224
list_for_each_entry_safe(struct vn_descriptor_set, set,
225
&pool->descriptor_sets, head) {
226
list_del(&set->head);
227
228
vn_object_base_fini(&set->base);
229
vk_free(alloc, set);
230
}
231
232
return VK_SUCCESS;
233
}
234
235
/* descriptor set commands */
236
237
VkResult
238
vn_AllocateDescriptorSets(VkDevice device,
239
const VkDescriptorSetAllocateInfo *pAllocateInfo,
240
VkDescriptorSet *pDescriptorSets)
241
{
242
struct vn_device *dev = vn_device_from_handle(device);
243
struct vn_descriptor_pool *pool =
244
vn_descriptor_pool_from_handle(pAllocateInfo->descriptorPool);
245
const VkAllocationCallbacks *alloc = &pool->allocator;
246
247
for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
248
struct vn_descriptor_set *set =
249
vk_zalloc(alloc, sizeof(*set), VN_DEFAULT_ALIGN,
250
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
251
if (!set) {
252
for (uint32_t j = 0; j < i; j++) {
253
set = vn_descriptor_set_from_handle(pDescriptorSets[j]);
254
list_del(&set->head);
255
vk_free(alloc, set);
256
}
257
memset(pDescriptorSets, 0,
258
sizeof(*pDescriptorSets) * pAllocateInfo->descriptorSetCount);
259
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
260
}
261
262
vn_object_base_init(&set->base, VK_OBJECT_TYPE_DESCRIPTOR_SET,
263
&dev->base);
264
set->layout =
265
vn_descriptor_set_layout_from_handle(pAllocateInfo->pSetLayouts[i]);
266
list_addtail(&set->head, &pool->descriptor_sets);
267
268
VkDescriptorSet set_handle = vn_descriptor_set_to_handle(set);
269
pDescriptorSets[i] = set_handle;
270
}
271
272
VkResult result = vn_call_vkAllocateDescriptorSets(
273
dev->instance, device, pAllocateInfo, pDescriptorSets);
274
if (result != VK_SUCCESS) {
275
for (uint32_t i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
276
struct vn_descriptor_set *set =
277
vn_descriptor_set_from_handle(pDescriptorSets[i]);
278
list_del(&set->head);
279
vk_free(alloc, set);
280
}
281
memset(pDescriptorSets, 0,
282
sizeof(*pDescriptorSets) * pAllocateInfo->descriptorSetCount);
283
return vn_error(dev->instance, result);
284
}
285
286
return VK_SUCCESS;
287
}
288
289
VkResult
290
vn_FreeDescriptorSets(VkDevice device,
291
VkDescriptorPool descriptorPool,
292
uint32_t descriptorSetCount,
293
const VkDescriptorSet *pDescriptorSets)
294
{
295
struct vn_device *dev = vn_device_from_handle(device);
296
struct vn_descriptor_pool *pool =
297
vn_descriptor_pool_from_handle(descriptorPool);
298
const VkAllocationCallbacks *alloc = &pool->allocator;
299
300
vn_async_vkFreeDescriptorSets(dev->instance, device, descriptorPool,
301
descriptorSetCount, pDescriptorSets);
302
303
for (uint32_t i = 0; i < descriptorSetCount; i++) {
304
struct vn_descriptor_set *set =
305
vn_descriptor_set_from_handle(pDescriptorSets[i]);
306
307
if (!set)
308
continue;
309
310
list_del(&set->head);
311
312
vn_object_base_fini(&set->base);
313
vk_free(alloc, set);
314
}
315
316
return VK_SUCCESS;
317
}
318
319
static struct vn_update_descriptor_sets *
320
vn_update_descriptor_sets_alloc(uint32_t write_count,
321
uint32_t image_count,
322
uint32_t buffer_count,
323
uint32_t view_count,
324
const VkAllocationCallbacks *alloc,
325
VkSystemAllocationScope scope)
326
{
327
const size_t writes_offset = sizeof(struct vn_update_descriptor_sets);
328
const size_t images_offset =
329
writes_offset + sizeof(VkWriteDescriptorSet) * write_count;
330
const size_t buffers_offset =
331
images_offset + sizeof(VkDescriptorImageInfo) * image_count;
332
const size_t views_offset =
333
buffers_offset + sizeof(VkDescriptorBufferInfo) * buffer_count;
334
const size_t alloc_size = views_offset + sizeof(VkBufferView) * view_count;
335
336
void *storage = vk_alloc(alloc, alloc_size, VN_DEFAULT_ALIGN, scope);
337
if (!storage)
338
return NULL;
339
340
struct vn_update_descriptor_sets *update = storage;
341
update->write_count = write_count;
342
update->writes = storage + writes_offset;
343
update->images = storage + images_offset;
344
update->buffers = storage + buffers_offset;
345
update->views = storage + views_offset;
346
347
return update;
348
}
349
350
static struct vn_update_descriptor_sets *
351
vn_update_descriptor_sets_parse_writes(uint32_t write_count,
352
const VkWriteDescriptorSet *writes,
353
const VkAllocationCallbacks *alloc)
354
{
355
uint32_t img_count = 0;
356
for (uint32_t i = 0; i < write_count; i++) {
357
const VkWriteDescriptorSet *write = &writes[i];
358
switch (write->descriptorType) {
359
case VK_DESCRIPTOR_TYPE_SAMPLER:
360
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
361
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
362
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
363
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
364
img_count += write->descriptorCount;
365
break;
366
default:
367
break;
368
}
369
}
370
371
struct vn_update_descriptor_sets *update =
372
vn_update_descriptor_sets_alloc(write_count, img_count, 0, 0, alloc,
373
VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
374
if (!update)
375
return NULL;
376
377
/* the encoder does not ignore
378
* VkWriteDescriptorSet::{pImageInfo,pBufferInfo,pTexelBufferView} when it
379
* should
380
*
381
* TODO make the encoder smarter
382
*/
383
memcpy(update->writes, writes, sizeof(*writes) * write_count);
384
img_count = 0;
385
for (uint32_t i = 0; i < write_count; i++) {
386
const struct vn_descriptor_set *set =
387
vn_descriptor_set_from_handle(writes[i].dstSet);
388
const struct vn_descriptor_set_layout_binding *binding =
389
&set->layout->bindings[writes[i].dstBinding];
390
VkWriteDescriptorSet *write = &update->writes[i];
391
VkDescriptorImageInfo *imgs = &update->images[img_count];
392
393
switch (write->descriptorType) {
394
case VK_DESCRIPTOR_TYPE_SAMPLER:
395
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
396
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
397
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
398
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
399
memcpy(imgs, write->pImageInfo,
400
sizeof(*imgs) * write->descriptorCount);
401
img_count += write->descriptorCount;
402
403
for (uint32_t j = 0; j < write->descriptorCount; j++) {
404
switch (write->descriptorType) {
405
case VK_DESCRIPTOR_TYPE_SAMPLER:
406
imgs[j].imageView = VK_NULL_HANDLE;
407
break;
408
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
409
if (binding->has_immutable_samplers)
410
imgs[j].sampler = VK_NULL_HANDLE;
411
break;
412
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
413
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
414
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
415
imgs[j].sampler = VK_NULL_HANDLE;
416
break;
417
default:
418
break;
419
}
420
}
421
422
write->pImageInfo = imgs;
423
write->pBufferInfo = NULL;
424
write->pTexelBufferView = NULL;
425
break;
426
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
427
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
428
write->pImageInfo = NULL;
429
write->pBufferInfo = NULL;
430
break;
431
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
432
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
433
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
434
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
435
write->pImageInfo = NULL;
436
write->pTexelBufferView = NULL;
437
break;
438
default:
439
write->pImageInfo = NULL;
440
write->pBufferInfo = NULL;
441
write->pTexelBufferView = NULL;
442
break;
443
}
444
}
445
446
return update;
447
}
448
449
void
450
vn_UpdateDescriptorSets(VkDevice device,
451
uint32_t descriptorWriteCount,
452
const VkWriteDescriptorSet *pDescriptorWrites,
453
uint32_t descriptorCopyCount,
454
const VkCopyDescriptorSet *pDescriptorCopies)
455
{
456
struct vn_device *dev = vn_device_from_handle(device);
457
const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
458
459
struct vn_update_descriptor_sets *update =
460
vn_update_descriptor_sets_parse_writes(descriptorWriteCount,
461
pDescriptorWrites, alloc);
462
if (!update) {
463
/* TODO update one-by-one? */
464
vn_log(dev->instance, "TODO descriptor set update ignored due to OOM");
465
return;
466
}
467
468
vn_async_vkUpdateDescriptorSets(dev->instance, device, update->write_count,
469
update->writes, descriptorCopyCount,
470
pDescriptorCopies);
471
472
vk_free(alloc, update);
473
}
474
475
/* descriptor update template commands */
476
477
static struct vn_update_descriptor_sets *
478
vn_update_descriptor_sets_parse_template(
479
const VkDescriptorUpdateTemplateCreateInfo *create_info,
480
const VkAllocationCallbacks *alloc,
481
struct vn_descriptor_update_template_entry *entries)
482
{
483
uint32_t img_count = 0;
484
uint32_t buf_count = 0;
485
uint32_t view_count = 0;
486
for (uint32_t i = 0; i < create_info->descriptorUpdateEntryCount; i++) {
487
const VkDescriptorUpdateTemplateEntry *entry =
488
&create_info->pDescriptorUpdateEntries[i];
489
490
switch (entry->descriptorType) {
491
case VK_DESCRIPTOR_TYPE_SAMPLER:
492
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
493
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
494
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
495
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
496
img_count += entry->descriptorCount;
497
break;
498
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
499
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
500
view_count += entry->descriptorCount;
501
break;
502
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
503
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
504
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
505
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
506
buf_count += entry->descriptorCount;
507
break;
508
default:
509
unreachable("unhandled descriptor type");
510
break;
511
}
512
}
513
514
struct vn_update_descriptor_sets *update = vn_update_descriptor_sets_alloc(
515
create_info->descriptorUpdateEntryCount, img_count, buf_count,
516
view_count, alloc, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
517
if (!update)
518
return NULL;
519
520
img_count = 0;
521
buf_count = 0;
522
view_count = 0;
523
for (uint32_t i = 0; i < create_info->descriptorUpdateEntryCount; i++) {
524
const VkDescriptorUpdateTemplateEntry *entry =
525
&create_info->pDescriptorUpdateEntries[i];
526
VkWriteDescriptorSet *write = &update->writes[i];
527
528
write->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
529
write->pNext = NULL;
530
write->dstBinding = entry->dstBinding;
531
write->dstArrayElement = entry->dstArrayElement;
532
write->descriptorCount = entry->descriptorCount;
533
write->descriptorType = entry->descriptorType;
534
535
entries[i].offset = entry->offset;
536
entries[i].stride = entry->stride;
537
538
switch (entry->descriptorType) {
539
case VK_DESCRIPTOR_TYPE_SAMPLER:
540
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
541
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
542
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
543
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
544
write->pImageInfo = &update->images[img_count];
545
write->pBufferInfo = NULL;
546
write->pTexelBufferView = NULL;
547
img_count += entry->descriptorCount;
548
break;
549
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
550
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
551
write->pImageInfo = NULL;
552
write->pBufferInfo = NULL;
553
write->pTexelBufferView = &update->views[view_count];
554
view_count += entry->descriptorCount;
555
break;
556
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
557
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
558
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
559
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
560
write->pImageInfo = NULL;
561
write->pBufferInfo = &update->buffers[buf_count];
562
write->pTexelBufferView = NULL;
563
buf_count += entry->descriptorCount;
564
break;
565
default:
566
break;
567
}
568
}
569
570
return update;
571
}
572
573
VkResult
574
vn_CreateDescriptorUpdateTemplate(
575
VkDevice device,
576
const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
577
const VkAllocationCallbacks *pAllocator,
578
VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
579
{
580
struct vn_device *dev = vn_device_from_handle(device);
581
const VkAllocationCallbacks *alloc =
582
pAllocator ? pAllocator : &dev->base.base.alloc;
583
584
const size_t templ_size =
585
offsetof(struct vn_descriptor_update_template,
586
entries[pCreateInfo->descriptorUpdateEntryCount + 1]);
587
struct vn_descriptor_update_template *templ = vk_zalloc(
588
alloc, templ_size, VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
589
if (!templ)
590
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
591
592
vn_object_base_init(&templ->base,
593
VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE, &dev->base);
594
595
templ->update = vn_update_descriptor_sets_parse_template(
596
pCreateInfo, alloc, templ->entries);
597
if (!templ->update) {
598
vk_free(alloc, templ);
599
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
600
}
601
602
mtx_init(&templ->mutex, mtx_plain);
603
604
/* no host object */
605
VkDescriptorUpdateTemplate templ_handle =
606
vn_descriptor_update_template_to_handle(templ);
607
*pDescriptorUpdateTemplate = templ_handle;
608
609
return VK_SUCCESS;
610
}
611
612
void
613
vn_DestroyDescriptorUpdateTemplate(
614
VkDevice device,
615
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
616
const VkAllocationCallbacks *pAllocator)
617
{
618
struct vn_device *dev = vn_device_from_handle(device);
619
struct vn_descriptor_update_template *templ =
620
vn_descriptor_update_template_from_handle(descriptorUpdateTemplate);
621
const VkAllocationCallbacks *alloc =
622
pAllocator ? pAllocator : &dev->base.base.alloc;
623
624
if (!templ)
625
return;
626
627
/* no host object */
628
vk_free(alloc, templ->update);
629
mtx_destroy(&templ->mutex);
630
631
vn_object_base_fini(&templ->base);
632
vk_free(alloc, templ);
633
}
634
635
void
636
vn_UpdateDescriptorSetWithTemplate(
637
VkDevice device,
638
VkDescriptorSet descriptorSet,
639
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
640
const void *pData)
641
{
642
struct vn_device *dev = vn_device_from_handle(device);
643
struct vn_descriptor_set *set =
644
vn_descriptor_set_from_handle(descriptorSet);
645
struct vn_descriptor_update_template *templ =
646
vn_descriptor_update_template_from_handle(descriptorUpdateTemplate);
647
struct vn_update_descriptor_sets *update = templ->update;
648
649
/* duplicate update instead to avoid locking? */
650
mtx_lock(&templ->mutex);
651
652
for (uint32_t i = 0; i < update->write_count; i++) {
653
const struct vn_descriptor_update_template_entry *entry =
654
&templ->entries[i];
655
const struct vn_descriptor_set_layout_binding *binding =
656
&set->layout->bindings[update->writes[i].dstBinding];
657
VkWriteDescriptorSet *write = &update->writes[i];
658
659
write->dstSet = vn_descriptor_set_to_handle(set);
660
661
switch (write->descriptorType) {
662
case VK_DESCRIPTOR_TYPE_SAMPLER:
663
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
664
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
665
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
666
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
667
for (uint32_t j = 0; j < write->descriptorCount; j++) {
668
const bool need_sampler =
669
(write->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER ||
670
write->descriptorType ==
671
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
672
!binding->has_immutable_samplers;
673
const bool need_view =
674
write->descriptorType != VK_DESCRIPTOR_TYPE_SAMPLER;
675
const VkDescriptorImageInfo *src =
676
pData + entry->offset + entry->stride * j;
677
VkDescriptorImageInfo *dst =
678
(VkDescriptorImageInfo *)&write->pImageInfo[j];
679
680
dst->sampler = need_sampler ? src->sampler : VK_NULL_HANDLE;
681
dst->imageView = need_view ? src->imageView : VK_NULL_HANDLE;
682
dst->imageLayout = src->imageLayout;
683
}
684
break;
685
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
686
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
687
for (uint32_t j = 0; j < write->descriptorCount; j++) {
688
const VkBufferView *src =
689
pData + entry->offset + entry->stride * j;
690
VkBufferView *dst = (VkBufferView *)&write->pTexelBufferView[j];
691
*dst = *src;
692
}
693
break;
694
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
695
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
696
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
697
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
698
for (uint32_t j = 0; j < write->descriptorCount; j++) {
699
const VkDescriptorBufferInfo *src =
700
pData + entry->offset + entry->stride * j;
701
VkDescriptorBufferInfo *dst =
702
(VkDescriptorBufferInfo *)&write->pBufferInfo[j];
703
*dst = *src;
704
}
705
break;
706
default:
707
unreachable("unhandled descriptor type");
708
break;
709
}
710
}
711
712
vn_async_vkUpdateDescriptorSets(dev->instance, device, update->write_count,
713
update->writes, 0, NULL);
714
715
mtx_unlock(&templ->mutex);
716
}
717
718