Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/gallium/frontends/lavapipe/lvp_descriptor_set.c
4565 views
1
/*
2
* Copyright © 2019 Red Hat.
3
*
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
* and/or sell copies of the Software, and to permit persons to whom the
9
* Software is furnished to do so, subject to the following conditions:
10
*
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
13
* Software.
14
*
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21
* IN THE SOFTWARE.
22
*/
23
24
#include "lvp_private.h"
25
#include "vk_descriptors.h"
26
#include "vk_util.h"
27
#include "u_math.h"
28
29
VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorSetLayout(
30
VkDevice _device,
31
const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
32
const VkAllocationCallbacks* pAllocator,
33
VkDescriptorSetLayout* pSetLayout)
34
{
35
LVP_FROM_HANDLE(lvp_device, device, _device);
36
struct lvp_descriptor_set_layout *set_layout;
37
38
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
39
uint32_t num_bindings = 0;
40
uint32_t immutable_sampler_count = 0;
41
for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
42
num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
43
/* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
44
*
45
* "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
46
* VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
47
* pImmutableSamplers can be used to initialize a set of immutable
48
* samplers. [...] If descriptorType is not one of these descriptor
49
* types, then pImmutableSamplers is ignored.
50
*
51
* We need to be careful here and only parse pImmutableSamplers if we
52
* have one of the right descriptor types.
53
*/
54
VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
55
if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
56
desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
57
pCreateInfo->pBindings[j].pImmutableSamplers)
58
immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
59
}
60
61
size_t size = sizeof(struct lvp_descriptor_set_layout) +
62
num_bindings * sizeof(set_layout->binding[0]) +
63
immutable_sampler_count * sizeof(struct lvp_sampler *);
64
65
set_layout = vk_zalloc2(&device->vk.alloc, pAllocator, size, 8,
66
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
67
if (!set_layout)
68
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
69
70
vk_object_base_init(&device->vk, &set_layout->base,
71
VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
72
set_layout->ref_cnt = 1;
73
/* We just allocate all the samplers at the end of the struct */
74
struct lvp_sampler **samplers =
75
(struct lvp_sampler **)&set_layout->binding[num_bindings];
76
77
set_layout->alloc = pAllocator;
78
set_layout->binding_count = num_bindings;
79
set_layout->shader_stages = 0;
80
set_layout->size = 0;
81
82
VkDescriptorSetLayoutBinding *bindings = NULL;
83
VkResult result = vk_create_sorted_bindings(pCreateInfo->pBindings,
84
pCreateInfo->bindingCount,
85
&bindings);
86
if (result != VK_SUCCESS) {
87
vk_object_base_finish(&set_layout->base);
88
vk_free2(&device->vk.alloc, pAllocator, set_layout);
89
return vk_error(device->instance, result);
90
}
91
92
uint32_t dynamic_offset_count = 0;
93
for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
94
const VkDescriptorSetLayoutBinding *binding = bindings + j;
95
uint32_t b = binding->binding;
96
97
set_layout->binding[b].array_size = binding->descriptorCount;
98
set_layout->binding[b].descriptor_index = set_layout->size;
99
set_layout->binding[b].type = binding->descriptorType;
100
set_layout->binding[b].valid = true;
101
set_layout->size += binding->descriptorCount;
102
103
for (gl_shader_stage stage = MESA_SHADER_VERTEX; stage < MESA_SHADER_STAGES; stage++) {
104
set_layout->binding[b].stage[stage].const_buffer_index = -1;
105
set_layout->binding[b].stage[stage].shader_buffer_index = -1;
106
set_layout->binding[b].stage[stage].sampler_index = -1;
107
set_layout->binding[b].stage[stage].sampler_view_index = -1;
108
set_layout->binding[b].stage[stage].image_index = -1;
109
}
110
111
if (binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
112
binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
113
set_layout->binding[b].dynamic_index = dynamic_offset_count;
114
dynamic_offset_count += binding->descriptorCount;
115
}
116
switch (binding->descriptorType) {
117
case VK_DESCRIPTOR_TYPE_SAMPLER:
118
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
119
lvp_foreach_stage(s, binding->stageFlags) {
120
set_layout->binding[b].stage[s].sampler_index = set_layout->stage[s].sampler_count;
121
set_layout->stage[s].sampler_count += binding->descriptorCount;
122
}
123
if (binding->pImmutableSamplers) {
124
set_layout->binding[b].immutable_samplers = samplers;
125
samplers += binding->descriptorCount;
126
127
for (uint32_t i = 0; i < binding->descriptorCount; i++)
128
set_layout->binding[b].immutable_samplers[i] =
129
lvp_sampler_from_handle(binding->pImmutableSamplers[i]);
130
}
131
break;
132
default:
133
break;
134
}
135
136
switch (binding->descriptorType) {
137
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
138
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
139
lvp_foreach_stage(s, binding->stageFlags) {
140
set_layout->binding[b].stage[s].const_buffer_index = set_layout->stage[s].const_buffer_count;
141
set_layout->stage[s].const_buffer_count += binding->descriptorCount;
142
}
143
break;
144
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
145
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
146
lvp_foreach_stage(s, binding->stageFlags) {
147
set_layout->binding[b].stage[s].shader_buffer_index = set_layout->stage[s].shader_buffer_count;
148
set_layout->stage[s].shader_buffer_count += binding->descriptorCount;
149
}
150
break;
151
152
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
153
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
154
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
155
lvp_foreach_stage(s, binding->stageFlags) {
156
set_layout->binding[b].stage[s].image_index = set_layout->stage[s].image_count;
157
set_layout->stage[s].image_count += binding->descriptorCount;
158
}
159
break;
160
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
161
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
162
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
163
lvp_foreach_stage(s, binding->stageFlags) {
164
set_layout->binding[b].stage[s].sampler_view_index = set_layout->stage[s].sampler_view_count;
165
set_layout->stage[s].sampler_view_count += binding->descriptorCount;
166
}
167
break;
168
default:
169
break;
170
}
171
172
set_layout->shader_stages |= binding->stageFlags;
173
}
174
175
free(bindings);
176
177
set_layout->dynamic_offset_count = dynamic_offset_count;
178
179
*pSetLayout = lvp_descriptor_set_layout_to_handle(set_layout);
180
181
return VK_SUCCESS;
182
}
183
184
void
185
lvp_descriptor_set_layout_destroy(struct lvp_device *device,
186
struct lvp_descriptor_set_layout *layout)
187
{
188
assert(layout->ref_cnt == 0);
189
vk_object_base_finish(&layout->base);
190
vk_free2(&device->vk.alloc, layout->alloc, layout);
191
}
192
193
VKAPI_ATTR void VKAPI_CALL lvp_DestroyDescriptorSetLayout(
194
VkDevice _device,
195
VkDescriptorSetLayout _set_layout,
196
const VkAllocationCallbacks* pAllocator)
197
{
198
LVP_FROM_HANDLE(lvp_device, device, _device);
199
LVP_FROM_HANDLE(lvp_descriptor_set_layout, set_layout, _set_layout);
200
201
if (!_set_layout)
202
return;
203
204
lvp_descriptor_set_layout_unref(device, set_layout);
205
}
206
207
VKAPI_ATTR VkResult VKAPI_CALL lvp_CreatePipelineLayout(
208
VkDevice _device,
209
const VkPipelineLayoutCreateInfo* pCreateInfo,
210
const VkAllocationCallbacks* pAllocator,
211
VkPipelineLayout* pPipelineLayout)
212
{
213
LVP_FROM_HANDLE(lvp_device, device, _device);
214
struct lvp_pipeline_layout *layout;
215
216
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
217
218
layout = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*layout), 8,
219
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
220
if (layout == NULL)
221
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
222
223
vk_object_base_init(&device->vk, &layout->base,
224
VK_OBJECT_TYPE_PIPELINE_LAYOUT);
225
layout->num_sets = pCreateInfo->setLayoutCount;
226
227
for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
228
LVP_FROM_HANDLE(lvp_descriptor_set_layout, set_layout,
229
pCreateInfo->pSetLayouts[set]);
230
layout->set[set].layout = set_layout;
231
lvp_descriptor_set_layout_ref(set_layout);
232
}
233
234
layout->push_constant_size = 0;
235
for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
236
const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
237
layout->push_constant_size = MAX2(layout->push_constant_size,
238
range->offset + range->size);
239
}
240
layout->push_constant_size = align(layout->push_constant_size, 16);
241
*pPipelineLayout = lvp_pipeline_layout_to_handle(layout);
242
243
return VK_SUCCESS;
244
}
245
246
VKAPI_ATTR void VKAPI_CALL lvp_DestroyPipelineLayout(
247
VkDevice _device,
248
VkPipelineLayout _pipelineLayout,
249
const VkAllocationCallbacks* pAllocator)
250
{
251
LVP_FROM_HANDLE(lvp_device, device, _device);
252
LVP_FROM_HANDLE(lvp_pipeline_layout, pipeline_layout, _pipelineLayout);
253
254
if (!_pipelineLayout)
255
return;
256
for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
257
lvp_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
258
259
vk_object_base_finish(&pipeline_layout->base);
260
vk_free2(&device->vk.alloc, pAllocator, pipeline_layout);
261
}
262
263
VkResult
264
lvp_descriptor_set_create(struct lvp_device *device,
265
struct lvp_descriptor_set_layout *layout,
266
struct lvp_descriptor_set **out_set)
267
{
268
struct lvp_descriptor_set *set;
269
size_t size = sizeof(*set) + layout->size * sizeof(set->descriptors[0]);
270
271
set = vk_alloc(&device->vk.alloc /* XXX: Use the pool */, size, 8,
272
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
273
if (!set)
274
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
275
276
/* A descriptor set may not be 100% filled. Clear the set so we can can
277
* later detect holes in it.
278
*/
279
memset(set, 0, size);
280
281
vk_object_base_init(&device->vk, &set->base,
282
VK_OBJECT_TYPE_DESCRIPTOR_SET);
283
set->layout = layout;
284
lvp_descriptor_set_layout_ref(layout);
285
286
/* Go through and fill out immutable samplers if we have any */
287
struct lvp_descriptor *desc = set->descriptors;
288
for (uint32_t b = 0; b < layout->binding_count; b++) {
289
if (layout->binding[b].immutable_samplers) {
290
for (uint32_t i = 0; i < layout->binding[b].array_size; i++)
291
desc[i].info.sampler = layout->binding[b].immutable_samplers[i];
292
}
293
desc += layout->binding[b].array_size;
294
}
295
296
*out_set = set;
297
298
return VK_SUCCESS;
299
}
300
301
void
302
lvp_descriptor_set_destroy(struct lvp_device *device,
303
struct lvp_descriptor_set *set)
304
{
305
lvp_descriptor_set_layout_unref(device, set->layout);
306
vk_object_base_finish(&set->base);
307
vk_free(&device->vk.alloc, set);
308
}
309
310
VKAPI_ATTR VkResult VKAPI_CALL lvp_AllocateDescriptorSets(
311
VkDevice _device,
312
const VkDescriptorSetAllocateInfo* pAllocateInfo,
313
VkDescriptorSet* pDescriptorSets)
314
{
315
LVP_FROM_HANDLE(lvp_device, device, _device);
316
LVP_FROM_HANDLE(lvp_descriptor_pool, pool, pAllocateInfo->descriptorPool);
317
VkResult result = VK_SUCCESS;
318
struct lvp_descriptor_set *set;
319
uint32_t i;
320
321
for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
322
LVP_FROM_HANDLE(lvp_descriptor_set_layout, layout,
323
pAllocateInfo->pSetLayouts[i]);
324
325
result = lvp_descriptor_set_create(device, layout, &set);
326
if (result != VK_SUCCESS)
327
break;
328
329
list_addtail(&set->link, &pool->sets);
330
pDescriptorSets[i] = lvp_descriptor_set_to_handle(set);
331
}
332
333
if (result != VK_SUCCESS)
334
lvp_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
335
i, pDescriptorSets);
336
337
return result;
338
}
339
340
VKAPI_ATTR VkResult VKAPI_CALL lvp_FreeDescriptorSets(
341
VkDevice _device,
342
VkDescriptorPool descriptorPool,
343
uint32_t count,
344
const VkDescriptorSet* pDescriptorSets)
345
{
346
LVP_FROM_HANDLE(lvp_device, device, _device);
347
for (uint32_t i = 0; i < count; i++) {
348
LVP_FROM_HANDLE(lvp_descriptor_set, set, pDescriptorSets[i]);
349
350
if (!set)
351
continue;
352
list_del(&set->link);
353
lvp_descriptor_set_destroy(device, set);
354
}
355
return VK_SUCCESS;
356
}
357
358
VKAPI_ATTR void VKAPI_CALL lvp_UpdateDescriptorSets(
359
VkDevice _device,
360
uint32_t descriptorWriteCount,
361
const VkWriteDescriptorSet* pDescriptorWrites,
362
uint32_t descriptorCopyCount,
363
const VkCopyDescriptorSet* pDescriptorCopies)
364
{
365
for (uint32_t i = 0; i < descriptorWriteCount; i++) {
366
const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
367
LVP_FROM_HANDLE(lvp_descriptor_set, set, write->dstSet);
368
const struct lvp_descriptor_set_binding_layout *bind_layout =
369
&set->layout->binding[write->dstBinding];
370
struct lvp_descriptor *desc =
371
&set->descriptors[bind_layout->descriptor_index];
372
desc += write->dstArrayElement;
373
374
switch (write->descriptorType) {
375
case VK_DESCRIPTOR_TYPE_SAMPLER:
376
for (uint32_t j = 0; j < write->descriptorCount; j++) {
377
LVP_FROM_HANDLE(lvp_sampler, sampler,
378
write->pImageInfo[j].sampler);
379
380
desc[j] = (struct lvp_descriptor) {
381
.type = VK_DESCRIPTOR_TYPE_SAMPLER,
382
.info.sampler = sampler,
383
};
384
}
385
break;
386
387
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
388
for (uint32_t j = 0; j < write->descriptorCount; j++) {
389
LVP_FROM_HANDLE(lvp_image_view, iview,
390
write->pImageInfo[j].imageView);
391
desc[j].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
392
desc[j].info.iview = iview;
393
/*
394
* All consecutive bindings updated via a single VkWriteDescriptorSet structure, except those
395
* with a descriptorCount of zero, must all either use immutable samplers or must all not
396
* use immutable samplers
397
*/
398
if (bind_layout->immutable_samplers) {
399
desc[j].info.sampler = bind_layout->immutable_samplers[j];
400
} else {
401
LVP_FROM_HANDLE(lvp_sampler, sampler,
402
write->pImageInfo[j].sampler);
403
404
desc[j].info.sampler = sampler;
405
}
406
}
407
break;
408
409
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
410
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
411
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
412
for (uint32_t j = 0; j < write->descriptorCount; j++) {
413
LVP_FROM_HANDLE(lvp_image_view, iview,
414
write->pImageInfo[j].imageView);
415
416
desc[j] = (struct lvp_descriptor) {
417
.type = write->descriptorType,
418
.info.iview = iview,
419
};
420
}
421
break;
422
423
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
424
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
425
for (uint32_t j = 0; j < write->descriptorCount; j++) {
426
LVP_FROM_HANDLE(lvp_buffer_view, bview,
427
write->pTexelBufferView[j]);
428
429
desc[j] = (struct lvp_descriptor) {
430
.type = write->descriptorType,
431
.info.buffer_view = bview,
432
};
433
}
434
break;
435
436
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
437
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
438
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
439
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
440
for (uint32_t j = 0; j < write->descriptorCount; j++) {
441
assert(write->pBufferInfo[j].buffer);
442
LVP_FROM_HANDLE(lvp_buffer, buffer, write->pBufferInfo[j].buffer);
443
assert(buffer);
444
desc[j] = (struct lvp_descriptor) {
445
.type = write->descriptorType,
446
.info.offset = write->pBufferInfo[j].offset,
447
.info.buffer = buffer,
448
.info.range = write->pBufferInfo[j].range,
449
};
450
451
}
452
453
default:
454
break;
455
}
456
}
457
458
for (uint32_t i = 0; i < descriptorCopyCount; i++) {
459
const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
460
LVP_FROM_HANDLE(lvp_descriptor_set, src, copy->srcSet);
461
LVP_FROM_HANDLE(lvp_descriptor_set, dst, copy->dstSet);
462
463
const struct lvp_descriptor_set_binding_layout *src_layout =
464
&src->layout->binding[copy->srcBinding];
465
struct lvp_descriptor *src_desc =
466
&src->descriptors[src_layout->descriptor_index];
467
src_desc += copy->srcArrayElement;
468
469
const struct lvp_descriptor_set_binding_layout *dst_layout =
470
&dst->layout->binding[copy->dstBinding];
471
struct lvp_descriptor *dst_desc =
472
&dst->descriptors[dst_layout->descriptor_index];
473
dst_desc += copy->dstArrayElement;
474
475
for (uint32_t j = 0; j < copy->descriptorCount; j++)
476
dst_desc[j] = src_desc[j];
477
}
478
}
479
480
VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorPool(
481
VkDevice _device,
482
const VkDescriptorPoolCreateInfo* pCreateInfo,
483
const VkAllocationCallbacks* pAllocator,
484
VkDescriptorPool* pDescriptorPool)
485
{
486
LVP_FROM_HANDLE(lvp_device, device, _device);
487
struct lvp_descriptor_pool *pool;
488
size_t size = sizeof(struct lvp_descriptor_pool);
489
pool = vk_zalloc2(&device->vk.alloc, pAllocator, size, 8,
490
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
491
if (!pool)
492
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
493
494
vk_object_base_init(&device->vk, &pool->base,
495
VK_OBJECT_TYPE_DESCRIPTOR_POOL);
496
pool->flags = pCreateInfo->flags;
497
list_inithead(&pool->sets);
498
*pDescriptorPool = lvp_descriptor_pool_to_handle(pool);
499
return VK_SUCCESS;
500
}
501
502
static void lvp_reset_descriptor_pool(struct lvp_device *device,
503
struct lvp_descriptor_pool *pool)
504
{
505
struct lvp_descriptor_set *set, *tmp;
506
LIST_FOR_EACH_ENTRY_SAFE(set, tmp, &pool->sets, link) {
507
lvp_descriptor_set_layout_unref(device, set->layout);
508
list_del(&set->link);
509
vk_free(&device->vk.alloc, set);
510
}
511
}
512
513
VKAPI_ATTR void VKAPI_CALL lvp_DestroyDescriptorPool(
514
VkDevice _device,
515
VkDescriptorPool _pool,
516
const VkAllocationCallbacks* pAllocator)
517
{
518
LVP_FROM_HANDLE(lvp_device, device, _device);
519
LVP_FROM_HANDLE(lvp_descriptor_pool, pool, _pool);
520
521
if (!_pool)
522
return;
523
524
lvp_reset_descriptor_pool(device, pool);
525
vk_object_base_finish(&pool->base);
526
vk_free2(&device->vk.alloc, pAllocator, pool);
527
}
528
529
VKAPI_ATTR VkResult VKAPI_CALL lvp_ResetDescriptorPool(
530
VkDevice _device,
531
VkDescriptorPool _pool,
532
VkDescriptorPoolResetFlags flags)
533
{
534
LVP_FROM_HANDLE(lvp_device, device, _device);
535
LVP_FROM_HANDLE(lvp_descriptor_pool, pool, _pool);
536
537
lvp_reset_descriptor_pool(device, pool);
538
return VK_SUCCESS;
539
}
540
541
VKAPI_ATTR void VKAPI_CALL lvp_GetDescriptorSetLayoutSupport(VkDevice device,
542
const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
543
VkDescriptorSetLayoutSupport* pSupport)
544
{
545
pSupport->supported = true;
546
}
547
548
VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDescriptorUpdateTemplate(VkDevice _device,
549
const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
550
const VkAllocationCallbacks *pAllocator,
551
VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
552
{
553
LVP_FROM_HANDLE(lvp_device, device, _device);
554
const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
555
const size_t size = sizeof(struct lvp_descriptor_update_template) +
556
sizeof(VkDescriptorUpdateTemplateEntry) * entry_count;
557
558
struct lvp_descriptor_update_template *templ;
559
560
templ = vk_alloc2(&device->vk.alloc, pAllocator, size, 8, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
561
if (!templ)
562
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
563
564
vk_object_base_init(&device->vk, &templ->base,
565
VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
566
567
templ->type = pCreateInfo->templateType;
568
templ->bind_point = pCreateInfo->pipelineBindPoint;
569
templ->set = pCreateInfo->set;
570
/* This parameter is ignored if templateType is not VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR */
571
if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR)
572
templ->pipeline_layout = lvp_pipeline_layout_from_handle(pCreateInfo->pipelineLayout);
573
else
574
templ->pipeline_layout = NULL;
575
templ->entry_count = entry_count;
576
577
VkDescriptorUpdateTemplateEntry *entries = (VkDescriptorUpdateTemplateEntry *)(templ + 1);
578
for (unsigned i = 0; i < entry_count; i++) {
579
entries[i] = pCreateInfo->pDescriptorUpdateEntries[i];
580
}
581
582
*pDescriptorUpdateTemplate = lvp_descriptor_update_template_to_handle(templ);
583
return VK_SUCCESS;
584
}
585
586
VKAPI_ATTR void VKAPI_CALL lvp_DestroyDescriptorUpdateTemplate(VkDevice _device,
587
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
588
const VkAllocationCallbacks *pAllocator)
589
{
590
LVP_FROM_HANDLE(lvp_device, device, _device);
591
LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, descriptorUpdateTemplate);
592
593
if (!templ)
594
return;
595
596
vk_object_base_finish(&templ->base);
597
vk_free2(&device->vk.alloc, pAllocator, templ);
598
}
599
600
VKAPI_ATTR void VKAPI_CALL lvp_UpdateDescriptorSetWithTemplate(VkDevice _device,
601
VkDescriptorSet descriptorSet,
602
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
603
const void *pData)
604
{
605
LVP_FROM_HANDLE(lvp_descriptor_set, set, descriptorSet);
606
LVP_FROM_HANDLE(lvp_descriptor_update_template, templ, descriptorUpdateTemplate);
607
uint32_t i, j;
608
609
for (i = 0; i < templ->entry_count; ++i) {
610
VkDescriptorUpdateTemplateEntry *entry = &templ->entry[i];
611
const uint8_t *pSrc = ((const uint8_t *) pData) + entry->offset;
612
const struct lvp_descriptor_set_binding_layout *bind_layout =
613
&set->layout->binding[entry->dstBinding];
614
struct lvp_descriptor *desc =
615
&set->descriptors[bind_layout->descriptor_index];
616
for (j = 0; j < entry->descriptorCount; ++j) {
617
unsigned idx = j + entry->dstArrayElement;
618
switch (entry->descriptorType) {
619
case VK_DESCRIPTOR_TYPE_SAMPLER: {
620
LVP_FROM_HANDLE(lvp_sampler, sampler,
621
*(VkSampler *)pSrc);
622
desc[idx] = (struct lvp_descriptor) {
623
.type = VK_DESCRIPTOR_TYPE_SAMPLER,
624
.info.sampler = sampler,
625
};
626
break;
627
}
628
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
629
VkDescriptorImageInfo *info = (VkDescriptorImageInfo *)pSrc;
630
desc[idx] = (struct lvp_descriptor) {
631
.type = entry->descriptorType,
632
.info.iview = lvp_image_view_from_handle(info->imageView),
633
.info.sampler = lvp_sampler_from_handle(info->sampler),
634
};
635
break;
636
}
637
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
638
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
639
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
640
LVP_FROM_HANDLE(lvp_image_view, iview,
641
((VkDescriptorImageInfo *)pSrc)->imageView);
642
desc[idx] = (struct lvp_descriptor) {
643
.type = entry->descriptorType,
644
.info.iview = iview,
645
};
646
break;
647
}
648
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
649
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
650
LVP_FROM_HANDLE(lvp_buffer_view, bview,
651
*(VkBufferView *)pSrc);
652
desc[idx] = (struct lvp_descriptor) {
653
.type = entry->descriptorType,
654
.info.buffer_view = bview,
655
};
656
break;
657
}
658
659
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
660
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
661
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
662
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
663
VkDescriptorBufferInfo *info = (VkDescriptorBufferInfo *)pSrc;
664
desc[idx] = (struct lvp_descriptor) {
665
.type = entry->descriptorType,
666
.info.offset = info->offset,
667
.info.buffer = lvp_buffer_from_handle(info->buffer),
668
.info.range = info->range,
669
};
670
break;
671
}
672
default:
673
break;
674
}
675
pSrc += entry->stride;
676
}
677
}
678
}
679
680