Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/freedreno/vulkan/tu_descriptor_set.c
4565 views
1
/*
2
* Copyright © 2016 Red Hat.
3
* Copyright © 2016 Bas Nieuwenhuizen
4
*
5
* Permission is hereby granted, free of charge, to any person obtaining a
6
* copy of this software and associated documentation files (the "Software"),
7
* to deal in the Software without restriction, including without limitation
8
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
9
* and/or sell copies of the Software, and to permit persons to whom the
10
* Software is furnished to do so, subject to the following conditions:
11
*
12
* The above copyright notice and this permission notice (including the next
13
* paragraph) shall be included in all copies or substantial portions of the
14
* Software.
15
*
16
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
20
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
21
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22
* DEALINGS IN THE SOFTWARE.
23
*/
24
25
/**
26
* @file
27
*
28
* We use the bindless descriptor model, which maps fairly closely to how
29
* Vulkan descriptor sets work. The two exceptions are input attachments and
30
* dynamic descriptors, which have to be patched when recording command
31
* buffers. We reserve an extra descriptor set for these. This descriptor set
32
* contains all the input attachments in the pipeline, in order, and then all
33
* the dynamic descriptors. The dynamic descriptors are stored in the CPU-side
34
* datastructure for each tu_descriptor_set, and then combined into one big
35
* descriptor set at CmdBindDescriptors time/draw time.
36
*/
37
38
#include "tu_private.h"
39
40
#include <assert.h>
41
#include <fcntl.h>
42
#include <stdbool.h>
43
#include <string.h>
44
#include <unistd.h>
45
46
#include "util/mesa-sha1.h"
47
#include "vk_descriptors.h"
48
#include "vk_util.h"
49
50
static uint32_t
51
descriptor_size(VkDescriptorType type)
52
{
53
switch (type) {
54
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
55
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
56
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
57
/* These are remapped to the special driver-managed descriptor set,
58
* hence they don't take up any space in the original descriptor set:
59
* Input attachment doesn't use descriptor sets at all
60
*/
61
return 0;
62
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
63
/* We make offsets and sizes all 16 dwords, to match how the hardware
64
* interprets indices passed to sample/load/store instructions in
65
* multiples of 16 dwords. This means that "normal" descriptors are all
66
* of size 16, with padding for smaller descriptors like uniform storage
67
* descriptors which are less than 16 dwords. However combined images
68
* and samplers are actually two descriptors, so they have size 2.
69
*/
70
return A6XX_TEX_CONST_DWORDS * 4 * 2;
71
default:
72
return A6XX_TEX_CONST_DWORDS * 4;
73
}
74
}
75
76
VKAPI_ATTR VkResult VKAPI_CALL
77
tu_CreateDescriptorSetLayout(
78
VkDevice _device,
79
const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
80
const VkAllocationCallbacks *pAllocator,
81
VkDescriptorSetLayout *pSetLayout)
82
{
83
TU_FROM_HANDLE(tu_device, device, _device);
84
struct tu_descriptor_set_layout *set_layout;
85
86
assert(pCreateInfo->sType ==
87
VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
88
const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =
89
vk_find_struct_const(
90
pCreateInfo->pNext,
91
DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
92
93
uint32_t num_bindings = 0;
94
uint32_t immutable_sampler_count = 0;
95
uint32_t ycbcr_sampler_count = 0;
96
for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
97
num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
98
if ((pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
99
pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
100
pCreateInfo->pBindings[j].pImmutableSamplers) {
101
immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
102
103
bool has_ycbcr_sampler = false;
104
for (unsigned i = 0; i < pCreateInfo->pBindings[j].descriptorCount; ++i) {
105
if (tu_sampler_from_handle(pCreateInfo->pBindings[j].pImmutableSamplers[i])->ycbcr_sampler)
106
has_ycbcr_sampler = true;
107
}
108
109
if (has_ycbcr_sampler)
110
ycbcr_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
111
}
112
}
113
114
uint32_t samplers_offset =
115
offsetof(struct tu_descriptor_set_layout, binding[num_bindings]);
116
117
/* note: only need to store TEX_SAMP_DWORDS for immutable samples,
118
* but using struct tu_sampler makes things simpler */
119
uint32_t size = samplers_offset +
120
immutable_sampler_count * sizeof(struct tu_sampler) +
121
ycbcr_sampler_count * sizeof(struct tu_sampler_ycbcr_conversion);
122
123
set_layout = vk_object_zalloc(&device->vk, pAllocator, size,
124
VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);
125
if (!set_layout)
126
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
127
128
set_layout->flags = pCreateInfo->flags;
129
130
/* We just allocate all the immutable samplers at the end of the struct */
131
struct tu_sampler *samplers = (void*) &set_layout->binding[num_bindings];
132
struct tu_sampler_ycbcr_conversion *ycbcr_samplers =
133
(void*) &samplers[immutable_sampler_count];
134
135
VkDescriptorSetLayoutBinding *bindings = NULL;
136
VkResult result = vk_create_sorted_bindings(
137
pCreateInfo->pBindings, pCreateInfo->bindingCount, &bindings);
138
if (result != VK_SUCCESS) {
139
vk_object_free(&device->vk, pAllocator, set_layout);
140
return vk_error(device->instance, result);
141
}
142
143
set_layout->binding_count = num_bindings;
144
set_layout->shader_stages = 0;
145
set_layout->has_immutable_samplers = false;
146
set_layout->size = 0;
147
set_layout->dynamic_ubo = 0;
148
149
uint32_t dynamic_offset_count = 0;
150
151
for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
152
const VkDescriptorSetLayoutBinding *binding = bindings + j;
153
uint32_t b = binding->binding;
154
155
set_layout->binding[b].type = binding->descriptorType;
156
set_layout->binding[b].array_size = binding->descriptorCount;
157
set_layout->binding[b].offset = set_layout->size;
158
set_layout->binding[b].dynamic_offset_offset = dynamic_offset_count;
159
set_layout->binding[b].size = descriptor_size(binding->descriptorType);
160
set_layout->binding[b].shader_stages = binding->stageFlags;
161
162
if (variable_flags && binding->binding < variable_flags->bindingCount &&
163
(variable_flags->pBindingFlags[binding->binding] &
164
VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {
165
assert(!binding->pImmutableSamplers); /* Terribly ill defined how
166
many samplers are valid */
167
assert(binding->binding == num_bindings - 1);
168
169
set_layout->has_variable_descriptors = true;
170
}
171
172
if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||
173
binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&
174
binding->pImmutableSamplers) {
175
set_layout->binding[b].immutable_samplers_offset = samplers_offset;
176
set_layout->has_immutable_samplers = true;
177
178
for (uint32_t i = 0; i < binding->descriptorCount; i++)
179
samplers[i] = *tu_sampler_from_handle(binding->pImmutableSamplers[i]);
180
181
samplers += binding->descriptorCount;
182
samplers_offset += sizeof(struct tu_sampler) * binding->descriptorCount;
183
184
bool has_ycbcr_sampler = false;
185
for (unsigned i = 0; i < pCreateInfo->pBindings[j].descriptorCount; ++i) {
186
if (tu_sampler_from_handle(binding->pImmutableSamplers[i])->ycbcr_sampler)
187
has_ycbcr_sampler = true;
188
}
189
190
if (has_ycbcr_sampler) {
191
set_layout->binding[b].ycbcr_samplers_offset =
192
(const char*)ycbcr_samplers - (const char*)set_layout;
193
for (uint32_t i = 0; i < binding->descriptorCount; i++) {
194
struct tu_sampler *sampler = tu_sampler_from_handle(binding->pImmutableSamplers[i]);
195
if (sampler->ycbcr_sampler)
196
ycbcr_samplers[i] = *sampler->ycbcr_sampler;
197
else
198
ycbcr_samplers[i].ycbcr_model = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;
199
}
200
ycbcr_samplers += binding->descriptorCount;
201
} else {
202
set_layout->binding[b].ycbcr_samplers_offset = 0;
203
}
204
}
205
206
set_layout->size +=
207
binding->descriptorCount * set_layout->binding[b].size;
208
if (binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
209
binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
210
if (binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {
211
STATIC_ASSERT(MAX_DYNAMIC_BUFFERS <= 8 * sizeof(set_layout->dynamic_ubo));
212
set_layout->dynamic_ubo |=
213
((1u << binding->descriptorCount) - 1) << dynamic_offset_count;
214
}
215
216
dynamic_offset_count += binding->descriptorCount;
217
}
218
219
set_layout->shader_stages |= binding->stageFlags;
220
}
221
222
free(bindings);
223
224
set_layout->dynamic_offset_count = dynamic_offset_count;
225
226
*pSetLayout = tu_descriptor_set_layout_to_handle(set_layout);
227
228
return VK_SUCCESS;
229
}
230
231
VKAPI_ATTR void VKAPI_CALL
232
tu_DestroyDescriptorSetLayout(VkDevice _device,
233
VkDescriptorSetLayout _set_layout,
234
const VkAllocationCallbacks *pAllocator)
235
{
236
TU_FROM_HANDLE(tu_device, device, _device);
237
TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout, _set_layout);
238
239
if (!set_layout)
240
return;
241
242
vk_object_free(&device->vk, pAllocator, set_layout);
243
}
244
245
VKAPI_ATTR void VKAPI_CALL
246
tu_GetDescriptorSetLayoutSupport(
247
VkDevice device,
248
const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
249
VkDescriptorSetLayoutSupport *pSupport)
250
{
251
VkDescriptorSetLayoutBinding *bindings = NULL;
252
VkResult result = vk_create_sorted_bindings(
253
pCreateInfo->pBindings, pCreateInfo->bindingCount, &bindings);
254
if (result != VK_SUCCESS) {
255
pSupport->supported = false;
256
return;
257
}
258
259
const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =
260
vk_find_struct_const(
261
pCreateInfo->pNext,
262
DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
263
VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *variable_count =
264
vk_find_struct(
265
(void *) pCreateInfo->pNext,
266
DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT);
267
if (variable_count) {
268
variable_count->maxVariableDescriptorCount = 0;
269
}
270
271
bool supported = true;
272
uint64_t size = 0;
273
for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {
274
const VkDescriptorSetLayoutBinding *binding = bindings + i;
275
276
uint64_t descriptor_sz = descriptor_size(binding->descriptorType);
277
uint64_t descriptor_alignment = 8;
278
279
if (size && !ALIGN_POT(size, descriptor_alignment)) {
280
supported = false;
281
}
282
size = ALIGN_POT(size, descriptor_alignment);
283
284
uint64_t max_count = UINT64_MAX;
285
if (descriptor_sz)
286
max_count = (UINT64_MAX - size) / descriptor_sz;
287
288
if (max_count < binding->descriptorCount) {
289
supported = false;
290
}
291
if (variable_flags && binding->binding < variable_flags->bindingCount &&
292
variable_count &&
293
(variable_flags->pBindingFlags[binding->binding] &
294
VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {
295
variable_count->maxVariableDescriptorCount =
296
MIN2(UINT32_MAX, max_count);
297
}
298
size += binding->descriptorCount * descriptor_sz;
299
}
300
301
free(bindings);
302
303
pSupport->supported = supported;
304
}
305
306
/*
307
* Pipeline layouts. These have nothing to do with the pipeline. They are
308
* just multiple descriptor set layouts pasted together.
309
*/
310
311
VKAPI_ATTR VkResult VKAPI_CALL
312
tu_CreatePipelineLayout(VkDevice _device,
313
const VkPipelineLayoutCreateInfo *pCreateInfo,
314
const VkAllocationCallbacks *pAllocator,
315
VkPipelineLayout *pPipelineLayout)
316
{
317
TU_FROM_HANDLE(tu_device, device, _device);
318
struct tu_pipeline_layout *layout;
319
320
assert(pCreateInfo->sType ==
321
VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
322
323
layout = vk_object_alloc(&device->vk, pAllocator, sizeof(*layout),
324
VK_OBJECT_TYPE_PIPELINE_LAYOUT);
325
if (layout == NULL)
326
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
327
328
layout->num_sets = pCreateInfo->setLayoutCount;
329
layout->dynamic_offset_count = 0;
330
331
unsigned dynamic_offset_count = 0;
332
333
for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
334
TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,
335
pCreateInfo->pSetLayouts[set]);
336
layout->set[set].layout = set_layout;
337
layout->set[set].dynamic_offset_start = dynamic_offset_count;
338
dynamic_offset_count += set_layout->dynamic_offset_count;
339
}
340
341
layout->dynamic_offset_count = dynamic_offset_count;
342
layout->push_constant_size = 0;
343
344
for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {
345
const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;
346
layout->push_constant_size =
347
MAX2(layout->push_constant_size, range->offset + range->size);
348
}
349
350
layout->push_constant_size = align(layout->push_constant_size, 16);
351
*pPipelineLayout = tu_pipeline_layout_to_handle(layout);
352
353
return VK_SUCCESS;
354
}
355
356
VKAPI_ATTR void VKAPI_CALL
357
tu_DestroyPipelineLayout(VkDevice _device,
358
VkPipelineLayout _pipelineLayout,
359
const VkAllocationCallbacks *pAllocator)
360
{
361
TU_FROM_HANDLE(tu_device, device, _device);
362
TU_FROM_HANDLE(tu_pipeline_layout, pipeline_layout, _pipelineLayout);
363
364
if (!pipeline_layout)
365
return;
366
367
vk_object_free(&device->vk, pAllocator, pipeline_layout);
368
}
369
370
#define EMPTY 1
371
372
static VkResult
373
tu_descriptor_set_create(struct tu_device *device,
374
struct tu_descriptor_pool *pool,
375
const struct tu_descriptor_set_layout *layout,
376
const uint32_t *variable_count,
377
struct tu_descriptor_set **out_set)
378
{
379
struct tu_descriptor_set *set;
380
unsigned dynamic_offset = sizeof(struct tu_descriptor_set);
381
unsigned mem_size = dynamic_offset +
382
A6XX_TEX_CONST_DWORDS * 4 * layout->dynamic_offset_count;
383
384
if (pool->host_memory_base) {
385
if (pool->host_memory_end - pool->host_memory_ptr < mem_size)
386
return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
387
388
set = (struct tu_descriptor_set*)pool->host_memory_ptr;
389
pool->host_memory_ptr += mem_size;
390
} else {
391
set = vk_alloc2(&device->vk.alloc, NULL, mem_size, 8,
392
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
393
394
if (!set)
395
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
396
}
397
398
memset(set, 0, mem_size);
399
vk_object_base_init(&device->vk, &set->base, VK_OBJECT_TYPE_DESCRIPTOR_SET);
400
401
if (layout->dynamic_offset_count) {
402
set->dynamic_descriptors = (uint32_t *)((uint8_t*)set + dynamic_offset);
403
}
404
405
set->layout = layout;
406
set->pool = pool;
407
uint32_t layout_size = layout->size;
408
if (variable_count) {
409
assert(layout->has_variable_descriptors);
410
uint32_t stride = layout->binding[layout->binding_count - 1].size;
411
layout_size = layout->binding[layout->binding_count - 1].offset +
412
*variable_count * stride;
413
}
414
415
if (layout_size) {
416
set->size = layout_size;
417
418
if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {
419
vk_object_free(&device->vk, NULL, set);
420
return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
421
}
422
423
/* try to allocate linearly first, so that we don't spend
424
* time looking for gaps if the app only allocates &
425
* resets via the pool. */
426
if (pool->current_offset + layout_size <= pool->size) {
427
set->mapped_ptr = (uint32_t*)(pool->bo.map + pool->current_offset);
428
set->va = pool->bo.iova + pool->current_offset;
429
if (!pool->host_memory_base) {
430
pool->entries[pool->entry_count].offset = pool->current_offset;
431
pool->entries[pool->entry_count].size = layout_size;
432
pool->entries[pool->entry_count].set = set;
433
pool->entry_count++;
434
}
435
pool->current_offset += layout_size;
436
} else if (!pool->host_memory_base) {
437
uint64_t offset = 0;
438
int index;
439
440
for (index = 0; index < pool->entry_count; ++index) {
441
if (pool->entries[index].offset - offset >= layout_size)
442
break;
443
offset = pool->entries[index].offset + pool->entries[index].size;
444
}
445
446
if (pool->size - offset < layout_size) {
447
vk_object_free(&device->vk, NULL, set);
448
return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
449
}
450
451
set->mapped_ptr = (uint32_t*)(pool->bo.map + offset);
452
set->va = pool->bo.iova + offset;
453
memmove(&pool->entries[index + 1], &pool->entries[index],
454
sizeof(pool->entries[0]) * (pool->entry_count - index));
455
pool->entries[index].offset = offset;
456
pool->entries[index].size = layout_size;
457
pool->entries[index].set = set;
458
pool->entry_count++;
459
} else
460
return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);
461
}
462
463
if (layout->has_immutable_samplers) {
464
for (unsigned i = 0; i < layout->binding_count; ++i) {
465
if (!layout->binding[i].immutable_samplers_offset)
466
continue;
467
468
unsigned offset = layout->binding[i].offset / 4;
469
if (layout->binding[i].type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
470
offset += A6XX_TEX_CONST_DWORDS;
471
472
const struct tu_sampler *samplers =
473
(const struct tu_sampler *)((const char *)layout +
474
layout->binding[i].immutable_samplers_offset);
475
for (unsigned j = 0; j < layout->binding[i].array_size; ++j) {
476
memcpy(set->mapped_ptr + offset, samplers[j].descriptor,
477
sizeof(samplers[j].descriptor));
478
offset += layout->binding[i].size / 4;
479
}
480
}
481
}
482
483
*out_set = set;
484
return VK_SUCCESS;
485
}
486
487
static void
488
tu_descriptor_set_destroy(struct tu_device *device,
489
struct tu_descriptor_pool *pool,
490
struct tu_descriptor_set *set,
491
bool free_bo)
492
{
493
assert(!pool->host_memory_base);
494
495
if (free_bo && set->size && !pool->host_memory_base) {
496
uint32_t offset = (uint8_t*)set->mapped_ptr - (uint8_t*)pool->bo.map;
497
for (int i = 0; i < pool->entry_count; ++i) {
498
if (pool->entries[i].offset == offset) {
499
memmove(&pool->entries[i], &pool->entries[i+1],
500
sizeof(pool->entries[i]) * (pool->entry_count - i - 1));
501
--pool->entry_count;
502
break;
503
}
504
}
505
}
506
507
vk_object_free(&device->vk, NULL, set);
508
}
509
510
VKAPI_ATTR VkResult VKAPI_CALL
511
tu_CreateDescriptorPool(VkDevice _device,
512
const VkDescriptorPoolCreateInfo *pCreateInfo,
513
const VkAllocationCallbacks *pAllocator,
514
VkDescriptorPool *pDescriptorPool)
515
{
516
TU_FROM_HANDLE(tu_device, device, _device);
517
struct tu_descriptor_pool *pool;
518
uint64_t size = sizeof(struct tu_descriptor_pool);
519
uint64_t bo_size = 0, bo_count = 0, dynamic_count = 0;
520
VkResult ret;
521
522
for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {
523
if (pCreateInfo->pPoolSizes[i].type != VK_DESCRIPTOR_TYPE_SAMPLER)
524
bo_count += pCreateInfo->pPoolSizes[i].descriptorCount;
525
526
switch(pCreateInfo->pPoolSizes[i].type) {
527
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
528
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
529
dynamic_count += pCreateInfo->pPoolSizes[i].descriptorCount;
530
break;
531
default:
532
break;
533
}
534
535
bo_size += descriptor_size(pCreateInfo->pPoolSizes[i].type) *
536
pCreateInfo->pPoolSizes[i].descriptorCount;
537
}
538
539
if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
540
uint64_t host_size = pCreateInfo->maxSets * sizeof(struct tu_descriptor_set);
541
host_size += sizeof(struct tu_bo*) * bo_count;
542
host_size += A6XX_TEX_CONST_DWORDS * 4 * dynamic_count;
543
size += host_size;
544
} else {
545
size += sizeof(struct tu_descriptor_pool_entry) * pCreateInfo->maxSets;
546
}
547
548
pool = vk_object_zalloc(&device->vk, pAllocator, size,
549
VK_OBJECT_TYPE_DESCRIPTOR_POOL);
550
if (!pool)
551
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
552
553
if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {
554
pool->host_memory_base = (uint8_t*)pool + sizeof(struct tu_descriptor_pool);
555
pool->host_memory_ptr = pool->host_memory_base;
556
pool->host_memory_end = (uint8_t*)pool + size;
557
}
558
559
if (bo_size) {
560
ret = tu_bo_init_new(device, &pool->bo, bo_size, TU_BO_ALLOC_ALLOW_DUMP);
561
if (ret)
562
goto fail_alloc;
563
564
ret = tu_bo_map(device, &pool->bo);
565
if (ret)
566
goto fail_map;
567
}
568
pool->size = bo_size;
569
pool->max_entry_count = pCreateInfo->maxSets;
570
571
*pDescriptorPool = tu_descriptor_pool_to_handle(pool);
572
return VK_SUCCESS;
573
574
fail_map:
575
tu_bo_finish(device, &pool->bo);
576
fail_alloc:
577
vk_object_free(&device->vk, pAllocator, pool);
578
return ret;
579
}
580
581
VKAPI_ATTR void VKAPI_CALL
582
tu_DestroyDescriptorPool(VkDevice _device,
583
VkDescriptorPool _pool,
584
const VkAllocationCallbacks *pAllocator)
585
{
586
TU_FROM_HANDLE(tu_device, device, _device);
587
TU_FROM_HANDLE(tu_descriptor_pool, pool, _pool);
588
589
if (!pool)
590
return;
591
592
if (!pool->host_memory_base) {
593
for(int i = 0; i < pool->entry_count; ++i) {
594
tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
595
}
596
}
597
598
if (pool->size)
599
tu_bo_finish(device, &pool->bo);
600
601
vk_object_free(&device->vk, pAllocator, pool);
602
}
603
604
VKAPI_ATTR VkResult VKAPI_CALL
605
tu_ResetDescriptorPool(VkDevice _device,
606
VkDescriptorPool descriptorPool,
607
VkDescriptorPoolResetFlags flags)
608
{
609
TU_FROM_HANDLE(tu_device, device, _device);
610
TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);
611
612
if (!pool->host_memory_base) {
613
for(int i = 0; i < pool->entry_count; ++i) {
614
tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);
615
}
616
pool->entry_count = 0;
617
}
618
619
pool->current_offset = 0;
620
pool->host_memory_ptr = pool->host_memory_base;
621
622
return VK_SUCCESS;
623
}
624
625
VKAPI_ATTR VkResult VKAPI_CALL
626
tu_AllocateDescriptorSets(VkDevice _device,
627
const VkDescriptorSetAllocateInfo *pAllocateInfo,
628
VkDescriptorSet *pDescriptorSets)
629
{
630
TU_FROM_HANDLE(tu_device, device, _device);
631
TU_FROM_HANDLE(tu_descriptor_pool, pool, pAllocateInfo->descriptorPool);
632
633
VkResult result = VK_SUCCESS;
634
uint32_t i;
635
struct tu_descriptor_set *set = NULL;
636
637
const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *variable_counts =
638
vk_find_struct_const(pAllocateInfo->pNext, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT);
639
const uint32_t zero = 0;
640
641
/* allocate a set of buffers for each shader to contain descriptors */
642
for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
643
TU_FROM_HANDLE(tu_descriptor_set_layout, layout,
644
pAllocateInfo->pSetLayouts[i]);
645
646
const uint32_t *variable_count = NULL;
647
if (variable_counts) {
648
if (i < variable_counts->descriptorSetCount)
649
variable_count = variable_counts->pDescriptorCounts + i;
650
else
651
variable_count = &zero;
652
}
653
654
assert(!(layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
655
656
result = tu_descriptor_set_create(device, pool, layout, variable_count, &set);
657
if (result != VK_SUCCESS)
658
break;
659
660
pDescriptorSets[i] = tu_descriptor_set_to_handle(set);
661
}
662
663
if (result != VK_SUCCESS) {
664
tu_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
665
i, pDescriptorSets);
666
for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
667
pDescriptorSets[i] = VK_NULL_HANDLE;
668
}
669
}
670
return result;
671
}
672
673
VKAPI_ATTR VkResult VKAPI_CALL
674
tu_FreeDescriptorSets(VkDevice _device,
675
VkDescriptorPool descriptorPool,
676
uint32_t count,
677
const VkDescriptorSet *pDescriptorSets)
678
{
679
TU_FROM_HANDLE(tu_device, device, _device);
680
TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);
681
682
for (uint32_t i = 0; i < count; i++) {
683
TU_FROM_HANDLE(tu_descriptor_set, set, pDescriptorSets[i]);
684
685
if (set && !pool->host_memory_base)
686
tu_descriptor_set_destroy(device, pool, set, true);
687
}
688
return VK_SUCCESS;
689
}
690
691
static void
692
write_texel_buffer_descriptor(uint32_t *dst, const VkBufferView buffer_view)
693
{
694
if (buffer_view == VK_NULL_HANDLE) {
695
memset(dst, 0, A6XX_TEX_CONST_DWORDS * sizeof(uint32_t));
696
} else {
697
TU_FROM_HANDLE(tu_buffer_view, view, buffer_view);
698
699
memcpy(dst, view->descriptor, sizeof(view->descriptor));
700
}
701
}
702
703
static uint32_t get_range(struct tu_buffer *buf, VkDeviceSize offset,
704
VkDeviceSize range)
705
{
706
if (range == VK_WHOLE_SIZE) {
707
return buf->size - offset;
708
} else {
709
return range;
710
}
711
}
712
713
static void
714
write_buffer_descriptor(const struct tu_device *device,
715
uint32_t *dst,
716
const VkDescriptorBufferInfo *buffer_info)
717
{
718
if (buffer_info->buffer == VK_NULL_HANDLE) {
719
memset(dst, 0, A6XX_TEX_CONST_DWORDS * sizeof(uint32_t));
720
return;
721
}
722
723
TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);
724
725
assert((buffer_info->offset & 63) == 0); /* minStorageBufferOffsetAlignment */
726
uint64_t va = tu_buffer_iova(buffer) + buffer_info->offset;
727
uint32_t range = get_range(buffer, buffer_info->offset, buffer_info->range);
728
/* newer a6xx allows using 16-bit descriptor for both 16-bit and 32-bit access */
729
if (device->physical_device->info->a6xx.storage_16bit) {
730
dst[0] = A6XX_IBO_0_TILE_MODE(TILE6_LINEAR) | A6XX_IBO_0_FMT(FMT6_16_UINT);
731
dst[1] = DIV_ROUND_UP(range, 2);
732
} else {
733
dst[0] = A6XX_IBO_0_TILE_MODE(TILE6_LINEAR) | A6XX_IBO_0_FMT(FMT6_32_UINT);
734
dst[1] = DIV_ROUND_UP(range, 4);
735
}
736
dst[2] =
737
A6XX_IBO_2_UNK4 | A6XX_IBO_2_TYPE(A6XX_TEX_1D) | A6XX_IBO_2_UNK31;
738
dst[3] = 0;
739
dst[4] = A6XX_IBO_4_BASE_LO(va);
740
dst[5] = A6XX_IBO_5_BASE_HI(va >> 32);
741
for (int i = 6; i < A6XX_TEX_CONST_DWORDS; i++)
742
dst[i] = 0;
743
}
744
745
static void
746
write_ubo_descriptor(uint32_t *dst, const VkDescriptorBufferInfo *buffer_info)
747
{
748
if (buffer_info->buffer == VK_NULL_HANDLE) {
749
dst[0] = dst[1] = 0;
750
return;
751
}
752
753
TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);
754
755
uint32_t range = get_range(buffer, buffer_info->offset, buffer_info->range);
756
/* The HW range is in vec4 units */
757
range = ALIGN_POT(range, 16) / 16;
758
uint64_t va = tu_buffer_iova(buffer) + buffer_info->offset;
759
760
dst[0] = A6XX_UBO_0_BASE_LO(va);
761
dst[1] = A6XX_UBO_1_BASE_HI(va >> 32) | A6XX_UBO_1_SIZE(range);
762
}
763
764
static void
765
write_image_descriptor(uint32_t *dst,
766
VkDescriptorType descriptor_type,
767
const VkDescriptorImageInfo *image_info)
768
{
769
if (image_info->imageView == VK_NULL_HANDLE) {
770
memset(dst, 0, A6XX_TEX_CONST_DWORDS * sizeof(uint32_t));
771
return;
772
}
773
774
TU_FROM_HANDLE(tu_image_view, iview, image_info->imageView);
775
776
if (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
777
memcpy(dst, iview->storage_descriptor, sizeof(iview->storage_descriptor));
778
} else {
779
memcpy(dst, iview->descriptor, sizeof(iview->descriptor));
780
}
781
}
782
783
static void
784
write_combined_image_sampler_descriptor(uint32_t *dst,
785
VkDescriptorType descriptor_type,
786
const VkDescriptorImageInfo *image_info,
787
bool has_sampler)
788
{
789
TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);
790
791
write_image_descriptor(dst, descriptor_type, image_info);
792
/* copy over sampler state */
793
if (has_sampler) {
794
memcpy(dst + A6XX_TEX_CONST_DWORDS, sampler->descriptor, sizeof(sampler->descriptor));
795
}
796
}
797
798
static void
799
write_sampler_descriptor(uint32_t *dst, const VkDescriptorImageInfo *image_info)
800
{
801
TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);
802
803
memcpy(dst, sampler->descriptor, sizeof(sampler->descriptor));
804
}
805
806
/* note: this is used with immutable samplers in push descriptors */
807
static void
808
write_sampler_push(uint32_t *dst, const struct tu_sampler *sampler)
809
{
810
memcpy(dst, sampler->descriptor, sizeof(sampler->descriptor));
811
}
812
813
void
814
tu_update_descriptor_sets(const struct tu_device *device,
815
VkDescriptorSet dstSetOverride,
816
uint32_t descriptorWriteCount,
817
const VkWriteDescriptorSet *pDescriptorWrites,
818
uint32_t descriptorCopyCount,
819
const VkCopyDescriptorSet *pDescriptorCopies)
820
{
821
uint32_t i, j;
822
for (i = 0; i < descriptorWriteCount; i++) {
823
const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];
824
TU_FROM_HANDLE(tu_descriptor_set, set, dstSetOverride ?: writeset->dstSet);
825
const struct tu_descriptor_set_binding_layout *binding_layout =
826
set->layout->binding + writeset->dstBinding;
827
uint32_t *ptr = set->mapped_ptr;
828
/* for immutable samplers with push descriptors: */
829
const bool copy_immutable_samplers =
830
dstSetOverride && binding_layout->immutable_samplers_offset;
831
const struct tu_sampler *samplers =
832
tu_immutable_samplers(set->layout, binding_layout);
833
834
ptr += binding_layout->offset / 4;
835
836
ptr += (binding_layout->size / 4) * writeset->dstArrayElement;
837
for (j = 0; j < writeset->descriptorCount; ++j) {
838
switch(writeset->descriptorType) {
839
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {
840
assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
841
unsigned idx = writeset->dstArrayElement + j;
842
idx += binding_layout->dynamic_offset_offset;
843
write_ubo_descriptor(set->dynamic_descriptors + A6XX_TEX_CONST_DWORDS * idx,
844
writeset->pBufferInfo + j);
845
break;
846
}
847
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
848
write_ubo_descriptor(ptr, writeset->pBufferInfo + j);
849
break;
850
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
851
assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
852
unsigned idx = writeset->dstArrayElement + j;
853
idx += binding_layout->dynamic_offset_offset;
854
write_buffer_descriptor(device, set->dynamic_descriptors + A6XX_TEX_CONST_DWORDS * idx,
855
writeset->pBufferInfo + j);
856
break;
857
}
858
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
859
write_buffer_descriptor(device, ptr, writeset->pBufferInfo + j);
860
break;
861
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
862
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
863
write_texel_buffer_descriptor(ptr, writeset->pTexelBufferView[j]);
864
break;
865
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
866
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
867
write_image_descriptor(ptr, writeset->descriptorType, writeset->pImageInfo + j);
868
break;
869
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
870
write_combined_image_sampler_descriptor(ptr,
871
writeset->descriptorType,
872
writeset->pImageInfo + j,
873
!binding_layout->immutable_samplers_offset);
874
875
if (copy_immutable_samplers)
876
write_sampler_push(ptr + A6XX_TEX_CONST_DWORDS, &samplers[writeset->dstArrayElement + j]);
877
break;
878
case VK_DESCRIPTOR_TYPE_SAMPLER:
879
if (!binding_layout->immutable_samplers_offset)
880
write_sampler_descriptor(ptr, writeset->pImageInfo + j);
881
else if (copy_immutable_samplers)
882
write_sampler_push(ptr, &samplers[writeset->dstArrayElement + j]);
883
break;
884
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
885
/* nothing in descriptor set - framebuffer state is used instead */
886
break;
887
default:
888
unreachable("unimplemented descriptor type");
889
break;
890
}
891
ptr += binding_layout->size / 4;
892
}
893
}
894
895
for (i = 0; i < descriptorCopyCount; i++) {
896
const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];
897
TU_FROM_HANDLE(tu_descriptor_set, src_set,
898
copyset->srcSet);
899
TU_FROM_HANDLE(tu_descriptor_set, dst_set,
900
copyset->dstSet);
901
const struct tu_descriptor_set_binding_layout *src_binding_layout =
902
src_set->layout->binding + copyset->srcBinding;
903
const struct tu_descriptor_set_binding_layout *dst_binding_layout =
904
dst_set->layout->binding + copyset->dstBinding;
905
uint32_t *src_ptr = src_set->mapped_ptr;
906
uint32_t *dst_ptr = dst_set->mapped_ptr;
907
908
src_ptr += src_binding_layout->offset / 4;
909
dst_ptr += dst_binding_layout->offset / 4;
910
911
src_ptr += src_binding_layout->size * copyset->srcArrayElement / 4;
912
dst_ptr += dst_binding_layout->size * copyset->dstArrayElement / 4;
913
914
for (j = 0; j < copyset->descriptorCount; ++j) {
915
switch (src_binding_layout->type) {
916
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
917
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
918
unsigned src_idx = copyset->srcArrayElement + j;
919
unsigned dst_idx = copyset->dstArrayElement + j;
920
src_idx += src_binding_layout->dynamic_offset_offset;
921
dst_idx += dst_binding_layout->dynamic_offset_offset;
922
923
uint32_t *src_dynamic, *dst_dynamic;
924
src_dynamic = src_set->dynamic_descriptors + src_idx * A6XX_TEX_CONST_DWORDS;
925
dst_dynamic = dst_set->dynamic_descriptors + dst_idx * A6XX_TEX_CONST_DWORDS;
926
memcpy(dst_dynamic, src_dynamic, A6XX_TEX_CONST_DWORDS * 4);
927
break;
928
}
929
default:
930
memcpy(dst_ptr, src_ptr, src_binding_layout->size);
931
}
932
933
src_ptr += src_binding_layout->size / 4;
934
dst_ptr += dst_binding_layout->size / 4;
935
}
936
}
937
}
938
939
VKAPI_ATTR void VKAPI_CALL
940
tu_UpdateDescriptorSets(VkDevice _device,
941
uint32_t descriptorWriteCount,
942
const VkWriteDescriptorSet *pDescriptorWrites,
943
uint32_t descriptorCopyCount,
944
const VkCopyDescriptorSet *pDescriptorCopies)
945
{
946
TU_FROM_HANDLE(tu_device, device, _device);
947
tu_update_descriptor_sets(device, VK_NULL_HANDLE,
948
descriptorWriteCount, pDescriptorWrites,
949
descriptorCopyCount, pDescriptorCopies);
950
}
951
952
VKAPI_ATTR VkResult VKAPI_CALL
953
tu_CreateDescriptorUpdateTemplate(
954
VkDevice _device,
955
const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
956
const VkAllocationCallbacks *pAllocator,
957
VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)
958
{
959
TU_FROM_HANDLE(tu_device, device, _device);
960
TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,
961
pCreateInfo->descriptorSetLayout);
962
const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;
963
const size_t size =
964
sizeof(struct tu_descriptor_update_template) +
965
sizeof(struct tu_descriptor_update_template_entry) * entry_count;
966
struct tu_descriptor_update_template *templ;
967
968
templ = vk_object_alloc(&device->vk, pAllocator, size,
969
VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
970
if (!templ)
971
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
972
973
templ->entry_count = entry_count;
974
975
if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {
976
TU_FROM_HANDLE(tu_pipeline_layout, pipeline_layout, pCreateInfo->pipelineLayout);
977
978
/* descriptorSetLayout should be ignored for push descriptors
979
* and instead it refers to pipelineLayout and set.
980
*/
981
assert(pCreateInfo->set < MAX_SETS);
982
set_layout = pipeline_layout->set[pCreateInfo->set].layout;
983
984
templ->bind_point = pCreateInfo->pipelineBindPoint;
985
}
986
987
for (uint32_t i = 0; i < entry_count; i++) {
988
const VkDescriptorUpdateTemplateEntry *entry = &pCreateInfo->pDescriptorUpdateEntries[i];
989
990
const struct tu_descriptor_set_binding_layout *binding_layout =
991
set_layout->binding + entry->dstBinding;
992
uint32_t dst_offset, dst_stride;
993
const struct tu_sampler *immutable_samplers = NULL;
994
995
/* dst_offset is an offset into dynamic_descriptors when the descriptor
996
* is dynamic, and an offset into mapped_ptr otherwise.
997
*/
998
switch (entry->descriptorType) {
999
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1000
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1001
dst_offset = (binding_layout->dynamic_offset_offset +
1002
entry->dstArrayElement) * A6XX_TEX_CONST_DWORDS;
1003
dst_stride = A6XX_TEX_CONST_DWORDS;
1004
break;
1005
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1006
case VK_DESCRIPTOR_TYPE_SAMPLER:
1007
if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR &&
1008
binding_layout->immutable_samplers_offset) {
1009
immutable_samplers =
1010
tu_immutable_samplers(set_layout, binding_layout) + entry->dstArrayElement;
1011
}
1012
FALLTHROUGH;
1013
default:
1014
dst_offset = binding_layout->offset / 4;
1015
dst_offset += (binding_layout->size * entry->dstArrayElement) / 4;
1016
dst_stride = binding_layout->size / 4;
1017
}
1018
1019
templ->entry[i] = (struct tu_descriptor_update_template_entry) {
1020
.descriptor_type = entry->descriptorType,
1021
.descriptor_count = entry->descriptorCount,
1022
.src_offset = entry->offset,
1023
.src_stride = entry->stride,
1024
.dst_offset = dst_offset,
1025
.dst_stride = dst_stride,
1026
.has_sampler = !binding_layout->immutable_samplers_offset,
1027
.immutable_samplers = immutable_samplers,
1028
};
1029
}
1030
1031
*pDescriptorUpdateTemplate =
1032
tu_descriptor_update_template_to_handle(templ);
1033
1034
return VK_SUCCESS;
1035
}
1036
1037
VKAPI_ATTR void VKAPI_CALL
1038
tu_DestroyDescriptorUpdateTemplate(
1039
VkDevice _device,
1040
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1041
const VkAllocationCallbacks *pAllocator)
1042
{
1043
TU_FROM_HANDLE(tu_device, device, _device);
1044
TU_FROM_HANDLE(tu_descriptor_update_template, templ,
1045
descriptorUpdateTemplate);
1046
1047
if (!templ)
1048
return;
1049
1050
vk_object_free(&device->vk, pAllocator, templ);
1051
}
1052
1053
void
1054
tu_update_descriptor_set_with_template(
1055
const struct tu_device *device,
1056
struct tu_descriptor_set *set,
1057
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1058
const void *pData)
1059
{
1060
TU_FROM_HANDLE(tu_descriptor_update_template, templ,
1061
descriptorUpdateTemplate);
1062
1063
for (uint32_t i = 0; i < templ->entry_count; i++) {
1064
uint32_t *ptr = set->mapped_ptr;
1065
const void *src = ((const char *) pData) + templ->entry[i].src_offset;
1066
const struct tu_sampler *samplers = templ->entry[i].immutable_samplers;
1067
1068
ptr += templ->entry[i].dst_offset;
1069
unsigned dst_offset = templ->entry[i].dst_offset;
1070
for (unsigned j = 0; j < templ->entry[i].descriptor_count; ++j) {
1071
switch(templ->entry[i].descriptor_type) {
1072
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {
1073
assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
1074
write_ubo_descriptor(set->dynamic_descriptors + dst_offset, src);
1075
break;
1076
}
1077
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1078
write_ubo_descriptor(ptr, src);
1079
break;
1080
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
1081
assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));
1082
write_buffer_descriptor(device, set->dynamic_descriptors + dst_offset, src);
1083
break;
1084
}
1085
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1086
write_buffer_descriptor(device, ptr, src);
1087
break;
1088
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1089
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1090
write_texel_buffer_descriptor(ptr, *(VkBufferView *) src);
1091
break;
1092
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1093
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
1094
write_image_descriptor(ptr, templ->entry[i].descriptor_type, src);
1095
break;
1096
}
1097
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1098
write_combined_image_sampler_descriptor(ptr,
1099
templ->entry[i].descriptor_type,
1100
src,
1101
templ->entry[i].has_sampler);
1102
if (samplers)
1103
write_sampler_push(ptr + A6XX_TEX_CONST_DWORDS, &samplers[j]);
1104
break;
1105
case VK_DESCRIPTOR_TYPE_SAMPLER:
1106
if (templ->entry[i].has_sampler)
1107
write_sampler_descriptor(ptr, src);
1108
else if (samplers)
1109
write_sampler_push(ptr, &samplers[j]);
1110
break;
1111
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1112
/* nothing in descriptor set - framebuffer state is used instead */
1113
break;
1114
default:
1115
unreachable("unimplemented descriptor type");
1116
break;
1117
}
1118
src = (char *) src + templ->entry[i].src_stride;
1119
ptr += templ->entry[i].dst_stride;
1120
dst_offset += templ->entry[i].dst_stride;
1121
}
1122
}
1123
}
1124
1125
VKAPI_ATTR void VKAPI_CALL
1126
tu_UpdateDescriptorSetWithTemplate(
1127
VkDevice _device,
1128
VkDescriptorSet descriptorSet,
1129
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1130
const void *pData)
1131
{
1132
TU_FROM_HANDLE(tu_device, device, _device);
1133
TU_FROM_HANDLE(tu_descriptor_set, set, descriptorSet);
1134
1135
tu_update_descriptor_set_with_template(device, set, descriptorUpdateTemplate, pData);
1136
}
1137
1138
VKAPI_ATTR VkResult VKAPI_CALL
1139
tu_CreateSamplerYcbcrConversion(
1140
VkDevice _device,
1141
const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
1142
const VkAllocationCallbacks *pAllocator,
1143
VkSamplerYcbcrConversion *pYcbcrConversion)
1144
{
1145
TU_FROM_HANDLE(tu_device, device, _device);
1146
struct tu_sampler_ycbcr_conversion *conversion;
1147
1148
conversion = vk_object_alloc(&device->vk, pAllocator, sizeof(*conversion),
1149
VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION);
1150
if (!conversion)
1151
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1152
1153
conversion->format = pCreateInfo->format;
1154
conversion->ycbcr_model = pCreateInfo->ycbcrModel;
1155
conversion->ycbcr_range = pCreateInfo->ycbcrRange;
1156
conversion->components = pCreateInfo->components;
1157
conversion->chroma_offsets[0] = pCreateInfo->xChromaOffset;
1158
conversion->chroma_offsets[1] = pCreateInfo->yChromaOffset;
1159
conversion->chroma_filter = pCreateInfo->chromaFilter;
1160
1161
*pYcbcrConversion = tu_sampler_ycbcr_conversion_to_handle(conversion);
1162
return VK_SUCCESS;
1163
}
1164
1165
VKAPI_ATTR void VKAPI_CALL
1166
tu_DestroySamplerYcbcrConversion(VkDevice _device,
1167
VkSamplerYcbcrConversion ycbcrConversion,
1168
const VkAllocationCallbacks *pAllocator)
1169
{
1170
TU_FROM_HANDLE(tu_device, device, _device);
1171
TU_FROM_HANDLE(tu_sampler_ycbcr_conversion, ycbcr_conversion, ycbcrConversion);
1172
1173
if (!ycbcr_conversion)
1174
return;
1175
1176
vk_object_free(&device->vk, pAllocator, ycbcr_conversion);
1177
}
1178
1179