Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/intel/vulkan/anv_descriptor_set.c
4547 views
1
/*
2
* Copyright © 2015 Intel Corporation
3
*
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
* and/or sell copies of the Software, and to permit persons to whom the
9
* Software is furnished to do so, subject to the following conditions:
10
*
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
13
* Software.
14
*
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21
* IN THE SOFTWARE.
22
*/
23
24
#include <assert.h>
25
#include <stdbool.h>
26
#include <string.h>
27
#include <unistd.h>
28
#include <fcntl.h>
29
30
#include "util/mesa-sha1.h"
31
#include "vk_util.h"
32
33
#include "anv_private.h"
34
35
/*
36
* Descriptor set layouts.
37
*/
38
39
static enum anv_descriptor_data
40
anv_descriptor_data_for_type(const struct anv_physical_device *device,
41
VkDescriptorType type)
42
{
43
enum anv_descriptor_data data = 0;
44
45
switch (type) {
46
case VK_DESCRIPTOR_TYPE_SAMPLER:
47
data = ANV_DESCRIPTOR_SAMPLER_STATE;
48
if (device->has_bindless_samplers)
49
data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
50
break;
51
52
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
53
data = ANV_DESCRIPTOR_SURFACE_STATE |
54
ANV_DESCRIPTOR_SAMPLER_STATE;
55
if (device->has_bindless_images || device->has_bindless_samplers)
56
data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
57
break;
58
59
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
60
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
61
data = ANV_DESCRIPTOR_SURFACE_STATE;
62
if (device->has_bindless_images)
63
data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;
64
break;
65
66
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
67
data = ANV_DESCRIPTOR_SURFACE_STATE;
68
break;
69
70
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
71
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
72
data = ANV_DESCRIPTOR_SURFACE_STATE;
73
if (device->info.ver < 9)
74
data |= ANV_DESCRIPTOR_IMAGE_PARAM;
75
if (device->has_bindless_images)
76
data |= ANV_DESCRIPTOR_STORAGE_IMAGE;
77
break;
78
79
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
80
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
81
data = ANV_DESCRIPTOR_SURFACE_STATE |
82
ANV_DESCRIPTOR_BUFFER_VIEW;
83
break;
84
85
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
86
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
87
data = ANV_DESCRIPTOR_SURFACE_STATE;
88
break;
89
90
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
91
data = ANV_DESCRIPTOR_INLINE_UNIFORM;
92
break;
93
94
case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:
95
data = ANV_DESCRIPTOR_ADDRESS_RANGE;
96
break;
97
98
default:
99
unreachable("Unsupported descriptor type");
100
}
101
102
/* On gfx8 and above when we have softpin enabled, we also need to push
103
* SSBO address ranges so that we can use A64 messages in the shader.
104
*/
105
if (device->has_a64_buffer_access &&
106
(type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||
107
type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||
108
type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
109
type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC))
110
data |= ANV_DESCRIPTOR_ADDRESS_RANGE;
111
112
/* On Ivy Bridge and Bay Trail, we need swizzles textures in the shader
113
* Do not handle VK_DESCRIPTOR_TYPE_STORAGE_IMAGE and
114
* VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT because they already must
115
* have identity swizzle.
116
*/
117
if (device->info.verx10 == 70 &&
118
(type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||
119
type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))
120
data |= ANV_DESCRIPTOR_TEXTURE_SWIZZLE;
121
122
return data;
123
}
124
125
static unsigned
126
anv_descriptor_data_size(enum anv_descriptor_data data)
127
{
128
unsigned size = 0;
129
130
if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE)
131
size += sizeof(struct anv_sampled_image_descriptor);
132
133
if (data & ANV_DESCRIPTOR_STORAGE_IMAGE)
134
size += sizeof(struct anv_storage_image_descriptor);
135
136
if (data & ANV_DESCRIPTOR_IMAGE_PARAM)
137
size += BRW_IMAGE_PARAM_SIZE * 4;
138
139
if (data & ANV_DESCRIPTOR_ADDRESS_RANGE)
140
size += sizeof(struct anv_address_range_descriptor);
141
142
if (data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE)
143
size += sizeof(struct anv_texture_swizzle_descriptor);
144
145
return size;
146
}
147
148
static bool
149
anv_needs_descriptor_buffer(VkDescriptorType desc_type,
150
enum anv_descriptor_data desc_data)
151
{
152
if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT ||
153
anv_descriptor_data_size(desc_data) > 0)
154
return true;
155
return false;
156
}
157
158
/** Returns the size in bytes of each descriptor with the given layout */
159
unsigned
160
anv_descriptor_size(const struct anv_descriptor_set_binding_layout *layout)
161
{
162
if (layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
163
assert(layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
164
return layout->array_size;
165
}
166
167
unsigned size = anv_descriptor_data_size(layout->data);
168
169
/* For multi-planar bindings, we make every descriptor consume the maximum
170
* number of planes so we don't have to bother with walking arrays and
171
* adding things up every time. Fortunately, YCbCr samplers aren't all
172
* that common and likely won't be in the middle of big arrays.
173
*/
174
if (layout->max_plane_count > 1)
175
size *= layout->max_plane_count;
176
177
return size;
178
}
179
180
/** Returns the size in bytes of each descriptor of the given type
181
*
182
* This version of the function does not have access to the entire layout so
183
* it may only work on certain descriptor types where the descriptor size is
184
* entirely determined by the descriptor type. Whenever possible, code should
185
* use anv_descriptor_size() instead.
186
*/
187
unsigned
188
anv_descriptor_type_size(const struct anv_physical_device *pdevice,
189
VkDescriptorType type)
190
{
191
assert(type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT &&
192
type != VK_DESCRIPTOR_TYPE_SAMPLER &&
193
type != VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE &&
194
type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);
195
196
return anv_descriptor_data_size(anv_descriptor_data_for_type(pdevice, type));
197
}
198
199
static bool
200
anv_descriptor_data_supports_bindless(const struct anv_physical_device *pdevice,
201
enum anv_descriptor_data data,
202
bool sampler)
203
{
204
if (data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
205
assert(pdevice->has_a64_buffer_access);
206
return true;
207
}
208
209
if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
210
assert(pdevice->has_bindless_images || pdevice->has_bindless_samplers);
211
return sampler ? pdevice->has_bindless_samplers :
212
pdevice->has_bindless_images;
213
}
214
215
if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
216
assert(pdevice->has_bindless_images);
217
return true;
218
}
219
220
return false;
221
}
222
223
bool
224
anv_descriptor_supports_bindless(const struct anv_physical_device *pdevice,
225
const struct anv_descriptor_set_binding_layout *binding,
226
bool sampler)
227
{
228
return anv_descriptor_data_supports_bindless(pdevice, binding->data,
229
sampler);
230
}
231
232
bool
233
anv_descriptor_requires_bindless(const struct anv_physical_device *pdevice,
234
const struct anv_descriptor_set_binding_layout *binding,
235
bool sampler)
236
{
237
if (pdevice->always_use_bindless)
238
return anv_descriptor_supports_bindless(pdevice, binding, sampler);
239
240
static const VkDescriptorBindingFlagBitsEXT flags_requiring_bindless =
241
VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT |
242
VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT |
243
VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;
244
245
return (binding->flags & flags_requiring_bindless) != 0;
246
}
247
248
void anv_GetDescriptorSetLayoutSupport(
249
VkDevice _device,
250
const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
251
VkDescriptorSetLayoutSupport* pSupport)
252
{
253
ANV_FROM_HANDLE(anv_device, device, _device);
254
const struct anv_physical_device *pdevice = device->physical;
255
256
uint32_t surface_count[MESA_VULKAN_SHADER_STAGES] = { 0, };
257
VkDescriptorType varying_desc_type = VK_DESCRIPTOR_TYPE_MAX_ENUM;
258
bool needs_descriptor_buffer = false;
259
260
const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =
261
vk_find_struct_const(pCreateInfo->pNext,
262
DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);
263
264
for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
265
const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
266
267
VkDescriptorBindingFlags flags = 0;
268
if (binding_flags_info && binding_flags_info->bindingCount > 0) {
269
assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
270
flags = binding_flags_info->pBindingFlags[b];
271
}
272
273
enum anv_descriptor_data desc_data =
274
anv_descriptor_data_for_type(pdevice, binding->descriptorType);
275
276
if (anv_needs_descriptor_buffer(binding->descriptorType, desc_data))
277
needs_descriptor_buffer = true;
278
279
if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)
280
varying_desc_type = binding->descriptorType;
281
282
switch (binding->descriptorType) {
283
case VK_DESCRIPTOR_TYPE_SAMPLER:
284
/* There is no real limit on samplers */
285
break;
286
287
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
288
/* Inline uniforms don't use a binding */
289
break;
290
291
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
292
if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
293
break;
294
295
if (binding->pImmutableSamplers) {
296
for (uint32_t i = 0; i < binding->descriptorCount; i++) {
297
ANV_FROM_HANDLE(anv_sampler, sampler,
298
binding->pImmutableSamplers[i]);
299
anv_foreach_stage(s, binding->stageFlags)
300
surface_count[s] += sampler->n_planes;
301
}
302
} else {
303
anv_foreach_stage(s, binding->stageFlags)
304
surface_count[s] += binding->descriptorCount;
305
}
306
break;
307
308
default:
309
if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))
310
break;
311
312
anv_foreach_stage(s, binding->stageFlags)
313
surface_count[s] += binding->descriptorCount;
314
break;
315
}
316
}
317
318
for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
319
if (needs_descriptor_buffer)
320
surface_count[s] += 1;
321
}
322
323
VkDescriptorSetVariableDescriptorCountLayoutSupport *vdcls =
324
vk_find_struct(pSupport->pNext,
325
DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);
326
if (vdcls != NULL) {
327
if (varying_desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
328
vdcls->maxVariableDescriptorCount = MAX_INLINE_UNIFORM_BLOCK_SIZE;
329
} else if (varying_desc_type != VK_DESCRIPTOR_TYPE_MAX_ENUM) {
330
vdcls->maxVariableDescriptorCount = UINT16_MAX;
331
} else {
332
vdcls->maxVariableDescriptorCount = 0;
333
}
334
}
335
336
bool supported = true;
337
for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {
338
/* Our maximum binding table size is 240 and we need to reserve 8 for
339
* render targets.
340
*/
341
if (surface_count[s] > MAX_BINDING_TABLE_SIZE - MAX_RTS)
342
supported = false;
343
}
344
345
pSupport->supported = supported;
346
}
347
348
VkResult anv_CreateDescriptorSetLayout(
349
VkDevice _device,
350
const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
351
const VkAllocationCallbacks* pAllocator,
352
VkDescriptorSetLayout* pSetLayout)
353
{
354
ANV_FROM_HANDLE(anv_device, device, _device);
355
356
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
357
358
uint32_t num_bindings = 0;
359
uint32_t immutable_sampler_count = 0;
360
for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
361
num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);
362
363
/* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:
364
*
365
* "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or
366
* VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then
367
* pImmutableSamplers can be used to initialize a set of immutable
368
* samplers. [...] If descriptorType is not one of these descriptor
369
* types, then pImmutableSamplers is ignored.
370
*
371
* We need to be careful here and only parse pImmutableSamplers if we
372
* have one of the right descriptor types.
373
*/
374
VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;
375
if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||
376
desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
377
pCreateInfo->pBindings[j].pImmutableSamplers)
378
immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
379
}
380
381
/* We need to allocate decriptor set layouts off the device allocator
382
* with DEVICE scope because they are reference counted and may not be
383
* destroyed when vkDestroyDescriptorSetLayout is called.
384
*/
385
VK_MULTIALLOC(ma);
386
VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_layout, set_layout, 1);
387
VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_binding_layout,
388
bindings, num_bindings);
389
VK_MULTIALLOC_DECL(&ma, struct anv_sampler *, samplers,
390
immutable_sampler_count);
391
392
if (!vk_object_multizalloc(&device->vk, &ma, NULL,
393
VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT))
394
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
395
396
set_layout->ref_cnt = 1;
397
set_layout->binding_count = num_bindings;
398
399
for (uint32_t b = 0; b < num_bindings; b++) {
400
/* Initialize all binding_layout entries to -1 */
401
memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
402
403
set_layout->binding[b].flags = 0;
404
set_layout->binding[b].data = 0;
405
set_layout->binding[b].max_plane_count = 0;
406
set_layout->binding[b].array_size = 0;
407
set_layout->binding[b].immutable_samplers = NULL;
408
}
409
410
/* Initialize all samplers to 0 */
411
memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
412
413
uint32_t buffer_view_count = 0;
414
uint32_t dynamic_offset_count = 0;
415
uint32_t descriptor_buffer_size = 0;
416
417
for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
418
const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
419
uint32_t b = binding->binding;
420
/* We temporarily store pCreateInfo->pBindings[] index (plus one) in the
421
* immutable_samplers pointer. This provides us with a quick-and-dirty
422
* way to sort the bindings by binding number.
423
*/
424
set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);
425
}
426
427
const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *binding_flags_info =
428
vk_find_struct_const(pCreateInfo->pNext,
429
DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);
430
431
for (uint32_t b = 0; b < num_bindings; b++) {
432
/* We stashed the pCreateInfo->pBindings[] index (plus one) in the
433
* immutable_samplers pointer. Check for NULL (empty binding) and then
434
* reset it and compute the index.
435
*/
436
if (set_layout->binding[b].immutable_samplers == NULL)
437
continue;
438
const uint32_t info_idx =
439
(uintptr_t)(void *)set_layout->binding[b].immutable_samplers - 1;
440
set_layout->binding[b].immutable_samplers = NULL;
441
442
const VkDescriptorSetLayoutBinding *binding =
443
&pCreateInfo->pBindings[info_idx];
444
445
if (binding->descriptorCount == 0)
446
continue;
447
448
set_layout->binding[b].type = binding->descriptorType;
449
450
if (binding_flags_info && binding_flags_info->bindingCount > 0) {
451
assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);
452
set_layout->binding[b].flags =
453
binding_flags_info->pBindingFlags[info_idx];
454
455
/* From the Vulkan spec:
456
*
457
* "If VkDescriptorSetLayoutCreateInfo::flags includes
458
* VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then
459
* all elements of pBindingFlags must not include
460
* VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
461
* VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, or
462
* VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT"
463
*/
464
if (pCreateInfo->flags &
465
VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {
466
assert(!(set_layout->binding[b].flags &
467
(VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |
468
VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |
469
VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)));
470
}
471
}
472
473
set_layout->binding[b].data =
474
anv_descriptor_data_for_type(device->physical,
475
binding->descriptorType);
476
set_layout->binding[b].array_size = binding->descriptorCount;
477
set_layout->binding[b].descriptor_index = set_layout->descriptor_count;
478
set_layout->descriptor_count += binding->descriptorCount;
479
480
if (set_layout->binding[b].data & ANV_DESCRIPTOR_BUFFER_VIEW) {
481
set_layout->binding[b].buffer_view_index = buffer_view_count;
482
buffer_view_count += binding->descriptorCount;
483
}
484
485
switch (binding->descriptorType) {
486
case VK_DESCRIPTOR_TYPE_SAMPLER:
487
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
488
set_layout->binding[b].max_plane_count = 1;
489
if (binding->pImmutableSamplers) {
490
set_layout->binding[b].immutable_samplers = samplers;
491
samplers += binding->descriptorCount;
492
493
for (uint32_t i = 0; i < binding->descriptorCount; i++) {
494
ANV_FROM_HANDLE(anv_sampler, sampler,
495
binding->pImmutableSamplers[i]);
496
497
set_layout->binding[b].immutable_samplers[i] = sampler;
498
if (set_layout->binding[b].max_plane_count < sampler->n_planes)
499
set_layout->binding[b].max_plane_count = sampler->n_planes;
500
}
501
}
502
break;
503
504
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
505
set_layout->binding[b].max_plane_count = 1;
506
break;
507
508
default:
509
break;
510
}
511
512
switch (binding->descriptorType) {
513
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
514
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
515
set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
516
set_layout->dynamic_offset_stages[dynamic_offset_count] = binding->stageFlags;
517
dynamic_offset_count += binding->descriptorCount;
518
assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
519
break;
520
521
default:
522
break;
523
}
524
525
if (binding->descriptorType ==
526
VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
527
/* Inline uniform blocks are specified to use the descriptor array
528
* size as the size in bytes of the block.
529
*/
530
descriptor_buffer_size = align_u32(descriptor_buffer_size,
531
ANV_UBO_ALIGNMENT);
532
set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
533
descriptor_buffer_size += binding->descriptorCount;
534
} else {
535
set_layout->binding[b].descriptor_offset = descriptor_buffer_size;
536
descriptor_buffer_size += anv_descriptor_size(&set_layout->binding[b]) *
537
binding->descriptorCount;
538
}
539
540
set_layout->shader_stages |= binding->stageFlags;
541
}
542
543
set_layout->buffer_view_count = buffer_view_count;
544
set_layout->dynamic_offset_count = dynamic_offset_count;
545
set_layout->descriptor_buffer_size = descriptor_buffer_size;
546
547
*pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
548
549
return VK_SUCCESS;
550
}
551
552
void
553
anv_descriptor_set_layout_destroy(struct anv_device *device,
554
struct anv_descriptor_set_layout *layout)
555
{
556
assert(layout->ref_cnt == 0);
557
vk_object_free(&device->vk, NULL, layout);
558
}
559
560
static const struct anv_descriptor_set_binding_layout *
561
set_layout_dynamic_binding(const struct anv_descriptor_set_layout *set_layout)
562
{
563
if (set_layout->binding_count == 0)
564
return NULL;
565
566
const struct anv_descriptor_set_binding_layout *last_binding =
567
&set_layout->binding[set_layout->binding_count - 1];
568
if (!(last_binding->flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT))
569
return NULL;
570
571
return last_binding;
572
}
573
574
static uint32_t
575
set_layout_descriptor_count(const struct anv_descriptor_set_layout *set_layout,
576
uint32_t var_desc_count)
577
{
578
const struct anv_descriptor_set_binding_layout *dynamic_binding =
579
set_layout_dynamic_binding(set_layout);
580
if (dynamic_binding == NULL)
581
return set_layout->descriptor_count;
582
583
assert(var_desc_count <= dynamic_binding->array_size);
584
uint32_t shrink = dynamic_binding->array_size - var_desc_count;
585
586
if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
587
return set_layout->descriptor_count;
588
589
return set_layout->descriptor_count - shrink;
590
}
591
592
static uint32_t
593
set_layout_buffer_view_count(const struct anv_descriptor_set_layout *set_layout,
594
uint32_t var_desc_count)
595
{
596
const struct anv_descriptor_set_binding_layout *dynamic_binding =
597
set_layout_dynamic_binding(set_layout);
598
if (dynamic_binding == NULL)
599
return set_layout->buffer_view_count;
600
601
assert(var_desc_count <= dynamic_binding->array_size);
602
uint32_t shrink = dynamic_binding->array_size - var_desc_count;
603
604
if (!(dynamic_binding->data & ANV_DESCRIPTOR_BUFFER_VIEW))
605
return set_layout->buffer_view_count;
606
607
return set_layout->buffer_view_count - shrink;
608
}
609
610
uint32_t
611
anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout *set_layout,
612
uint32_t var_desc_count)
613
{
614
const struct anv_descriptor_set_binding_layout *dynamic_binding =
615
set_layout_dynamic_binding(set_layout);
616
if (dynamic_binding == NULL)
617
return ALIGN(set_layout->descriptor_buffer_size, ANV_UBO_ALIGNMENT);
618
619
assert(var_desc_count <= dynamic_binding->array_size);
620
uint32_t shrink = dynamic_binding->array_size - var_desc_count;
621
uint32_t set_size;
622
623
if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
624
/* Inline uniform blocks are specified to use the descriptor array
625
* size as the size in bytes of the block.
626
*/
627
set_size = set_layout->descriptor_buffer_size - shrink;
628
} else {
629
set_size = set_layout->descriptor_buffer_size -
630
shrink * anv_descriptor_size(dynamic_binding);
631
}
632
633
return ALIGN(set_size, ANV_UBO_ALIGNMENT);
634
}
635
636
void anv_DestroyDescriptorSetLayout(
637
VkDevice _device,
638
VkDescriptorSetLayout _set_layout,
639
const VkAllocationCallbacks* pAllocator)
640
{
641
ANV_FROM_HANDLE(anv_device, device, _device);
642
ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
643
644
if (!set_layout)
645
return;
646
647
anv_descriptor_set_layout_unref(device, set_layout);
648
}
649
650
#define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));
651
652
static void
653
sha1_update_immutable_sampler(struct mesa_sha1 *ctx,
654
const struct anv_sampler *sampler)
655
{
656
if (!sampler->conversion)
657
return;
658
659
/* The only thing that affects the shader is ycbcr conversion */
660
_mesa_sha1_update(ctx, sampler->conversion,
661
sizeof(*sampler->conversion));
662
}
663
664
static void
665
sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,
666
const struct anv_descriptor_set_binding_layout *layout)
667
{
668
SHA1_UPDATE_VALUE(ctx, layout->flags);
669
SHA1_UPDATE_VALUE(ctx, layout->data);
670
SHA1_UPDATE_VALUE(ctx, layout->max_plane_count);
671
SHA1_UPDATE_VALUE(ctx, layout->array_size);
672
SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);
673
SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);
674
SHA1_UPDATE_VALUE(ctx, layout->buffer_view_index);
675
SHA1_UPDATE_VALUE(ctx, layout->descriptor_offset);
676
677
if (layout->immutable_samplers) {
678
for (uint16_t i = 0; i < layout->array_size; i++)
679
sha1_update_immutable_sampler(ctx, layout->immutable_samplers[i]);
680
}
681
}
682
683
static void
684
sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
685
const struct anv_descriptor_set_layout *layout)
686
{
687
SHA1_UPDATE_VALUE(ctx, layout->binding_count);
688
SHA1_UPDATE_VALUE(ctx, layout->descriptor_count);
689
SHA1_UPDATE_VALUE(ctx, layout->shader_stages);
690
SHA1_UPDATE_VALUE(ctx, layout->buffer_view_count);
691
SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);
692
SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_size);
693
694
for (uint16_t i = 0; i < layout->binding_count; i++)
695
sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);
696
}
697
698
/*
699
* Pipeline layouts. These have nothing to do with the pipeline. They are
700
* just multiple descriptor set layouts pasted together
701
*/
702
703
VkResult anv_CreatePipelineLayout(
704
VkDevice _device,
705
const VkPipelineLayoutCreateInfo* pCreateInfo,
706
const VkAllocationCallbacks* pAllocator,
707
VkPipelineLayout* pPipelineLayout)
708
{
709
ANV_FROM_HANDLE(anv_device, device, _device);
710
struct anv_pipeline_layout *layout;
711
712
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
713
714
layout = vk_object_alloc(&device->vk, pAllocator, sizeof(*layout),
715
VK_OBJECT_TYPE_PIPELINE_LAYOUT);
716
if (layout == NULL)
717
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
718
719
layout->num_sets = pCreateInfo->setLayoutCount;
720
721
unsigned dynamic_offset_count = 0;
722
723
for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
724
ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
725
pCreateInfo->pSetLayouts[set]);
726
layout->set[set].layout = set_layout;
727
anv_descriptor_set_layout_ref(set_layout);
728
729
layout->set[set].dynamic_offset_start = dynamic_offset_count;
730
for (uint32_t b = 0; b < set_layout->binding_count; b++) {
731
if (set_layout->binding[b].dynamic_offset_index < 0)
732
continue;
733
734
dynamic_offset_count += set_layout->binding[b].array_size;
735
}
736
}
737
assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);
738
739
struct mesa_sha1 ctx;
740
_mesa_sha1_init(&ctx);
741
for (unsigned s = 0; s < layout->num_sets; s++) {
742
sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
743
_mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
744
sizeof(layout->set[s].dynamic_offset_start));
745
}
746
_mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
747
_mesa_sha1_final(&ctx, layout->sha1);
748
749
*pPipelineLayout = anv_pipeline_layout_to_handle(layout);
750
751
return VK_SUCCESS;
752
}
753
754
void anv_DestroyPipelineLayout(
755
VkDevice _device,
756
VkPipelineLayout _pipelineLayout,
757
const VkAllocationCallbacks* pAllocator)
758
{
759
ANV_FROM_HANDLE(anv_device, device, _device);
760
ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
761
762
if (!pipeline_layout)
763
return;
764
765
for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
766
anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
767
768
vk_object_free(&device->vk, pAllocator, pipeline_layout);
769
}
770
771
/*
772
* Descriptor pools.
773
*
774
* These are implemented using a big pool of memory and a free-list for the
775
* host memory allocations and a state_stream and a free list for the buffer
776
* view surface state. The spec allows us to fail to allocate due to
777
* fragmentation in all cases but two: 1) after pool reset, allocating up
778
* until the pool size with no freeing must succeed and 2) allocating and
779
* freeing only descriptor sets with the same layout. Case 1) is easy enogh,
780
* and the free lists lets us recycle blocks for case 2).
781
*/
782
783
/* The vma heap reserves 0 to mean NULL; we have to offset by some ammount to
784
* ensure we can allocate the entire BO without hitting zero. The actual
785
* amount doesn't matter.
786
*/
787
#define POOL_HEAP_OFFSET 64
788
789
#define EMPTY 1
790
791
VkResult anv_CreateDescriptorPool(
792
VkDevice _device,
793
const VkDescriptorPoolCreateInfo* pCreateInfo,
794
const VkAllocationCallbacks* pAllocator,
795
VkDescriptorPool* pDescriptorPool)
796
{
797
ANV_FROM_HANDLE(anv_device, device, _device);
798
struct anv_descriptor_pool *pool;
799
800
const VkDescriptorPoolInlineUniformBlockCreateInfoEXT *inline_info =
801
vk_find_struct_const(pCreateInfo->pNext,
802
DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT);
803
804
uint32_t descriptor_count = 0;
805
uint32_t buffer_view_count = 0;
806
uint32_t descriptor_bo_size = 0;
807
for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
808
enum anv_descriptor_data desc_data =
809
anv_descriptor_data_for_type(device->physical,
810
pCreateInfo->pPoolSizes[i].type);
811
812
if (desc_data & ANV_DESCRIPTOR_BUFFER_VIEW)
813
buffer_view_count += pCreateInfo->pPoolSizes[i].descriptorCount;
814
815
unsigned desc_data_size = anv_descriptor_data_size(desc_data) *
816
pCreateInfo->pPoolSizes[i].descriptorCount;
817
818
/* Combined image sampler descriptors can take up to 3 slots if they
819
* hold a YCbCr image.
820
*/
821
if (pCreateInfo->pPoolSizes[i].type ==
822
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)
823
desc_data_size *= 3;
824
825
if (pCreateInfo->pPoolSizes[i].type ==
826
VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {
827
/* Inline uniform blocks are specified to use the descriptor array
828
* size as the size in bytes of the block.
829
*/
830
assert(inline_info);
831
desc_data_size += pCreateInfo->pPoolSizes[i].descriptorCount;
832
}
833
834
descriptor_bo_size += desc_data_size;
835
836
descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
837
}
838
/* We have to align descriptor buffer allocations to 32B so that we can
839
* push descriptor buffers. This means that each descriptor buffer
840
* allocated may burn up to 32B of extra space to get the right alignment.
841
* (Technically, it's at most 28B because we're always going to start at
842
* least 4B aligned but we're being conservative here.) Allocate enough
843
* extra space that we can chop it into maxSets pieces and align each one
844
* of them to 32B.
845
*/
846
descriptor_bo_size += ANV_UBO_ALIGNMENT * pCreateInfo->maxSets;
847
/* We align inline uniform blocks to ANV_UBO_ALIGNMENT */
848
if (inline_info) {
849
descriptor_bo_size +=
850
ANV_UBO_ALIGNMENT * inline_info->maxInlineUniformBlockBindings;
851
}
852
descriptor_bo_size = ALIGN(descriptor_bo_size, 4096);
853
854
const size_t pool_size =
855
pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
856
descriptor_count * sizeof(struct anv_descriptor) +
857
buffer_view_count * sizeof(struct anv_buffer_view);
858
const size_t total_size = sizeof(*pool) + pool_size;
859
860
pool = vk_object_alloc(&device->vk, pAllocator, total_size,
861
VK_OBJECT_TYPE_DESCRIPTOR_POOL);
862
if (!pool)
863
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
864
865
pool->size = pool_size;
866
pool->next = 0;
867
pool->free_list = EMPTY;
868
869
if (descriptor_bo_size > 0) {
870
VkResult result = anv_device_alloc_bo(device,
871
"descriptors",
872
descriptor_bo_size,
873
ANV_BO_ALLOC_MAPPED |
874
ANV_BO_ALLOC_SNOOPED,
875
0 /* explicit_address */,
876
&pool->bo);
877
if (result != VK_SUCCESS) {
878
vk_object_free(&device->vk, pAllocator, pool);
879
return result;
880
}
881
882
util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, descriptor_bo_size);
883
} else {
884
pool->bo = NULL;
885
}
886
887
anv_state_stream_init(&pool->surface_state_stream,
888
&device->surface_state_pool, 4096);
889
pool->surface_state_free_list = NULL;
890
891
list_inithead(&pool->desc_sets);
892
893
*pDescriptorPool = anv_descriptor_pool_to_handle(pool);
894
895
return VK_SUCCESS;
896
}
897
898
void anv_DestroyDescriptorPool(
899
VkDevice _device,
900
VkDescriptorPool _pool,
901
const VkAllocationCallbacks* pAllocator)
902
{
903
ANV_FROM_HANDLE(anv_device, device, _device);
904
ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
905
906
if (!pool)
907
return;
908
909
list_for_each_entry_safe(struct anv_descriptor_set, set,
910
&pool->desc_sets, pool_link) {
911
anv_descriptor_set_layout_unref(device, set->layout);
912
}
913
914
if (pool->bo) {
915
util_vma_heap_finish(&pool->bo_heap);
916
anv_device_release_bo(device, pool->bo);
917
}
918
anv_state_stream_finish(&pool->surface_state_stream);
919
920
vk_object_free(&device->vk, pAllocator, pool);
921
}
922
923
VkResult anv_ResetDescriptorPool(
924
VkDevice _device,
925
VkDescriptorPool descriptorPool,
926
VkDescriptorPoolResetFlags flags)
927
{
928
ANV_FROM_HANDLE(anv_device, device, _device);
929
ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
930
931
list_for_each_entry_safe(struct anv_descriptor_set, set,
932
&pool->desc_sets, pool_link) {
933
anv_descriptor_set_layout_unref(device, set->layout);
934
}
935
list_inithead(&pool->desc_sets);
936
937
pool->next = 0;
938
pool->free_list = EMPTY;
939
940
if (pool->bo) {
941
util_vma_heap_finish(&pool->bo_heap);
942
util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, pool->bo->size);
943
}
944
945
anv_state_stream_finish(&pool->surface_state_stream);
946
anv_state_stream_init(&pool->surface_state_stream,
947
&device->surface_state_pool, 4096);
948
pool->surface_state_free_list = NULL;
949
950
return VK_SUCCESS;
951
}
952
953
struct pool_free_list_entry {
954
uint32_t next;
955
uint32_t size;
956
};
957
958
static VkResult
959
anv_descriptor_pool_alloc_set(struct anv_descriptor_pool *pool,
960
uint32_t size,
961
struct anv_descriptor_set **set)
962
{
963
if (size <= pool->size - pool->next) {
964
*set = (struct anv_descriptor_set *) (pool->data + pool->next);
965
(*set)->size = size;
966
pool->next += size;
967
return VK_SUCCESS;
968
} else {
969
struct pool_free_list_entry *entry;
970
uint32_t *link = &pool->free_list;
971
for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
972
entry = (struct pool_free_list_entry *) (pool->data + f);
973
if (size <= entry->size) {
974
*link = entry->next;
975
*set = (struct anv_descriptor_set *) entry;
976
(*set)->size = entry->size;
977
return VK_SUCCESS;
978
}
979
link = &entry->next;
980
}
981
982
if (pool->free_list != EMPTY) {
983
return vk_error(VK_ERROR_FRAGMENTED_POOL);
984
} else {
985
return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY);
986
}
987
}
988
}
989
990
static void
991
anv_descriptor_pool_free_set(struct anv_descriptor_pool *pool,
992
struct anv_descriptor_set *set)
993
{
994
/* Put the descriptor set allocation back on the free list. */
995
const uint32_t index = (char *) set - pool->data;
996
if (index + set->size == pool->next) {
997
pool->next = index;
998
} else {
999
struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
1000
entry->next = pool->free_list;
1001
entry->size = set->size;
1002
pool->free_list = (char *) entry - pool->data;
1003
}
1004
}
1005
1006
struct surface_state_free_list_entry {
1007
void *next;
1008
struct anv_state state;
1009
};
1010
1011
static struct anv_state
1012
anv_descriptor_pool_alloc_state(struct anv_descriptor_pool *pool)
1013
{
1014
struct surface_state_free_list_entry *entry =
1015
pool->surface_state_free_list;
1016
1017
if (entry) {
1018
struct anv_state state = entry->state;
1019
pool->surface_state_free_list = entry->next;
1020
assert(state.alloc_size == 64);
1021
return state;
1022
} else {
1023
return anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
1024
}
1025
}
1026
1027
static void
1028
anv_descriptor_pool_free_state(struct anv_descriptor_pool *pool,
1029
struct anv_state state)
1030
{
1031
/* Put the buffer view surface state back on the free list. */
1032
struct surface_state_free_list_entry *entry = state.map;
1033
entry->next = pool->surface_state_free_list;
1034
entry->state = state;
1035
pool->surface_state_free_list = entry;
1036
}
1037
1038
size_t
1039
anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout,
1040
uint32_t var_desc_count)
1041
{
1042
const uint32_t descriptor_count =
1043
set_layout_descriptor_count(layout, var_desc_count);
1044
const uint32_t buffer_view_count =
1045
set_layout_buffer_view_count(layout, var_desc_count);
1046
1047
return sizeof(struct anv_descriptor_set) +
1048
descriptor_count * sizeof(struct anv_descriptor) +
1049
buffer_view_count * sizeof(struct anv_buffer_view);
1050
}
1051
1052
VkResult
1053
anv_descriptor_set_create(struct anv_device *device,
1054
struct anv_descriptor_pool *pool,
1055
struct anv_descriptor_set_layout *layout,
1056
uint32_t var_desc_count,
1057
struct anv_descriptor_set **out_set)
1058
{
1059
struct anv_descriptor_set *set;
1060
const size_t size = anv_descriptor_set_layout_size(layout, var_desc_count);
1061
1062
VkResult result = anv_descriptor_pool_alloc_set(pool, size, &set);
1063
if (result != VK_SUCCESS)
1064
return result;
1065
1066
uint32_t descriptor_buffer_size =
1067
anv_descriptor_set_layout_descriptor_buffer_size(layout, var_desc_count);
1068
if (descriptor_buffer_size) {
1069
uint64_t pool_vma_offset =
1070
util_vma_heap_alloc(&pool->bo_heap, descriptor_buffer_size,
1071
ANV_UBO_ALIGNMENT);
1072
if (pool_vma_offset == 0) {
1073
anv_descriptor_pool_free_set(pool, set);
1074
return vk_error(VK_ERROR_FRAGMENTED_POOL);
1075
}
1076
assert(pool_vma_offset >= POOL_HEAP_OFFSET &&
1077
pool_vma_offset - POOL_HEAP_OFFSET <= INT32_MAX);
1078
set->desc_mem.offset = pool_vma_offset - POOL_HEAP_OFFSET;
1079
set->desc_mem.alloc_size = descriptor_buffer_size;
1080
set->desc_mem.map = pool->bo->map + set->desc_mem.offset;
1081
1082
set->desc_addr = (struct anv_address) {
1083
.bo = pool->bo,
1084
.offset = set->desc_mem.offset,
1085
};
1086
1087
enum isl_format format =
1088
anv_isl_format_for_descriptor_type(device,
1089
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);
1090
1091
set->desc_surface_state = anv_descriptor_pool_alloc_state(pool);
1092
anv_fill_buffer_surface_state(device, set->desc_surface_state, format,
1093
ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,
1094
set->desc_addr,
1095
descriptor_buffer_size, 1);
1096
} else {
1097
set->desc_mem = ANV_STATE_NULL;
1098
set->desc_addr = (struct anv_address) { .bo = NULL, .offset = 0 };
1099
set->desc_surface_state = ANV_STATE_NULL;
1100
}
1101
1102
vk_object_base_init(&device->vk, &set->base,
1103
VK_OBJECT_TYPE_DESCRIPTOR_SET);
1104
set->pool = pool;
1105
set->layout = layout;
1106
anv_descriptor_set_layout_ref(layout);
1107
1108
set->buffer_view_count =
1109
set_layout_buffer_view_count(layout, var_desc_count);
1110
set->descriptor_count =
1111
set_layout_descriptor_count(layout, var_desc_count);
1112
1113
set->buffer_views =
1114
(struct anv_buffer_view *) &set->descriptors[set->descriptor_count];
1115
1116
/* By defining the descriptors to be zero now, we can later verify that
1117
* a descriptor has not been populated with user data.
1118
*/
1119
memset(set->descriptors, 0,
1120
sizeof(struct anv_descriptor) * set->descriptor_count);
1121
1122
/* Go through and fill out immutable samplers if we have any */
1123
struct anv_descriptor *desc = set->descriptors;
1124
for (uint32_t b = 0; b < layout->binding_count; b++) {
1125
if (layout->binding[b].immutable_samplers) {
1126
for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
1127
/* The type will get changed to COMBINED_IMAGE_SAMPLER in
1128
* UpdateDescriptorSets if needed. However, if the descriptor
1129
* set has an immutable sampler, UpdateDescriptorSets may never
1130
* touch it, so we need to make sure it's 100% valid now.
1131
*
1132
* We don't need to actually provide a sampler because the helper
1133
* will always write in the immutable sampler regardless of what
1134
* is in the sampler parameter.
1135
*/
1136
VkDescriptorImageInfo info = { };
1137
anv_descriptor_set_write_image_view(device, set, &info,
1138
VK_DESCRIPTOR_TYPE_SAMPLER,
1139
b, i);
1140
}
1141
}
1142
desc += layout->binding[b].array_size;
1143
}
1144
1145
/* Allocate surface state for the buffer views. */
1146
for (uint32_t b = 0; b < set->buffer_view_count; b++) {
1147
set->buffer_views[b].surface_state =
1148
anv_descriptor_pool_alloc_state(pool);
1149
}
1150
1151
list_addtail(&set->pool_link, &pool->desc_sets);
1152
1153
*out_set = set;
1154
1155
return VK_SUCCESS;
1156
}
1157
1158
void
1159
anv_descriptor_set_destroy(struct anv_device *device,
1160
struct anv_descriptor_pool *pool,
1161
struct anv_descriptor_set *set)
1162
{
1163
anv_descriptor_set_layout_unref(device, set->layout);
1164
1165
if (set->desc_mem.alloc_size) {
1166
util_vma_heap_free(&pool->bo_heap,
1167
(uint64_t)set->desc_mem.offset + POOL_HEAP_OFFSET,
1168
set->desc_mem.alloc_size);
1169
anv_descriptor_pool_free_state(pool, set->desc_surface_state);
1170
}
1171
1172
for (uint32_t b = 0; b < set->buffer_view_count; b++)
1173
anv_descriptor_pool_free_state(pool, set->buffer_views[b].surface_state);
1174
1175
list_del(&set->pool_link);
1176
1177
vk_object_base_finish(&set->base);
1178
anv_descriptor_pool_free_set(pool, set);
1179
}
1180
1181
VkResult anv_AllocateDescriptorSets(
1182
VkDevice _device,
1183
const VkDescriptorSetAllocateInfo* pAllocateInfo,
1184
VkDescriptorSet* pDescriptorSets)
1185
{
1186
ANV_FROM_HANDLE(anv_device, device, _device);
1187
ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
1188
1189
VkResult result = VK_SUCCESS;
1190
struct anv_descriptor_set *set;
1191
uint32_t i;
1192
1193
const VkDescriptorSetVariableDescriptorCountAllocateInfo *vdcai =
1194
vk_find_struct_const(pAllocateInfo->pNext,
1195
DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);
1196
1197
for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
1198
ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
1199
pAllocateInfo->pSetLayouts[i]);
1200
1201
uint32_t var_desc_count = 0;
1202
if (vdcai != NULL && vdcai->descriptorSetCount > 0) {
1203
assert(vdcai->descriptorSetCount == pAllocateInfo->descriptorSetCount);
1204
var_desc_count = vdcai->pDescriptorCounts[i];
1205
}
1206
1207
result = anv_descriptor_set_create(device, pool, layout,
1208
var_desc_count, &set);
1209
if (result != VK_SUCCESS)
1210
break;
1211
1212
pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
1213
}
1214
1215
if (result != VK_SUCCESS)
1216
anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
1217
i, pDescriptorSets);
1218
1219
return result;
1220
}
1221
1222
VkResult anv_FreeDescriptorSets(
1223
VkDevice _device,
1224
VkDescriptorPool descriptorPool,
1225
uint32_t count,
1226
const VkDescriptorSet* pDescriptorSets)
1227
{
1228
ANV_FROM_HANDLE(anv_device, device, _device);
1229
ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
1230
1231
for (uint32_t i = 0; i < count; i++) {
1232
ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
1233
1234
if (!set)
1235
continue;
1236
1237
anv_descriptor_set_destroy(device, pool, set);
1238
}
1239
1240
return VK_SUCCESS;
1241
}
1242
1243
static void
1244
anv_descriptor_set_write_image_param(uint32_t *param_desc_map,
1245
const struct brw_image_param *param)
1246
{
1247
#define WRITE_PARAM_FIELD(field, FIELD) \
1248
for (unsigned i = 0; i < ARRAY_SIZE(param->field); i++) \
1249
param_desc_map[BRW_IMAGE_PARAM_##FIELD##_OFFSET + i] = param->field[i]
1250
1251
WRITE_PARAM_FIELD(offset, OFFSET);
1252
WRITE_PARAM_FIELD(size, SIZE);
1253
WRITE_PARAM_FIELD(stride, STRIDE);
1254
WRITE_PARAM_FIELD(tiling, TILING);
1255
WRITE_PARAM_FIELD(swizzling, SWIZZLING);
1256
WRITE_PARAM_FIELD(size, SIZE);
1257
1258
#undef WRITE_PARAM_FIELD
1259
}
1260
1261
static uint32_t
1262
anv_surface_state_to_handle(struct anv_state state)
1263
{
1264
/* Bits 31:12 of the bindless surface offset in the extended message
1265
* descriptor is bits 25:6 of the byte-based address.
1266
*/
1267
assert(state.offset >= 0);
1268
uint32_t offset = state.offset;
1269
assert((offset & 0x3f) == 0 && offset < (1 << 26));
1270
return offset << 6;
1271
}
1272
1273
void
1274
anv_descriptor_set_write_image_view(struct anv_device *device,
1275
struct anv_descriptor_set *set,
1276
const VkDescriptorImageInfo * const info,
1277
VkDescriptorType type,
1278
uint32_t binding,
1279
uint32_t element)
1280
{
1281
const struct anv_descriptor_set_binding_layout *bind_layout =
1282
&set->layout->binding[binding];
1283
struct anv_descriptor *desc =
1284
&set->descriptors[bind_layout->descriptor_index + element];
1285
struct anv_image_view *image_view = NULL;
1286
struct anv_sampler *sampler = NULL;
1287
1288
/* We get called with just VK_DESCRIPTOR_TYPE_SAMPLER as part of descriptor
1289
* set initialization to set the bindless samplers.
1290
*/
1291
assert(type == bind_layout->type ||
1292
type == VK_DESCRIPTOR_TYPE_SAMPLER);
1293
1294
switch (type) {
1295
case VK_DESCRIPTOR_TYPE_SAMPLER:
1296
sampler = bind_layout->immutable_samplers ?
1297
bind_layout->immutable_samplers[element] :
1298
anv_sampler_from_handle(info->sampler);
1299
break;
1300
1301
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1302
image_view = anv_image_view_from_handle(info->imageView);
1303
sampler = bind_layout->immutable_samplers ?
1304
bind_layout->immutable_samplers[element] :
1305
anv_sampler_from_handle(info->sampler);
1306
break;
1307
1308
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1309
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1310
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1311
image_view = anv_image_view_from_handle(info->imageView);
1312
break;
1313
1314
default:
1315
unreachable("invalid descriptor type");
1316
}
1317
1318
*desc = (struct anv_descriptor) {
1319
.type = type,
1320
.layout = info->imageLayout,
1321
.image_view = image_view,
1322
.sampler = sampler,
1323
};
1324
1325
void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1326
element * anv_descriptor_size(bind_layout);
1327
memset(desc_map, 0, anv_descriptor_size(bind_layout));
1328
1329
if (bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1330
struct anv_sampled_image_descriptor desc_data[3];
1331
memset(desc_data, 0, sizeof(desc_data));
1332
1333
if (image_view) {
1334
for (unsigned p = 0; p < image_view->n_planes; p++) {
1335
struct anv_surface_state sstate =
1336
(desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?
1337
image_view->planes[p].general_sampler_surface_state :
1338
image_view->planes[p].optimal_sampler_surface_state;
1339
desc_data[p].image = anv_surface_state_to_handle(sstate.state);
1340
}
1341
}
1342
1343
if (sampler) {
1344
for (unsigned p = 0; p < sampler->n_planes; p++)
1345
desc_data[p].sampler = sampler->bindless_state.offset + p * 32;
1346
}
1347
1348
/* We may have max_plane_count < 0 if this isn't a sampled image but it
1349
* can be no more than the size of our array of handles.
1350
*/
1351
assert(bind_layout->max_plane_count <= ARRAY_SIZE(desc_data));
1352
memcpy(desc_map, desc_data,
1353
MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1354
}
1355
1356
if (image_view == NULL)
1357
return;
1358
1359
if (bind_layout->data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1360
assert(!(bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM));
1361
assert(image_view->n_planes == 1);
1362
struct anv_storage_image_descriptor desc_data = {
1363
.read_write = anv_surface_state_to_handle(
1364
image_view->planes[0].storage_surface_state.state),
1365
.write_only = anv_surface_state_to_handle(
1366
image_view->planes[0].writeonly_storage_surface_state.state),
1367
};
1368
memcpy(desc_map, &desc_data, sizeof(desc_data));
1369
}
1370
1371
if (bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM) {
1372
/* Storage images can only ever have one plane */
1373
assert(image_view->n_planes == 1);
1374
const struct brw_image_param *image_param =
1375
&image_view->planes[0].storage_image_param;
1376
1377
anv_descriptor_set_write_image_param(desc_map, image_param);
1378
}
1379
1380
if (bind_layout->data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE) {
1381
assert(!(bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE));
1382
assert(image_view);
1383
struct anv_texture_swizzle_descriptor desc_data[3];
1384
memset(desc_data, 0, sizeof(desc_data));
1385
1386
for (unsigned p = 0; p < image_view->n_planes; p++) {
1387
desc_data[p] = (struct anv_texture_swizzle_descriptor) {
1388
.swizzle = {
1389
(uint8_t)image_view->planes[p].isl.swizzle.r,
1390
(uint8_t)image_view->planes[p].isl.swizzle.g,
1391
(uint8_t)image_view->planes[p].isl.swizzle.b,
1392
(uint8_t)image_view->planes[p].isl.swizzle.a,
1393
},
1394
};
1395
}
1396
memcpy(desc_map, desc_data,
1397
MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));
1398
}
1399
}
1400
1401
void
1402
anv_descriptor_set_write_buffer_view(struct anv_device *device,
1403
struct anv_descriptor_set *set,
1404
VkDescriptorType type,
1405
struct anv_buffer_view *buffer_view,
1406
uint32_t binding,
1407
uint32_t element)
1408
{
1409
const struct anv_descriptor_set_binding_layout *bind_layout =
1410
&set->layout->binding[binding];
1411
struct anv_descriptor *desc =
1412
&set->descriptors[bind_layout->descriptor_index + element];
1413
1414
assert(type == bind_layout->type);
1415
1416
void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1417
element * anv_descriptor_size(bind_layout);
1418
1419
if (buffer_view == NULL) {
1420
*desc = (struct anv_descriptor) { .type = type, };
1421
memset(desc_map, 0, anv_descriptor_size(bind_layout));
1422
return;
1423
}
1424
1425
*desc = (struct anv_descriptor) {
1426
.type = type,
1427
.buffer_view = buffer_view,
1428
};
1429
1430
if (bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {
1431
struct anv_sampled_image_descriptor desc_data = {
1432
.image = anv_surface_state_to_handle(buffer_view->surface_state),
1433
};
1434
memcpy(desc_map, &desc_data, sizeof(desc_data));
1435
}
1436
1437
if (bind_layout->data & ANV_DESCRIPTOR_STORAGE_IMAGE) {
1438
assert(!(bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM));
1439
struct anv_storage_image_descriptor desc_data = {
1440
.read_write = anv_surface_state_to_handle(
1441
buffer_view->storage_surface_state),
1442
.write_only = anv_surface_state_to_handle(
1443
buffer_view->writeonly_storage_surface_state),
1444
};
1445
memcpy(desc_map, &desc_data, sizeof(desc_data));
1446
}
1447
1448
if (bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM) {
1449
anv_descriptor_set_write_image_param(desc_map,
1450
&buffer_view->storage_image_param);
1451
}
1452
}
1453
1454
void
1455
anv_descriptor_set_write_buffer(struct anv_device *device,
1456
struct anv_descriptor_set *set,
1457
struct anv_state_stream *alloc_stream,
1458
VkDescriptorType type,
1459
struct anv_buffer *buffer,
1460
uint32_t binding,
1461
uint32_t element,
1462
VkDeviceSize offset,
1463
VkDeviceSize range)
1464
{
1465
const struct anv_descriptor_set_binding_layout *bind_layout =
1466
&set->layout->binding[binding];
1467
struct anv_descriptor *desc =
1468
&set->descriptors[bind_layout->descriptor_index + element];
1469
1470
assert(type == bind_layout->type);
1471
1472
void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1473
element * anv_descriptor_size(bind_layout);
1474
1475
if (buffer == NULL) {
1476
*desc = (struct anv_descriptor) { .type = type, };
1477
memset(desc_map, 0, anv_descriptor_size(bind_layout));
1478
return;
1479
}
1480
1481
struct anv_address bind_addr = anv_address_add(buffer->address, offset);
1482
uint64_t bind_range = anv_buffer_get_range(buffer, offset, range);
1483
1484
/* We report a bounds checking alignment of 32B for the sake of block
1485
* messages which read an entire register worth at a time.
1486
*/
1487
if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
1488
type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)
1489
bind_range = align_u64(bind_range, ANV_UBO_ALIGNMENT);
1490
1491
if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
1492
type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
1493
*desc = (struct anv_descriptor) {
1494
.type = type,
1495
.buffer = buffer,
1496
.offset = offset,
1497
.range = range,
1498
};
1499
} else {
1500
assert(bind_layout->data & ANV_DESCRIPTOR_BUFFER_VIEW);
1501
struct anv_buffer_view *bview =
1502
&set->buffer_views[bind_layout->buffer_view_index + element];
1503
1504
bview->format = anv_isl_format_for_descriptor_type(device, type);
1505
bview->range = bind_range;
1506
bview->address = bind_addr;
1507
1508
/* If we're writing descriptors through a push command, we need to
1509
* allocate the surface state from the command buffer. Otherwise it will
1510
* be allocated by the descriptor pool when calling
1511
* vkAllocateDescriptorSets. */
1512
if (alloc_stream)
1513
bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
1514
1515
isl_surf_usage_flags_t usage =
1516
(type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||
1517
type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ?
1518
ISL_SURF_USAGE_CONSTANT_BUFFER_BIT :
1519
ISL_SURF_USAGE_STORAGE_BIT;
1520
1521
anv_fill_buffer_surface_state(device, bview->surface_state,
1522
bview->format, usage,
1523
bind_addr, bind_range, 1);
1524
1525
*desc = (struct anv_descriptor) {
1526
.type = type,
1527
.buffer_view = bview,
1528
};
1529
}
1530
1531
if (bind_layout->data & ANV_DESCRIPTOR_ADDRESS_RANGE) {
1532
struct anv_address_range_descriptor desc_data = {
1533
.address = anv_address_physical(bind_addr),
1534
.range = bind_range,
1535
};
1536
memcpy(desc_map, &desc_data, sizeof(desc_data));
1537
}
1538
}
1539
1540
void
1541
anv_descriptor_set_write_inline_uniform_data(struct anv_device *device,
1542
struct anv_descriptor_set *set,
1543
uint32_t binding,
1544
const void *data,
1545
size_t offset,
1546
size_t size)
1547
{
1548
const struct anv_descriptor_set_binding_layout *bind_layout =
1549
&set->layout->binding[binding];
1550
1551
assert(bind_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM);
1552
1553
void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset;
1554
1555
memcpy(desc_map + offset, data, size);
1556
}
1557
1558
void
1559
anv_descriptor_set_write_acceleration_structure(struct anv_device *device,
1560
struct anv_descriptor_set *set,
1561
struct anv_acceleration_structure *accel,
1562
uint32_t binding,
1563
uint32_t element)
1564
{
1565
const struct anv_descriptor_set_binding_layout *bind_layout =
1566
&set->layout->binding[binding];
1567
struct anv_descriptor *desc =
1568
&set->descriptors[bind_layout->descriptor_index + element];
1569
1570
assert(bind_layout->type == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);
1571
*desc = (struct anv_descriptor) {
1572
.type = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
1573
};
1574
1575
struct anv_address_range_descriptor desc_data = { };
1576
if (accel != NULL) {
1577
desc_data.address = anv_address_physical(accel->address);
1578
desc_data.range = accel->size;
1579
}
1580
assert(anv_descriptor_size(bind_layout) == sizeof(desc_data));
1581
1582
void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +
1583
element * sizeof(desc_data);
1584
memcpy(desc_map, &desc_data, sizeof(desc_data));
1585
}
1586
1587
void anv_UpdateDescriptorSets(
1588
VkDevice _device,
1589
uint32_t descriptorWriteCount,
1590
const VkWriteDescriptorSet* pDescriptorWrites,
1591
uint32_t descriptorCopyCount,
1592
const VkCopyDescriptorSet* pDescriptorCopies)
1593
{
1594
ANV_FROM_HANDLE(anv_device, device, _device);
1595
1596
for (uint32_t i = 0; i < descriptorWriteCount; i++) {
1597
const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
1598
ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
1599
1600
switch (write->descriptorType) {
1601
case VK_DESCRIPTOR_TYPE_SAMPLER:
1602
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1603
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1604
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1605
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1606
for (uint32_t j = 0; j < write->descriptorCount; j++) {
1607
anv_descriptor_set_write_image_view(device, set,
1608
write->pImageInfo + j,
1609
write->descriptorType,
1610
write->dstBinding,
1611
write->dstArrayElement + j);
1612
}
1613
break;
1614
1615
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1616
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1617
for (uint32_t j = 0; j < write->descriptorCount; j++) {
1618
ANV_FROM_HANDLE(anv_buffer_view, bview,
1619
write->pTexelBufferView[j]);
1620
1621
anv_descriptor_set_write_buffer_view(device, set,
1622
write->descriptorType,
1623
bview,
1624
write->dstBinding,
1625
write->dstArrayElement + j);
1626
}
1627
break;
1628
1629
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1630
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1631
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1632
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1633
for (uint32_t j = 0; j < write->descriptorCount; j++) {
1634
ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
1635
1636
anv_descriptor_set_write_buffer(device, set,
1637
NULL,
1638
write->descriptorType,
1639
buffer,
1640
write->dstBinding,
1641
write->dstArrayElement + j,
1642
write->pBufferInfo[j].offset,
1643
write->pBufferInfo[j].range);
1644
}
1645
break;
1646
1647
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {
1648
const VkWriteDescriptorSetInlineUniformBlockEXT *inline_write =
1649
vk_find_struct_const(write->pNext,
1650
WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT);
1651
assert(inline_write->dataSize == write->descriptorCount);
1652
anv_descriptor_set_write_inline_uniform_data(device, set,
1653
write->dstBinding,
1654
inline_write->pData,
1655
write->dstArrayElement,
1656
inline_write->dataSize);
1657
break;
1658
}
1659
1660
case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
1661
const VkWriteDescriptorSetAccelerationStructureKHR *accel_write =
1662
vk_find_struct_const(write, WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR);
1663
assert(accel_write->accelerationStructureCount ==
1664
write->descriptorCount);
1665
for (uint32_t j = 0; j < write->descriptorCount; j++) {
1666
ANV_FROM_HANDLE(anv_acceleration_structure, accel,
1667
accel_write->pAccelerationStructures[j]);
1668
anv_descriptor_set_write_acceleration_structure(device, set, accel,
1669
write->dstBinding,
1670
write->dstArrayElement + j);
1671
}
1672
break;
1673
}
1674
1675
default:
1676
break;
1677
}
1678
}
1679
1680
for (uint32_t i = 0; i < descriptorCopyCount; i++) {
1681
const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
1682
ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
1683
ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
1684
1685
const struct anv_descriptor_set_binding_layout *src_layout =
1686
&src->layout->binding[copy->srcBinding];
1687
struct anv_descriptor *src_desc =
1688
&src->descriptors[src_layout->descriptor_index];
1689
src_desc += copy->srcArrayElement;
1690
1691
const struct anv_descriptor_set_binding_layout *dst_layout =
1692
&dst->layout->binding[copy->dstBinding];
1693
struct anv_descriptor *dst_desc =
1694
&dst->descriptors[dst_layout->descriptor_index];
1695
dst_desc += copy->dstArrayElement;
1696
1697
if (src_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {
1698
assert(src_layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);
1699
memcpy(dst->desc_mem.map + dst_layout->descriptor_offset +
1700
copy->dstArrayElement,
1701
src->desc_mem.map + src_layout->descriptor_offset +
1702
copy->srcArrayElement,
1703
copy->descriptorCount);
1704
} else {
1705
for (uint32_t j = 0; j < copy->descriptorCount; j++)
1706
dst_desc[j] = src_desc[j];
1707
1708
unsigned desc_size = anv_descriptor_size(src_layout);
1709
if (desc_size > 0) {
1710
assert(desc_size == anv_descriptor_size(dst_layout));
1711
memcpy(dst->desc_mem.map + dst_layout->descriptor_offset +
1712
copy->dstArrayElement * desc_size,
1713
src->desc_mem.map + src_layout->descriptor_offset +
1714
copy->srcArrayElement * desc_size,
1715
copy->descriptorCount * desc_size);
1716
}
1717
}
1718
}
1719
}
1720
1721
/*
1722
* Descriptor update templates.
1723
*/
1724
1725
void
1726
anv_descriptor_set_write_template(struct anv_device *device,
1727
struct anv_descriptor_set *set,
1728
struct anv_state_stream *alloc_stream,
1729
const struct anv_descriptor_update_template *template,
1730
const void *data)
1731
{
1732
for (uint32_t i = 0; i < template->entry_count; i++) {
1733
const struct anv_descriptor_template_entry *entry =
1734
&template->entries[i];
1735
1736
switch (entry->type) {
1737
case VK_DESCRIPTOR_TYPE_SAMPLER:
1738
case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
1739
case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
1740
case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
1741
case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
1742
for (uint32_t j = 0; j < entry->array_count; j++) {
1743
const VkDescriptorImageInfo *info =
1744
data + entry->offset + j * entry->stride;
1745
anv_descriptor_set_write_image_view(device, set,
1746
info, entry->type,
1747
entry->binding,
1748
entry->array_element + j);
1749
}
1750
break;
1751
1752
case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
1753
case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
1754
for (uint32_t j = 0; j < entry->array_count; j++) {
1755
const VkBufferView *_bview =
1756
data + entry->offset + j * entry->stride;
1757
ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
1758
1759
anv_descriptor_set_write_buffer_view(device, set,
1760
entry->type,
1761
bview,
1762
entry->binding,
1763
entry->array_element + j);
1764
}
1765
break;
1766
1767
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
1768
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
1769
case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
1770
case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
1771
for (uint32_t j = 0; j < entry->array_count; j++) {
1772
const VkDescriptorBufferInfo *info =
1773
data + entry->offset + j * entry->stride;
1774
ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
1775
1776
anv_descriptor_set_write_buffer(device, set,
1777
alloc_stream,
1778
entry->type,
1779
buffer,
1780
entry->binding,
1781
entry->array_element + j,
1782
info->offset, info->range);
1783
}
1784
break;
1785
1786
case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:
1787
anv_descriptor_set_write_inline_uniform_data(device, set,
1788
entry->binding,
1789
data + entry->offset,
1790
entry->array_element,
1791
entry->array_count);
1792
break;
1793
1794
default:
1795
break;
1796
}
1797
}
1798
}
1799
1800
VkResult anv_CreateDescriptorUpdateTemplate(
1801
VkDevice _device,
1802
const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
1803
const VkAllocationCallbacks* pAllocator,
1804
VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate)
1805
{
1806
ANV_FROM_HANDLE(anv_device, device, _device);
1807
struct anv_descriptor_update_template *template;
1808
1809
size_t size = sizeof(*template) +
1810
pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
1811
template = vk_object_alloc(&device->vk, pAllocator, size,
1812
VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);
1813
if (template == NULL)
1814
return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
1815
1816
template->bind_point = pCreateInfo->pipelineBindPoint;
1817
1818
if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
1819
template->set = pCreateInfo->set;
1820
1821
template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
1822
for (uint32_t i = 0; i < template->entry_count; i++) {
1823
const VkDescriptorUpdateTemplateEntry *pEntry =
1824
&pCreateInfo->pDescriptorUpdateEntries[i];
1825
1826
template->entries[i] = (struct anv_descriptor_template_entry) {
1827
.type = pEntry->descriptorType,
1828
.binding = pEntry->dstBinding,
1829
.array_element = pEntry->dstArrayElement,
1830
.array_count = pEntry->descriptorCount,
1831
.offset = pEntry->offset,
1832
.stride = pEntry->stride,
1833
};
1834
}
1835
1836
*pDescriptorUpdateTemplate =
1837
anv_descriptor_update_template_to_handle(template);
1838
1839
return VK_SUCCESS;
1840
}
1841
1842
void anv_DestroyDescriptorUpdateTemplate(
1843
VkDevice _device,
1844
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1845
const VkAllocationCallbacks* pAllocator)
1846
{
1847
ANV_FROM_HANDLE(anv_device, device, _device);
1848
ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1849
descriptorUpdateTemplate);
1850
1851
if (!template)
1852
return;
1853
1854
vk_object_free(&device->vk, pAllocator, template);
1855
}
1856
1857
void anv_UpdateDescriptorSetWithTemplate(
1858
VkDevice _device,
1859
VkDescriptorSet descriptorSet,
1860
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
1861
const void* pData)
1862
{
1863
ANV_FROM_HANDLE(anv_device, device, _device);
1864
ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
1865
ANV_FROM_HANDLE(anv_descriptor_update_template, template,
1866
descriptorUpdateTemplate);
1867
1868
anv_descriptor_set_write_template(device, set, NULL, template, pData);
1869
}
1870
1871