Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/virtio/vulkan/vn_device.c
4560 views
1
/*
2
* Copyright 2019 Google LLC
3
* SPDX-License-Identifier: MIT
4
*
5
* based in part on anv and radv which are:
6
* Copyright © 2015 Intel Corporation
7
* Copyright © 2016 Red Hat.
8
* Copyright © 2016 Bas Nieuwenhuizen
9
*/
10
11
#include "vn_device.h"
12
13
#include <stdio.h>
14
15
#include "git_sha1.h"
16
#include "util/driconf.h"
17
#include "util/mesa-sha1.h"
18
#include "venus-protocol/vn_protocol_driver_device.h"
19
#include "venus-protocol/vn_protocol_driver_info.h"
20
#include "venus-protocol/vn_protocol_driver_instance.h"
21
#include "venus-protocol/vn_protocol_driver_transport.h"
22
23
#include "vn_android.h"
24
#include "vn_device_memory.h"
25
#include "vn_icd.h"
26
#include "vn_queue.h"
27
#include "vn_renderer.h"
28
29
/* require and request at least Vulkan 1.1 at both instance and device levels
30
*/
31
#define VN_MIN_RENDERER_VERSION VK_API_VERSION_1_1
32
33
/* max advertised version at both instance and device levels */
34
#ifdef ANDROID
35
#define VN_MAX_API_VERSION VK_MAKE_VERSION(1, 1, VK_HEADER_VERSION)
36
#else
37
#define VN_MAX_API_VERSION VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION)
38
#endif
39
40
#define VN_EXTENSION_TABLE_INDEX(tbl, ext) \
41
((const bool *)((const void *)(&(tbl)) + \
42
offsetof(__typeof__(tbl), ext)) - \
43
(tbl).extensions)
44
45
/*
46
* Instance extensions add instance-level or physical-device-level
47
* functionalities. It seems renderer support is either unnecessary or
48
* optional. We should be able to advertise them or lie about them locally.
49
*/
50
static const struct vk_instance_extension_table
51
vn_instance_supported_extensions = {
52
/* promoted to VK_VERSION_1_1 */
53
.KHR_device_group_creation = true,
54
.KHR_external_fence_capabilities = true,
55
.KHR_external_memory_capabilities = true,
56
.KHR_external_semaphore_capabilities = true,
57
.KHR_get_physical_device_properties2 = true,
58
59
#ifdef VN_USE_WSI_PLATFORM
60
.KHR_get_surface_capabilities2 = true,
61
.KHR_surface = true,
62
.KHR_surface_protected_capabilities = true,
63
#endif
64
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
65
.KHR_wayland_surface = true,
66
#endif
67
#ifdef VK_USE_PLATFORM_XCB_KHR
68
.KHR_xcb_surface = true,
69
#endif
70
#ifdef VK_USE_PLATFORM_XLIB_KHR
71
.KHR_xlib_surface = true,
72
#endif
73
};
74
75
static const driOptionDescription vn_dri_options[] = {
76
/* clang-format off */
77
DRI_CONF_SECTION_PERFORMANCE
78
DRI_CONF_VK_X11_ENSURE_MIN_IMAGE_COUNT(false)
79
DRI_CONF_VK_X11_OVERRIDE_MIN_IMAGE_COUNT(0)
80
DRI_CONF_VK_X11_STRICT_IMAGE_COUNT(false)
81
DRI_CONF_SECTION_END
82
DRI_CONF_SECTION_DEBUG
83
DRI_CONF_VK_WSI_FORCE_BGRA8_UNORM_FIRST(false)
84
DRI_CONF_SECTION_END
85
/* clang-format on */
86
};
87
88
static VkResult
89
vn_instance_init_renderer_versions(struct vn_instance *instance)
90
{
91
uint32_t instance_version = 0;
92
VkResult result =
93
vn_call_vkEnumerateInstanceVersion(instance, &instance_version);
94
if (result != VK_SUCCESS) {
95
if (VN_DEBUG(INIT))
96
vn_log(instance, "failed to enumerate renderer instance version");
97
return result;
98
}
99
100
if (instance_version < VN_MIN_RENDERER_VERSION) {
101
if (VN_DEBUG(INIT)) {
102
vn_log(instance, "unsupported renderer instance version %d.%d",
103
VK_VERSION_MAJOR(instance_version),
104
VK_VERSION_MINOR(instance_version));
105
}
106
return VK_ERROR_INITIALIZATION_FAILED;
107
}
108
109
if (VN_DEBUG(INIT)) {
110
vn_log(instance, "renderer instance version %d.%d.%d",
111
VK_VERSION_MAJOR(instance_version),
112
VK_VERSION_MINOR(instance_version),
113
VK_VERSION_PATCH(instance_version));
114
}
115
116
/* request at least VN_MIN_RENDERER_VERSION internally */
117
instance->renderer_api_version =
118
MAX2(instance->base.base.app_info.api_version, VN_MIN_RENDERER_VERSION);
119
120
/* instance version for internal use is capped */
121
instance_version = MIN3(instance_version, instance->renderer_api_version,
122
instance->renderer_info.vk_xml_version);
123
assert(instance_version >= VN_MIN_RENDERER_VERSION);
124
125
instance->renderer_version = instance_version;
126
127
return VK_SUCCESS;
128
}
129
130
static VkResult
131
vn_instance_init_ring(struct vn_instance *instance)
132
{
133
/* 32-bit seqno for renderer roundtrips */
134
const size_t extra_size = sizeof(uint32_t);
135
struct vn_ring_layout layout;
136
vn_ring_get_layout(extra_size, &layout);
137
138
instance->ring.shmem =
139
vn_renderer_shmem_create(instance->renderer, layout.shmem_size);
140
if (!instance->ring.shmem) {
141
if (VN_DEBUG(INIT))
142
vn_log(instance, "failed to allocate/map ring shmem");
143
return VK_ERROR_OUT_OF_HOST_MEMORY;
144
}
145
146
mtx_init(&instance->ring.mutex, mtx_plain);
147
148
struct vn_ring *ring = &instance->ring.ring;
149
vn_ring_init(ring, instance->renderer, &layout,
150
instance->ring.shmem->mmap_ptr);
151
152
instance->ring.id = (uintptr_t)ring;
153
154
const struct VkRingCreateInfoMESA info = {
155
.sType = VK_STRUCTURE_TYPE_RING_CREATE_INFO_MESA,
156
.resourceId = instance->ring.shmem->res_id,
157
.size = layout.shmem_size,
158
.idleTimeout = 50ull * 1000 * 1000,
159
.headOffset = layout.head_offset,
160
.tailOffset = layout.tail_offset,
161
.statusOffset = layout.status_offset,
162
.bufferOffset = layout.buffer_offset,
163
.bufferSize = layout.buffer_size,
164
.extraOffset = layout.extra_offset,
165
.extraSize = layout.extra_size,
166
};
167
168
uint32_t create_ring_data[64];
169
struct vn_cs_encoder local_enc = VN_CS_ENCODER_INITIALIZER_LOCAL(
170
create_ring_data, sizeof(create_ring_data));
171
vn_encode_vkCreateRingMESA(&local_enc, 0, instance->ring.id, &info);
172
vn_renderer_submit_simple(instance->renderer, create_ring_data,
173
vn_cs_encoder_get_len(&local_enc));
174
175
vn_cs_encoder_init_indirect(&instance->ring.upload, instance,
176
1 * 1024 * 1024);
177
178
return VK_SUCCESS;
179
}
180
181
static void
182
vn_instance_init_experimental_features(struct vn_instance *instance)
183
{
184
if (instance->renderer_info.vk_mesa_venus_protocol_spec_version !=
185
100000) {
186
if (VN_DEBUG(INIT))
187
vn_log(instance, "renderer supports no experimental features");
188
return;
189
}
190
191
size_t size = sizeof(instance->experimental);
192
vn_call_vkGetVenusExperimentalFeatureData100000MESA(
193
instance, &size, &instance->experimental);
194
if (VN_DEBUG(INIT)) {
195
vn_log(instance,
196
"VkVenusExperimentalFeatures100000MESA is as below:"
197
"\n\tmemoryResourceAllocationSize = %u"
198
"\n\tglobalFencing = %u",
199
instance->experimental.memoryResourceAllocationSize,
200
instance->experimental.globalFencing);
201
}
202
}
203
204
static VkResult
205
vn_instance_init_renderer(struct vn_instance *instance)
206
{
207
const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
208
209
VkResult result = vn_renderer_create(instance, alloc, &instance->renderer);
210
if (result != VK_SUCCESS)
211
return result;
212
213
mtx_init(&instance->roundtrip_mutex, mtx_plain);
214
instance->roundtrip_next = 1;
215
216
vn_renderer_get_info(instance->renderer, &instance->renderer_info);
217
218
uint32_t version = vn_info_wire_format_version();
219
if (instance->renderer_info.wire_format_version != version) {
220
if (VN_DEBUG(INIT)) {
221
vn_log(instance, "wire format version %d != %d",
222
instance->renderer_info.wire_format_version, version);
223
}
224
return VK_ERROR_INITIALIZATION_FAILED;
225
}
226
227
version = vn_info_vk_xml_version();
228
if (instance->renderer_info.vk_xml_version > version)
229
instance->renderer_info.vk_xml_version = version;
230
if (instance->renderer_info.vk_xml_version < VN_MIN_RENDERER_VERSION) {
231
if (VN_DEBUG(INIT)) {
232
vn_log(instance, "vk xml version %d.%d.%d < %d.%d.%d",
233
VK_VERSION_MAJOR(instance->renderer_info.vk_xml_version),
234
VK_VERSION_MINOR(instance->renderer_info.vk_xml_version),
235
VK_VERSION_PATCH(instance->renderer_info.vk_xml_version),
236
VK_VERSION_MAJOR(VN_MIN_RENDERER_VERSION),
237
VK_VERSION_MINOR(VN_MIN_RENDERER_VERSION),
238
VK_VERSION_PATCH(VN_MIN_RENDERER_VERSION));
239
}
240
return VK_ERROR_INITIALIZATION_FAILED;
241
}
242
243
version = vn_info_extension_spec_version("VK_EXT_command_serialization");
244
if (instance->renderer_info.vk_ext_command_serialization_spec_version >
245
version) {
246
instance->renderer_info.vk_ext_command_serialization_spec_version =
247
version;
248
}
249
250
version = vn_info_extension_spec_version("VK_MESA_venus_protocol");
251
if (instance->renderer_info.vk_mesa_venus_protocol_spec_version >
252
version) {
253
instance->renderer_info.vk_mesa_venus_protocol_spec_version = version;
254
}
255
256
if (VN_DEBUG(INIT)) {
257
vn_log(instance, "connected to renderer");
258
vn_log(instance, "wire format version %d",
259
instance->renderer_info.wire_format_version);
260
vn_log(instance, "vk xml version %d.%d.%d",
261
VK_VERSION_MAJOR(instance->renderer_info.vk_xml_version),
262
VK_VERSION_MINOR(instance->renderer_info.vk_xml_version),
263
VK_VERSION_PATCH(instance->renderer_info.vk_xml_version));
264
vn_log(
265
instance, "VK_EXT_command_serialization spec version %d",
266
instance->renderer_info.vk_ext_command_serialization_spec_version);
267
vn_log(instance, "VK_MESA_venus_protocol spec version %d",
268
instance->renderer_info.vk_mesa_venus_protocol_spec_version);
269
}
270
271
return VK_SUCCESS;
272
}
273
274
VkResult
275
vn_instance_submit_roundtrip(struct vn_instance *instance,
276
uint32_t *roundtrip_seqno)
277
{
278
uint32_t write_ring_extra_data[8];
279
struct vn_cs_encoder local_enc = VN_CS_ENCODER_INITIALIZER_LOCAL(
280
write_ring_extra_data, sizeof(write_ring_extra_data));
281
282
/* submit a vkWriteRingExtraMESA through the renderer */
283
mtx_lock(&instance->roundtrip_mutex);
284
const uint32_t seqno = instance->roundtrip_next++;
285
vn_encode_vkWriteRingExtraMESA(&local_enc, 0, instance->ring.id, 0, seqno);
286
VkResult result =
287
vn_renderer_submit_simple(instance->renderer, write_ring_extra_data,
288
vn_cs_encoder_get_len(&local_enc));
289
mtx_unlock(&instance->roundtrip_mutex);
290
291
*roundtrip_seqno = seqno;
292
return result;
293
}
294
295
void
296
vn_instance_wait_roundtrip(struct vn_instance *instance,
297
uint32_t roundtrip_seqno)
298
{
299
const struct vn_ring *ring = &instance->ring.ring;
300
const volatile atomic_uint *ptr = ring->shared.extra;
301
uint32_t iter = 0;
302
do {
303
const uint32_t cur = atomic_load_explicit(ptr, memory_order_acquire);
304
if (cur >= roundtrip_seqno || roundtrip_seqno - cur >= INT32_MAX)
305
break;
306
vn_relax(&iter);
307
} while (true);
308
}
309
310
struct vn_instance_submission {
311
uint32_t local_cs_data[64];
312
313
void *cs_data;
314
size_t cs_size;
315
struct vn_ring_submit *submit;
316
};
317
318
static void *
319
vn_instance_submission_indirect_cs(struct vn_instance_submission *submit,
320
const struct vn_cs_encoder *cs,
321
size_t *cs_size)
322
{
323
VkCommandStreamDescriptionMESA local_descs[8];
324
VkCommandStreamDescriptionMESA *descs = local_descs;
325
if (cs->buffer_count > ARRAY_SIZE(local_descs)) {
326
descs =
327
malloc(sizeof(VkCommandStreamDescriptionMESA) * cs->buffer_count);
328
if (!descs)
329
return NULL;
330
}
331
332
uint32_t desc_count = 0;
333
for (uint32_t i = 0; i < cs->buffer_count; i++) {
334
const struct vn_cs_encoder_buffer *buf = &cs->buffers[i];
335
if (buf->committed_size) {
336
descs[desc_count++] = (VkCommandStreamDescriptionMESA){
337
.resourceId = buf->shmem->res_id,
338
.offset = buf->offset,
339
.size = buf->committed_size,
340
};
341
}
342
}
343
344
const size_t exec_size = vn_sizeof_vkExecuteCommandStreamsMESA(
345
desc_count, descs, NULL, 0, NULL, 0);
346
void *exec_data = submit->local_cs_data;
347
if (exec_size > sizeof(submit->local_cs_data)) {
348
exec_data = malloc(exec_size);
349
if (!exec_data)
350
goto out;
351
}
352
353
struct vn_cs_encoder local_enc =
354
VN_CS_ENCODER_INITIALIZER_LOCAL(exec_data, exec_size);
355
vn_encode_vkExecuteCommandStreamsMESA(&local_enc, 0, desc_count, descs,
356
NULL, 0, NULL, 0);
357
358
*cs_size = vn_cs_encoder_get_len(&local_enc);
359
360
out:
361
if (descs != local_descs)
362
free(descs);
363
364
return exec_data;
365
}
366
367
static void *
368
vn_instance_submission_direct_cs(struct vn_instance_submission *submit,
369
const struct vn_cs_encoder *cs,
370
size_t *cs_size)
371
{
372
if (cs->buffer_count == 1) {
373
*cs_size = cs->buffers[0].committed_size;
374
return cs->buffers[0].base;
375
}
376
377
assert(vn_cs_encoder_get_len(cs) <= sizeof(submit->local_cs_data));
378
void *dst = submit->local_cs_data;
379
for (uint32_t i = 0; i < cs->buffer_count; i++) {
380
const struct vn_cs_encoder_buffer *buf = &cs->buffers[i];
381
memcpy(dst, buf->base, buf->committed_size);
382
dst += buf->committed_size;
383
}
384
385
*cs_size = dst - (void *)submit->local_cs_data;
386
return submit->local_cs_data;
387
}
388
389
static struct vn_ring_submit *
390
vn_instance_submission_get_ring_submit(struct vn_ring *ring,
391
const struct vn_cs_encoder *cs,
392
struct vn_renderer_shmem *extra_shmem,
393
bool direct)
394
{
395
const uint32_t shmem_count =
396
(direct ? 0 : cs->buffer_count) + (extra_shmem ? 1 : 0);
397
struct vn_ring_submit *submit = vn_ring_get_submit(ring, shmem_count);
398
if (!submit)
399
return NULL;
400
401
submit->shmem_count = shmem_count;
402
if (!direct) {
403
for (uint32_t i = 0; i < cs->buffer_count; i++) {
404
submit->shmems[i] =
405
vn_renderer_shmem_ref(ring->renderer, cs->buffers[i].shmem);
406
}
407
}
408
if (extra_shmem) {
409
submit->shmems[shmem_count - 1] =
410
vn_renderer_shmem_ref(ring->renderer, extra_shmem);
411
}
412
413
return submit;
414
}
415
416
static void
417
vn_instance_submission_cleanup(struct vn_instance_submission *submit,
418
const struct vn_cs_encoder *cs)
419
{
420
if (submit->cs_data != submit->local_cs_data &&
421
submit->cs_data != cs->buffers[0].base)
422
free(submit->cs_data);
423
}
424
425
static VkResult
426
vn_instance_submission_prepare(struct vn_instance_submission *submit,
427
const struct vn_cs_encoder *cs,
428
struct vn_ring *ring,
429
struct vn_renderer_shmem *extra_shmem,
430
bool direct)
431
{
432
if (direct) {
433
submit->cs_data =
434
vn_instance_submission_direct_cs(submit, cs, &submit->cs_size);
435
} else {
436
submit->cs_data =
437
vn_instance_submission_indirect_cs(submit, cs, &submit->cs_size);
438
}
439
if (!submit->cs_data)
440
return VK_ERROR_OUT_OF_HOST_MEMORY;
441
442
submit->submit =
443
vn_instance_submission_get_ring_submit(ring, cs, extra_shmem, direct);
444
if (!submit->submit) {
445
vn_instance_submission_cleanup(submit, cs);
446
return VK_ERROR_OUT_OF_HOST_MEMORY;
447
}
448
449
return VK_SUCCESS;
450
}
451
452
static bool
453
vn_instance_submission_can_direct(const struct vn_cs_encoder *cs)
454
{
455
struct vn_instance_submission submit;
456
return vn_cs_encoder_get_len(cs) <= sizeof(submit.local_cs_data);
457
}
458
459
static struct vn_cs_encoder *
460
vn_instance_ring_cs_upload_locked(struct vn_instance *instance,
461
const struct vn_cs_encoder *cs)
462
{
463
assert(!cs->indirect && cs->buffer_count == 1);
464
const void *cs_data = cs->buffers[0].base;
465
const size_t cs_size = cs->total_committed_size;
466
assert(cs_size == vn_cs_encoder_get_len(cs));
467
468
struct vn_cs_encoder *upload = &instance->ring.upload;
469
vn_cs_encoder_reset(upload);
470
471
if (!vn_cs_encoder_reserve(upload, cs_size))
472
return NULL;
473
474
vn_cs_encoder_write(upload, cs_size, cs_data, cs_size);
475
vn_cs_encoder_commit(upload);
476
vn_instance_wait_roundtrip(instance, upload->current_buffer_roundtrip);
477
478
return upload;
479
}
480
481
static VkResult
482
vn_instance_ring_submit_locked(struct vn_instance *instance,
483
const struct vn_cs_encoder *cs,
484
struct vn_renderer_shmem *extra_shmem,
485
uint32_t *ring_seqno)
486
{
487
struct vn_ring *ring = &instance->ring.ring;
488
489
const bool direct = vn_instance_submission_can_direct(cs);
490
if (!direct && !cs->indirect) {
491
cs = vn_instance_ring_cs_upload_locked(instance, cs);
492
if (!cs)
493
return VK_ERROR_OUT_OF_HOST_MEMORY;
494
assert(cs->indirect);
495
}
496
497
struct vn_instance_submission submit;
498
VkResult result =
499
vn_instance_submission_prepare(&submit, cs, ring, extra_shmem, direct);
500
if (result != VK_SUCCESS)
501
return result;
502
503
uint32_t seqno;
504
const bool notify = vn_ring_submit(ring, submit.submit, submit.cs_data,
505
submit.cs_size, &seqno);
506
if (notify) {
507
uint32_t notify_ring_data[8];
508
struct vn_cs_encoder local_enc = VN_CS_ENCODER_INITIALIZER_LOCAL(
509
notify_ring_data, sizeof(notify_ring_data));
510
vn_encode_vkNotifyRingMESA(&local_enc, 0, instance->ring.id, seqno, 0);
511
vn_renderer_submit_simple(instance->renderer, notify_ring_data,
512
vn_cs_encoder_get_len(&local_enc));
513
}
514
515
vn_instance_submission_cleanup(&submit, cs);
516
517
if (ring_seqno)
518
*ring_seqno = seqno;
519
520
return VK_SUCCESS;
521
}
522
523
VkResult
524
vn_instance_ring_submit(struct vn_instance *instance,
525
const struct vn_cs_encoder *cs)
526
{
527
mtx_lock(&instance->ring.mutex);
528
VkResult result = vn_instance_ring_submit_locked(instance, cs, NULL, NULL);
529
mtx_unlock(&instance->ring.mutex);
530
531
return result;
532
}
533
534
static bool
535
vn_instance_grow_reply_shmem_locked(struct vn_instance *instance, size_t size)
536
{
537
const size_t min_shmem_size = 1 << 20;
538
539
size_t shmem_size =
540
instance->reply.size ? instance->reply.size : min_shmem_size;
541
while (shmem_size < size) {
542
shmem_size <<= 1;
543
if (!shmem_size)
544
return false;
545
}
546
547
struct vn_renderer_shmem *shmem =
548
vn_renderer_shmem_create(instance->renderer, shmem_size);
549
if (!shmem)
550
return false;
551
552
if (instance->reply.shmem)
553
vn_renderer_shmem_unref(instance->renderer, instance->reply.shmem);
554
instance->reply.shmem = shmem;
555
instance->reply.size = shmem_size;
556
instance->reply.used = 0;
557
instance->reply.ptr = shmem->mmap_ptr;
558
559
return true;
560
}
561
562
static struct vn_renderer_shmem *
563
vn_instance_get_reply_shmem_locked(struct vn_instance *instance,
564
size_t size,
565
void **ptr)
566
{
567
if (unlikely(instance->reply.used + size > instance->reply.size)) {
568
if (!vn_instance_grow_reply_shmem_locked(instance, size))
569
return NULL;
570
571
uint32_t set_reply_command_stream_data[16];
572
struct vn_cs_encoder local_enc = VN_CS_ENCODER_INITIALIZER_LOCAL(
573
set_reply_command_stream_data,
574
sizeof(set_reply_command_stream_data));
575
const struct VkCommandStreamDescriptionMESA stream = {
576
.resourceId = instance->reply.shmem->res_id,
577
.size = instance->reply.size,
578
};
579
vn_encode_vkSetReplyCommandStreamMESA(&local_enc, 0, &stream);
580
vn_cs_encoder_commit(&local_enc);
581
582
vn_instance_roundtrip(instance);
583
vn_instance_ring_submit_locked(instance, &local_enc, NULL, NULL);
584
}
585
586
/* TODO avoid this seek command and go lock-free? */
587
uint32_t seek_reply_command_stream_data[8];
588
struct vn_cs_encoder local_enc = VN_CS_ENCODER_INITIALIZER_LOCAL(
589
seek_reply_command_stream_data, sizeof(seek_reply_command_stream_data));
590
const size_t offset = instance->reply.used;
591
vn_encode_vkSeekReplyCommandStreamMESA(&local_enc, 0, offset);
592
vn_cs_encoder_commit(&local_enc);
593
vn_instance_ring_submit_locked(instance, &local_enc, NULL, NULL);
594
595
*ptr = instance->reply.ptr + offset;
596
instance->reply.used += size;
597
598
return vn_renderer_shmem_ref(instance->renderer, instance->reply.shmem);
599
}
600
601
void
602
vn_instance_submit_command(struct vn_instance *instance,
603
struct vn_instance_submit_command *submit)
604
{
605
void *reply_ptr = NULL;
606
submit->reply_shmem = NULL;
607
608
mtx_lock(&instance->ring.mutex);
609
610
if (vn_cs_encoder_is_empty(&submit->command))
611
goto fail;
612
vn_cs_encoder_commit(&submit->command);
613
614
if (submit->reply_size) {
615
submit->reply_shmem = vn_instance_get_reply_shmem_locked(
616
instance, submit->reply_size, &reply_ptr);
617
if (!submit->reply_shmem)
618
goto fail;
619
}
620
621
uint32_t ring_seqno;
622
VkResult result = vn_instance_ring_submit_locked(
623
instance, &submit->command, submit->reply_shmem, &ring_seqno);
624
625
mtx_unlock(&instance->ring.mutex);
626
627
submit->reply = VN_CS_DECODER_INITIALIZER(reply_ptr, submit->reply_size);
628
629
if (submit->reply_size && result == VK_SUCCESS)
630
vn_ring_wait(&instance->ring.ring, ring_seqno);
631
632
return;
633
634
fail:
635
instance->ring.command_dropped++;
636
mtx_unlock(&instance->ring.mutex);
637
}
638
639
static struct vn_physical_device *
640
vn_instance_find_physical_device(struct vn_instance *instance,
641
vn_object_id id)
642
{
643
for (uint32_t i = 0; i < instance->physical_device_count; i++) {
644
if (instance->physical_devices[i].base.id == id)
645
return &instance->physical_devices[i];
646
}
647
return NULL;
648
}
649
650
static void
651
vn_physical_device_init_features(struct vn_physical_device *physical_dev)
652
{
653
struct vn_instance *instance = physical_dev->instance;
654
struct {
655
/* Vulkan 1.1 */
656
VkPhysicalDevice16BitStorageFeatures sixteen_bit_storage;
657
VkPhysicalDeviceMultiviewFeatures multiview;
658
VkPhysicalDeviceVariablePointersFeatures variable_pointers;
659
VkPhysicalDeviceProtectedMemoryFeatures protected_memory;
660
VkPhysicalDeviceSamplerYcbcrConversionFeatures sampler_ycbcr_conversion;
661
VkPhysicalDeviceShaderDrawParametersFeatures shader_draw_parameters;
662
663
/* Vulkan 1.2 */
664
VkPhysicalDevice8BitStorageFeatures eight_bit_storage;
665
VkPhysicalDeviceShaderAtomicInt64Features shader_atomic_int64;
666
VkPhysicalDeviceShaderFloat16Int8Features shader_float16_int8;
667
VkPhysicalDeviceDescriptorIndexingFeatures descriptor_indexing;
668
VkPhysicalDeviceScalarBlockLayoutFeatures scalar_block_layout;
669
VkPhysicalDeviceImagelessFramebufferFeatures imageless_framebuffer;
670
VkPhysicalDeviceUniformBufferStandardLayoutFeatures
671
uniform_buffer_standard_layout;
672
VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures
673
shader_subgroup_extended_types;
674
VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures
675
separate_depth_stencil_layouts;
676
VkPhysicalDeviceHostQueryResetFeatures host_query_reset;
677
VkPhysicalDeviceTimelineSemaphoreFeatures timeline_semaphore;
678
VkPhysicalDeviceBufferDeviceAddressFeatures buffer_device_address;
679
VkPhysicalDeviceVulkanMemoryModelFeatures vulkan_memory_model;
680
} local_feats;
681
682
physical_dev->features.sType =
683
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
684
if (physical_dev->renderer_version >= VK_API_VERSION_1_2) {
685
physical_dev->features.pNext = &physical_dev->vulkan_1_1_features;
686
687
physical_dev->vulkan_1_1_features.sType =
688
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES;
689
physical_dev->vulkan_1_1_features.pNext =
690
&physical_dev->vulkan_1_2_features;
691
physical_dev->vulkan_1_2_features.sType =
692
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES;
693
physical_dev->vulkan_1_2_features.pNext = NULL;
694
} else {
695
physical_dev->features.pNext = &local_feats.sixteen_bit_storage;
696
697
local_feats.sixteen_bit_storage.sType =
698
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES;
699
local_feats.sixteen_bit_storage.pNext = &local_feats.multiview;
700
local_feats.multiview.sType =
701
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES;
702
local_feats.multiview.pNext = &local_feats.variable_pointers;
703
local_feats.variable_pointers.sType =
704
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES;
705
local_feats.variable_pointers.pNext = &local_feats.protected_memory;
706
local_feats.protected_memory.sType =
707
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES;
708
local_feats.protected_memory.pNext =
709
&local_feats.sampler_ycbcr_conversion;
710
local_feats.sampler_ycbcr_conversion.sType =
711
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES;
712
local_feats.sampler_ycbcr_conversion.pNext =
713
&local_feats.shader_draw_parameters;
714
local_feats.shader_draw_parameters.sType =
715
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES;
716
local_feats.shader_draw_parameters.pNext =
717
&local_feats.eight_bit_storage;
718
719
local_feats.eight_bit_storage.sType =
720
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES;
721
local_feats.eight_bit_storage.pNext = &local_feats.shader_atomic_int64;
722
local_feats.shader_atomic_int64.sType =
723
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES;
724
local_feats.shader_atomic_int64.pNext =
725
&local_feats.shader_float16_int8;
726
local_feats.shader_float16_int8.sType =
727
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES;
728
local_feats.shader_float16_int8.pNext =
729
&local_feats.descriptor_indexing;
730
local_feats.descriptor_indexing.sType =
731
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES;
732
local_feats.descriptor_indexing.pNext =
733
&local_feats.scalar_block_layout;
734
local_feats.scalar_block_layout.sType =
735
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES;
736
local_feats.scalar_block_layout.pNext =
737
&local_feats.imageless_framebuffer;
738
local_feats.imageless_framebuffer.sType =
739
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES;
740
local_feats.imageless_framebuffer.pNext =
741
&local_feats.uniform_buffer_standard_layout;
742
local_feats.uniform_buffer_standard_layout.sType =
743
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES;
744
local_feats.uniform_buffer_standard_layout.pNext =
745
&local_feats.shader_subgroup_extended_types;
746
local_feats.shader_subgroup_extended_types.sType =
747
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES;
748
local_feats.shader_subgroup_extended_types.pNext =
749
&local_feats.separate_depth_stencil_layouts;
750
local_feats.separate_depth_stencil_layouts.sType =
751
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES;
752
local_feats.separate_depth_stencil_layouts.pNext =
753
&local_feats.host_query_reset;
754
local_feats.host_query_reset.sType =
755
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES;
756
local_feats.host_query_reset.pNext = &local_feats.timeline_semaphore;
757
local_feats.timeline_semaphore.sType =
758
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES;
759
local_feats.timeline_semaphore.pNext =
760
&local_feats.buffer_device_address;
761
local_feats.buffer_device_address.sType =
762
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES;
763
local_feats.buffer_device_address.pNext =
764
&local_feats.vulkan_memory_model;
765
local_feats.vulkan_memory_model.sType =
766
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES;
767
local_feats.vulkan_memory_model.pNext = NULL;
768
}
769
770
if (physical_dev->renderer_extensions.EXT_transform_feedback) {
771
physical_dev->transform_feedback_features.sType =
772
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT;
773
physical_dev->transform_feedback_features.pNext =
774
physical_dev->features.pNext;
775
physical_dev->features.pNext =
776
&physical_dev->transform_feedback_features;
777
}
778
779
vn_call_vkGetPhysicalDeviceFeatures2(
780
instance, vn_physical_device_to_handle(physical_dev),
781
&physical_dev->features);
782
783
const struct vk_device_extension_table *exts =
784
&physical_dev->renderer_extensions;
785
struct VkPhysicalDeviceVulkan11Features *vk11_feats =
786
&physical_dev->vulkan_1_1_features;
787
struct VkPhysicalDeviceVulkan12Features *vk12_feats =
788
&physical_dev->vulkan_1_2_features;
789
790
if (physical_dev->renderer_version < VK_API_VERSION_1_2) {
791
vk11_feats->storageBuffer16BitAccess =
792
local_feats.sixteen_bit_storage.storageBuffer16BitAccess;
793
vk11_feats->uniformAndStorageBuffer16BitAccess =
794
local_feats.sixteen_bit_storage.uniformAndStorageBuffer16BitAccess;
795
vk11_feats->storagePushConstant16 =
796
local_feats.sixteen_bit_storage.storagePushConstant16;
797
vk11_feats->storageInputOutput16 =
798
local_feats.sixteen_bit_storage.storageInputOutput16;
799
800
vk11_feats->multiview = local_feats.multiview.multiview;
801
vk11_feats->multiviewGeometryShader =
802
local_feats.multiview.multiviewGeometryShader;
803
vk11_feats->multiviewTessellationShader =
804
local_feats.multiview.multiviewTessellationShader;
805
806
vk11_feats->variablePointersStorageBuffer =
807
local_feats.variable_pointers.variablePointersStorageBuffer;
808
vk11_feats->variablePointers =
809
local_feats.variable_pointers.variablePointers;
810
811
vk11_feats->protectedMemory =
812
local_feats.protected_memory.protectedMemory;
813
814
vk11_feats->samplerYcbcrConversion =
815
local_feats.sampler_ycbcr_conversion.samplerYcbcrConversion;
816
817
vk11_feats->shaderDrawParameters =
818
local_feats.shader_draw_parameters.shaderDrawParameters;
819
820
vk12_feats->samplerMirrorClampToEdge =
821
exts->KHR_sampler_mirror_clamp_to_edge;
822
vk12_feats->drawIndirectCount = exts->KHR_draw_indirect_count;
823
824
if (exts->KHR_8bit_storage) {
825
vk12_feats->storageBuffer8BitAccess =
826
local_feats.eight_bit_storage.storageBuffer8BitAccess;
827
vk12_feats->uniformAndStorageBuffer8BitAccess =
828
local_feats.eight_bit_storage.uniformAndStorageBuffer8BitAccess;
829
vk12_feats->storagePushConstant8 =
830
local_feats.eight_bit_storage.storagePushConstant8;
831
}
832
if (exts->KHR_shader_atomic_int64) {
833
vk12_feats->shaderBufferInt64Atomics =
834
local_feats.shader_atomic_int64.shaderBufferInt64Atomics;
835
vk12_feats->shaderSharedInt64Atomics =
836
local_feats.shader_atomic_int64.shaderSharedInt64Atomics;
837
}
838
if (exts->KHR_shader_float16_int8) {
839
vk12_feats->shaderFloat16 =
840
local_feats.shader_float16_int8.shaderFloat16;
841
vk12_feats->shaderInt8 = local_feats.shader_float16_int8.shaderInt8;
842
}
843
if (exts->EXT_descriptor_indexing) {
844
vk12_feats->descriptorIndexing = true;
845
vk12_feats->shaderInputAttachmentArrayDynamicIndexing =
846
local_feats.descriptor_indexing
847
.shaderInputAttachmentArrayDynamicIndexing;
848
vk12_feats->shaderUniformTexelBufferArrayDynamicIndexing =
849
local_feats.descriptor_indexing
850
.shaderUniformTexelBufferArrayDynamicIndexing;
851
vk12_feats->shaderStorageTexelBufferArrayDynamicIndexing =
852
local_feats.descriptor_indexing
853
.shaderStorageTexelBufferArrayDynamicIndexing;
854
vk12_feats->shaderUniformBufferArrayNonUniformIndexing =
855
local_feats.descriptor_indexing
856
.shaderUniformBufferArrayNonUniformIndexing;
857
vk12_feats->shaderSampledImageArrayNonUniformIndexing =
858
local_feats.descriptor_indexing
859
.shaderSampledImageArrayNonUniformIndexing;
860
vk12_feats->shaderStorageBufferArrayNonUniformIndexing =
861
local_feats.descriptor_indexing
862
.shaderStorageBufferArrayNonUniformIndexing;
863
vk12_feats->shaderStorageImageArrayNonUniformIndexing =
864
local_feats.descriptor_indexing
865
.shaderStorageImageArrayNonUniformIndexing;
866
vk12_feats->shaderInputAttachmentArrayNonUniformIndexing =
867
local_feats.descriptor_indexing
868
.shaderInputAttachmentArrayNonUniformIndexing;
869
vk12_feats->shaderUniformTexelBufferArrayNonUniformIndexing =
870
local_feats.descriptor_indexing
871
.shaderUniformTexelBufferArrayNonUniformIndexing;
872
vk12_feats->shaderStorageTexelBufferArrayNonUniformIndexing =
873
local_feats.descriptor_indexing
874
.shaderStorageTexelBufferArrayNonUniformIndexing;
875
vk12_feats->descriptorBindingUniformBufferUpdateAfterBind =
876
local_feats.descriptor_indexing
877
.descriptorBindingUniformBufferUpdateAfterBind;
878
vk12_feats->descriptorBindingSampledImageUpdateAfterBind =
879
local_feats.descriptor_indexing
880
.descriptorBindingSampledImageUpdateAfterBind;
881
vk12_feats->descriptorBindingStorageImageUpdateAfterBind =
882
local_feats.descriptor_indexing
883
.descriptorBindingStorageImageUpdateAfterBind;
884
vk12_feats->descriptorBindingStorageBufferUpdateAfterBind =
885
local_feats.descriptor_indexing
886
.descriptorBindingStorageBufferUpdateAfterBind;
887
vk12_feats->descriptorBindingUniformTexelBufferUpdateAfterBind =
888
local_feats.descriptor_indexing
889
.descriptorBindingUniformTexelBufferUpdateAfterBind;
890
vk12_feats->descriptorBindingStorageTexelBufferUpdateAfterBind =
891
local_feats.descriptor_indexing
892
.descriptorBindingStorageTexelBufferUpdateAfterBind;
893
vk12_feats->descriptorBindingUpdateUnusedWhilePending =
894
local_feats.descriptor_indexing
895
.descriptorBindingUpdateUnusedWhilePending;
896
vk12_feats->descriptorBindingPartiallyBound =
897
local_feats.descriptor_indexing.descriptorBindingPartiallyBound;
898
vk12_feats->descriptorBindingVariableDescriptorCount =
899
local_feats.descriptor_indexing
900
.descriptorBindingVariableDescriptorCount;
901
vk12_feats->runtimeDescriptorArray =
902
local_feats.descriptor_indexing.runtimeDescriptorArray;
903
}
904
905
vk12_feats->samplerFilterMinmax = exts->EXT_sampler_filter_minmax;
906
907
if (exts->EXT_scalar_block_layout) {
908
vk12_feats->scalarBlockLayout =
909
local_feats.scalar_block_layout.scalarBlockLayout;
910
}
911
if (exts->KHR_imageless_framebuffer) {
912
vk12_feats->imagelessFramebuffer =
913
local_feats.imageless_framebuffer.imagelessFramebuffer;
914
}
915
if (exts->KHR_uniform_buffer_standard_layout) {
916
vk12_feats->uniformBufferStandardLayout =
917
local_feats.uniform_buffer_standard_layout
918
.uniformBufferStandardLayout;
919
}
920
if (exts->KHR_shader_subgroup_extended_types) {
921
vk12_feats->shaderSubgroupExtendedTypes =
922
local_feats.shader_subgroup_extended_types
923
.shaderSubgroupExtendedTypes;
924
}
925
if (exts->KHR_separate_depth_stencil_layouts) {
926
vk12_feats->separateDepthStencilLayouts =
927
local_feats.separate_depth_stencil_layouts
928
.separateDepthStencilLayouts;
929
}
930
if (exts->EXT_host_query_reset) {
931
vk12_feats->hostQueryReset =
932
local_feats.host_query_reset.hostQueryReset;
933
}
934
if (exts->KHR_timeline_semaphore) {
935
vk12_feats->timelineSemaphore =
936
local_feats.timeline_semaphore.timelineSemaphore;
937
}
938
if (exts->KHR_buffer_device_address) {
939
vk12_feats->bufferDeviceAddress =
940
local_feats.buffer_device_address.bufferDeviceAddress;
941
vk12_feats->bufferDeviceAddressCaptureReplay =
942
local_feats.buffer_device_address.bufferDeviceAddressCaptureReplay;
943
vk12_feats->bufferDeviceAddressMultiDevice =
944
local_feats.buffer_device_address.bufferDeviceAddressMultiDevice;
945
}
946
if (exts->KHR_vulkan_memory_model) {
947
vk12_feats->vulkanMemoryModel =
948
local_feats.vulkan_memory_model.vulkanMemoryModel;
949
vk12_feats->vulkanMemoryModelDeviceScope =
950
local_feats.vulkan_memory_model.vulkanMemoryModelDeviceScope;
951
vk12_feats->vulkanMemoryModelAvailabilityVisibilityChains =
952
local_feats.vulkan_memory_model
953
.vulkanMemoryModelAvailabilityVisibilityChains;
954
}
955
956
vk12_feats->shaderOutputViewportIndex =
957
exts->EXT_shader_viewport_index_layer;
958
vk12_feats->shaderOutputLayer = exts->EXT_shader_viewport_index_layer;
959
vk12_feats->subgroupBroadcastDynamicId = false;
960
}
961
}
962
963
static void
964
vn_physical_device_init_uuids(struct vn_physical_device *physical_dev)
965
{
966
struct VkPhysicalDeviceProperties *props =
967
&physical_dev->properties.properties;
968
struct VkPhysicalDeviceVulkan11Properties *vk11_props =
969
&physical_dev->vulkan_1_1_properties;
970
struct VkPhysicalDeviceVulkan12Properties *vk12_props =
971
&physical_dev->vulkan_1_2_properties;
972
struct mesa_sha1 sha1_ctx;
973
uint8_t sha1[SHA1_DIGEST_LENGTH];
974
975
static_assert(VK_UUID_SIZE <= SHA1_DIGEST_LENGTH, "");
976
977
_mesa_sha1_init(&sha1_ctx);
978
_mesa_sha1_update(&sha1_ctx, &props->pipelineCacheUUID,
979
sizeof(props->pipelineCacheUUID));
980
_mesa_sha1_final(&sha1_ctx, sha1);
981
982
memcpy(props->pipelineCacheUUID, sha1, VK_UUID_SIZE);
983
984
_mesa_sha1_init(&sha1_ctx);
985
_mesa_sha1_update(&sha1_ctx, &props->vendorID, sizeof(props->vendorID));
986
_mesa_sha1_update(&sha1_ctx, &props->deviceID, sizeof(props->deviceID));
987
_mesa_sha1_final(&sha1_ctx, sha1);
988
989
memcpy(vk11_props->deviceUUID, sha1, VK_UUID_SIZE);
990
991
_mesa_sha1_init(&sha1_ctx);
992
_mesa_sha1_update(&sha1_ctx, vk12_props->driverName,
993
strlen(vk12_props->driverName));
994
_mesa_sha1_update(&sha1_ctx, vk12_props->driverInfo,
995
strlen(vk12_props->driverInfo));
996
_mesa_sha1_final(&sha1_ctx, sha1);
997
998
memcpy(vk11_props->driverUUID, sha1, VK_UUID_SIZE);
999
1000
memset(vk11_props->deviceLUID, 0, VK_LUID_SIZE);
1001
vk11_props->deviceNodeMask = 0;
1002
vk11_props->deviceLUIDValid = false;
1003
}
1004
1005
static void
1006
vn_physical_device_init_properties(struct vn_physical_device *physical_dev)
1007
{
1008
struct vn_instance *instance = physical_dev->instance;
1009
struct {
1010
/* Vulkan 1.1 */
1011
VkPhysicalDeviceIDProperties id;
1012
VkPhysicalDeviceSubgroupProperties subgroup;
1013
VkPhysicalDevicePointClippingProperties point_clipping;
1014
VkPhysicalDeviceMultiviewProperties multiview;
1015
VkPhysicalDeviceProtectedMemoryProperties protected_memory;
1016
VkPhysicalDeviceMaintenance3Properties maintenance_3;
1017
1018
/* Vulkan 1.2 */
1019
VkPhysicalDeviceDriverProperties driver;
1020
VkPhysicalDeviceFloatControlsProperties float_controls;
1021
VkPhysicalDeviceDescriptorIndexingProperties descriptor_indexing;
1022
VkPhysicalDeviceDepthStencilResolveProperties depth_stencil_resolve;
1023
VkPhysicalDeviceSamplerFilterMinmaxProperties sampler_filter_minmax;
1024
VkPhysicalDeviceTimelineSemaphoreProperties timeline_semaphore;
1025
} local_props;
1026
1027
physical_dev->properties.sType =
1028
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
1029
if (physical_dev->renderer_version >= VK_API_VERSION_1_2) {
1030
physical_dev->properties.pNext = &physical_dev->vulkan_1_1_properties;
1031
1032
physical_dev->vulkan_1_1_properties.sType =
1033
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES;
1034
physical_dev->vulkan_1_1_properties.pNext =
1035
&physical_dev->vulkan_1_2_properties;
1036
physical_dev->vulkan_1_2_properties.sType =
1037
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES;
1038
physical_dev->vulkan_1_2_properties.pNext = NULL;
1039
} else {
1040
physical_dev->properties.pNext = &local_props.id;
1041
1042
local_props.id.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
1043
local_props.id.pNext = &local_props.subgroup;
1044
local_props.subgroup.sType =
1045
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES;
1046
local_props.subgroup.pNext = &local_props.point_clipping;
1047
local_props.point_clipping.sType =
1048
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES;
1049
local_props.point_clipping.pNext = &local_props.multiview;
1050
local_props.multiview.sType =
1051
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES;
1052
local_props.multiview.pNext = &local_props.protected_memory;
1053
local_props.protected_memory.sType =
1054
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES;
1055
local_props.protected_memory.pNext = &local_props.maintenance_3;
1056
local_props.maintenance_3.sType =
1057
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES;
1058
local_props.maintenance_3.pNext = &local_props.driver;
1059
1060
local_props.driver.sType =
1061
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES;
1062
local_props.driver.pNext = &local_props.float_controls;
1063
local_props.float_controls.sType =
1064
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES;
1065
local_props.float_controls.pNext = &local_props.descriptor_indexing;
1066
local_props.descriptor_indexing.sType =
1067
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES;
1068
local_props.descriptor_indexing.pNext =
1069
&local_props.depth_stencil_resolve;
1070
local_props.depth_stencil_resolve.sType =
1071
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES;
1072
local_props.depth_stencil_resolve.pNext =
1073
&local_props.sampler_filter_minmax;
1074
local_props.sampler_filter_minmax.sType =
1075
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES;
1076
local_props.sampler_filter_minmax.pNext =
1077
&local_props.timeline_semaphore;
1078
local_props.timeline_semaphore.sType =
1079
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES;
1080
local_props.timeline_semaphore.pNext = NULL;
1081
}
1082
1083
if (physical_dev->renderer_extensions.EXT_transform_feedback) {
1084
physical_dev->transform_feedback_properties.sType =
1085
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT;
1086
physical_dev->transform_feedback_properties.pNext =
1087
physical_dev->properties.pNext;
1088
physical_dev->properties.pNext =
1089
&physical_dev->transform_feedback_properties;
1090
}
1091
1092
vn_call_vkGetPhysicalDeviceProperties2(
1093
instance, vn_physical_device_to_handle(physical_dev),
1094
&physical_dev->properties);
1095
1096
const struct vk_device_extension_table *exts =
1097
&physical_dev->renderer_extensions;
1098
struct VkPhysicalDeviceProperties *props =
1099
&physical_dev->properties.properties;
1100
struct VkPhysicalDeviceVulkan11Properties *vk11_props =
1101
&physical_dev->vulkan_1_1_properties;
1102
struct VkPhysicalDeviceVulkan12Properties *vk12_props =
1103
&physical_dev->vulkan_1_2_properties;
1104
1105
if (physical_dev->renderer_version < VK_API_VERSION_1_2) {
1106
memcpy(vk11_props->deviceUUID, local_props.id.deviceUUID,
1107
sizeof(vk11_props->deviceUUID));
1108
memcpy(vk11_props->driverUUID, local_props.id.driverUUID,
1109
sizeof(vk11_props->driverUUID));
1110
memcpy(vk11_props->deviceLUID, local_props.id.deviceLUID,
1111
sizeof(vk11_props->deviceLUID));
1112
vk11_props->deviceNodeMask = local_props.id.deviceNodeMask;
1113
vk11_props->deviceLUIDValid = local_props.id.deviceLUIDValid;
1114
1115
vk11_props->subgroupSize = local_props.subgroup.subgroupSize;
1116
vk11_props->subgroupSupportedStages =
1117
local_props.subgroup.supportedStages;
1118
vk11_props->subgroupSupportedOperations =
1119
local_props.subgroup.supportedOperations;
1120
vk11_props->subgroupQuadOperationsInAllStages =
1121
local_props.subgroup.quadOperationsInAllStages;
1122
1123
vk11_props->pointClippingBehavior =
1124
local_props.point_clipping.pointClippingBehavior;
1125
1126
vk11_props->maxMultiviewViewCount =
1127
local_props.multiview.maxMultiviewViewCount;
1128
vk11_props->maxMultiviewInstanceIndex =
1129
local_props.multiview.maxMultiviewInstanceIndex;
1130
1131
vk11_props->protectedNoFault =
1132
local_props.protected_memory.protectedNoFault;
1133
1134
vk11_props->maxPerSetDescriptors =
1135
local_props.maintenance_3.maxPerSetDescriptors;
1136
vk11_props->maxMemoryAllocationSize =
1137
local_props.maintenance_3.maxMemoryAllocationSize;
1138
1139
if (exts->KHR_driver_properties) {
1140
vk12_props->driverID = local_props.driver.driverID;
1141
memcpy(vk12_props->driverName, local_props.driver.driverName,
1142
VK_MAX_DRIVER_NAME_SIZE);
1143
memcpy(vk12_props->driverInfo, local_props.driver.driverInfo,
1144
VK_MAX_DRIVER_INFO_SIZE);
1145
vk12_props->conformanceVersion =
1146
local_props.driver.conformanceVersion;
1147
}
1148
if (exts->KHR_shader_float_controls) {
1149
vk12_props->denormBehaviorIndependence =
1150
local_props.float_controls.denormBehaviorIndependence;
1151
vk12_props->roundingModeIndependence =
1152
local_props.float_controls.roundingModeIndependence;
1153
vk12_props->shaderSignedZeroInfNanPreserveFloat16 =
1154
local_props.float_controls.shaderSignedZeroInfNanPreserveFloat16;
1155
vk12_props->shaderSignedZeroInfNanPreserveFloat32 =
1156
local_props.float_controls.shaderSignedZeroInfNanPreserveFloat32;
1157
vk12_props->shaderSignedZeroInfNanPreserveFloat64 =
1158
local_props.float_controls.shaderSignedZeroInfNanPreserveFloat64;
1159
vk12_props->shaderDenormPreserveFloat16 =
1160
local_props.float_controls.shaderDenormPreserveFloat16;
1161
vk12_props->shaderDenormPreserveFloat32 =
1162
local_props.float_controls.shaderDenormPreserveFloat32;
1163
vk12_props->shaderDenormPreserveFloat64 =
1164
local_props.float_controls.shaderDenormPreserveFloat64;
1165
vk12_props->shaderDenormFlushToZeroFloat16 =
1166
local_props.float_controls.shaderDenormFlushToZeroFloat16;
1167
vk12_props->shaderDenormFlushToZeroFloat32 =
1168
local_props.float_controls.shaderDenormFlushToZeroFloat32;
1169
vk12_props->shaderDenormFlushToZeroFloat64 =
1170
local_props.float_controls.shaderDenormFlushToZeroFloat64;
1171
vk12_props->shaderRoundingModeRTEFloat16 =
1172
local_props.float_controls.shaderRoundingModeRTEFloat16;
1173
vk12_props->shaderRoundingModeRTEFloat32 =
1174
local_props.float_controls.shaderRoundingModeRTEFloat32;
1175
vk12_props->shaderRoundingModeRTEFloat64 =
1176
local_props.float_controls.shaderRoundingModeRTEFloat64;
1177
vk12_props->shaderRoundingModeRTZFloat16 =
1178
local_props.float_controls.shaderRoundingModeRTZFloat16;
1179
vk12_props->shaderRoundingModeRTZFloat32 =
1180
local_props.float_controls.shaderRoundingModeRTZFloat32;
1181
vk12_props->shaderRoundingModeRTZFloat64 =
1182
local_props.float_controls.shaderRoundingModeRTZFloat64;
1183
}
1184
if (exts->EXT_descriptor_indexing) {
1185
vk12_props->maxUpdateAfterBindDescriptorsInAllPools =
1186
local_props.descriptor_indexing
1187
.maxUpdateAfterBindDescriptorsInAllPools;
1188
vk12_props->shaderUniformBufferArrayNonUniformIndexingNative =
1189
local_props.descriptor_indexing
1190
.shaderUniformBufferArrayNonUniformIndexingNative;
1191
vk12_props->shaderSampledImageArrayNonUniformIndexingNative =
1192
local_props.descriptor_indexing
1193
.shaderSampledImageArrayNonUniformIndexingNative;
1194
vk12_props->shaderStorageBufferArrayNonUniformIndexingNative =
1195
local_props.descriptor_indexing
1196
.shaderStorageBufferArrayNonUniformIndexingNative;
1197
vk12_props->shaderStorageImageArrayNonUniformIndexingNative =
1198
local_props.descriptor_indexing
1199
.shaderStorageImageArrayNonUniformIndexingNative;
1200
vk12_props->shaderInputAttachmentArrayNonUniformIndexingNative =
1201
local_props.descriptor_indexing
1202
.shaderInputAttachmentArrayNonUniformIndexingNative;
1203
vk12_props->robustBufferAccessUpdateAfterBind =
1204
local_props.descriptor_indexing.robustBufferAccessUpdateAfterBind;
1205
vk12_props->quadDivergentImplicitLod =
1206
local_props.descriptor_indexing.quadDivergentImplicitLod;
1207
vk12_props->maxPerStageDescriptorUpdateAfterBindSamplers =
1208
local_props.descriptor_indexing
1209
.maxPerStageDescriptorUpdateAfterBindSamplers;
1210
vk12_props->maxPerStageDescriptorUpdateAfterBindUniformBuffers =
1211
local_props.descriptor_indexing
1212
.maxPerStageDescriptorUpdateAfterBindUniformBuffers;
1213
vk12_props->maxPerStageDescriptorUpdateAfterBindStorageBuffers =
1214
local_props.descriptor_indexing
1215
.maxPerStageDescriptorUpdateAfterBindStorageBuffers;
1216
vk12_props->maxPerStageDescriptorUpdateAfterBindSampledImages =
1217
local_props.descriptor_indexing
1218
.maxPerStageDescriptorUpdateAfterBindSampledImages;
1219
vk12_props->maxPerStageDescriptorUpdateAfterBindStorageImages =
1220
local_props.descriptor_indexing
1221
.maxPerStageDescriptorUpdateAfterBindStorageImages;
1222
vk12_props->maxPerStageDescriptorUpdateAfterBindInputAttachments =
1223
local_props.descriptor_indexing
1224
.maxPerStageDescriptorUpdateAfterBindInputAttachments;
1225
vk12_props->maxPerStageUpdateAfterBindResources =
1226
local_props.descriptor_indexing
1227
.maxPerStageUpdateAfterBindResources;
1228
vk12_props->maxDescriptorSetUpdateAfterBindSamplers =
1229
local_props.descriptor_indexing
1230
.maxDescriptorSetUpdateAfterBindSamplers;
1231
vk12_props->maxDescriptorSetUpdateAfterBindUniformBuffers =
1232
local_props.descriptor_indexing
1233
.maxDescriptorSetUpdateAfterBindUniformBuffers;
1234
vk12_props->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
1235
local_props.descriptor_indexing
1236
.maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
1237
vk12_props->maxDescriptorSetUpdateAfterBindStorageBuffers =
1238
local_props.descriptor_indexing
1239
.maxDescriptorSetUpdateAfterBindStorageBuffers;
1240
vk12_props->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
1241
local_props.descriptor_indexing
1242
.maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
1243
vk12_props->maxDescriptorSetUpdateAfterBindSampledImages =
1244
local_props.descriptor_indexing
1245
.maxDescriptorSetUpdateAfterBindSampledImages;
1246
vk12_props->maxDescriptorSetUpdateAfterBindStorageImages =
1247
local_props.descriptor_indexing
1248
.maxDescriptorSetUpdateAfterBindStorageImages;
1249
vk12_props->maxDescriptorSetUpdateAfterBindInputAttachments =
1250
local_props.descriptor_indexing
1251
.maxDescriptorSetUpdateAfterBindInputAttachments;
1252
}
1253
if (exts->KHR_depth_stencil_resolve) {
1254
vk12_props->supportedDepthResolveModes =
1255
local_props.depth_stencil_resolve.supportedDepthResolveModes;
1256
vk12_props->supportedStencilResolveModes =
1257
local_props.depth_stencil_resolve.supportedStencilResolveModes;
1258
vk12_props->independentResolveNone =
1259
local_props.depth_stencil_resolve.independentResolveNone;
1260
vk12_props->independentResolve =
1261
local_props.depth_stencil_resolve.independentResolve;
1262
}
1263
if (exts->EXT_sampler_filter_minmax) {
1264
vk12_props->filterMinmaxSingleComponentFormats =
1265
local_props.sampler_filter_minmax
1266
.filterMinmaxSingleComponentFormats;
1267
vk12_props->filterMinmaxImageComponentMapping =
1268
local_props.sampler_filter_minmax
1269
.filterMinmaxImageComponentMapping;
1270
}
1271
if (exts->KHR_timeline_semaphore) {
1272
vk12_props->maxTimelineSemaphoreValueDifference =
1273
local_props.timeline_semaphore.maxTimelineSemaphoreValueDifference;
1274
}
1275
1276
vk12_props->framebufferIntegerColorSampleCounts = VK_SAMPLE_COUNT_1_BIT;
1277
}
1278
1279
const uint32_t version_override = vk_get_version_override();
1280
if (version_override) {
1281
props->apiVersion = version_override;
1282
} else {
1283
/* cap the advertised api version */
1284
uint32_t version = MIN3(props->apiVersion, VN_MAX_API_VERSION,
1285
instance->renderer_info.vk_xml_version);
1286
if (VK_VERSION_PATCH(version) > VK_VERSION_PATCH(props->apiVersion)) {
1287
version = version - VK_VERSION_PATCH(version) +
1288
VK_VERSION_PATCH(props->apiVersion);
1289
}
1290
props->apiVersion = version;
1291
}
1292
1293
props->driverVersion = vk_get_driver_version();
1294
1295
char device_name[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
1296
int device_name_len = snprintf(device_name, sizeof(device_name),
1297
"Virtio-GPU Venus (%s)", props->deviceName);
1298
if (device_name_len >= VK_MAX_PHYSICAL_DEVICE_NAME_SIZE) {
1299
memcpy(device_name + VK_MAX_PHYSICAL_DEVICE_NAME_SIZE - 5, "...)", 4);
1300
device_name_len = VK_MAX_PHYSICAL_DEVICE_NAME_SIZE - 1;
1301
}
1302
memcpy(props->deviceName, device_name, device_name_len + 1);
1303
1304
vk12_props->driverID = 0;
1305
snprintf(vk12_props->driverName, sizeof(vk12_props->driverName), "venus");
1306
snprintf(vk12_props->driverInfo, sizeof(vk12_props->driverInfo),
1307
"Mesa " PACKAGE_VERSION MESA_GIT_SHA1);
1308
vk12_props->conformanceVersion = (VkConformanceVersionKHR){
1309
.major = 0,
1310
.minor = 0,
1311
.subminor = 0,
1312
.patch = 0,
1313
};
1314
1315
vn_physical_device_init_uuids(physical_dev);
1316
}
1317
1318
static VkResult
1319
vn_physical_device_init_queue_family_properties(
1320
struct vn_physical_device *physical_dev)
1321
{
1322
struct vn_instance *instance = physical_dev->instance;
1323
const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1324
uint32_t count;
1325
1326
vn_call_vkGetPhysicalDeviceQueueFamilyProperties2(
1327
instance, vn_physical_device_to_handle(physical_dev), &count, NULL);
1328
1329
VkQueueFamilyProperties2 *props =
1330
vk_alloc(alloc, sizeof(*props) * count, VN_DEFAULT_ALIGN,
1331
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1332
if (!props)
1333
return VK_ERROR_OUT_OF_HOST_MEMORY;
1334
1335
for (uint32_t i = 0; i < count; i++) {
1336
props[i].sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2;
1337
props[i].pNext = NULL;
1338
}
1339
vn_call_vkGetPhysicalDeviceQueueFamilyProperties2(
1340
instance, vn_physical_device_to_handle(physical_dev), &count, props);
1341
1342
physical_dev->queue_family_properties = props;
1343
physical_dev->queue_family_count = count;
1344
1345
return VK_SUCCESS;
1346
}
1347
1348
static void
1349
vn_physical_device_init_memory_properties(
1350
struct vn_physical_device *physical_dev)
1351
{
1352
struct vn_instance *instance = physical_dev->instance;
1353
1354
physical_dev->memory_properties.sType =
1355
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2;
1356
1357
vn_call_vkGetPhysicalDeviceMemoryProperties2(
1358
instance, vn_physical_device_to_handle(physical_dev),
1359
&physical_dev->memory_properties);
1360
1361
if (!instance->renderer_info.has_cache_management) {
1362
VkPhysicalDeviceMemoryProperties *props =
1363
&physical_dev->memory_properties.memoryProperties;
1364
const uint32_t host_flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1365
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
1366
VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
1367
1368
for (uint32_t i = 0; i < props->memoryTypeCount; i++) {
1369
const bool coherent = props->memoryTypes[i].propertyFlags &
1370
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
1371
if (!coherent)
1372
props->memoryTypes[i].propertyFlags &= ~host_flags;
1373
}
1374
}
1375
}
1376
1377
static void
1378
vn_physical_device_init_external_memory(
1379
struct vn_physical_device *physical_dev)
1380
{
1381
/* When a renderer VkDeviceMemory is exportable, we can create a
1382
* vn_renderer_bo from it. The vn_renderer_bo can be freely exported as an
1383
* opaque fd or a dma-buf.
1384
*
1385
* However, to know if a rendender VkDeviceMemory is exportable, we have to
1386
* start from VkPhysicalDeviceExternalImageFormatInfo (or
1387
* vkGetPhysicalDeviceExternalBufferProperties). That means we need to
1388
* know the handle type that the renderer will use to make those queries.
1389
*
1390
* XXX We also assume that a vn_renderer_bo can be created as long as the
1391
* renderer VkDeviceMemory has a mappable memory type. That is plain
1392
* wrong. It is impossible to fix though until some new extension is
1393
* created and supported by the driver, and that the renderer switches to
1394
* the extension.
1395
*/
1396
1397
if (!physical_dev->instance->renderer_info.has_dma_buf_import)
1398
return;
1399
1400
/* TODO We assume the renderer uses dma-bufs here. This should be
1401
* negotiated by adding a new function to VK_MESA_venus_protocol.
1402
*/
1403
if (physical_dev->renderer_extensions.EXT_external_memory_dma_buf) {
1404
physical_dev->external_memory.renderer_handle_type =
1405
VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1406
1407
#ifdef ANDROID
1408
physical_dev->external_memory.supported_handle_types =
1409
VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
1410
#else
1411
physical_dev->external_memory.supported_handle_types =
1412
VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT |
1413
VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1414
#endif
1415
}
1416
}
1417
1418
static void
1419
vn_physical_device_init_external_fence_handles(
1420
struct vn_physical_device *physical_dev)
1421
{
1422
/* The current code manipulates the host-side VkFence directly.
1423
* vkWaitForFences is translated to repeated vkGetFenceStatus.
1424
*
1425
* External fence is not possible currently. At best, we could cheat by
1426
* translating vkGetFenceFdKHR to vkWaitForFences and returning -1, when
1427
* the handle type is sync file.
1428
*
1429
* We would like to create a vn_renderer_sync from a host-side VkFence,
1430
* similar to how a vn_renderer_bo is created from a host-side
1431
* VkDeviceMemory. That would require kernel support and tons of works on
1432
* the host side. If we had that, and we kept both the vn_renderer_sync
1433
* and the host-side VkFence in sync, we would have the freedom to use
1434
* either of them depending on the occasions, and support external fences
1435
* and idle waiting.
1436
*/
1437
physical_dev->external_fence_handles = 0;
1438
1439
#ifdef ANDROID
1440
if (physical_dev->instance->experimental.globalFencing) {
1441
physical_dev->external_fence_handles =
1442
VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
1443
}
1444
#endif
1445
}
1446
1447
static void
1448
vn_physical_device_init_external_semaphore_handles(
1449
struct vn_physical_device *physical_dev)
1450
{
1451
/* The current code manipulates the host-side VkSemaphore directly. It
1452
* works very well for binary semaphores because there is no CPU operation.
1453
* But for timeline semaphores, the situation is similar to that of fences.
1454
* vkWaitSemaphores is translated to repeated vkGetSemaphoreCounterValue.
1455
*
1456
* External semaphore is not possible currently. We could cheat when the
1457
* semaphore is binary and the handle type is sync file, but that would
1458
* require associating a fence with the semaphore and doing vkWaitForFences
1459
* in vkGetSemaphoreFdKHR.
1460
*
1461
* We would like to create a vn_renderer_sync from a host-side VkSemaphore,
1462
* similar to how a vn_renderer_bo is created from a host-side
1463
* VkDeviceMemory. The reasoning is the same as that for fences.
1464
* Additionally, we would like the sync file exported from the
1465
* vn_renderer_sync to carry the necessary information to identify the
1466
* host-side VkSemaphore. That would allow the consumers to wait on the
1467
* host side rather than the guest side.
1468
*/
1469
physical_dev->external_binary_semaphore_handles = 0;
1470
physical_dev->external_timeline_semaphore_handles = 0;
1471
1472
#ifdef ANDROID
1473
if (physical_dev->instance->experimental.globalFencing) {
1474
physical_dev->external_binary_semaphore_handles =
1475
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
1476
}
1477
#endif
1478
}
1479
1480
static void
1481
vn_physical_device_get_native_extensions(
1482
const struct vn_physical_device *physical_dev,
1483
struct vk_device_extension_table *exts)
1484
{
1485
const struct vn_instance *instance = physical_dev->instance;
1486
const struct vn_renderer_info *renderer_info = &instance->renderer_info;
1487
const struct vk_device_extension_table *renderer_exts =
1488
&physical_dev->renderer_extensions;
1489
1490
memset(exts, 0, sizeof(*exts));
1491
1492
/* see vn_physical_device_init_external_memory */
1493
const bool can_external_mem = renderer_exts->EXT_external_memory_dma_buf &&
1494
renderer_info->has_dma_buf_import;
1495
1496
#ifdef ANDROID
1497
if (can_external_mem && renderer_exts->EXT_image_drm_format_modifier &&
1498
renderer_exts->EXT_queue_family_foreign &&
1499
instance->experimental.memoryResourceAllocationSize == VK_TRUE) {
1500
exts->ANDROID_external_memory_android_hardware_buffer = true;
1501
exts->ANDROID_native_buffer = true;
1502
}
1503
1504
/* we have a very poor implementation */
1505
if (instance->experimental.globalFencing) {
1506
exts->KHR_external_fence_fd = true;
1507
exts->KHR_external_semaphore_fd = true;
1508
}
1509
#else /* ANDROID */
1510
if (can_external_mem) {
1511
exts->KHR_external_memory_fd = true;
1512
exts->EXT_external_memory_dma_buf = true;
1513
}
1514
1515
#ifdef VN_USE_WSI_PLATFORM
1516
/* XXX we should check for EXT_queue_family_foreign */
1517
exts->KHR_incremental_present = true;
1518
exts->KHR_swapchain = true;
1519
exts->KHR_swapchain_mutable_format = true;
1520
#endif
1521
#endif /* ANDROID */
1522
}
1523
1524
static void
1525
vn_physical_device_get_passthrough_extensions(
1526
const struct vn_physical_device *physical_dev,
1527
struct vk_device_extension_table *exts)
1528
{
1529
*exts = (struct vk_device_extension_table){
1530
/* promoted to VK_VERSION_1_1 */
1531
.KHR_16bit_storage = true,
1532
.KHR_bind_memory2 = true,
1533
.KHR_dedicated_allocation = true,
1534
.KHR_descriptor_update_template = true,
1535
.KHR_device_group = true,
1536
.KHR_external_fence = true,
1537
.KHR_external_memory = true,
1538
.KHR_external_semaphore = true,
1539
.KHR_get_memory_requirements2 = true,
1540
.KHR_maintenance1 = true,
1541
.KHR_maintenance2 = true,
1542
.KHR_maintenance3 = true,
1543
.KHR_multiview = true,
1544
.KHR_relaxed_block_layout = true,
1545
.KHR_sampler_ycbcr_conversion = true,
1546
.KHR_shader_draw_parameters = true,
1547
.KHR_storage_buffer_storage_class = true,
1548
.KHR_variable_pointers = true,
1549
1550
/* promoted to VK_VERSION_1_2 */
1551
.KHR_8bit_storage = true,
1552
.KHR_buffer_device_address = true,
1553
.KHR_create_renderpass2 = true,
1554
.KHR_depth_stencil_resolve = true,
1555
.KHR_draw_indirect_count = true,
1556
#ifndef ANDROID
1557
/* xxx remove the #ifndef after venus has a driver id */
1558
.KHR_driver_properties = true,
1559
#endif
1560
.KHR_image_format_list = true,
1561
.KHR_imageless_framebuffer = true,
1562
.KHR_sampler_mirror_clamp_to_edge = true,
1563
.KHR_separate_depth_stencil_layouts = true,
1564
.KHR_shader_atomic_int64 = true,
1565
.KHR_shader_float16_int8 = true,
1566
.KHR_shader_float_controls = true,
1567
.KHR_shader_subgroup_extended_types = true,
1568
.KHR_spirv_1_4 = true,
1569
.KHR_timeline_semaphore = true,
1570
.KHR_uniform_buffer_standard_layout = true,
1571
.KHR_vulkan_memory_model = true,
1572
.EXT_descriptor_indexing = true,
1573
.EXT_host_query_reset = true,
1574
.EXT_sampler_filter_minmax = true,
1575
.EXT_scalar_block_layout = true,
1576
.EXT_separate_stencil_usage = true,
1577
.EXT_shader_viewport_index_layer = true,
1578
1579
/* EXT */
1580
#ifndef ANDROID
1581
.EXT_image_drm_format_modifier = true,
1582
#endif
1583
.EXT_queue_family_foreign = true,
1584
.EXT_transform_feedback = true,
1585
};
1586
}
1587
1588
static void
1589
vn_physical_device_init_supported_extensions(
1590
struct vn_physical_device *physical_dev)
1591
{
1592
struct vk_device_extension_table native;
1593
struct vk_device_extension_table passthrough;
1594
vn_physical_device_get_native_extensions(physical_dev, &native);
1595
vn_physical_device_get_passthrough_extensions(physical_dev, &passthrough);
1596
1597
for (uint32_t i = 0; i < VK_DEVICE_EXTENSION_COUNT; i++) {
1598
const VkExtensionProperties *props = &vk_device_extensions[i];
1599
1600
#ifdef ANDROID
1601
if (!vk_android_allowed_device_extensions.extensions[i])
1602
continue;
1603
#endif
1604
1605
if (native.extensions[i]) {
1606
physical_dev->base.base.supported_extensions.extensions[i] = true;
1607
physical_dev->extension_spec_versions[i] = props->specVersion;
1608
} else if (passthrough.extensions[i] &&
1609
physical_dev->renderer_extensions.extensions[i]) {
1610
physical_dev->base.base.supported_extensions.extensions[i] = true;
1611
physical_dev->extension_spec_versions[i] = MIN2(
1612
physical_dev->extension_spec_versions[i], props->specVersion);
1613
}
1614
}
1615
1616
/* override VK_ANDROID_native_buffer spec version */
1617
if (native.ANDROID_native_buffer) {
1618
const uint32_t index =
1619
VN_EXTENSION_TABLE_INDEX(native, ANDROID_native_buffer);
1620
physical_dev->extension_spec_versions[index] =
1621
VN_ANDROID_NATIVE_BUFFER_SPEC_VERSION;
1622
}
1623
}
1624
1625
static VkResult
1626
vn_physical_device_init_renderer_extensions(
1627
struct vn_physical_device *physical_dev)
1628
{
1629
struct vn_instance *instance = physical_dev->instance;
1630
const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1631
1632
/* get renderer extensions */
1633
uint32_t count;
1634
VkResult result = vn_call_vkEnumerateDeviceExtensionProperties(
1635
instance, vn_physical_device_to_handle(physical_dev), NULL, &count,
1636
NULL);
1637
if (result != VK_SUCCESS)
1638
return result;
1639
1640
VkExtensionProperties *exts = NULL;
1641
if (count) {
1642
exts = vk_alloc(alloc, sizeof(*exts) * count, VN_DEFAULT_ALIGN,
1643
VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
1644
if (!exts)
1645
return VK_ERROR_OUT_OF_HOST_MEMORY;
1646
1647
result = vn_call_vkEnumerateDeviceExtensionProperties(
1648
instance, vn_physical_device_to_handle(physical_dev), NULL, &count,
1649
exts);
1650
if (result < VK_SUCCESS) {
1651
vk_free(alloc, exts);
1652
return result;
1653
}
1654
}
1655
1656
physical_dev->extension_spec_versions =
1657
vk_zalloc(alloc,
1658
sizeof(*physical_dev->extension_spec_versions) *
1659
VK_DEVICE_EXTENSION_COUNT,
1660
VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1661
if (!physical_dev->extension_spec_versions) {
1662
vk_free(alloc, exts);
1663
return VK_ERROR_OUT_OF_HOST_MEMORY;
1664
}
1665
1666
for (uint32_t i = 0; i < VK_DEVICE_EXTENSION_COUNT; i++) {
1667
const VkExtensionProperties *props = &vk_device_extensions[i];
1668
for (uint32_t j = 0; j < count; j++) {
1669
if (strcmp(props->extensionName, exts[j].extensionName))
1670
continue;
1671
1672
/* check encoder support */
1673
const uint32_t spec_version =
1674
vn_info_extension_spec_version(props->extensionName);
1675
if (!spec_version)
1676
continue;
1677
1678
physical_dev->renderer_extensions.extensions[i] = true;
1679
physical_dev->extension_spec_versions[i] =
1680
MIN2(exts[j].specVersion, spec_version);
1681
1682
break;
1683
}
1684
}
1685
1686
vk_free(alloc, exts);
1687
1688
return VK_SUCCESS;
1689
}
1690
1691
static VkResult
1692
vn_physical_device_init_renderer_version(
1693
struct vn_physical_device *physical_dev)
1694
{
1695
struct vn_instance *instance = physical_dev->instance;
1696
1697
/*
1698
* We either check and enable VK_KHR_get_physical_device_properties2, or we
1699
* must use vkGetPhysicalDeviceProperties to get the device-level version.
1700
*/
1701
VkPhysicalDeviceProperties props;
1702
vn_call_vkGetPhysicalDeviceProperties(
1703
instance, vn_physical_device_to_handle(physical_dev), &props);
1704
if (props.apiVersion < VN_MIN_RENDERER_VERSION) {
1705
if (VN_DEBUG(INIT)) {
1706
vn_log(instance, "%s has unsupported renderer device version %d.%d",
1707
props.deviceName, VK_VERSION_MAJOR(props.apiVersion),
1708
VK_VERSION_MINOR(props.apiVersion));
1709
}
1710
return VK_ERROR_INITIALIZATION_FAILED;
1711
}
1712
1713
/* device version for internal use is capped */
1714
physical_dev->renderer_version =
1715
MIN3(props.apiVersion, instance->renderer_api_version,
1716
instance->renderer_info.vk_xml_version);
1717
1718
return VK_SUCCESS;
1719
}
1720
1721
static VkResult
1722
vn_physical_device_init(struct vn_physical_device *physical_dev)
1723
{
1724
struct vn_instance *instance = physical_dev->instance;
1725
const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1726
1727
VkResult result = vn_physical_device_init_renderer_version(physical_dev);
1728
if (result != VK_SUCCESS)
1729
return result;
1730
1731
result = vn_physical_device_init_renderer_extensions(physical_dev);
1732
if (result != VK_SUCCESS)
1733
return result;
1734
1735
vn_physical_device_init_supported_extensions(physical_dev);
1736
1737
/* TODO query all caps with minimal round trips */
1738
vn_physical_device_init_features(physical_dev);
1739
vn_physical_device_init_properties(physical_dev);
1740
1741
result = vn_physical_device_init_queue_family_properties(physical_dev);
1742
if (result != VK_SUCCESS)
1743
goto fail;
1744
1745
vn_physical_device_init_memory_properties(physical_dev);
1746
1747
vn_physical_device_init_external_memory(physical_dev);
1748
vn_physical_device_init_external_fence_handles(physical_dev);
1749
vn_physical_device_init_external_semaphore_handles(physical_dev);
1750
1751
result = vn_wsi_init(physical_dev);
1752
if (result != VK_SUCCESS)
1753
goto fail;
1754
1755
return VK_SUCCESS;
1756
1757
fail:
1758
vk_free(alloc, physical_dev->extension_spec_versions);
1759
vk_free(alloc, physical_dev->queue_family_properties);
1760
return result;
1761
}
1762
1763
static void
1764
vn_physical_device_fini(struct vn_physical_device *physical_dev)
1765
{
1766
struct vn_instance *instance = physical_dev->instance;
1767
const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1768
1769
vn_wsi_fini(physical_dev);
1770
vk_free(alloc, physical_dev->extension_spec_versions);
1771
vk_free(alloc, physical_dev->queue_family_properties);
1772
1773
vn_physical_device_base_fini(&physical_dev->base);
1774
}
1775
1776
static VkResult
1777
vn_instance_enumerate_physical_devices(struct vn_instance *instance)
1778
{
1779
/* TODO cache device group info here as well */
1780
const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
1781
struct vn_physical_device *physical_devs = NULL;
1782
VkResult result;
1783
1784
mtx_lock(&instance->physical_device_mutex);
1785
1786
if (instance->physical_devices) {
1787
result = VK_SUCCESS;
1788
goto out;
1789
}
1790
1791
uint32_t count;
1792
result = vn_call_vkEnumeratePhysicalDevices(
1793
instance, vn_instance_to_handle(instance), &count, NULL);
1794
if (result != VK_SUCCESS || !count)
1795
goto out;
1796
1797
physical_devs =
1798
vk_zalloc(alloc, sizeof(*physical_devs) * count, VN_DEFAULT_ALIGN,
1799
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1800
if (!physical_devs) {
1801
result = VK_ERROR_OUT_OF_HOST_MEMORY;
1802
goto out;
1803
}
1804
1805
VkPhysicalDevice *handles =
1806
vk_alloc(alloc, sizeof(*handles) * count, VN_DEFAULT_ALIGN,
1807
VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
1808
if (!handles) {
1809
result = VK_ERROR_OUT_OF_HOST_MEMORY;
1810
goto out;
1811
}
1812
1813
for (uint32_t i = 0; i < count; i++) {
1814
struct vn_physical_device *physical_dev = &physical_devs[i];
1815
1816
struct vk_physical_device_dispatch_table dispatch_table;
1817
vk_physical_device_dispatch_table_from_entrypoints(
1818
&dispatch_table, &vn_physical_device_entrypoints, true);
1819
result = vn_physical_device_base_init(
1820
&physical_dev->base, &instance->base, NULL, &dispatch_table);
1821
if (result != VK_SUCCESS) {
1822
count = i;
1823
goto out;
1824
}
1825
1826
physical_dev->instance = instance;
1827
1828
handles[i] = vn_physical_device_to_handle(physical_dev);
1829
}
1830
1831
result = vn_call_vkEnumeratePhysicalDevices(
1832
instance, vn_instance_to_handle(instance), &count, handles);
1833
vk_free(alloc, handles);
1834
1835
if (result != VK_SUCCESS)
1836
goto out;
1837
1838
uint32_t i = 0;
1839
while (i < count) {
1840
struct vn_physical_device *physical_dev = &physical_devs[i];
1841
1842
result = vn_physical_device_init(physical_dev);
1843
if (result != VK_SUCCESS) {
1844
vn_physical_device_base_fini(&physical_devs[i].base);
1845
memmove(&physical_devs[i], &physical_devs[i + 1],
1846
sizeof(*physical_devs) * (count - i - 1));
1847
count--;
1848
continue;
1849
}
1850
1851
i++;
1852
}
1853
1854
if (count) {
1855
instance->physical_devices = physical_devs;
1856
instance->physical_device_count = count;
1857
result = VK_SUCCESS;
1858
}
1859
1860
out:
1861
if (result != VK_SUCCESS && physical_devs) {
1862
for (uint32_t i = 0; i < count; i++)
1863
vn_physical_device_base_fini(&physical_devs[i].base);
1864
vk_free(alloc, physical_devs);
1865
}
1866
1867
mtx_unlock(&instance->physical_device_mutex);
1868
return result;
1869
}
1870
1871
/* instance commands */
1872
1873
VkResult
1874
vn_EnumerateInstanceVersion(uint32_t *pApiVersion)
1875
{
1876
*pApiVersion = VN_MAX_API_VERSION;
1877
return VK_SUCCESS;
1878
}
1879
1880
VkResult
1881
vn_EnumerateInstanceExtensionProperties(const char *pLayerName,
1882
uint32_t *pPropertyCount,
1883
VkExtensionProperties *pProperties)
1884
{
1885
if (pLayerName)
1886
return vn_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1887
1888
return vk_enumerate_instance_extension_properties(
1889
&vn_instance_supported_extensions, pPropertyCount, pProperties);
1890
}
1891
1892
VkResult
1893
vn_EnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
1894
VkLayerProperties *pProperties)
1895
{
1896
*pPropertyCount = 0;
1897
return VK_SUCCESS;
1898
}
1899
1900
VkResult
1901
vn_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
1902
const VkAllocationCallbacks *pAllocator,
1903
VkInstance *pInstance)
1904
{
1905
const VkAllocationCallbacks *alloc =
1906
pAllocator ? pAllocator : vk_default_allocator();
1907
struct vn_instance *instance;
1908
VkResult result;
1909
1910
vn_debug_init();
1911
1912
instance = vk_zalloc(alloc, sizeof(*instance), VN_DEFAULT_ALIGN,
1913
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1914
if (!instance)
1915
return vn_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
1916
1917
struct vk_instance_dispatch_table dispatch_table;
1918
vk_instance_dispatch_table_from_entrypoints(
1919
&dispatch_table, &vn_instance_entrypoints, true);
1920
result = vn_instance_base_init(&instance->base,
1921
&vn_instance_supported_extensions,
1922
&dispatch_table, pCreateInfo, alloc);
1923
if (result != VK_SUCCESS) {
1924
vk_free(alloc, instance);
1925
return vn_error(NULL, result);
1926
}
1927
1928
mtx_init(&instance->physical_device_mutex, mtx_plain);
1929
1930
if (!vn_icd_supports_api_version(
1931
instance->base.base.app_info.api_version)) {
1932
result = VK_ERROR_INCOMPATIBLE_DRIVER;
1933
goto fail;
1934
}
1935
1936
if (pCreateInfo->enabledLayerCount) {
1937
result = VK_ERROR_LAYER_NOT_PRESENT;
1938
goto fail;
1939
}
1940
1941
result = vn_instance_init_renderer(instance);
1942
if (result != VK_SUCCESS)
1943
goto fail;
1944
1945
result = vn_instance_init_ring(instance);
1946
if (result != VK_SUCCESS)
1947
goto fail;
1948
1949
vn_instance_init_experimental_features(instance);
1950
1951
result = vn_instance_init_renderer_versions(instance);
1952
if (result != VK_SUCCESS)
1953
goto fail;
1954
1955
VkInstanceCreateInfo local_create_info = *pCreateInfo;
1956
local_create_info.ppEnabledExtensionNames = NULL;
1957
local_create_info.enabledExtensionCount = 0;
1958
pCreateInfo = &local_create_info;
1959
1960
VkApplicationInfo local_app_info;
1961
if (instance->base.base.app_info.api_version <
1962
instance->renderer_api_version) {
1963
if (pCreateInfo->pApplicationInfo) {
1964
local_app_info = *pCreateInfo->pApplicationInfo;
1965
local_app_info.apiVersion = instance->renderer_api_version;
1966
} else {
1967
local_app_info = (const VkApplicationInfo){
1968
.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
1969
.apiVersion = instance->renderer_api_version,
1970
};
1971
}
1972
local_create_info.pApplicationInfo = &local_app_info;
1973
}
1974
1975
VkInstance instance_handle = vn_instance_to_handle(instance);
1976
result =
1977
vn_call_vkCreateInstance(instance, pCreateInfo, NULL, &instance_handle);
1978
if (result != VK_SUCCESS)
1979
goto fail;
1980
1981
driParseOptionInfo(&instance->available_dri_options, vn_dri_options,
1982
ARRAY_SIZE(vn_dri_options));
1983
driParseConfigFiles(&instance->dri_options,
1984
&instance->available_dri_options, 0, "venus", NULL,
1985
instance->base.base.app_info.app_name,
1986
instance->base.base.app_info.app_version,
1987
instance->base.base.app_info.engine_name,
1988
instance->base.base.app_info.engine_version);
1989
1990
*pInstance = instance_handle;
1991
1992
return VK_SUCCESS;
1993
1994
fail:
1995
if (instance->reply.shmem)
1996
vn_renderer_shmem_unref(instance->renderer, instance->reply.shmem);
1997
1998
if (instance->ring.shmem) {
1999
uint32_t destroy_ring_data[4];
2000
struct vn_cs_encoder local_enc = VN_CS_ENCODER_INITIALIZER_LOCAL(
2001
destroy_ring_data, sizeof(destroy_ring_data));
2002
vn_encode_vkDestroyRingMESA(&local_enc, 0, instance->ring.id);
2003
vn_renderer_submit_simple(instance->renderer, destroy_ring_data,
2004
vn_cs_encoder_get_len(&local_enc));
2005
2006
vn_cs_encoder_fini(&instance->ring.upload);
2007
vn_renderer_shmem_unref(instance->renderer, instance->ring.shmem);
2008
vn_ring_fini(&instance->ring.ring);
2009
mtx_destroy(&instance->ring.mutex);
2010
}
2011
2012
if (instance->renderer) {
2013
mtx_destroy(&instance->roundtrip_mutex);
2014
vn_renderer_destroy(instance->renderer, alloc);
2015
}
2016
2017
mtx_destroy(&instance->physical_device_mutex);
2018
2019
vn_instance_base_fini(&instance->base);
2020
vk_free(alloc, instance);
2021
2022
return vn_error(NULL, result);
2023
}
2024
2025
void
2026
vn_DestroyInstance(VkInstance _instance,
2027
const VkAllocationCallbacks *pAllocator)
2028
{
2029
struct vn_instance *instance = vn_instance_from_handle(_instance);
2030
const VkAllocationCallbacks *alloc =
2031
pAllocator ? pAllocator : &instance->base.base.alloc;
2032
2033
if (!instance)
2034
return;
2035
2036
if (instance->physical_devices) {
2037
for (uint32_t i = 0; i < instance->physical_device_count; i++)
2038
vn_physical_device_fini(&instance->physical_devices[i]);
2039
vk_free(alloc, instance->physical_devices);
2040
}
2041
2042
vn_call_vkDestroyInstance(instance, _instance, NULL);
2043
2044
vn_renderer_shmem_unref(instance->renderer, instance->reply.shmem);
2045
2046
uint32_t destroy_ring_data[4];
2047
struct vn_cs_encoder local_enc = VN_CS_ENCODER_INITIALIZER_LOCAL(
2048
destroy_ring_data, sizeof(destroy_ring_data));
2049
vn_encode_vkDestroyRingMESA(&local_enc, 0, instance->ring.id);
2050
vn_renderer_submit_simple(instance->renderer, destroy_ring_data,
2051
vn_cs_encoder_get_len(&local_enc));
2052
2053
vn_cs_encoder_fini(&instance->ring.upload);
2054
vn_ring_fini(&instance->ring.ring);
2055
mtx_destroy(&instance->ring.mutex);
2056
vn_renderer_shmem_unref(instance->renderer, instance->ring.shmem);
2057
2058
mtx_destroy(&instance->roundtrip_mutex);
2059
vn_renderer_destroy(instance->renderer, alloc);
2060
2061
mtx_destroy(&instance->physical_device_mutex);
2062
2063
driDestroyOptionCache(&instance->dri_options);
2064
driDestroyOptionInfo(&instance->available_dri_options);
2065
2066
vn_instance_base_fini(&instance->base);
2067
vk_free(alloc, instance);
2068
}
2069
2070
PFN_vkVoidFunction
2071
vn_GetInstanceProcAddr(VkInstance _instance, const char *pName)
2072
{
2073
struct vn_instance *instance = vn_instance_from_handle(_instance);
2074
return vk_instance_get_proc_addr(&instance->base.base,
2075
&vn_instance_entrypoints, pName);
2076
}
2077
2078
/* physical device commands */
2079
2080
VkResult
2081
vn_EnumeratePhysicalDevices(VkInstance _instance,
2082
uint32_t *pPhysicalDeviceCount,
2083
VkPhysicalDevice *pPhysicalDevices)
2084
{
2085
struct vn_instance *instance = vn_instance_from_handle(_instance);
2086
2087
VkResult result = vn_instance_enumerate_physical_devices(instance);
2088
if (result != VK_SUCCESS)
2089
return vn_error(instance, result);
2090
2091
VK_OUTARRAY_MAKE(out, pPhysicalDevices, pPhysicalDeviceCount);
2092
for (uint32_t i = 0; i < instance->physical_device_count; i++) {
2093
vk_outarray_append(&out, physical_dev) {
2094
*physical_dev =
2095
vn_physical_device_to_handle(&instance->physical_devices[i]);
2096
}
2097
}
2098
2099
return vk_outarray_status(&out);
2100
}
2101
2102
VkResult
2103
vn_EnumeratePhysicalDeviceGroups(
2104
VkInstance _instance,
2105
uint32_t *pPhysicalDeviceGroupCount,
2106
VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
2107
{
2108
struct vn_instance *instance = vn_instance_from_handle(_instance);
2109
const VkAllocationCallbacks *alloc = &instance->base.base.alloc;
2110
struct vn_physical_device_base *dummy = NULL;
2111
VkResult result;
2112
2113
result = vn_instance_enumerate_physical_devices(instance);
2114
if (result != VK_SUCCESS)
2115
return vn_error(instance, result);
2116
2117
if (pPhysicalDeviceGroupProperties && *pPhysicalDeviceGroupCount == 0)
2118
return instance->physical_device_count ? VK_INCOMPLETE : VK_SUCCESS;
2119
2120
/* make sure VkPhysicalDevice point to objects, as they are considered
2121
* inputs by the encoder
2122
*/
2123
if (pPhysicalDeviceGroupProperties) {
2124
const uint32_t count = *pPhysicalDeviceGroupCount;
2125
const size_t size = sizeof(*dummy) * VK_MAX_DEVICE_GROUP_SIZE * count;
2126
2127
dummy = vk_zalloc(alloc, size, VN_DEFAULT_ALIGN,
2128
VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
2129
if (!dummy)
2130
return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
2131
2132
for (uint32_t i = 0; i < count; i++) {
2133
VkPhysicalDeviceGroupProperties *props =
2134
&pPhysicalDeviceGroupProperties[i];
2135
2136
for (uint32_t j = 0; j < VK_MAX_DEVICE_GROUP_SIZE; j++) {
2137
struct vn_physical_device_base *obj =
2138
&dummy[VK_MAX_DEVICE_GROUP_SIZE * i + j];
2139
obj->base.base.type = VK_OBJECT_TYPE_PHYSICAL_DEVICE;
2140
props->physicalDevices[j] = (VkPhysicalDevice)obj;
2141
}
2142
}
2143
}
2144
2145
result = vn_call_vkEnumeratePhysicalDeviceGroups(
2146
instance, _instance, pPhysicalDeviceGroupCount,
2147
pPhysicalDeviceGroupProperties);
2148
if (result != VK_SUCCESS) {
2149
if (dummy)
2150
vk_free(alloc, dummy);
2151
return vn_error(instance, result);
2152
}
2153
2154
if (pPhysicalDeviceGroupProperties) {
2155
for (uint32_t i = 0; i < *pPhysicalDeviceGroupCount; i++) {
2156
VkPhysicalDeviceGroupProperties *props =
2157
&pPhysicalDeviceGroupProperties[i];
2158
for (uint32_t j = 0; j < props->physicalDeviceCount; j++) {
2159
const vn_object_id id =
2160
dummy[VK_MAX_DEVICE_GROUP_SIZE * i + j].id;
2161
struct vn_physical_device *physical_dev =
2162
vn_instance_find_physical_device(instance, id);
2163
props->physicalDevices[j] =
2164
vn_physical_device_to_handle(physical_dev);
2165
}
2166
}
2167
}
2168
2169
if (dummy)
2170
vk_free(alloc, dummy);
2171
2172
return VK_SUCCESS;
2173
}
2174
2175
void
2176
vn_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
2177
VkPhysicalDeviceFeatures *pFeatures)
2178
{
2179
struct vn_physical_device *physical_dev =
2180
vn_physical_device_from_handle(physicalDevice);
2181
2182
*pFeatures = physical_dev->features.features;
2183
}
2184
2185
void
2186
vn_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
2187
VkPhysicalDeviceProperties *pProperties)
2188
{
2189
struct vn_physical_device *physical_dev =
2190
vn_physical_device_from_handle(physicalDevice);
2191
2192
*pProperties = physical_dev->properties.properties;
2193
}
2194
2195
void
2196
vn_GetPhysicalDeviceQueueFamilyProperties(
2197
VkPhysicalDevice physicalDevice,
2198
uint32_t *pQueueFamilyPropertyCount,
2199
VkQueueFamilyProperties *pQueueFamilyProperties)
2200
{
2201
struct vn_physical_device *physical_dev =
2202
vn_physical_device_from_handle(physicalDevice);
2203
2204
VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
2205
for (uint32_t i = 0; i < physical_dev->queue_family_count; i++) {
2206
vk_outarray_append(&out, props) {
2207
*props =
2208
physical_dev->queue_family_properties[i].queueFamilyProperties;
2209
}
2210
}
2211
}
2212
2213
void
2214
vn_GetPhysicalDeviceMemoryProperties(
2215
VkPhysicalDevice physicalDevice,
2216
VkPhysicalDeviceMemoryProperties *pMemoryProperties)
2217
{
2218
struct vn_physical_device *physical_dev =
2219
vn_physical_device_from_handle(physicalDevice);
2220
2221
*pMemoryProperties = physical_dev->memory_properties.memoryProperties;
2222
}
2223
2224
void
2225
vn_GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,
2226
VkFormat format,
2227
VkFormatProperties *pFormatProperties)
2228
{
2229
struct vn_physical_device *physical_dev =
2230
vn_physical_device_from_handle(physicalDevice);
2231
2232
/* TODO query all formats during init */
2233
vn_call_vkGetPhysicalDeviceFormatProperties(
2234
physical_dev->instance, physicalDevice, format, pFormatProperties);
2235
}
2236
2237
VkResult
2238
vn_GetPhysicalDeviceImageFormatProperties(
2239
VkPhysicalDevice physicalDevice,
2240
VkFormat format,
2241
VkImageType type,
2242
VkImageTiling tiling,
2243
VkImageUsageFlags usage,
2244
VkImageCreateFlags flags,
2245
VkImageFormatProperties *pImageFormatProperties)
2246
{
2247
struct vn_physical_device *physical_dev =
2248
vn_physical_device_from_handle(physicalDevice);
2249
2250
/* TODO per-device cache */
2251
VkResult result = vn_call_vkGetPhysicalDeviceImageFormatProperties(
2252
physical_dev->instance, physicalDevice, format, type, tiling, usage,
2253
flags, pImageFormatProperties);
2254
2255
return vn_result(physical_dev->instance, result);
2256
}
2257
2258
void
2259
vn_GetPhysicalDeviceSparseImageFormatProperties(
2260
VkPhysicalDevice physicalDevice,
2261
VkFormat format,
2262
VkImageType type,
2263
uint32_t samples,
2264
VkImageUsageFlags usage,
2265
VkImageTiling tiling,
2266
uint32_t *pPropertyCount,
2267
VkSparseImageFormatProperties *pProperties)
2268
{
2269
struct vn_physical_device *physical_dev =
2270
vn_physical_device_from_handle(physicalDevice);
2271
2272
/* TODO per-device cache */
2273
vn_call_vkGetPhysicalDeviceSparseImageFormatProperties(
2274
physical_dev->instance, physicalDevice, format, type, samples, usage,
2275
tiling, pPropertyCount, pProperties);
2276
}
2277
2278
void
2279
vn_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
2280
VkPhysicalDeviceFeatures2 *pFeatures)
2281
{
2282
struct vn_physical_device *physical_dev =
2283
vn_physical_device_from_handle(physicalDevice);
2284
const struct VkPhysicalDeviceVulkan11Features *vk11_feats =
2285
&physical_dev->vulkan_1_1_features;
2286
const struct VkPhysicalDeviceVulkan12Features *vk12_feats =
2287
&physical_dev->vulkan_1_2_features;
2288
union {
2289
VkBaseOutStructure *pnext;
2290
2291
/* Vulkan 1.1 */
2292
VkPhysicalDevice16BitStorageFeatures *sixteen_bit_storage;
2293
VkPhysicalDeviceMultiviewFeatures *multiview;
2294
VkPhysicalDeviceVariablePointersFeatures *variable_pointers;
2295
VkPhysicalDeviceProtectedMemoryFeatures *protected_memory;
2296
VkPhysicalDeviceSamplerYcbcrConversionFeatures *sampler_ycbcr_conversion;
2297
VkPhysicalDeviceShaderDrawParametersFeatures *shader_draw_parameters;
2298
2299
/* Vulkan 1.2 */
2300
VkPhysicalDevice8BitStorageFeatures *eight_bit_storage;
2301
VkPhysicalDeviceShaderAtomicInt64Features *shader_atomic_int64;
2302
VkPhysicalDeviceShaderFloat16Int8Features *shader_float16_int8;
2303
VkPhysicalDeviceDescriptorIndexingFeatures *descriptor_indexing;
2304
VkPhysicalDeviceScalarBlockLayoutFeatures *scalar_block_layout;
2305
VkPhysicalDeviceImagelessFramebufferFeatures *imageless_framebuffer;
2306
VkPhysicalDeviceUniformBufferStandardLayoutFeatures
2307
*uniform_buffer_standard_layout;
2308
VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures
2309
*shader_subgroup_extended_types;
2310
VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures
2311
*separate_depth_stencil_layouts;
2312
VkPhysicalDeviceHostQueryResetFeatures *host_query_reset;
2313
VkPhysicalDeviceTimelineSemaphoreFeatures *timeline_semaphore;
2314
VkPhysicalDeviceBufferDeviceAddressFeatures *buffer_device_address;
2315
VkPhysicalDeviceVulkanMemoryModelFeatures *vulkan_memory_model;
2316
2317
VkPhysicalDeviceTransformFeedbackFeaturesEXT *transform_feedback;
2318
} u;
2319
2320
u.pnext = (VkBaseOutStructure *)pFeatures;
2321
while (u.pnext) {
2322
void *saved = u.pnext->pNext;
2323
switch (u.pnext->sType) {
2324
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2:
2325
memcpy(u.pnext, &physical_dev->features,
2326
sizeof(physical_dev->features));
2327
break;
2328
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES:
2329
memcpy(u.pnext, vk11_feats, sizeof(*vk11_feats));
2330
break;
2331
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES:
2332
memcpy(u.pnext, vk12_feats, sizeof(*vk12_feats));
2333
break;
2334
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES:
2335
u.sixteen_bit_storage->storageBuffer16BitAccess =
2336
vk11_feats->storageBuffer16BitAccess;
2337
u.sixteen_bit_storage->uniformAndStorageBuffer16BitAccess =
2338
vk11_feats->uniformAndStorageBuffer16BitAccess;
2339
u.sixteen_bit_storage->storagePushConstant16 =
2340
vk11_feats->storagePushConstant16;
2341
u.sixteen_bit_storage->storageInputOutput16 =
2342
vk11_feats->storageInputOutput16;
2343
break;
2344
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES:
2345
u.multiview->multiview = vk11_feats->multiview;
2346
u.multiview->multiviewGeometryShader =
2347
vk11_feats->multiviewGeometryShader;
2348
u.multiview->multiviewTessellationShader =
2349
vk11_feats->multiviewTessellationShader;
2350
break;
2351
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES:
2352
u.variable_pointers->variablePointersStorageBuffer =
2353
vk11_feats->variablePointersStorageBuffer;
2354
u.variable_pointers->variablePointers = vk11_feats->variablePointers;
2355
break;
2356
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES:
2357
u.protected_memory->protectedMemory = vk11_feats->protectedMemory;
2358
break;
2359
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES:
2360
u.sampler_ycbcr_conversion->samplerYcbcrConversion =
2361
vk11_feats->samplerYcbcrConversion;
2362
break;
2363
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES:
2364
u.shader_draw_parameters->shaderDrawParameters =
2365
vk11_feats->shaderDrawParameters;
2366
break;
2367
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES:
2368
u.eight_bit_storage->storageBuffer8BitAccess =
2369
vk12_feats->storageBuffer8BitAccess;
2370
u.eight_bit_storage->uniformAndStorageBuffer8BitAccess =
2371
vk12_feats->uniformAndStorageBuffer8BitAccess;
2372
u.eight_bit_storage->storagePushConstant8 =
2373
vk12_feats->storagePushConstant8;
2374
break;
2375
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES:
2376
u.shader_atomic_int64->shaderBufferInt64Atomics =
2377
vk12_feats->shaderBufferInt64Atomics;
2378
u.shader_atomic_int64->shaderSharedInt64Atomics =
2379
vk12_feats->shaderSharedInt64Atomics;
2380
break;
2381
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES:
2382
u.shader_float16_int8->shaderFloat16 = vk12_feats->shaderFloat16;
2383
u.shader_float16_int8->shaderInt8 = vk12_feats->shaderInt8;
2384
break;
2385
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES:
2386
u.descriptor_indexing->shaderInputAttachmentArrayDynamicIndexing =
2387
vk12_feats->shaderInputAttachmentArrayDynamicIndexing;
2388
u.descriptor_indexing->shaderUniformTexelBufferArrayDynamicIndexing =
2389
vk12_feats->shaderUniformTexelBufferArrayDynamicIndexing;
2390
u.descriptor_indexing->shaderStorageTexelBufferArrayDynamicIndexing =
2391
vk12_feats->shaderStorageTexelBufferArrayDynamicIndexing;
2392
u.descriptor_indexing->shaderUniformBufferArrayNonUniformIndexing =
2393
vk12_feats->shaderUniformBufferArrayNonUniformIndexing;
2394
u.descriptor_indexing->shaderSampledImageArrayNonUniformIndexing =
2395
vk12_feats->shaderSampledImageArrayNonUniformIndexing;
2396
u.descriptor_indexing->shaderStorageBufferArrayNonUniformIndexing =
2397
vk12_feats->shaderStorageBufferArrayNonUniformIndexing;
2398
u.descriptor_indexing->shaderStorageImageArrayNonUniformIndexing =
2399
vk12_feats->shaderStorageImageArrayNonUniformIndexing;
2400
u.descriptor_indexing->shaderInputAttachmentArrayNonUniformIndexing =
2401
vk12_feats->shaderInputAttachmentArrayNonUniformIndexing;
2402
u.descriptor_indexing
2403
->shaderUniformTexelBufferArrayNonUniformIndexing =
2404
vk12_feats->shaderUniformTexelBufferArrayNonUniformIndexing;
2405
u.descriptor_indexing
2406
->shaderStorageTexelBufferArrayNonUniformIndexing =
2407
vk12_feats->shaderStorageTexelBufferArrayNonUniformIndexing;
2408
u.descriptor_indexing->descriptorBindingUniformBufferUpdateAfterBind =
2409
vk12_feats->descriptorBindingUniformBufferUpdateAfterBind;
2410
u.descriptor_indexing->descriptorBindingSampledImageUpdateAfterBind =
2411
vk12_feats->descriptorBindingSampledImageUpdateAfterBind;
2412
u.descriptor_indexing->descriptorBindingStorageImageUpdateAfterBind =
2413
vk12_feats->descriptorBindingStorageImageUpdateAfterBind;
2414
u.descriptor_indexing->descriptorBindingStorageBufferUpdateAfterBind =
2415
vk12_feats->descriptorBindingStorageBufferUpdateAfterBind;
2416
u.descriptor_indexing
2417
->descriptorBindingUniformTexelBufferUpdateAfterBind =
2418
vk12_feats->descriptorBindingUniformTexelBufferUpdateAfterBind;
2419
u.descriptor_indexing
2420
->descriptorBindingStorageTexelBufferUpdateAfterBind =
2421
vk12_feats->descriptorBindingStorageTexelBufferUpdateAfterBind;
2422
u.descriptor_indexing->descriptorBindingUpdateUnusedWhilePending =
2423
vk12_feats->descriptorBindingUpdateUnusedWhilePending;
2424
u.descriptor_indexing->descriptorBindingPartiallyBound =
2425
vk12_feats->descriptorBindingPartiallyBound;
2426
u.descriptor_indexing->descriptorBindingVariableDescriptorCount =
2427
vk12_feats->descriptorBindingVariableDescriptorCount;
2428
u.descriptor_indexing->runtimeDescriptorArray =
2429
vk12_feats->runtimeDescriptorArray;
2430
break;
2431
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES:
2432
u.scalar_block_layout->scalarBlockLayout =
2433
vk12_feats->scalarBlockLayout;
2434
break;
2435
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES:
2436
u.imageless_framebuffer->imagelessFramebuffer =
2437
vk12_feats->imagelessFramebuffer;
2438
break;
2439
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES:
2440
u.uniform_buffer_standard_layout->uniformBufferStandardLayout =
2441
vk12_feats->uniformBufferStandardLayout;
2442
break;
2443
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES:
2444
u.shader_subgroup_extended_types->shaderSubgroupExtendedTypes =
2445
vk12_feats->shaderSubgroupExtendedTypes;
2446
break;
2447
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES:
2448
u.separate_depth_stencil_layouts->separateDepthStencilLayouts =
2449
vk12_feats->separateDepthStencilLayouts;
2450
break;
2451
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES:
2452
u.host_query_reset->hostQueryReset = vk12_feats->hostQueryReset;
2453
break;
2454
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES:
2455
u.timeline_semaphore->timelineSemaphore =
2456
vk12_feats->timelineSemaphore;
2457
break;
2458
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES:
2459
u.buffer_device_address->bufferDeviceAddress =
2460
vk12_feats->bufferDeviceAddress;
2461
u.buffer_device_address->bufferDeviceAddressCaptureReplay =
2462
vk12_feats->bufferDeviceAddressCaptureReplay;
2463
u.buffer_device_address->bufferDeviceAddressMultiDevice =
2464
vk12_feats->bufferDeviceAddressMultiDevice;
2465
break;
2466
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES:
2467
u.vulkan_memory_model->vulkanMemoryModel =
2468
vk12_feats->vulkanMemoryModel;
2469
u.vulkan_memory_model->vulkanMemoryModelDeviceScope =
2470
vk12_feats->vulkanMemoryModelDeviceScope;
2471
u.vulkan_memory_model->vulkanMemoryModelAvailabilityVisibilityChains =
2472
vk12_feats->vulkanMemoryModelAvailabilityVisibilityChains;
2473
break;
2474
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT:
2475
memcpy(u.transform_feedback,
2476
&physical_dev->transform_feedback_features,
2477
sizeof(physical_dev->transform_feedback_features));
2478
break;
2479
default:
2480
break;
2481
}
2482
u.pnext->pNext = saved;
2483
2484
u.pnext = u.pnext->pNext;
2485
}
2486
}
2487
2488
void
2489
vn_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
2490
VkPhysicalDeviceProperties2 *pProperties)
2491
{
2492
struct vn_physical_device *physical_dev =
2493
vn_physical_device_from_handle(physicalDevice);
2494
const struct VkPhysicalDeviceVulkan11Properties *vk11_props =
2495
&physical_dev->vulkan_1_1_properties;
2496
const struct VkPhysicalDeviceVulkan12Properties *vk12_props =
2497
&physical_dev->vulkan_1_2_properties;
2498
union {
2499
VkBaseOutStructure *pnext;
2500
2501
/* Vulkan 1.1 */
2502
VkPhysicalDeviceIDProperties *id;
2503
VkPhysicalDeviceSubgroupProperties *subgroup;
2504
VkPhysicalDevicePointClippingProperties *point_clipping;
2505
VkPhysicalDeviceMultiviewProperties *multiview;
2506
VkPhysicalDeviceProtectedMemoryProperties *protected_memory;
2507
VkPhysicalDeviceMaintenance3Properties *maintenance_3;
2508
2509
/* Vulkan 1.2 */
2510
VkPhysicalDeviceDriverProperties *driver;
2511
VkPhysicalDeviceFloatControlsProperties *float_controls;
2512
VkPhysicalDeviceDescriptorIndexingProperties *descriptor_indexing;
2513
VkPhysicalDeviceDepthStencilResolveProperties *depth_stencil_resolve;
2514
VkPhysicalDeviceSamplerFilterMinmaxProperties *sampler_filter_minmax;
2515
VkPhysicalDeviceTimelineSemaphoreProperties *timeline_semaphore;
2516
2517
VkPhysicalDevicePCIBusInfoPropertiesEXT *pci_bus_info;
2518
VkPhysicalDeviceTransformFeedbackPropertiesEXT *transform_feedback;
2519
VkPhysicalDevicePresentationPropertiesANDROID *presentation_properties;
2520
} u;
2521
2522
u.pnext = (VkBaseOutStructure *)pProperties;
2523
while (u.pnext) {
2524
void *saved = u.pnext->pNext;
2525
switch ((int32_t)u.pnext->sType) {
2526
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2:
2527
memcpy(u.pnext, &physical_dev->properties,
2528
sizeof(physical_dev->properties));
2529
break;
2530
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES:
2531
memcpy(u.pnext, vk11_props, sizeof(*vk11_props));
2532
break;
2533
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES:
2534
memcpy(u.pnext, vk12_props, sizeof(*vk12_props));
2535
break;
2536
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES:
2537
memcpy(u.id->deviceUUID, vk11_props->deviceUUID,
2538
sizeof(vk11_props->deviceUUID));
2539
memcpy(u.id->driverUUID, vk11_props->driverUUID,
2540
sizeof(vk11_props->driverUUID));
2541
memcpy(u.id->deviceLUID, vk11_props->deviceLUID,
2542
sizeof(vk11_props->deviceLUID));
2543
u.id->deviceNodeMask = vk11_props->deviceNodeMask;
2544
u.id->deviceLUIDValid = vk11_props->deviceLUIDValid;
2545
break;
2546
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES:
2547
u.subgroup->subgroupSize = vk11_props->subgroupSize;
2548
u.subgroup->supportedStages = vk11_props->subgroupSupportedStages;
2549
u.subgroup->supportedOperations =
2550
vk11_props->subgroupSupportedOperations;
2551
u.subgroup->quadOperationsInAllStages =
2552
vk11_props->subgroupQuadOperationsInAllStages;
2553
break;
2554
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES:
2555
u.point_clipping->pointClippingBehavior =
2556
vk11_props->pointClippingBehavior;
2557
break;
2558
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES:
2559
u.multiview->maxMultiviewViewCount =
2560
vk11_props->maxMultiviewViewCount;
2561
u.multiview->maxMultiviewInstanceIndex =
2562
vk11_props->maxMultiviewInstanceIndex;
2563
break;
2564
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES:
2565
u.protected_memory->protectedNoFault = vk11_props->protectedNoFault;
2566
break;
2567
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES:
2568
u.maintenance_3->maxPerSetDescriptors =
2569
vk11_props->maxPerSetDescriptors;
2570
u.maintenance_3->maxMemoryAllocationSize =
2571
vk11_props->maxMemoryAllocationSize;
2572
break;
2573
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES:
2574
u.driver->driverID = vk12_props->driverID;
2575
memcpy(u.driver->driverName, vk12_props->driverName,
2576
sizeof(vk12_props->driverName));
2577
memcpy(u.driver->driverInfo, vk12_props->driverInfo,
2578
sizeof(vk12_props->driverInfo));
2579
u.driver->conformanceVersion = vk12_props->conformanceVersion;
2580
break;
2581
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES:
2582
u.float_controls->denormBehaviorIndependence =
2583
vk12_props->denormBehaviorIndependence;
2584
u.float_controls->roundingModeIndependence =
2585
vk12_props->roundingModeIndependence;
2586
u.float_controls->shaderSignedZeroInfNanPreserveFloat16 =
2587
vk12_props->shaderSignedZeroInfNanPreserveFloat16;
2588
u.float_controls->shaderSignedZeroInfNanPreserveFloat32 =
2589
vk12_props->shaderSignedZeroInfNanPreserveFloat32;
2590
u.float_controls->shaderSignedZeroInfNanPreserveFloat64 =
2591
vk12_props->shaderSignedZeroInfNanPreserveFloat64;
2592
u.float_controls->shaderDenormPreserveFloat16 =
2593
vk12_props->shaderDenormPreserveFloat16;
2594
u.float_controls->shaderDenormPreserveFloat32 =
2595
vk12_props->shaderDenormPreserveFloat32;
2596
u.float_controls->shaderDenormPreserveFloat64 =
2597
vk12_props->shaderDenormPreserveFloat64;
2598
u.float_controls->shaderDenormFlushToZeroFloat16 =
2599
vk12_props->shaderDenormFlushToZeroFloat16;
2600
u.float_controls->shaderDenormFlushToZeroFloat32 =
2601
vk12_props->shaderDenormFlushToZeroFloat32;
2602
u.float_controls->shaderDenormFlushToZeroFloat64 =
2603
vk12_props->shaderDenormFlushToZeroFloat64;
2604
u.float_controls->shaderRoundingModeRTEFloat16 =
2605
vk12_props->shaderRoundingModeRTEFloat16;
2606
u.float_controls->shaderRoundingModeRTEFloat32 =
2607
vk12_props->shaderRoundingModeRTEFloat32;
2608
u.float_controls->shaderRoundingModeRTEFloat64 =
2609
vk12_props->shaderRoundingModeRTEFloat64;
2610
u.float_controls->shaderRoundingModeRTZFloat16 =
2611
vk12_props->shaderRoundingModeRTZFloat16;
2612
u.float_controls->shaderRoundingModeRTZFloat32 =
2613
vk12_props->shaderRoundingModeRTZFloat32;
2614
u.float_controls->shaderRoundingModeRTZFloat64 =
2615
vk12_props->shaderRoundingModeRTZFloat64;
2616
break;
2617
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES:
2618
u.descriptor_indexing->maxUpdateAfterBindDescriptorsInAllPools =
2619
vk12_props->maxUpdateAfterBindDescriptorsInAllPools;
2620
u.descriptor_indexing
2621
->shaderUniformBufferArrayNonUniformIndexingNative =
2622
vk12_props->shaderUniformBufferArrayNonUniformIndexingNative;
2623
u.descriptor_indexing
2624
->shaderSampledImageArrayNonUniformIndexingNative =
2625
vk12_props->shaderSampledImageArrayNonUniformIndexingNative;
2626
u.descriptor_indexing
2627
->shaderStorageBufferArrayNonUniformIndexingNative =
2628
vk12_props->shaderStorageBufferArrayNonUniformIndexingNative;
2629
u.descriptor_indexing
2630
->shaderStorageImageArrayNonUniformIndexingNative =
2631
vk12_props->shaderStorageImageArrayNonUniformIndexingNative;
2632
u.descriptor_indexing
2633
->shaderInputAttachmentArrayNonUniformIndexingNative =
2634
vk12_props->shaderInputAttachmentArrayNonUniformIndexingNative;
2635
u.descriptor_indexing->robustBufferAccessUpdateAfterBind =
2636
vk12_props->robustBufferAccessUpdateAfterBind;
2637
u.descriptor_indexing->quadDivergentImplicitLod =
2638
vk12_props->quadDivergentImplicitLod;
2639
u.descriptor_indexing->maxPerStageDescriptorUpdateAfterBindSamplers =
2640
vk12_props->maxPerStageDescriptorUpdateAfterBindSamplers;
2641
u.descriptor_indexing
2642
->maxPerStageDescriptorUpdateAfterBindUniformBuffers =
2643
vk12_props->maxPerStageDescriptorUpdateAfterBindUniformBuffers;
2644
u.descriptor_indexing
2645
->maxPerStageDescriptorUpdateAfterBindStorageBuffers =
2646
vk12_props->maxPerStageDescriptorUpdateAfterBindStorageBuffers;
2647
u.descriptor_indexing
2648
->maxPerStageDescriptorUpdateAfterBindSampledImages =
2649
vk12_props->maxPerStageDescriptorUpdateAfterBindSampledImages;
2650
u.descriptor_indexing
2651
->maxPerStageDescriptorUpdateAfterBindStorageImages =
2652
vk12_props->maxPerStageDescriptorUpdateAfterBindStorageImages;
2653
u.descriptor_indexing
2654
->maxPerStageDescriptorUpdateAfterBindInputAttachments =
2655
vk12_props->maxPerStageDescriptorUpdateAfterBindInputAttachments;
2656
u.descriptor_indexing->maxPerStageUpdateAfterBindResources =
2657
vk12_props->maxPerStageUpdateAfterBindResources;
2658
u.descriptor_indexing->maxDescriptorSetUpdateAfterBindSamplers =
2659
vk12_props->maxDescriptorSetUpdateAfterBindSamplers;
2660
u.descriptor_indexing->maxDescriptorSetUpdateAfterBindUniformBuffers =
2661
vk12_props->maxDescriptorSetUpdateAfterBindUniformBuffers;
2662
u.descriptor_indexing
2663
->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic =
2664
vk12_props->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
2665
u.descriptor_indexing->maxDescriptorSetUpdateAfterBindStorageBuffers =
2666
vk12_props->maxDescriptorSetUpdateAfterBindStorageBuffers;
2667
u.descriptor_indexing
2668
->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic =
2669
vk12_props->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
2670
u.descriptor_indexing->maxDescriptorSetUpdateAfterBindSampledImages =
2671
vk12_props->maxDescriptorSetUpdateAfterBindSampledImages;
2672
u.descriptor_indexing->maxDescriptorSetUpdateAfterBindStorageImages =
2673
vk12_props->maxDescriptorSetUpdateAfterBindStorageImages;
2674
u.descriptor_indexing
2675
->maxDescriptorSetUpdateAfterBindInputAttachments =
2676
vk12_props->maxDescriptorSetUpdateAfterBindInputAttachments;
2677
break;
2678
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES:
2679
u.depth_stencil_resolve->supportedDepthResolveModes =
2680
vk12_props->supportedDepthResolveModes;
2681
u.depth_stencil_resolve->supportedStencilResolveModes =
2682
vk12_props->supportedStencilResolveModes;
2683
u.depth_stencil_resolve->independentResolveNone =
2684
vk12_props->independentResolveNone;
2685
u.depth_stencil_resolve->independentResolve =
2686
vk12_props->independentResolve;
2687
break;
2688
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES:
2689
u.sampler_filter_minmax->filterMinmaxSingleComponentFormats =
2690
vk12_props->filterMinmaxSingleComponentFormats;
2691
u.sampler_filter_minmax->filterMinmaxImageComponentMapping =
2692
vk12_props->filterMinmaxImageComponentMapping;
2693
break;
2694
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES:
2695
u.timeline_semaphore->maxTimelineSemaphoreValueDifference =
2696
vk12_props->maxTimelineSemaphoreValueDifference;
2697
break;
2698
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
2699
/* this is used by WSI */
2700
if (physical_dev->instance->renderer_info.pci.has_bus_info) {
2701
u.pci_bus_info->pciDomain =
2702
physical_dev->instance->renderer_info.pci.domain;
2703
u.pci_bus_info->pciBus =
2704
physical_dev->instance->renderer_info.pci.bus;
2705
u.pci_bus_info->pciDevice =
2706
physical_dev->instance->renderer_info.pci.device;
2707
u.pci_bus_info->pciFunction =
2708
physical_dev->instance->renderer_info.pci.function;
2709
}
2710
break;
2711
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT:
2712
memcpy(u.transform_feedback,
2713
&physical_dev->transform_feedback_properties,
2714
sizeof(physical_dev->transform_feedback_properties));
2715
break;
2716
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID:
2717
u.presentation_properties->sharedImage = VK_FALSE;
2718
break;
2719
default:
2720
break;
2721
}
2722
u.pnext->pNext = saved;
2723
2724
u.pnext = u.pnext->pNext;
2725
}
2726
}
2727
2728
void
2729
vn_GetPhysicalDeviceQueueFamilyProperties2(
2730
VkPhysicalDevice physicalDevice,
2731
uint32_t *pQueueFamilyPropertyCount,
2732
VkQueueFamilyProperties2 *pQueueFamilyProperties)
2733
{
2734
struct vn_physical_device *physical_dev =
2735
vn_physical_device_from_handle(physicalDevice);
2736
2737
VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
2738
for (uint32_t i = 0; i < physical_dev->queue_family_count; i++) {
2739
vk_outarray_append(&out, props) {
2740
*props = physical_dev->queue_family_properties[i];
2741
}
2742
}
2743
}
2744
2745
void
2746
vn_GetPhysicalDeviceMemoryProperties2(
2747
VkPhysicalDevice physicalDevice,
2748
VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
2749
{
2750
struct vn_physical_device *physical_dev =
2751
vn_physical_device_from_handle(physicalDevice);
2752
2753
pMemoryProperties->memoryProperties =
2754
physical_dev->memory_properties.memoryProperties;
2755
}
2756
2757
void
2758
vn_GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,
2759
VkFormat format,
2760
VkFormatProperties2 *pFormatProperties)
2761
{
2762
struct vn_physical_device *physical_dev =
2763
vn_physical_device_from_handle(physicalDevice);
2764
2765
/* TODO query all formats during init */
2766
vn_call_vkGetPhysicalDeviceFormatProperties2(
2767
physical_dev->instance, physicalDevice, format, pFormatProperties);
2768
}
2769
2770
struct vn_physical_device_image_format_info {
2771
VkPhysicalDeviceImageFormatInfo2 format;
2772
VkPhysicalDeviceExternalImageFormatInfo external;
2773
VkImageFormatListCreateInfo list;
2774
VkImageStencilUsageCreateInfo stencil_usage;
2775
VkPhysicalDeviceImageDrmFormatModifierInfoEXT modifier;
2776
};
2777
2778
static const VkPhysicalDeviceImageFormatInfo2 *
2779
vn_physical_device_fix_image_format_info(
2780
struct vn_physical_device *physical_dev,
2781
const VkPhysicalDeviceImageFormatInfo2 *info,
2782
struct vn_physical_device_image_format_info *local_info)
2783
{
2784
local_info->format = *info;
2785
VkBaseOutStructure *dst = (void *)&local_info->format;
2786
2787
bool use_modifier = false;
2788
/* we should generate deep copy functions... */
2789
vk_foreach_struct_const(src, info->pNext) {
2790
void *pnext = NULL;
2791
switch (src->sType) {
2792
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO:
2793
memcpy(&local_info->external, src, sizeof(local_info->external));
2794
use_modifier =
2795
local_info->external.handleType ==
2796
VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2797
local_info->external.handleType =
2798
physical_dev->external_memory.renderer_handle_type;
2799
pnext = &local_info->external;
2800
break;
2801
case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
2802
memcpy(&local_info->list, src, sizeof(local_info->list));
2803
pnext = &local_info->list;
2804
break;
2805
case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT:
2806
memcpy(&local_info->stencil_usage, src,
2807
sizeof(local_info->stencil_usage));
2808
pnext = &local_info->stencil_usage;
2809
break;
2810
default:
2811
break;
2812
}
2813
2814
if (pnext) {
2815
dst->pNext = pnext;
2816
dst = pnext;
2817
}
2818
}
2819
2820
if (use_modifier) {
2821
local_info->format.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
2822
if (!vn_android_get_drm_format_modifier_info(&local_info->format,
2823
&local_info->modifier))
2824
return NULL;
2825
}
2826
2827
dst->pNext = use_modifier ? (void *)&local_info->modifier : NULL;
2828
2829
return &local_info->format;
2830
}
2831
2832
VkResult
2833
vn_GetPhysicalDeviceImageFormatProperties2(
2834
VkPhysicalDevice physicalDevice,
2835
const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
2836
VkImageFormatProperties2 *pImageFormatProperties)
2837
{
2838
struct vn_physical_device *physical_dev =
2839
vn_physical_device_from_handle(physicalDevice);
2840
const VkExternalMemoryHandleTypeFlagBits renderer_handle_type =
2841
physical_dev->external_memory.renderer_handle_type;
2842
const VkExternalMemoryHandleTypeFlags supported_handle_types =
2843
physical_dev->external_memory.supported_handle_types;
2844
2845
const VkPhysicalDeviceExternalImageFormatInfo *external_info =
2846
vk_find_struct_const(pImageFormatInfo->pNext,
2847
PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO);
2848
if (external_info && !external_info->handleType)
2849
external_info = NULL;
2850
2851
struct vn_physical_device_image_format_info local_info;
2852
if (external_info) {
2853
if (!(external_info->handleType & supported_handle_types)) {
2854
return vn_error(physical_dev->instance,
2855
VK_ERROR_FORMAT_NOT_SUPPORTED);
2856
}
2857
2858
if (external_info->handleType != renderer_handle_type) {
2859
pImageFormatInfo = vn_physical_device_fix_image_format_info(
2860
physical_dev, pImageFormatInfo, &local_info);
2861
if (!pImageFormatInfo) {
2862
return vn_error(physical_dev->instance,
2863
VK_ERROR_FORMAT_NOT_SUPPORTED);
2864
}
2865
}
2866
}
2867
2868
VkResult result;
2869
/* TODO per-device cache */
2870
result = vn_call_vkGetPhysicalDeviceImageFormatProperties2(
2871
physical_dev->instance, physicalDevice, pImageFormatInfo,
2872
pImageFormatProperties);
2873
if (result != VK_SUCCESS || !external_info)
2874
return vn_result(physical_dev->instance, result);
2875
2876
VkExternalImageFormatProperties *img_props = vk_find_struct(
2877
pImageFormatProperties->pNext, EXTERNAL_IMAGE_FORMAT_PROPERTIES);
2878
VkExternalMemoryProperties *mem_props =
2879
&img_props->externalMemoryProperties;
2880
2881
if (external_info->handleType ==
2882
VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) {
2883
/* AHB backed image requires renderer to support import bit */
2884
if (!(mem_props->externalMemoryFeatures &
2885
VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT))
2886
return vn_error(physical_dev->instance,
2887
VK_ERROR_FORMAT_NOT_SUPPORTED);
2888
2889
mem_props->externalMemoryFeatures =
2890
VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT |
2891
VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT |
2892
VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
2893
mem_props->exportFromImportedHandleTypes =
2894
VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2895
mem_props->compatibleHandleTypes =
2896
VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2897
2898
VkAndroidHardwareBufferUsageANDROID *ahb_usage =
2899
vk_find_struct(pImageFormatProperties->pNext,
2900
ANDROID_HARDWARE_BUFFER_USAGE_ANDROID);
2901
if (ahb_usage) {
2902
ahb_usage->androidHardwareBufferUsage = vn_android_get_ahb_usage(
2903
pImageFormatInfo->usage, pImageFormatInfo->flags);
2904
}
2905
} else {
2906
mem_props->compatibleHandleTypes = supported_handle_types;
2907
mem_props->exportFromImportedHandleTypes =
2908
(mem_props->exportFromImportedHandleTypes & renderer_handle_type)
2909
? supported_handle_types
2910
: 0;
2911
}
2912
2913
return vn_result(physical_dev->instance, result);
2914
}
2915
2916
void
2917
vn_GetPhysicalDeviceSparseImageFormatProperties2(
2918
VkPhysicalDevice physicalDevice,
2919
const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo,
2920
uint32_t *pPropertyCount,
2921
VkSparseImageFormatProperties2 *pProperties)
2922
{
2923
struct vn_physical_device *physical_dev =
2924
vn_physical_device_from_handle(physicalDevice);
2925
2926
/* TODO per-device cache */
2927
vn_call_vkGetPhysicalDeviceSparseImageFormatProperties2(
2928
physical_dev->instance, physicalDevice, pFormatInfo, pPropertyCount,
2929
pProperties);
2930
}
2931
2932
void
2933
vn_GetPhysicalDeviceExternalBufferProperties(
2934
VkPhysicalDevice physicalDevice,
2935
const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo,
2936
VkExternalBufferProperties *pExternalBufferProperties)
2937
{
2938
struct vn_physical_device *physical_dev =
2939
vn_physical_device_from_handle(physicalDevice);
2940
const VkExternalMemoryHandleTypeFlagBits renderer_handle_type =
2941
physical_dev->external_memory.renderer_handle_type;
2942
const VkExternalMemoryHandleTypeFlags supported_handle_types =
2943
physical_dev->external_memory.supported_handle_types;
2944
2945
VkExternalMemoryProperties *props =
2946
&pExternalBufferProperties->externalMemoryProperties;
2947
if (!(pExternalBufferInfo->handleType & supported_handle_types)) {
2948
props->compatibleHandleTypes = pExternalBufferInfo->handleType;
2949
props->exportFromImportedHandleTypes = 0;
2950
props->externalMemoryFeatures = 0;
2951
return;
2952
}
2953
2954
VkPhysicalDeviceExternalBufferInfo local_info;
2955
if (pExternalBufferInfo->handleType != renderer_handle_type) {
2956
local_info = *pExternalBufferInfo;
2957
local_info.handleType = renderer_handle_type;
2958
pExternalBufferInfo = &local_info;
2959
}
2960
2961
/* TODO per-device cache */
2962
vn_call_vkGetPhysicalDeviceExternalBufferProperties(
2963
physical_dev->instance, physicalDevice, pExternalBufferInfo,
2964
pExternalBufferProperties);
2965
2966
if (pExternalBufferInfo->handleType ==
2967
VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID) {
2968
props->compatibleHandleTypes =
2969
VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2970
/* AHB backed buffer requires renderer to support import bit while it
2971
* also requires the renderer to must not advertise dedicated only bit
2972
*/
2973
if (!(props->externalMemoryFeatures &
2974
VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT) ||
2975
(props->externalMemoryFeatures &
2976
VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT)) {
2977
props->externalMemoryFeatures = 0;
2978
props->exportFromImportedHandleTypes = 0;
2979
return;
2980
}
2981
props->externalMemoryFeatures =
2982
VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT |
2983
VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT;
2984
props->exportFromImportedHandleTypes =
2985
VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2986
} else {
2987
props->compatibleHandleTypes = supported_handle_types;
2988
props->exportFromImportedHandleTypes =
2989
(props->exportFromImportedHandleTypes & renderer_handle_type)
2990
? supported_handle_types
2991
: 0;
2992
}
2993
}
2994
2995
void
2996
vn_GetPhysicalDeviceExternalFenceProperties(
2997
VkPhysicalDevice physicalDevice,
2998
const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
2999
VkExternalFenceProperties *pExternalFenceProperties)
3000
{
3001
struct vn_physical_device *physical_dev =
3002
vn_physical_device_from_handle(physicalDevice);
3003
3004
if (pExternalFenceInfo->handleType &
3005
physical_dev->external_fence_handles) {
3006
pExternalFenceProperties->compatibleHandleTypes =
3007
physical_dev->external_fence_handles;
3008
pExternalFenceProperties->exportFromImportedHandleTypes =
3009
physical_dev->external_fence_handles;
3010
pExternalFenceProperties->externalFenceFeatures =
3011
VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT |
3012
VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT;
3013
} else {
3014
pExternalFenceProperties->compatibleHandleTypes =
3015
pExternalFenceInfo->handleType;
3016
pExternalFenceProperties->exportFromImportedHandleTypes = 0;
3017
pExternalFenceProperties->externalFenceFeatures = 0;
3018
}
3019
}
3020
3021
void
3022
vn_GetPhysicalDeviceExternalSemaphoreProperties(
3023
VkPhysicalDevice physicalDevice,
3024
const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
3025
VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
3026
{
3027
struct vn_physical_device *physical_dev =
3028
vn_physical_device_from_handle(physicalDevice);
3029
3030
const VkSemaphoreTypeCreateInfoKHR *type_info = vk_find_struct_const(
3031
pExternalSemaphoreInfo->pNext, SEMAPHORE_TYPE_CREATE_INFO_KHR);
3032
const VkSemaphoreType sem_type =
3033
type_info ? type_info->semaphoreType : VK_SEMAPHORE_TYPE_BINARY;
3034
const VkExternalSemaphoreHandleTypeFlags valid_handles =
3035
sem_type == VK_SEMAPHORE_TYPE_BINARY
3036
? physical_dev->external_binary_semaphore_handles
3037
: physical_dev->external_timeline_semaphore_handles;
3038
if (pExternalSemaphoreInfo->handleType & valid_handles) {
3039
pExternalSemaphoreProperties->compatibleHandleTypes = valid_handles;
3040
pExternalSemaphoreProperties->exportFromImportedHandleTypes =
3041
valid_handles;
3042
pExternalSemaphoreProperties->externalSemaphoreFeatures =
3043
VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
3044
VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
3045
} else {
3046
pExternalSemaphoreProperties->compatibleHandleTypes =
3047
pExternalSemaphoreInfo->handleType;
3048
pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
3049
pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
3050
}
3051
}
3052
3053
/* device commands */
3054
3055
VkResult
3056
vn_EnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
3057
const char *pLayerName,
3058
uint32_t *pPropertyCount,
3059
VkExtensionProperties *pProperties)
3060
{
3061
struct vn_physical_device *physical_dev =
3062
vn_physical_device_from_handle(physicalDevice);
3063
3064
if (pLayerName)
3065
return vn_error(physical_dev->instance, VK_ERROR_LAYER_NOT_PRESENT);
3066
3067
VK_OUTARRAY_MAKE(out, pProperties, pPropertyCount);
3068
for (uint32_t i = 0; i < VK_DEVICE_EXTENSION_COUNT; i++) {
3069
if (physical_dev->base.base.supported_extensions.extensions[i]) {
3070
vk_outarray_append(&out, prop) {
3071
*prop = vk_device_extensions[i];
3072
prop->specVersion = physical_dev->extension_spec_versions[i];
3073
}
3074
}
3075
}
3076
3077
return vk_outarray_status(&out);
3078
}
3079
3080
VkResult
3081
vn_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
3082
uint32_t *pPropertyCount,
3083
VkLayerProperties *pProperties)
3084
{
3085
*pPropertyCount = 0;
3086
return VK_SUCCESS;
3087
}
3088
3089
static void
3090
vn_queue_fini(struct vn_queue *queue)
3091
{
3092
if (queue->wait_fence != VK_NULL_HANDLE) {
3093
vn_DestroyFence(vn_device_to_handle(queue->device), queue->wait_fence,
3094
NULL);
3095
}
3096
vn_object_base_fini(&queue->base);
3097
}
3098
3099
static VkResult
3100
vn_queue_init(struct vn_device *dev,
3101
struct vn_queue *queue,
3102
const VkDeviceQueueCreateInfo *queue_info,
3103
uint32_t queue_index)
3104
{
3105
vn_object_base_init(&queue->base, VK_OBJECT_TYPE_QUEUE, &dev->base);
3106
3107
VkQueue queue_handle = vn_queue_to_handle(queue);
3108
vn_async_vkGetDeviceQueue2(
3109
dev->instance, vn_device_to_handle(dev),
3110
&(VkDeviceQueueInfo2){
3111
.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
3112
.flags = queue_info->flags,
3113
.queueFamilyIndex = queue_info->queueFamilyIndex,
3114
.queueIndex = queue_index,
3115
},
3116
&queue_handle);
3117
3118
queue->device = dev;
3119
queue->family = queue_info->queueFamilyIndex;
3120
queue->index = queue_index;
3121
queue->flags = queue_info->flags;
3122
3123
const VkExportFenceCreateInfo export_fence_info = {
3124
.sType = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
3125
.pNext = NULL,
3126
.handleTypes = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
3127
};
3128
const VkFenceCreateInfo fence_info = {
3129
.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
3130
.pNext = dev->instance->experimental.globalFencing == VK_TRUE
3131
? &export_fence_info
3132
: NULL,
3133
.flags = 0,
3134
};
3135
VkResult result = vn_CreateFence(vn_device_to_handle(dev), &fence_info,
3136
NULL, &queue->wait_fence);
3137
if (result != VK_SUCCESS)
3138
return result;
3139
3140
return VK_SUCCESS;
3141
}
3142
3143
static VkResult
3144
vn_device_init_queues(struct vn_device *dev,
3145
const VkDeviceCreateInfo *create_info)
3146
{
3147
const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
3148
3149
uint32_t count = 0;
3150
for (uint32_t i = 0; i < create_info->queueCreateInfoCount; i++)
3151
count += create_info->pQueueCreateInfos[i].queueCount;
3152
3153
struct vn_queue *queues =
3154
vk_zalloc(alloc, sizeof(*queues) * count, VN_DEFAULT_ALIGN,
3155
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
3156
if (!queues)
3157
return VK_ERROR_OUT_OF_HOST_MEMORY;
3158
3159
VkResult result = VK_SUCCESS;
3160
count = 0;
3161
for (uint32_t i = 0; i < create_info->queueCreateInfoCount; i++) {
3162
const VkDeviceQueueCreateInfo *queue_info =
3163
&create_info->pQueueCreateInfos[i];
3164
for (uint32_t j = 0; j < queue_info->queueCount; j++) {
3165
result = vn_queue_init(dev, &queues[count], queue_info, j);
3166
if (result != VK_SUCCESS)
3167
break;
3168
3169
count++;
3170
}
3171
}
3172
3173
if (result != VK_SUCCESS) {
3174
for (uint32_t i = 0; i < count; i++)
3175
vn_queue_fini(&queues[i]);
3176
vk_free(alloc, queues);
3177
3178
return result;
3179
}
3180
3181
dev->queues = queues;
3182
dev->queue_count = count;
3183
3184
return VK_SUCCESS;
3185
}
3186
3187
static bool
3188
find_extension_names(const char *const *exts,
3189
uint32_t ext_count,
3190
const char *name)
3191
{
3192
for (uint32_t i = 0; i < ext_count; i++) {
3193
if (!strcmp(exts[i], name))
3194
return true;
3195
}
3196
return false;
3197
}
3198
3199
static bool
3200
merge_extension_names(const char *const *exts,
3201
uint32_t ext_count,
3202
const char *const *extra_exts,
3203
uint32_t extra_count,
3204
const char *const *block_exts,
3205
uint32_t block_count,
3206
const VkAllocationCallbacks *alloc,
3207
const char *const **out_exts,
3208
uint32_t *out_count)
3209
{
3210
const char **merged =
3211
vk_alloc(alloc, sizeof(*merged) * (ext_count + extra_count),
3212
VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
3213
if (!merged)
3214
return false;
3215
3216
uint32_t count = 0;
3217
for (uint32_t i = 0; i < ext_count; i++) {
3218
if (!find_extension_names(block_exts, block_count, exts[i]))
3219
merged[count++] = exts[i];
3220
}
3221
for (uint32_t i = 0; i < extra_count; i++) {
3222
if (!find_extension_names(exts, ext_count, extra_exts[i]))
3223
merged[count++] = extra_exts[i];
3224
}
3225
3226
*out_exts = merged;
3227
*out_count = count;
3228
return true;
3229
}
3230
3231
static const VkDeviceCreateInfo *
3232
vn_device_fix_create_info(const struct vn_device *dev,
3233
const VkDeviceCreateInfo *dev_info,
3234
const VkAllocationCallbacks *alloc,
3235
VkDeviceCreateInfo *local_info)
3236
{
3237
const struct vn_physical_device *physical_dev = dev->physical_device;
3238
const struct vk_device_extension_table *app_exts =
3239
&dev->base.base.enabled_extensions;
3240
/* extra_exts and block_exts must not overlap */
3241
const char *extra_exts[16];
3242
const char *block_exts[16];
3243
uint32_t extra_count = 0;
3244
uint32_t block_count = 0;
3245
3246
/* fix for WSI (treat AHB as WSI extension for simplicity) */
3247
const bool has_wsi =
3248
app_exts->KHR_swapchain || app_exts->ANDROID_native_buffer ||
3249
app_exts->ANDROID_external_memory_android_hardware_buffer;
3250
if (has_wsi) {
3251
/* KHR_swapchain may be advertised without the renderer support for
3252
* EXT_image_drm_format_modifier
3253
*/
3254
if (!app_exts->EXT_image_drm_format_modifier &&
3255
physical_dev->renderer_extensions.EXT_image_drm_format_modifier) {
3256
extra_exts[extra_count++] =
3257
VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME;
3258
3259
if (physical_dev->renderer_version < VK_API_VERSION_1_2 &&
3260
!app_exts->KHR_image_format_list) {
3261
extra_exts[extra_count++] =
3262
VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME;
3263
}
3264
}
3265
3266
/* XXX KHR_swapchain may be advertised without the renderer support for
3267
* EXT_queue_family_foreign
3268
*/
3269
if (!app_exts->EXT_queue_family_foreign &&
3270
physical_dev->renderer_extensions.EXT_queue_family_foreign) {
3271
extra_exts[extra_count++] =
3272
VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME;
3273
}
3274
3275
if (app_exts->KHR_swapchain) {
3276
/* see vn_physical_device_get_native_extensions */
3277
block_exts[block_count++] = VK_KHR_SWAPCHAIN_EXTENSION_NAME;
3278
block_exts[block_count++] =
3279
VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME;
3280
block_exts[block_count++] =
3281
VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME;
3282
}
3283
3284
if (app_exts->ANDROID_native_buffer)
3285
block_exts[block_count++] = VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME;
3286
3287
if (app_exts->ANDROID_external_memory_android_hardware_buffer) {
3288
block_exts[block_count++] =
3289
VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME;
3290
}
3291
}
3292
3293
if (app_exts->KHR_external_memory_fd ||
3294
app_exts->EXT_external_memory_dma_buf || has_wsi) {
3295
switch (physical_dev->external_memory.renderer_handle_type) {
3296
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
3297
if (!app_exts->EXT_external_memory_dma_buf) {
3298
extra_exts[extra_count++] =
3299
VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME;
3300
}
3301
FALLTHROUGH;
3302
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT:
3303
if (!app_exts->KHR_external_memory_fd) {
3304
extra_exts[extra_count++] =
3305
VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME;
3306
}
3307
break;
3308
default:
3309
/* TODO other handle types */
3310
break;
3311
}
3312
}
3313
3314
assert(extra_count <= ARRAY_SIZE(extra_exts));
3315
assert(block_count <= ARRAY_SIZE(block_exts));
3316
3317
if (!extra_count && (!block_count || !dev_info->enabledExtensionCount))
3318
return dev_info;
3319
3320
*local_info = *dev_info;
3321
if (!merge_extension_names(dev_info->ppEnabledExtensionNames,
3322
dev_info->enabledExtensionCount, extra_exts,
3323
extra_count, block_exts, block_count, alloc,
3324
&local_info->ppEnabledExtensionNames,
3325
&local_info->enabledExtensionCount))
3326
return NULL;
3327
3328
return local_info;
3329
}
3330
3331
VkResult
3332
vn_CreateDevice(VkPhysicalDevice physicalDevice,
3333
const VkDeviceCreateInfo *pCreateInfo,
3334
const VkAllocationCallbacks *pAllocator,
3335
VkDevice *pDevice)
3336
{
3337
struct vn_physical_device *physical_dev =
3338
vn_physical_device_from_handle(physicalDevice);
3339
struct vn_instance *instance = physical_dev->instance;
3340
const VkAllocationCallbacks *alloc =
3341
pAllocator ? pAllocator : &instance->base.base.alloc;
3342
struct vn_device *dev;
3343
VkResult result;
3344
3345
dev = vk_zalloc(alloc, sizeof(*dev), VN_DEFAULT_ALIGN,
3346
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
3347
if (!dev)
3348
return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
3349
3350
struct vk_device_dispatch_table dispatch_table;
3351
vk_device_dispatch_table_from_entrypoints(&dispatch_table,
3352
&vn_device_entrypoints, true);
3353
result = vn_device_base_init(&dev->base, &physical_dev->base,
3354
&dispatch_table, pCreateInfo, alloc);
3355
if (result != VK_SUCCESS) {
3356
vk_free(alloc, dev);
3357
return vn_error(instance, result);
3358
}
3359
3360
dev->instance = instance;
3361
dev->physical_device = physical_dev;
3362
dev->renderer = instance->renderer;
3363
3364
VkDeviceCreateInfo local_create_info;
3365
pCreateInfo =
3366
vn_device_fix_create_info(dev, pCreateInfo, alloc, &local_create_info);
3367
if (!pCreateInfo) {
3368
result = VK_ERROR_OUT_OF_HOST_MEMORY;
3369
goto fail;
3370
}
3371
3372
VkDevice dev_handle = vn_device_to_handle(dev);
3373
result = vn_call_vkCreateDevice(instance, physicalDevice, pCreateInfo,
3374
NULL, &dev_handle);
3375
if (result != VK_SUCCESS)
3376
goto fail;
3377
3378
result = vn_device_init_queues(dev, pCreateInfo);
3379
if (result != VK_SUCCESS) {
3380
vn_call_vkDestroyDevice(instance, dev_handle, NULL);
3381
goto fail;
3382
}
3383
3384
for (uint32_t i = 0; i < ARRAY_SIZE(dev->memory_pools); i++) {
3385
struct vn_device_memory_pool *pool = &dev->memory_pools[i];
3386
mtx_init(&pool->mutex, mtx_plain);
3387
}
3388
3389
if (dev->base.base.enabled_extensions
3390
.ANDROID_external_memory_android_hardware_buffer) {
3391
result = vn_android_init_ahb_buffer_memory_type_bits(dev);
3392
if (result != VK_SUCCESS) {
3393
vn_call_vkDestroyDevice(instance, dev_handle, NULL);
3394
goto fail;
3395
}
3396
}
3397
3398
*pDevice = dev_handle;
3399
3400
if (pCreateInfo == &local_create_info)
3401
vk_free(alloc, (void *)pCreateInfo->ppEnabledExtensionNames);
3402
3403
return VK_SUCCESS;
3404
3405
fail:
3406
if (pCreateInfo == &local_create_info)
3407
vk_free(alloc, (void *)pCreateInfo->ppEnabledExtensionNames);
3408
vn_device_base_fini(&dev->base);
3409
vk_free(alloc, dev);
3410
return vn_error(instance, result);
3411
}
3412
3413
void
3414
vn_DestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator)
3415
{
3416
struct vn_device *dev = vn_device_from_handle(device);
3417
const VkAllocationCallbacks *alloc =
3418
pAllocator ? pAllocator : &dev->base.base.alloc;
3419
3420
if (!dev)
3421
return;
3422
3423
for (uint32_t i = 0; i < ARRAY_SIZE(dev->memory_pools); i++)
3424
vn_device_memory_pool_fini(dev, i);
3425
3426
for (uint32_t i = 0; i < dev->queue_count; i++)
3427
vn_queue_fini(&dev->queues[i]);
3428
3429
/* We must emit vkDestroyDevice before freeing dev->queues. Otherwise,
3430
* another thread might reuse their object ids while they still refer to
3431
* the queues in the renderer.
3432
*/
3433
vn_async_vkDestroyDevice(dev->instance, device, NULL);
3434
3435
vk_free(alloc, dev->queues);
3436
3437
vn_device_base_fini(&dev->base);
3438
vk_free(alloc, dev);
3439
}
3440
3441
PFN_vkVoidFunction
3442
vn_GetDeviceProcAddr(VkDevice device, const char *pName)
3443
{
3444
struct vn_device *dev = vn_device_from_handle(device);
3445
return vk_device_get_proc_addr(&dev->base.base, pName);
3446
}
3447
3448
void
3449
vn_GetDeviceGroupPeerMemoryFeatures(
3450
VkDevice device,
3451
uint32_t heapIndex,
3452
uint32_t localDeviceIndex,
3453
uint32_t remoteDeviceIndex,
3454
VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
3455
{
3456
struct vn_device *dev = vn_device_from_handle(device);
3457
3458
/* TODO get and cache the values in vkCreateDevice */
3459
vn_call_vkGetDeviceGroupPeerMemoryFeatures(
3460
dev->instance, device, heapIndex, localDeviceIndex, remoteDeviceIndex,
3461
pPeerMemoryFeatures);
3462
}
3463
3464
VkResult
3465
vn_DeviceWaitIdle(VkDevice device)
3466
{
3467
struct vn_device *dev = vn_device_from_handle(device);
3468
3469
for (uint32_t i = 0; i < dev->queue_count; i++) {
3470
struct vn_queue *queue = &dev->queues[i];
3471
VkResult result = vn_QueueWaitIdle(vn_queue_to_handle(queue));
3472
if (result != VK_SUCCESS)
3473
return vn_error(dev->instance, result);
3474
}
3475
3476
return VK_SUCCESS;
3477
}
3478
3479