Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/panfrost/vulkan/panvk_device.c
4560 views
1
/*
2
* Copyright © 2021 Collabora Ltd.
3
*
4
* Derived from tu_device.c which is:
5
* Copyright © 2016 Red Hat.
6
* Copyright © 2016 Bas Nieuwenhuizen
7
* Copyright © 2015 Intel Corporation
8
*
9
* Permission is hereby granted, free of charge, to any person obtaining a
10
* copy of this software and associated documentation files (the "Software"),
11
* to deal in the Software without restriction, including without limitation
12
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
13
* and/or sell copies of the Software, and to permit persons to whom the
14
* Software is furnished to do so, subject to the following conditions:
15
*
16
* The above copyright notice and this permission notice (including the next
17
* paragraph) shall be included in all copies or substantial portions of the
18
* Software.
19
*
20
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
23
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
25
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
26
* DEALINGS IN THE SOFTWARE.
27
*/
28
29
#include "panvk_private.h"
30
31
#include "panfrost-quirks.h"
32
#include "pan_bo.h"
33
#include "pan_encoder.h"
34
#include "pan_util.h"
35
#include "decode.h"
36
37
#include <fcntl.h>
38
#include <libsync.h>
39
#include <stdbool.h>
40
#include <string.h>
41
#include <sys/mman.h>
42
#include <sys/sysinfo.h>
43
#include <unistd.h>
44
#include <xf86drm.h>
45
46
#include "drm-uapi/panfrost_drm.h"
47
48
#include "util/debug.h"
49
#include "util/strtod.h"
50
#include "vk_format.h"
51
#include "vk_util.h"
52
53
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
54
#include <wayland-client.h>
55
#include "wayland-drm-client-protocol.h"
56
#endif
57
58
#include "panvk_cs.h"
59
60
VkResult
61
_panvk_device_set_lost(struct panvk_device *device,
62
const char *file, int line,
63
const char *msg, ...)
64
{
65
/* Set the flag indicating that waits should return in finite time even
66
* after device loss.
67
*/
68
p_atomic_inc(&device->_lost);
69
70
/* TODO: Report the log message through VkDebugReportCallbackEXT instead */
71
fprintf(stderr, "%s:%d: ", file, line);
72
va_list ap;
73
va_start(ap, msg);
74
vfprintf(stderr, msg, ap);
75
va_end(ap);
76
77
if (env_var_as_boolean("PANVK_ABORT_ON_DEVICE_LOSS", false))
78
abort();
79
80
return VK_ERROR_DEVICE_LOST;
81
}
82
83
static int
84
panvk_device_get_cache_uuid(uint16_t family, void *uuid)
85
{
86
uint32_t mesa_timestamp;
87
uint16_t f = family;
88
memset(uuid, 0, VK_UUID_SIZE);
89
memcpy(uuid, &mesa_timestamp, 4);
90
memcpy((char *) uuid + 4, &f, 2);
91
snprintf((char *) uuid + 6, VK_UUID_SIZE - 10, "pan");
92
return 0;
93
}
94
95
static void
96
panvk_get_driver_uuid(void *uuid)
97
{
98
memset(uuid, 0, VK_UUID_SIZE);
99
snprintf(uuid, VK_UUID_SIZE, "panfrost");
100
}
101
102
static void
103
panvk_get_device_uuid(void *uuid)
104
{
105
memset(uuid, 0, VK_UUID_SIZE);
106
}
107
108
static const struct debug_control panvk_debug_options[] = {
109
{ "startup", PANVK_DEBUG_STARTUP },
110
{ "nir", PANVK_DEBUG_NIR },
111
{ "trace", PANVK_DEBUG_TRACE },
112
{ "sync", PANVK_DEBUG_SYNC },
113
{ "afbc", PANVK_DEBUG_AFBC },
114
{ "linear", PANVK_DEBUG_LINEAR },
115
{ NULL, 0 }
116
};
117
118
#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
119
#define PANVK_USE_WSI_PLATFORM
120
#endif
121
122
#define PANVK_API_VERSION VK_MAKE_VERSION(1, 1, VK_HEADER_VERSION)
123
124
VkResult
125
panvk_EnumerateInstanceVersion(uint32_t *pApiVersion)
126
{
127
*pApiVersion = PANVK_API_VERSION;
128
return VK_SUCCESS;
129
}
130
131
static const struct vk_instance_extension_table panvk_instance_extensions = {
132
#ifdef PANVK_USE_WSI_PLATFORM
133
.KHR_surface = true,
134
#endif
135
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
136
.KHR_wayland_surface = true,
137
#endif
138
};
139
140
static void
141
panvk_get_device_extensions(const struct panvk_physical_device *device,
142
struct vk_device_extension_table *ext)
143
{
144
*ext = (struct vk_device_extension_table) {
145
#ifdef PANVK_USE_WSI_PLATFORM
146
.KHR_swapchain = true,
147
#endif
148
.EXT_custom_border_color = true,
149
};
150
}
151
152
VkResult
153
panvk_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
154
const VkAllocationCallbacks *pAllocator,
155
VkInstance *pInstance)
156
{
157
struct panvk_instance *instance;
158
VkResult result;
159
160
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
161
162
pAllocator = pAllocator ? : vk_default_allocator();
163
instance = vk_zalloc(pAllocator, sizeof(*instance), 8,
164
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
165
if (!instance)
166
return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
167
168
struct vk_instance_dispatch_table dispatch_table;
169
170
vk_instance_dispatch_table_from_entrypoints(&dispatch_table,
171
&panvk_instance_entrypoints,
172
true);
173
result = vk_instance_init(&instance->vk,
174
&panvk_instance_extensions,
175
&dispatch_table,
176
pCreateInfo,
177
pAllocator);
178
if (result != VK_SUCCESS) {
179
vk_free(pAllocator, instance);
180
return vk_error(NULL, result);
181
}
182
183
instance->physical_device_count = -1;
184
instance->debug_flags = parse_debug_string(getenv("PANVK_DEBUG"),
185
panvk_debug_options);
186
187
if (instance->debug_flags & PANVK_DEBUG_STARTUP)
188
panvk_logi("Created an instance");
189
190
VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
191
192
*pInstance = panvk_instance_to_handle(instance);
193
194
return VK_SUCCESS;
195
}
196
197
static void
198
panvk_physical_device_finish(struct panvk_physical_device *device)
199
{
200
panvk_wsi_finish(device);
201
202
panvk_meta_cleanup(device);
203
panfrost_close_device(&device->pdev);
204
if (device->master_fd != -1)
205
close(device->master_fd);
206
207
vk_physical_device_finish(&device->vk);
208
}
209
210
void
211
panvk_DestroyInstance(VkInstance _instance,
212
const VkAllocationCallbacks *pAllocator)
213
{
214
VK_FROM_HANDLE(panvk_instance, instance, _instance);
215
216
if (!instance)
217
return;
218
219
for (int i = 0; i < instance->physical_device_count; ++i) {
220
panvk_physical_device_finish(instance->physical_devices + i);
221
}
222
223
vk_instance_finish(&instance->vk);
224
vk_free(&instance->vk.alloc, instance);
225
}
226
227
static VkResult
228
panvk_physical_device_init(struct panvk_physical_device *device,
229
struct panvk_instance *instance,
230
drmDevicePtr drm_device)
231
{
232
const char *path = drm_device->nodes[DRM_NODE_RENDER];
233
VkResult result = VK_SUCCESS;
234
drmVersionPtr version;
235
int fd;
236
int master_fd = -1;
237
238
if (!getenv("PAN_I_WANT_A_BROKEN_VULKAN_DRIVER")) {
239
return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
240
"WARNING: panvk is not a conformant vulkan implementation, "
241
"pass PAN_I_WANT_A_BROKEN_VULKAN_DRIVER=1 if you know what you're doing.");
242
}
243
244
fd = open(path, O_RDWR | O_CLOEXEC);
245
if (fd < 0) {
246
return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
247
"failed to open device %s", path);
248
}
249
250
version = drmGetVersion(fd);
251
if (!version) {
252
close(fd);
253
return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
254
"failed to query kernel driver version for device %s",
255
path);
256
}
257
258
if (strcmp(version->name, "panfrost")) {
259
drmFreeVersion(version);
260
close(fd);
261
return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
262
"device %s does not use the panfrost kernel driver", path);
263
}
264
265
drmFreeVersion(version);
266
267
if (instance->debug_flags & PANVK_DEBUG_STARTUP)
268
panvk_logi("Found compatible device '%s'.", path);
269
270
struct vk_device_extension_table supported_extensions;
271
panvk_get_device_extensions(device, &supported_extensions);
272
273
struct vk_physical_device_dispatch_table dispatch_table;
274
vk_physical_device_dispatch_table_from_entrypoints(&dispatch_table,
275
&panvk_physical_device_entrypoints,
276
true);
277
278
result = vk_physical_device_init(&device->vk, &instance->vk,
279
&supported_extensions,
280
&dispatch_table);
281
282
if (result != VK_SUCCESS) {
283
vk_error(instance, result);
284
goto fail;
285
}
286
287
device->instance = instance;
288
assert(strlen(path) < ARRAY_SIZE(device->path));
289
strncpy(device->path, path, ARRAY_SIZE(device->path));
290
291
if (instance->vk.enabled_extensions.KHR_display) {
292
master_fd = open(drm_device->nodes[DRM_NODE_PRIMARY], O_RDWR | O_CLOEXEC);
293
if (master_fd >= 0) {
294
/* TODO: free master_fd is accel is not working? */
295
}
296
}
297
298
device->master_fd = master_fd;
299
device->pdev.debug = PAN_DBG_TRACE;
300
panfrost_open_device(NULL, fd, &device->pdev);
301
302
if (device->pdev.quirks & MIDGARD_SFBD) {
303
result = vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
304
"%s not supported",
305
panfrost_model_name(device->pdev.gpu_id));
306
goto fail;
307
}
308
309
panvk_meta_init(device);
310
311
memset(device->name, 0, sizeof(device->name));
312
sprintf(device->name, "%s", panfrost_model_name(device->pdev.gpu_id));
313
314
if (panvk_device_get_cache_uuid(device->pdev.gpu_id, device->cache_uuid)) {
315
result = vk_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
316
"cannot generate UUID");
317
goto fail;
318
}
319
320
fprintf(stderr, "WARNING: panvk is not a conformant vulkan implementation, "
321
"testing use only.\n");
322
323
panvk_get_driver_uuid(&device->device_uuid);
324
panvk_get_device_uuid(&device->device_uuid);
325
326
result = panvk_wsi_init(device);
327
if (result != VK_SUCCESS) {
328
vk_error(instance, result);
329
goto fail;
330
}
331
332
return VK_SUCCESS;
333
334
fail:
335
close(fd);
336
if (master_fd != -1)
337
close(master_fd);
338
return result;
339
}
340
341
static VkResult
342
panvk_enumerate_devices(struct panvk_instance *instance)
343
{
344
/* TODO: Check for more devices ? */
345
drmDevicePtr devices[8];
346
VkResult result = VK_ERROR_INCOMPATIBLE_DRIVER;
347
int max_devices;
348
349
instance->physical_device_count = 0;
350
351
max_devices = drmGetDevices2(0, devices, ARRAY_SIZE(devices));
352
353
if (instance->debug_flags & PANVK_DEBUG_STARTUP)
354
panvk_logi("Found %d drm nodes", max_devices);
355
356
if (max_devices < 1)
357
return vk_error(instance, VK_ERROR_INCOMPATIBLE_DRIVER);
358
359
for (unsigned i = 0; i < (unsigned) max_devices; i++) {
360
if ((devices[i]->available_nodes & (1 << DRM_NODE_RENDER)) &&
361
devices[i]->bustype == DRM_BUS_PLATFORM) {
362
363
result = panvk_physical_device_init(instance->physical_devices +
364
instance->physical_device_count,
365
instance, devices[i]);
366
if (result == VK_SUCCESS)
367
++instance->physical_device_count;
368
else if (result != VK_ERROR_INCOMPATIBLE_DRIVER)
369
break;
370
}
371
}
372
drmFreeDevices(devices, max_devices);
373
374
return result;
375
}
376
377
VkResult
378
panvk_EnumeratePhysicalDevices(VkInstance _instance,
379
uint32_t *pPhysicalDeviceCount,
380
VkPhysicalDevice *pPhysicalDevices)
381
{
382
VK_FROM_HANDLE(panvk_instance, instance, _instance);
383
VK_OUTARRAY_MAKE(out, pPhysicalDevices, pPhysicalDeviceCount);
384
385
VkResult result;
386
387
if (instance->physical_device_count < 0) {
388
result = panvk_enumerate_devices(instance);
389
if (result != VK_SUCCESS && result != VK_ERROR_INCOMPATIBLE_DRIVER)
390
return result;
391
}
392
393
for (uint32_t i = 0; i < instance->physical_device_count; ++i) {
394
vk_outarray_append(&out, p)
395
{
396
*p = panvk_physical_device_to_handle(instance->physical_devices + i);
397
}
398
}
399
400
return vk_outarray_status(&out);
401
}
402
403
VkResult
404
panvk_EnumeratePhysicalDeviceGroups(VkInstance _instance,
405
uint32_t *pPhysicalDeviceGroupCount,
406
VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
407
{
408
VK_FROM_HANDLE(panvk_instance, instance, _instance);
409
VK_OUTARRAY_MAKE(out, pPhysicalDeviceGroupProperties,
410
pPhysicalDeviceGroupCount);
411
VkResult result;
412
413
if (instance->physical_device_count < 0) {
414
result = panvk_enumerate_devices(instance);
415
if (result != VK_SUCCESS && result != VK_ERROR_INCOMPATIBLE_DRIVER)
416
return result;
417
}
418
419
for (uint32_t i = 0; i < instance->physical_device_count; ++i) {
420
vk_outarray_append(&out, p)
421
{
422
p->physicalDeviceCount = 1;
423
p->physicalDevices[0] =
424
panvk_physical_device_to_handle(instance->physical_devices + i);
425
p->subsetAllocation = false;
426
}
427
}
428
429
return VK_SUCCESS;
430
}
431
432
void
433
panvk_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
434
VkPhysicalDeviceFeatures2 *pFeatures)
435
{
436
vk_foreach_struct(ext, pFeatures->pNext)
437
{
438
switch (ext->sType) {
439
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES: {
440
VkPhysicalDeviceVulkan11Features *features = (void *) ext;
441
features->storageBuffer16BitAccess = false;
442
features->uniformAndStorageBuffer16BitAccess = false;
443
features->storagePushConstant16 = false;
444
features->storageInputOutput16 = false;
445
features->multiview = false;
446
features->multiviewGeometryShader = false;
447
features->multiviewTessellationShader = false;
448
features->variablePointersStorageBuffer = true;
449
features->variablePointers = true;
450
features->protectedMemory = false;
451
features->samplerYcbcrConversion = false;
452
features->shaderDrawParameters = false;
453
break;
454
}
455
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES: {
456
VkPhysicalDeviceVulkan12Features *features = (void *) ext;
457
features->samplerMirrorClampToEdge = false;
458
features->drawIndirectCount = false;
459
features->storageBuffer8BitAccess = false;
460
features->uniformAndStorageBuffer8BitAccess = false;
461
features->storagePushConstant8 = false;
462
features->shaderBufferInt64Atomics = false;
463
features->shaderSharedInt64Atomics = false;
464
features->shaderFloat16 = false;
465
features->shaderInt8 = false;
466
467
features->descriptorIndexing = false;
468
features->shaderInputAttachmentArrayDynamicIndexing = false;
469
features->shaderUniformTexelBufferArrayDynamicIndexing = false;
470
features->shaderStorageTexelBufferArrayDynamicIndexing = false;
471
features->shaderUniformBufferArrayNonUniformIndexing = false;
472
features->shaderSampledImageArrayNonUniformIndexing = false;
473
features->shaderStorageBufferArrayNonUniformIndexing = false;
474
features->shaderStorageImageArrayNonUniformIndexing = false;
475
features->shaderInputAttachmentArrayNonUniformIndexing = false;
476
features->shaderUniformTexelBufferArrayNonUniformIndexing = false;
477
features->shaderStorageTexelBufferArrayNonUniformIndexing = false;
478
features->descriptorBindingUniformBufferUpdateAfterBind = false;
479
features->descriptorBindingSampledImageUpdateAfterBind = false;
480
features->descriptorBindingStorageImageUpdateAfterBind = false;
481
features->descriptorBindingStorageBufferUpdateAfterBind = false;
482
features->descriptorBindingUniformTexelBufferUpdateAfterBind = false;
483
features->descriptorBindingStorageTexelBufferUpdateAfterBind = false;
484
features->descriptorBindingUpdateUnusedWhilePending = false;
485
features->descriptorBindingPartiallyBound = false;
486
features->descriptorBindingVariableDescriptorCount = false;
487
features->runtimeDescriptorArray = false;
488
489
features->samplerFilterMinmax = false;
490
features->scalarBlockLayout = false;
491
features->imagelessFramebuffer = false;
492
features->uniformBufferStandardLayout = false;
493
features->shaderSubgroupExtendedTypes = false;
494
features->separateDepthStencilLayouts = false;
495
features->hostQueryReset = false;
496
features->timelineSemaphore = false;
497
features->bufferDeviceAddress = false;
498
features->bufferDeviceAddressCaptureReplay = false;
499
features->bufferDeviceAddressMultiDevice = false;
500
features->vulkanMemoryModel = false;
501
features->vulkanMemoryModelDeviceScope = false;
502
features->vulkanMemoryModelAvailabilityVisibilityChains = false;
503
features->shaderOutputViewportIndex = false;
504
features->shaderOutputLayer = false;
505
features->subgroupBroadcastDynamicId = false;
506
break;
507
}
508
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
509
VkPhysicalDeviceVariablePointersFeatures *features = (void *) ext;
510
features->variablePointersStorageBuffer = true;
511
features->variablePointers = true;
512
break;
513
}
514
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
515
VkPhysicalDeviceMultiviewFeatures *features =
516
(VkPhysicalDeviceMultiviewFeatures *) ext;
517
features->multiview = false;
518
features->multiviewGeometryShader = false;
519
features->multiviewTessellationShader = false;
520
break;
521
}
522
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
523
VkPhysicalDeviceShaderDrawParametersFeatures *features =
524
(VkPhysicalDeviceShaderDrawParametersFeatures *) ext;
525
features->shaderDrawParameters = false;
526
break;
527
}
528
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
529
VkPhysicalDeviceProtectedMemoryFeatures *features =
530
(VkPhysicalDeviceProtectedMemoryFeatures *) ext;
531
features->protectedMemory = false;
532
break;
533
}
534
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
535
VkPhysicalDevice16BitStorageFeatures *features =
536
(VkPhysicalDevice16BitStorageFeatures *) ext;
537
features->storageBuffer16BitAccess = false;
538
features->uniformAndStorageBuffer16BitAccess = false;
539
features->storagePushConstant16 = false;
540
features->storageInputOutput16 = false;
541
break;
542
}
543
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
544
VkPhysicalDeviceSamplerYcbcrConversionFeatures *features =
545
(VkPhysicalDeviceSamplerYcbcrConversionFeatures *) ext;
546
features->samplerYcbcrConversion = false;
547
break;
548
}
549
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT: {
550
VkPhysicalDeviceDescriptorIndexingFeaturesEXT *features =
551
(VkPhysicalDeviceDescriptorIndexingFeaturesEXT *) ext;
552
features->shaderInputAttachmentArrayDynamicIndexing = false;
553
features->shaderUniformTexelBufferArrayDynamicIndexing = false;
554
features->shaderStorageTexelBufferArrayDynamicIndexing = false;
555
features->shaderUniformBufferArrayNonUniformIndexing = false;
556
features->shaderSampledImageArrayNonUniformIndexing = false;
557
features->shaderStorageBufferArrayNonUniformIndexing = false;
558
features->shaderStorageImageArrayNonUniformIndexing = false;
559
features->shaderInputAttachmentArrayNonUniformIndexing = false;
560
features->shaderUniformTexelBufferArrayNonUniformIndexing = false;
561
features->shaderStorageTexelBufferArrayNonUniformIndexing = false;
562
features->descriptorBindingUniformBufferUpdateAfterBind = false;
563
features->descriptorBindingSampledImageUpdateAfterBind = false;
564
features->descriptorBindingStorageImageUpdateAfterBind = false;
565
features->descriptorBindingStorageBufferUpdateAfterBind = false;
566
features->descriptorBindingUniformTexelBufferUpdateAfterBind = false;
567
features->descriptorBindingStorageTexelBufferUpdateAfterBind = false;
568
features->descriptorBindingUpdateUnusedWhilePending = false;
569
features->descriptorBindingPartiallyBound = false;
570
features->descriptorBindingVariableDescriptorCount = false;
571
features->runtimeDescriptorArray = false;
572
break;
573
}
574
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
575
VkPhysicalDeviceConditionalRenderingFeaturesEXT *features =
576
(VkPhysicalDeviceConditionalRenderingFeaturesEXT *) ext;
577
features->conditionalRendering = false;
578
features->inheritedConditionalRendering = false;
579
break;
580
}
581
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: {
582
VkPhysicalDeviceTransformFeedbackFeaturesEXT *features =
583
(VkPhysicalDeviceTransformFeedbackFeaturesEXT *) ext;
584
features->transformFeedback = false;
585
features->geometryStreams = false;
586
break;
587
}
588
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
589
VkPhysicalDeviceIndexTypeUint8FeaturesEXT *features =
590
(VkPhysicalDeviceIndexTypeUint8FeaturesEXT *)ext;
591
features->indexTypeUint8 = true;
592
break;
593
}
594
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
595
VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *features =
596
(VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *)ext;
597
features->vertexAttributeInstanceRateDivisor = true;
598
features->vertexAttributeInstanceRateZeroDivisor = true;
599
break;
600
}
601
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: {
602
VkPhysicalDevicePrivateDataFeaturesEXT *features =
603
(VkPhysicalDevicePrivateDataFeaturesEXT *)ext;
604
features->privateData = true;
605
break;
606
}
607
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: {
608
VkPhysicalDeviceDepthClipEnableFeaturesEXT *features =
609
(VkPhysicalDeviceDepthClipEnableFeaturesEXT *)ext;
610
features->depthClipEnable = true;
611
break;
612
}
613
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: {
614
VkPhysicalDevice4444FormatsFeaturesEXT *features = (void *)ext;
615
features->formatA4R4G4B4 = true;
616
features->formatA4B4G4R4 = true;
617
break;
618
}
619
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
620
VkPhysicalDeviceCustomBorderColorFeaturesEXT *features = (void *) ext;
621
features->customBorderColors = true;
622
features->customBorderColorWithoutFormat = true;
623
break;
624
}
625
default:
626
break;
627
}
628
}
629
630
pFeatures->features = (VkPhysicalDeviceFeatures) {
631
.fullDrawIndexUint32 = true,
632
.independentBlend = true,
633
.wideLines = true,
634
.largePoints = true,
635
.textureCompressionETC2 = true,
636
.textureCompressionASTC_LDR = true,
637
.shaderUniformBufferArrayDynamicIndexing = true,
638
.shaderSampledImageArrayDynamicIndexing = true,
639
.shaderStorageBufferArrayDynamicIndexing = true,
640
.shaderStorageImageArrayDynamicIndexing = true,
641
};
642
}
643
644
void
645
panvk_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
646
VkPhysicalDeviceProperties2 *pProperties)
647
{
648
VK_FROM_HANDLE(panvk_physical_device, pdevice, physicalDevice);
649
650
vk_foreach_struct(ext, pProperties->pNext)
651
{
652
switch (ext->sType) {
653
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: {
654
VkPhysicalDevicePushDescriptorPropertiesKHR *properties = (VkPhysicalDevicePushDescriptorPropertiesKHR *)ext;
655
properties->maxPushDescriptors = MAX_PUSH_DESCRIPTORS;
656
break;
657
}
658
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
659
VkPhysicalDeviceIDProperties *properties = (VkPhysicalDeviceIDProperties *)ext;
660
memcpy(properties->driverUUID, pdevice->driver_uuid, VK_UUID_SIZE);
661
memcpy(properties->deviceUUID, pdevice->device_uuid, VK_UUID_SIZE);
662
properties->deviceLUIDValid = false;
663
break;
664
}
665
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: {
666
VkPhysicalDeviceMultiviewProperties *properties = (VkPhysicalDeviceMultiviewProperties *)ext;
667
properties->maxMultiviewViewCount = 0;
668
properties->maxMultiviewInstanceIndex = 0;
669
break;
670
}
671
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
672
VkPhysicalDevicePointClippingProperties *properties = (VkPhysicalDevicePointClippingProperties *)ext;
673
properties->pointClippingBehavior =
674
VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES;
675
break;
676
}
677
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: {
678
VkPhysicalDeviceMaintenance3Properties *properties = (VkPhysicalDeviceMaintenance3Properties *)ext;
679
/* Make sure everything is addressable by a signed 32-bit int, and
680
* our largest descriptors are 96 bytes. */
681
properties->maxPerSetDescriptors = (1ull << 31) / 96;
682
/* Our buffer size fields allow only this much */
683
properties->maxMemoryAllocationSize = 0xFFFFFFFFull;
684
break;
685
}
686
default:
687
break;
688
}
689
}
690
691
VkSampleCountFlags sample_counts = 0xf;
692
693
/* make sure that the entire descriptor set is addressable with a signed
694
* 32-bit int. So the sum of all limits scaled by descriptor size has to
695
* be at most 2 GiB. the combined image & samples object count as one of
696
* both. This limit is for the pipeline layout, not for the set layout, but
697
* there is no set limit, so we just set a pipeline limit. I don't think
698
* any app is going to hit this soon. */
699
size_t max_descriptor_set_size =
700
((1ull << 31) - 16 * MAX_DYNAMIC_BUFFERS) /
701
(32 /* uniform buffer, 32 due to potential space wasted on alignment */ +
702
32 /* storage buffer, 32 due to potential space wasted on alignment */ +
703
32 /* sampler, largest when combined with image */ +
704
64 /* sampled image */ + 64 /* storage image */);
705
706
VkPhysicalDeviceLimits limits = {
707
.maxImageDimension1D = (1 << 14),
708
.maxImageDimension2D = (1 << 14),
709
.maxImageDimension3D = (1 << 11),
710
.maxImageDimensionCube = (1 << 14),
711
.maxImageArrayLayers = (1 << 11),
712
.maxTexelBufferElements = 128 * 1024 * 1024,
713
.maxUniformBufferRange = UINT32_MAX,
714
.maxStorageBufferRange = UINT32_MAX,
715
.maxPushConstantsSize = MAX_PUSH_CONSTANTS_SIZE,
716
.maxMemoryAllocationCount = UINT32_MAX,
717
.maxSamplerAllocationCount = 64 * 1024,
718
.bufferImageGranularity = 64, /* A cache line */
719
.sparseAddressSpaceSize = 0xffffffffu, /* buffer max size */
720
.maxBoundDescriptorSets = MAX_SETS,
721
.maxPerStageDescriptorSamplers = max_descriptor_set_size,
722
.maxPerStageDescriptorUniformBuffers = max_descriptor_set_size,
723
.maxPerStageDescriptorStorageBuffers = max_descriptor_set_size,
724
.maxPerStageDescriptorSampledImages = max_descriptor_set_size,
725
.maxPerStageDescriptorStorageImages = max_descriptor_set_size,
726
.maxPerStageDescriptorInputAttachments = max_descriptor_set_size,
727
.maxPerStageResources = max_descriptor_set_size,
728
.maxDescriptorSetSamplers = max_descriptor_set_size,
729
.maxDescriptorSetUniformBuffers = max_descriptor_set_size,
730
.maxDescriptorSetUniformBuffersDynamic = MAX_DYNAMIC_UNIFORM_BUFFERS,
731
.maxDescriptorSetStorageBuffers = max_descriptor_set_size,
732
.maxDescriptorSetStorageBuffersDynamic = MAX_DYNAMIC_STORAGE_BUFFERS,
733
.maxDescriptorSetSampledImages = max_descriptor_set_size,
734
.maxDescriptorSetStorageImages = max_descriptor_set_size,
735
.maxDescriptorSetInputAttachments = max_descriptor_set_size,
736
.maxVertexInputAttributes = 32,
737
.maxVertexInputBindings = 32,
738
.maxVertexInputAttributeOffset = 2047,
739
.maxVertexInputBindingStride = 2048,
740
.maxVertexOutputComponents = 128,
741
.maxTessellationGenerationLevel = 64,
742
.maxTessellationPatchSize = 32,
743
.maxTessellationControlPerVertexInputComponents = 128,
744
.maxTessellationControlPerVertexOutputComponents = 128,
745
.maxTessellationControlPerPatchOutputComponents = 120,
746
.maxTessellationControlTotalOutputComponents = 4096,
747
.maxTessellationEvaluationInputComponents = 128,
748
.maxTessellationEvaluationOutputComponents = 128,
749
.maxGeometryShaderInvocations = 127,
750
.maxGeometryInputComponents = 64,
751
.maxGeometryOutputComponents = 128,
752
.maxGeometryOutputVertices = 256,
753
.maxGeometryTotalOutputComponents = 1024,
754
.maxFragmentInputComponents = 128,
755
.maxFragmentOutputAttachments = 8,
756
.maxFragmentDualSrcAttachments = 1,
757
.maxFragmentCombinedOutputResources = 8,
758
.maxComputeSharedMemorySize = 32768,
759
.maxComputeWorkGroupCount = { 65535, 65535, 65535 },
760
.maxComputeWorkGroupInvocations = 2048,
761
.maxComputeWorkGroupSize = { 2048, 2048, 2048 },
762
.subPixelPrecisionBits = 4 /* FIXME */,
763
.subTexelPrecisionBits = 4 /* FIXME */,
764
.mipmapPrecisionBits = 4 /* FIXME */,
765
.maxDrawIndexedIndexValue = UINT32_MAX,
766
.maxDrawIndirectCount = UINT32_MAX,
767
.maxSamplerLodBias = 16,
768
.maxSamplerAnisotropy = 16,
769
.maxViewports = MAX_VIEWPORTS,
770
.maxViewportDimensions = { (1 << 14), (1 << 14) },
771
.viewportBoundsRange = { INT16_MIN, INT16_MAX },
772
.viewportSubPixelBits = 8,
773
.minMemoryMapAlignment = 4096, /* A page */
774
.minTexelBufferOffsetAlignment = 1,
775
.minUniformBufferOffsetAlignment = 4,
776
.minStorageBufferOffsetAlignment = 4,
777
.minTexelOffset = -32,
778
.maxTexelOffset = 31,
779
.minTexelGatherOffset = -32,
780
.maxTexelGatherOffset = 31,
781
.minInterpolationOffset = -2,
782
.maxInterpolationOffset = 2,
783
.subPixelInterpolationOffsetBits = 8,
784
.maxFramebufferWidth = (1 << 14),
785
.maxFramebufferHeight = (1 << 14),
786
.maxFramebufferLayers = (1 << 10),
787
.framebufferColorSampleCounts = sample_counts,
788
.framebufferDepthSampleCounts = sample_counts,
789
.framebufferStencilSampleCounts = sample_counts,
790
.framebufferNoAttachmentsSampleCounts = sample_counts,
791
.maxColorAttachments = MAX_RTS,
792
.sampledImageColorSampleCounts = sample_counts,
793
.sampledImageIntegerSampleCounts = VK_SAMPLE_COUNT_1_BIT,
794
.sampledImageDepthSampleCounts = sample_counts,
795
.sampledImageStencilSampleCounts = sample_counts,
796
.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT,
797
.maxSampleMaskWords = 1,
798
.timestampComputeAndGraphics = true,
799
.timestampPeriod = 1,
800
.maxClipDistances = 8,
801
.maxCullDistances = 8,
802
.maxCombinedClipAndCullDistances = 8,
803
.discreteQueuePriorities = 1,
804
.pointSizeRange = { 0.125, 255.875 },
805
.lineWidthRange = { 0.0, 7.9921875 },
806
.pointSizeGranularity = (1.0 / 8.0),
807
.lineWidthGranularity = (1.0 / 128.0),
808
.strictLines = false, /* FINISHME */
809
.standardSampleLocations = true,
810
.optimalBufferCopyOffsetAlignment = 128,
811
.optimalBufferCopyRowPitchAlignment = 128,
812
.nonCoherentAtomSize = 64,
813
};
814
815
pProperties->properties = (VkPhysicalDeviceProperties) {
816
.apiVersion = PANVK_API_VERSION,
817
.driverVersion = vk_get_driver_version(),
818
.vendorID = 0, /* TODO */
819
.deviceID = 0,
820
.deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
821
.limits = limits,
822
.sparseProperties = { 0 },
823
};
824
825
strcpy(pProperties->properties.deviceName, pdevice->name);
826
memcpy(pProperties->properties.pipelineCacheUUID, pdevice->cache_uuid, VK_UUID_SIZE);
827
}
828
829
static const VkQueueFamilyProperties panvk_queue_family_properties = {
830
.queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT | VK_QUEUE_TRANSFER_BIT,
831
.queueCount = 1,
832
.timestampValidBits = 64,
833
.minImageTransferGranularity = { 1, 1, 1 },
834
};
835
836
void
837
panvk_GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
838
uint32_t *pQueueFamilyPropertyCount,
839
VkQueueFamilyProperties *pQueueFamilyProperties)
840
{
841
VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
842
843
vk_outarray_append(&out, p) { *p = panvk_queue_family_properties; }
844
}
845
846
void
847
panvk_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
848
uint32_t *pQueueFamilyPropertyCount,
849
VkQueueFamilyProperties2 *pQueueFamilyProperties)
850
{
851
VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
852
853
vk_outarray_append(&out, p)
854
{
855
p->queueFamilyProperties = panvk_queue_family_properties;
856
}
857
}
858
859
static uint64_t
860
panvk_get_system_heap_size()
861
{
862
struct sysinfo info;
863
sysinfo(&info);
864
865
uint64_t total_ram = (uint64_t)info.totalram * info.mem_unit;
866
867
/* We don't want to burn too much ram with the GPU. If the user has 4GiB
868
* or less, we use at most half. If they have more than 4GiB, we use 3/4.
869
*/
870
uint64_t available_ram;
871
if (total_ram <= 4ull * 1024 * 1024 * 1024)
872
available_ram = total_ram / 2;
873
else
874
available_ram = total_ram * 3 / 4;
875
876
return available_ram;
877
}
878
879
void
880
panvk_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,
881
VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
882
{
883
pMemoryProperties->memoryProperties = (VkPhysicalDeviceMemoryProperties) {
884
.memoryHeapCount = 1,
885
.memoryHeaps[0].size = panvk_get_system_heap_size(),
886
.memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
887
.memoryTypeCount = 1,
888
.memoryTypes[0].propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
889
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
890
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
891
.memoryTypes[0].heapIndex = 0,
892
};
893
}
894
895
static VkResult
896
panvk_queue_init(struct panvk_device *device,
897
struct panvk_queue *queue,
898
uint32_t queue_family_index,
899
int idx,
900
VkDeviceQueueCreateFlags flags)
901
{
902
const struct panfrost_device *pdev = &device->physical_device->pdev;
903
904
vk_object_base_init(&device->vk, &queue->base, VK_OBJECT_TYPE_QUEUE);
905
queue->device = device;
906
queue->queue_family_index = queue_family_index;
907
queue->flags = flags;
908
909
struct drm_syncobj_create create = {
910
.flags = DRM_SYNCOBJ_CREATE_SIGNALED,
911
};
912
913
int ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_CREATE, &create);
914
if (ret)
915
return VK_ERROR_OUT_OF_HOST_MEMORY;
916
917
queue->sync = create.handle;
918
return VK_SUCCESS;
919
}
920
921
static void
922
panvk_queue_finish(struct panvk_queue *queue)
923
{
924
}
925
926
VkResult
927
panvk_CreateDevice(VkPhysicalDevice physicalDevice,
928
const VkDeviceCreateInfo *pCreateInfo,
929
const VkAllocationCallbacks *pAllocator,
930
VkDevice *pDevice)
931
{
932
VK_FROM_HANDLE(panvk_physical_device, physical_device, physicalDevice);
933
VkResult result;
934
struct panvk_device *device;
935
936
/* Check enabled features */
937
if (pCreateInfo->pEnabledFeatures) {
938
VkPhysicalDeviceFeatures2 supported_features = {
939
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
940
};
941
panvk_GetPhysicalDeviceFeatures2(physicalDevice, &supported_features);
942
VkBool32 *supported_feature = (VkBool32 *) &supported_features.features;
943
VkBool32 *enabled_feature = (VkBool32 *) pCreateInfo->pEnabledFeatures;
944
unsigned num_features =
945
sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
946
for (uint32_t i = 0; i < num_features; i++) {
947
if (enabled_feature[i] && !supported_feature[i])
948
return vk_error(physical_device->instance,
949
VK_ERROR_FEATURE_NOT_PRESENT);
950
}
951
}
952
953
device = vk_zalloc2(&physical_device->instance->vk.alloc, pAllocator,
954
sizeof(*device), 8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
955
if (!device)
956
return vk_error(physical_device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
957
958
struct vk_device_dispatch_table dispatch_table;
959
vk_device_dispatch_table_from_entrypoints(&dispatch_table,
960
&panvk_device_entrypoints,
961
true);
962
result = vk_device_init(&device->vk, &physical_device->vk, &dispatch_table,
963
pCreateInfo, pAllocator);
964
if (result != VK_SUCCESS) {
965
vk_free(&device->vk.alloc, device);
966
return vk_errorf(physical_device->instance, result, "vk_device_init failed");
967
}
968
969
device->instance = physical_device->instance;
970
device->physical_device = physical_device;
971
972
for (unsigned i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
973
const VkDeviceQueueCreateInfo *queue_create =
974
&pCreateInfo->pQueueCreateInfos[i];
975
uint32_t qfi = queue_create->queueFamilyIndex;
976
device->queues[qfi] =
977
vk_alloc(&device->vk.alloc,
978
queue_create->queueCount * sizeof(struct panvk_queue),
979
8, VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
980
if (!device->queues[qfi]) {
981
result = VK_ERROR_OUT_OF_HOST_MEMORY;
982
goto fail;
983
}
984
985
memset(device->queues[qfi], 0,
986
queue_create->queueCount * sizeof(struct panvk_queue));
987
988
device->queue_count[qfi] = queue_create->queueCount;
989
990
for (unsigned q = 0; q < queue_create->queueCount; q++) {
991
result = panvk_queue_init(device, &device->queues[qfi][q], qfi, q,
992
queue_create->flags);
993
if (result != VK_SUCCESS)
994
goto fail;
995
}
996
}
997
998
*pDevice = panvk_device_to_handle(device);
999
return VK_SUCCESS;
1000
1001
fail:
1002
for (unsigned i = 0; i < PANVK_MAX_QUEUE_FAMILIES; i++) {
1003
for (unsigned q = 0; q < device->queue_count[i]; q++)
1004
panvk_queue_finish(&device->queues[i][q]);
1005
if (device->queue_count[i])
1006
vk_object_free(&device->vk, NULL, device->queues[i]);
1007
}
1008
1009
vk_free(&device->vk.alloc, device);
1010
return result;
1011
}
1012
1013
void
1014
panvk_DestroyDevice(VkDevice _device, const VkAllocationCallbacks *pAllocator)
1015
{
1016
VK_FROM_HANDLE(panvk_device, device, _device);
1017
1018
if (!device)
1019
return;
1020
1021
for (unsigned i = 0; i < PANVK_MAX_QUEUE_FAMILIES; i++) {
1022
for (unsigned q = 0; q < device->queue_count[i]; q++)
1023
panvk_queue_finish(&device->queues[i][q]);
1024
if (device->queue_count[i])
1025
vk_object_free(&device->vk, NULL, device->queues[i]);
1026
}
1027
1028
vk_free(&device->vk.alloc, device);
1029
}
1030
1031
VkResult
1032
panvk_EnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
1033
VkLayerProperties *pProperties)
1034
{
1035
*pPropertyCount = 0;
1036
return VK_SUCCESS;
1037
}
1038
1039
void
1040
panvk_GetDeviceQueue2(VkDevice _device,
1041
const VkDeviceQueueInfo2 *pQueueInfo,
1042
VkQueue *pQueue)
1043
{
1044
VK_FROM_HANDLE(panvk_device, device, _device);
1045
struct panvk_queue *queue;
1046
1047
queue = &device->queues[pQueueInfo->queueFamilyIndex][pQueueInfo->queueIndex];
1048
if (pQueueInfo->flags != queue->flags) {
1049
/* From the Vulkan 1.1.70 spec:
1050
*
1051
* "The queue returned by vkGetDeviceQueue2 must have the same
1052
* flags value from this structure as that used at device
1053
* creation time in a VkDeviceQueueCreateInfo instance. If no
1054
* matching flags were specified at device creation time then
1055
* pQueue will return VK_NULL_HANDLE."
1056
*/
1057
*pQueue = VK_NULL_HANDLE;
1058
return;
1059
}
1060
1061
*pQueue = panvk_queue_to_handle(queue);
1062
}
1063
1064
void
1065
panvk_GetDeviceQueue(VkDevice _device,
1066
uint32_t queueFamilyIndex,
1067
uint32_t queueIndex,
1068
VkQueue *pQueue)
1069
{
1070
const VkDeviceQueueInfo2 info = (VkDeviceQueueInfo2) {
1071
.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
1072
.queueFamilyIndex = queueFamilyIndex,
1073
.queueIndex = queueIndex
1074
};
1075
1076
panvk_GetDeviceQueue2(_device, &info, pQueue);
1077
}
1078
1079
static void
1080
panvk_queue_submit_batch(struct panvk_queue *queue,
1081
struct panvk_batch *batch,
1082
uint32_t *bos, unsigned nr_bos,
1083
uint32_t *in_fences,
1084
unsigned nr_in_fences)
1085
{
1086
const struct panvk_device *dev = queue->device;
1087
unsigned debug = dev->physical_device->instance->debug_flags;
1088
const struct panfrost_device *pdev = &dev->physical_device->pdev;
1089
int ret;
1090
1091
/* Reset the batch if it's already been issued */
1092
if (batch->issued) {
1093
util_dynarray_foreach(&batch->jobs, void *, job)
1094
memset((*job), 0, 4 * 4);
1095
1096
/* Reset the tiler before re-issuing the batch */
1097
if (pan_is_bifrost(pdev) && batch->tiler.bifrost_descs.cpu) {
1098
memcpy(batch->tiler.bifrost_descs.cpu, &batch->tiler.templ.bifrost,
1099
sizeof(batch->tiler.templ.bifrost));
1100
} else if (!pan_is_bifrost(pdev) && batch->fb.desc.cpu) {
1101
void *tiler = pan_section_ptr(batch->fb.desc.cpu, MULTI_TARGET_FRAMEBUFFER, TILER);
1102
memcpy(tiler, &batch->tiler.templ.midgard, sizeof(batch->tiler.templ.midgard));
1103
/* All weights set to 0, nothing to do here */
1104
pan_section_pack(batch->fb.desc.cpu, MULTI_TARGET_FRAMEBUFFER, TILER_WEIGHTS, w);
1105
}
1106
}
1107
1108
if (batch->scoreboard.first_job) {
1109
struct drm_panfrost_submit submit = {
1110
.bo_handles = (uintptr_t)bos,
1111
.bo_handle_count = nr_bos,
1112
.in_syncs = (uintptr_t)in_fences,
1113
.in_sync_count = nr_in_fences,
1114
.out_sync = queue->sync,
1115
.jc = batch->scoreboard.first_job,
1116
};
1117
1118
ret = drmIoctl(pdev->fd, DRM_IOCTL_PANFROST_SUBMIT, &submit);
1119
assert(!ret);
1120
1121
if (debug & (PANVK_DEBUG_TRACE | PANVK_DEBUG_SYNC)) {
1122
ret = drmSyncobjWait(pdev->fd, &submit.out_sync, 1, INT64_MAX, 0, NULL);
1123
assert(!ret);
1124
}
1125
1126
if (debug & PANVK_DEBUG_TRACE)
1127
pandecode_jc(batch->scoreboard.first_job, pan_is_bifrost(pdev), pdev->gpu_id);
1128
}
1129
1130
if (batch->fragment_job) {
1131
struct drm_panfrost_submit submit = {
1132
.bo_handles = (uintptr_t)bos,
1133
.bo_handle_count = nr_bos,
1134
.out_sync = queue->sync,
1135
.jc = batch->fragment_job,
1136
.requirements = PANFROST_JD_REQ_FS,
1137
};
1138
1139
if (batch->scoreboard.first_job) {
1140
submit.in_syncs = (uintptr_t)(&queue->sync);
1141
submit.in_sync_count = 1;
1142
} else {
1143
submit.in_syncs = (uintptr_t)in_fences;
1144
submit.in_sync_count = nr_in_fences;
1145
}
1146
1147
ret = drmIoctl(pdev->fd, DRM_IOCTL_PANFROST_SUBMIT, &submit);
1148
assert(!ret);
1149
if (debug & (PANVK_DEBUG_TRACE | PANVK_DEBUG_SYNC)) {
1150
ret = drmSyncobjWait(pdev->fd, &submit.out_sync, 1, INT64_MAX, 0, NULL);
1151
assert(!ret);
1152
}
1153
1154
if (debug & PANVK_DEBUG_TRACE)
1155
pandecode_jc(batch->fragment_job, pan_is_bifrost(pdev), pdev->gpu_id);
1156
}
1157
1158
if (debug & PANVK_DEBUG_TRACE)
1159
pandecode_next_frame();
1160
1161
batch->issued = true;
1162
}
1163
1164
static void
1165
panvk_queue_transfer_sync(struct panvk_queue *queue, uint32_t syncobj)
1166
{
1167
const struct panfrost_device *pdev = &queue->device->physical_device->pdev;
1168
int ret;
1169
1170
struct drm_syncobj_handle handle = {
1171
.handle = queue->sync,
1172
.flags = DRM_SYNCOBJ_HANDLE_TO_FD_FLAGS_EXPORT_SYNC_FILE,
1173
.fd = -1,
1174
};
1175
1176
ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_HANDLE_TO_FD, &handle);
1177
assert(!ret);
1178
assert(handle.fd >= 0);
1179
1180
handle.handle = syncobj;
1181
ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_FD_TO_HANDLE, &handle);
1182
assert(!ret);
1183
1184
close(handle.fd);
1185
}
1186
1187
static void
1188
panvk_add_wait_event_syncobjs(struct panvk_batch *batch, uint32_t *in_fences, unsigned *nr_in_fences)
1189
{
1190
util_dynarray_foreach(&batch->event_ops, struct panvk_event_op, op) {
1191
switch (op->type) {
1192
case PANVK_EVENT_OP_SET:
1193
/* Nothing to do yet */
1194
break;
1195
case PANVK_EVENT_OP_RESET:
1196
/* Nothing to do yet */
1197
break;
1198
case PANVK_EVENT_OP_WAIT:
1199
in_fences[*nr_in_fences++] = op->event->syncobj;
1200
break;
1201
default:
1202
unreachable("bad panvk_event_op type\n");
1203
}
1204
}
1205
}
1206
1207
static void
1208
panvk_signal_event_syncobjs(struct panvk_queue *queue, struct panvk_batch *batch)
1209
{
1210
const struct panfrost_device *pdev = &queue->device->physical_device->pdev;
1211
1212
util_dynarray_foreach(&batch->event_ops, struct panvk_event_op, op) {
1213
switch (op->type) {
1214
case PANVK_EVENT_OP_SET: {
1215
panvk_queue_transfer_sync(queue, op->event->syncobj);
1216
break;
1217
}
1218
case PANVK_EVENT_OP_RESET: {
1219
struct panvk_event *event = op->event;
1220
1221
struct drm_syncobj_array objs = {
1222
.handles = (uint64_t) (uintptr_t) &event->syncobj,
1223
.count_handles = 1
1224
};
1225
1226
int ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_RESET, &objs);
1227
assert(!ret);
1228
break;
1229
}
1230
case PANVK_EVENT_OP_WAIT:
1231
/* Nothing left to do */
1232
break;
1233
default:
1234
unreachable("bad panvk_event_op type\n");
1235
}
1236
}
1237
}
1238
1239
VkResult
1240
panvk_QueueSubmit(VkQueue _queue,
1241
uint32_t submitCount,
1242
const VkSubmitInfo *pSubmits,
1243
VkFence _fence)
1244
{
1245
VK_FROM_HANDLE(panvk_queue, queue, _queue);
1246
VK_FROM_HANDLE(panvk_fence, fence, _fence);
1247
const struct panfrost_device *pdev = &queue->device->physical_device->pdev;
1248
1249
for (uint32_t i = 0; i < submitCount; ++i) {
1250
const VkSubmitInfo *submit = pSubmits + i;
1251
unsigned nr_semaphores = submit->waitSemaphoreCount + 1;
1252
uint32_t semaphores[nr_semaphores];
1253
1254
semaphores[0] = queue->sync;
1255
for (unsigned i = 0; i < submit->waitSemaphoreCount; i++) {
1256
VK_FROM_HANDLE(panvk_semaphore, sem, submit->pWaitSemaphores[i]);
1257
1258
semaphores[i + 1] = sem->syncobj.temporary ? : sem->syncobj.permanent;
1259
}
1260
1261
for (uint32_t j = 0; j < submit->commandBufferCount; ++j) {
1262
VK_FROM_HANDLE(panvk_cmd_buffer, cmdbuf, (submit->pCommandBuffers[j]));
1263
1264
list_for_each_entry(struct panvk_batch, batch, &cmdbuf->batches, node) {
1265
/* FIXME: should be done at the batch level */
1266
unsigned nr_bos =
1267
panvk_pool_num_bos(&cmdbuf->desc_pool) +
1268
panvk_pool_num_bos(&cmdbuf->varying_pool) +
1269
panvk_pool_num_bos(&cmdbuf->tls_pool) +
1270
(batch->fb.info ? batch->fb.info->attachment_count : 0) +
1271
(batch->blit.src ? 1 : 0) +
1272
(batch->blit.dst ? 1 : 0) +
1273
(batch->scoreboard.first_tiler ? 1 : 0) + 1;
1274
unsigned bo_idx = 0;
1275
uint32_t bos[nr_bos];
1276
1277
panvk_pool_get_bo_handles(&cmdbuf->desc_pool, &bos[bo_idx]);
1278
bo_idx += panvk_pool_num_bos(&cmdbuf->desc_pool);
1279
1280
panvk_pool_get_bo_handles(&cmdbuf->varying_pool, &bos[bo_idx]);
1281
bo_idx += panvk_pool_num_bos(&cmdbuf->varying_pool);
1282
1283
panvk_pool_get_bo_handles(&cmdbuf->tls_pool, &bos[bo_idx]);
1284
bo_idx += panvk_pool_num_bos(&cmdbuf->tls_pool);
1285
1286
if (batch->fb.info) {
1287
for (unsigned i = 0; i < batch->fb.info->attachment_count; i++) {
1288
bos[bo_idx++] = batch->fb.info->attachments[i].iview->pview.image->data.bo->gem_handle;
1289
}
1290
}
1291
1292
if (batch->blit.src)
1293
bos[bo_idx++] = batch->blit.src->gem_handle;
1294
1295
if (batch->blit.dst)
1296
bos[bo_idx++] = batch->blit.dst->gem_handle;
1297
1298
if (batch->scoreboard.first_tiler)
1299
bos[bo_idx++] = pdev->tiler_heap->gem_handle;
1300
1301
bos[bo_idx++] = pdev->sample_positions->gem_handle;
1302
assert(bo_idx == nr_bos);
1303
1304
unsigned nr_in_fences = 0;
1305
unsigned max_wait_event_syncobjs =
1306
util_dynarray_num_elements(&batch->event_ops,
1307
struct panvk_event_op);
1308
uint32_t in_fences[nr_semaphores + max_wait_event_syncobjs];
1309
memcpy(in_fences, semaphores, nr_semaphores * sizeof(*in_fences));
1310
nr_in_fences += nr_semaphores;
1311
1312
panvk_add_wait_event_syncobjs(batch, in_fences, &nr_in_fences);
1313
1314
panvk_queue_submit_batch(queue, batch, bos, nr_bos, in_fences, nr_in_fences);
1315
1316
panvk_signal_event_syncobjs(queue, batch);
1317
}
1318
}
1319
1320
/* Transfer the out fence to signal semaphores */
1321
for (unsigned i = 0; i < submit->signalSemaphoreCount; i++) {
1322
VK_FROM_HANDLE(panvk_semaphore, sem, submit->pSignalSemaphores[i]);
1323
panvk_queue_transfer_sync(queue, sem->syncobj.temporary ? : sem->syncobj.permanent);
1324
}
1325
}
1326
1327
if (fence) {
1328
/* Transfer the last out fence to the fence object */
1329
panvk_queue_transfer_sync(queue, fence->syncobj.temporary ? : fence->syncobj.permanent);
1330
}
1331
1332
return VK_SUCCESS;
1333
}
1334
1335
VkResult
1336
panvk_QueueWaitIdle(VkQueue _queue)
1337
{
1338
VK_FROM_HANDLE(panvk_queue, queue, _queue);
1339
1340
if (panvk_device_is_lost(queue->device))
1341
return VK_ERROR_DEVICE_LOST;
1342
1343
const struct panfrost_device *pdev = &queue->device->physical_device->pdev;
1344
struct drm_syncobj_wait wait = {
1345
.handles = (uint64_t) (uintptr_t)(&queue->sync),
1346
.count_handles = 1,
1347
.timeout_nsec = INT64_MAX,
1348
.flags = DRM_SYNCOBJ_WAIT_FLAGS_WAIT_ALL,
1349
};
1350
int ret;
1351
1352
ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_WAIT, &wait);
1353
assert(!ret);
1354
1355
return VK_SUCCESS;
1356
}
1357
1358
VkResult
1359
panvk_DeviceWaitIdle(VkDevice _device)
1360
{
1361
VK_FROM_HANDLE(panvk_device, device, _device);
1362
1363
if (panvk_device_is_lost(device))
1364
return VK_ERROR_DEVICE_LOST;
1365
1366
for (unsigned i = 0; i < PANVK_MAX_QUEUE_FAMILIES; i++) {
1367
for (unsigned q = 0; q < device->queue_count[i]; q++) {
1368
panvk_QueueWaitIdle(panvk_queue_to_handle(&device->queues[i][q]));
1369
}
1370
}
1371
return VK_SUCCESS;
1372
}
1373
1374
VkResult
1375
panvk_EnumerateInstanceExtensionProperties(const char *pLayerName,
1376
uint32_t *pPropertyCount,
1377
VkExtensionProperties *pProperties)
1378
{
1379
if (pLayerName)
1380
return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1381
1382
return vk_enumerate_instance_extension_properties(&panvk_instance_extensions,
1383
pPropertyCount, pProperties);
1384
}
1385
1386
PFN_vkVoidFunction
1387
panvk_GetInstanceProcAddr(VkInstance _instance, const char *pName)
1388
{
1389
VK_FROM_HANDLE(panvk_instance, instance, _instance);
1390
return vk_instance_get_proc_addr(&instance->vk,
1391
&panvk_instance_entrypoints,
1392
pName);
1393
}
1394
1395
/* The loader wants us to expose a second GetInstanceProcAddr function
1396
* to work around certain LD_PRELOAD issues seen in apps.
1397
*/
1398
PUBLIC
1399
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1400
vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName);
1401
1402
PUBLIC
1403
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1404
vk_icdGetInstanceProcAddr(VkInstance instance, const char *pName)
1405
{
1406
return panvk_GetInstanceProcAddr(instance, pName);
1407
}
1408
1409
VkResult
1410
panvk_AllocateMemory(VkDevice _device,
1411
const VkMemoryAllocateInfo *pAllocateInfo,
1412
const VkAllocationCallbacks *pAllocator,
1413
VkDeviceMemory *pMem)
1414
{
1415
VK_FROM_HANDLE(panvk_device, device, _device);
1416
struct panvk_device_memory *mem;
1417
1418
assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
1419
1420
if (pAllocateInfo->allocationSize == 0) {
1421
/* Apparently, this is allowed */
1422
*pMem = VK_NULL_HANDLE;
1423
return VK_SUCCESS;
1424
}
1425
1426
mem = vk_object_alloc(&device->vk, pAllocator, sizeof(*mem),
1427
VK_OBJECT_TYPE_DEVICE_MEMORY);
1428
if (mem == NULL)
1429
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1430
1431
const VkImportMemoryFdInfoKHR *fd_info =
1432
vk_find_struct_const(pAllocateInfo->pNext,
1433
IMPORT_MEMORY_FD_INFO_KHR);
1434
1435
if (fd_info && !fd_info->handleType)
1436
fd_info = NULL;
1437
1438
if (fd_info) {
1439
assert(fd_info->handleType ==
1440
VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
1441
fd_info->handleType ==
1442
VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
1443
1444
/*
1445
* TODO Importing the same fd twice gives us the same handle without
1446
* reference counting. We need to maintain a per-instance handle-to-bo
1447
* table and add reference count to panvk_bo.
1448
*/
1449
mem->bo = panfrost_bo_import(&device->physical_device->pdev, fd_info->fd);
1450
/* take ownership and close the fd */
1451
close(fd_info->fd);
1452
} else {
1453
mem->bo = panfrost_bo_create(&device->physical_device->pdev,
1454
pAllocateInfo->allocationSize, 0,
1455
"User-requested memory");
1456
}
1457
1458
assert(mem->bo);
1459
1460
*pMem = panvk_device_memory_to_handle(mem);
1461
1462
return VK_SUCCESS;
1463
}
1464
1465
void
1466
panvk_FreeMemory(VkDevice _device,
1467
VkDeviceMemory _mem,
1468
const VkAllocationCallbacks *pAllocator)
1469
{
1470
VK_FROM_HANDLE(panvk_device, device, _device);
1471
VK_FROM_HANDLE(panvk_device_memory, mem, _mem);
1472
1473
if (mem == NULL)
1474
return;
1475
1476
panfrost_bo_unreference(mem->bo);
1477
vk_object_free(&device->vk, pAllocator, mem);
1478
}
1479
1480
VkResult
1481
panvk_MapMemory(VkDevice _device,
1482
VkDeviceMemory _memory,
1483
VkDeviceSize offset,
1484
VkDeviceSize size,
1485
VkMemoryMapFlags flags,
1486
void **ppData)
1487
{
1488
VK_FROM_HANDLE(panvk_device, device, _device);
1489
VK_FROM_HANDLE(panvk_device_memory, mem, _memory);
1490
1491
if (mem == NULL) {
1492
*ppData = NULL;
1493
return VK_SUCCESS;
1494
}
1495
1496
if (!mem->bo->ptr.cpu)
1497
panfrost_bo_mmap(mem->bo);
1498
1499
*ppData = mem->bo->ptr.cpu;
1500
1501
if (*ppData) {
1502
*ppData += offset;
1503
return VK_SUCCESS;
1504
}
1505
1506
return vk_error(device->instance, VK_ERROR_MEMORY_MAP_FAILED);
1507
}
1508
1509
void
1510
panvk_UnmapMemory(VkDevice _device, VkDeviceMemory _memory)
1511
{
1512
}
1513
1514
VkResult
1515
panvk_FlushMappedMemoryRanges(VkDevice _device,
1516
uint32_t memoryRangeCount,
1517
const VkMappedMemoryRange *pMemoryRanges)
1518
{
1519
return VK_SUCCESS;
1520
}
1521
1522
VkResult
1523
panvk_InvalidateMappedMemoryRanges(VkDevice _device,
1524
uint32_t memoryRangeCount,
1525
const VkMappedMemoryRange *pMemoryRanges)
1526
{
1527
return VK_SUCCESS;
1528
}
1529
1530
void
1531
panvk_GetBufferMemoryRequirements(VkDevice _device,
1532
VkBuffer _buffer,
1533
VkMemoryRequirements *pMemoryRequirements)
1534
{
1535
VK_FROM_HANDLE(panvk_buffer, buffer, _buffer);
1536
1537
pMemoryRequirements->memoryTypeBits = 1;
1538
pMemoryRequirements->alignment = 64;
1539
pMemoryRequirements->size =
1540
align64(buffer->size, pMemoryRequirements->alignment);
1541
}
1542
1543
void
1544
panvk_GetBufferMemoryRequirements2(VkDevice device,
1545
const VkBufferMemoryRequirementsInfo2 *pInfo,
1546
VkMemoryRequirements2 *pMemoryRequirements)
1547
{
1548
panvk_GetBufferMemoryRequirements(device, pInfo->buffer,
1549
&pMemoryRequirements->memoryRequirements);
1550
}
1551
1552
void
1553
panvk_GetImageMemoryRequirements(VkDevice _device,
1554
VkImage _image,
1555
VkMemoryRequirements *pMemoryRequirements)
1556
{
1557
VK_FROM_HANDLE(panvk_image, image, _image);
1558
1559
pMemoryRequirements->memoryTypeBits = 1;
1560
pMemoryRequirements->size = panvk_image_get_total_size(image);
1561
pMemoryRequirements->alignment = 4096;
1562
}
1563
1564
void
1565
panvk_GetImageMemoryRequirements2(VkDevice device,
1566
const VkImageMemoryRequirementsInfo2 *pInfo,
1567
VkMemoryRequirements2 *pMemoryRequirements)
1568
{
1569
panvk_GetImageMemoryRequirements(device, pInfo->image,
1570
&pMemoryRequirements->memoryRequirements);
1571
}
1572
1573
void
1574
panvk_GetImageSparseMemoryRequirements(VkDevice device, VkImage image,
1575
uint32_t *pSparseMemoryRequirementCount,
1576
VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
1577
{
1578
panvk_stub();
1579
}
1580
1581
void
1582
panvk_GetImageSparseMemoryRequirements2(VkDevice device,
1583
const VkImageSparseMemoryRequirementsInfo2 *pInfo,
1584
uint32_t *pSparseMemoryRequirementCount,
1585
VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
1586
{
1587
panvk_stub();
1588
}
1589
1590
void
1591
panvk_GetDeviceMemoryCommitment(VkDevice device,
1592
VkDeviceMemory memory,
1593
VkDeviceSize *pCommittedMemoryInBytes)
1594
{
1595
*pCommittedMemoryInBytes = 0;
1596
}
1597
1598
VkResult
1599
panvk_BindBufferMemory2(VkDevice device,
1600
uint32_t bindInfoCount,
1601
const VkBindBufferMemoryInfo *pBindInfos)
1602
{
1603
for (uint32_t i = 0; i < bindInfoCount; ++i) {
1604
VK_FROM_HANDLE(panvk_device_memory, mem, pBindInfos[i].memory);
1605
VK_FROM_HANDLE(panvk_buffer, buffer, pBindInfos[i].buffer);
1606
1607
if (mem) {
1608
buffer->bo = mem->bo;
1609
buffer->bo_offset = pBindInfos[i].memoryOffset;
1610
} else {
1611
buffer->bo = NULL;
1612
}
1613
}
1614
return VK_SUCCESS;
1615
}
1616
1617
VkResult
1618
panvk_BindBufferMemory(VkDevice device,
1619
VkBuffer buffer,
1620
VkDeviceMemory memory,
1621
VkDeviceSize memoryOffset)
1622
{
1623
const VkBindBufferMemoryInfo info = {
1624
.sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
1625
.buffer = buffer,
1626
.memory = memory,
1627
.memoryOffset = memoryOffset
1628
};
1629
1630
return panvk_BindBufferMemory2(device, 1, &info);
1631
}
1632
1633
VkResult
1634
panvk_BindImageMemory2(VkDevice device,
1635
uint32_t bindInfoCount,
1636
const VkBindImageMemoryInfo *pBindInfos)
1637
{
1638
for (uint32_t i = 0; i < bindInfoCount; ++i) {
1639
VK_FROM_HANDLE(panvk_image, image, pBindInfos[i].image);
1640
VK_FROM_HANDLE(panvk_device_memory, mem, pBindInfos[i].memory);
1641
1642
if (mem) {
1643
panfrost_bo_reference(mem->bo);
1644
image->pimage.data.bo = mem->bo;
1645
image->pimage.data.offset = pBindInfos[i].memoryOffset;
1646
/* Reset the AFBC headers */
1647
if (drm_is_afbc(image->pimage.layout.modifier)) {
1648
void *base = image->pimage.data.bo->ptr.cpu + image->pimage.data.offset;
1649
1650
for (unsigned layer = 0; layer < image->pimage.layout.array_size; layer++) {
1651
for (unsigned level = 0; level < image->pimage.layout.nr_slices; level++) {
1652
void *header = base +
1653
(layer * image->pimage.layout.array_stride) +
1654
image->pimage.layout.slices[level].offset;
1655
memset(header, 0, image->pimage.layout.slices[level].afbc.header_size);
1656
}
1657
}
1658
}
1659
} else {
1660
panfrost_bo_unreference(image->pimage.data.bo);
1661
image->pimage.data.bo = NULL;
1662
image->pimage.data.offset = pBindInfos[i].memoryOffset;
1663
}
1664
}
1665
1666
return VK_SUCCESS;
1667
}
1668
1669
VkResult
1670
panvk_BindImageMemory(VkDevice device,
1671
VkImage image,
1672
VkDeviceMemory memory,
1673
VkDeviceSize memoryOffset)
1674
{
1675
const VkBindImageMemoryInfo info = {
1676
.sType = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
1677
.image = image,
1678
.memory = memory,
1679
.memoryOffset = memoryOffset
1680
};
1681
1682
return panvk_BindImageMemory2(device, 1, &info);
1683
}
1684
1685
VkResult
1686
panvk_QueueBindSparse(VkQueue _queue,
1687
uint32_t bindInfoCount,
1688
const VkBindSparseInfo *pBindInfo,
1689
VkFence _fence)
1690
{
1691
return VK_SUCCESS;
1692
}
1693
1694
VkResult
1695
panvk_CreateEvent(VkDevice _device,
1696
const VkEventCreateInfo *pCreateInfo,
1697
const VkAllocationCallbacks *pAllocator,
1698
VkEvent *pEvent)
1699
{
1700
VK_FROM_HANDLE(panvk_device, device, _device);
1701
const struct panfrost_device *pdev = &device->physical_device->pdev;
1702
struct panvk_event *event =
1703
vk_object_zalloc(&device->vk, pAllocator, sizeof(*event),
1704
VK_OBJECT_TYPE_EVENT);
1705
if (!event)
1706
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1707
1708
struct drm_syncobj_create create = {
1709
.flags = 0,
1710
};
1711
1712
int ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_CREATE, &create);
1713
if (ret)
1714
return VK_ERROR_OUT_OF_HOST_MEMORY;
1715
1716
event->syncobj = create.handle;
1717
*pEvent = panvk_event_to_handle(event);
1718
1719
return VK_SUCCESS;
1720
}
1721
1722
void
1723
panvk_DestroyEvent(VkDevice _device,
1724
VkEvent _event,
1725
const VkAllocationCallbacks *pAllocator)
1726
{
1727
VK_FROM_HANDLE(panvk_device, device, _device);
1728
VK_FROM_HANDLE(panvk_event, event, _event);
1729
const struct panfrost_device *pdev = &device->physical_device->pdev;
1730
1731
if (!event)
1732
return;
1733
1734
struct drm_syncobj_destroy destroy = { .handle = event->syncobj };
1735
drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_DESTROY, &destroy);
1736
1737
vk_object_free(&device->vk, pAllocator, event);
1738
}
1739
1740
VkResult
1741
panvk_GetEventStatus(VkDevice _device, VkEvent _event)
1742
{
1743
VK_FROM_HANDLE(panvk_device, device, _device);
1744
VK_FROM_HANDLE(panvk_event, event, _event);
1745
const struct panfrost_device *pdev = &device->physical_device->pdev;
1746
bool signaled;
1747
1748
struct drm_syncobj_wait wait = {
1749
.handles = (uintptr_t) &event->syncobj,
1750
.count_handles = 1,
1751
.timeout_nsec = 0,
1752
.flags = DRM_SYNCOBJ_WAIT_FLAGS_WAIT_FOR_SUBMIT,
1753
};
1754
1755
int ret = drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_WAIT, &wait);
1756
if (ret) {
1757
if (errno == ETIME)
1758
signaled = false;
1759
else {
1760
assert(0);
1761
return VK_ERROR_DEVICE_LOST; /* TODO */
1762
}
1763
} else
1764
signaled = true;
1765
1766
return signaled ? VK_EVENT_SET : VK_EVENT_RESET;
1767
}
1768
1769
VkResult
1770
panvk_SetEvent(VkDevice _device, VkEvent _event)
1771
{
1772
VK_FROM_HANDLE(panvk_device, device, _device);
1773
VK_FROM_HANDLE(panvk_event, event, _event);
1774
const struct panfrost_device *pdev = &device->physical_device->pdev;
1775
1776
struct drm_syncobj_array objs = {
1777
.handles = (uint64_t) (uintptr_t) &event->syncobj,
1778
.count_handles = 1
1779
};
1780
1781
/* This is going to just replace the fence for this syncobj with one that
1782
* is already in signaled state. This won't be a problem because the spec
1783
* mandates that the event will have been set before the vkCmdWaitEvents
1784
* command executes.
1785
* https://www.khronos.org/registry/vulkan/specs/1.2/html/chap6.html#commandbuffers-submission-progress
1786
*/
1787
if (drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_SIGNAL, &objs))
1788
return VK_ERROR_DEVICE_LOST;
1789
1790
return VK_SUCCESS;
1791
}
1792
1793
VkResult
1794
panvk_ResetEvent(VkDevice _device, VkEvent _event)
1795
{
1796
VK_FROM_HANDLE(panvk_device, device, _device);
1797
VK_FROM_HANDLE(panvk_event, event, _event);
1798
const struct panfrost_device *pdev = &device->physical_device->pdev;
1799
1800
struct drm_syncobj_array objs = {
1801
.handles = (uint64_t) (uintptr_t) &event->syncobj,
1802
.count_handles = 1
1803
};
1804
1805
if (drmIoctl(pdev->fd, DRM_IOCTL_SYNCOBJ_RESET, &objs))
1806
return VK_ERROR_DEVICE_LOST;
1807
1808
return VK_SUCCESS;
1809
}
1810
1811
VkResult
1812
panvk_CreateBuffer(VkDevice _device,
1813
const VkBufferCreateInfo *pCreateInfo,
1814
const VkAllocationCallbacks *pAllocator,
1815
VkBuffer *pBuffer)
1816
{
1817
VK_FROM_HANDLE(panvk_device, device, _device);
1818
struct panvk_buffer *buffer;
1819
1820
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
1821
1822
buffer = vk_object_alloc(&device->vk, pAllocator, sizeof(*buffer),
1823
VK_OBJECT_TYPE_BUFFER);
1824
if (buffer == NULL)
1825
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1826
1827
buffer->size = pCreateInfo->size;
1828
buffer->usage = pCreateInfo->usage;
1829
buffer->flags = pCreateInfo->flags;
1830
1831
*pBuffer = panvk_buffer_to_handle(buffer);
1832
1833
return VK_SUCCESS;
1834
}
1835
1836
void
1837
panvk_DestroyBuffer(VkDevice _device,
1838
VkBuffer _buffer,
1839
const VkAllocationCallbacks *pAllocator)
1840
{
1841
VK_FROM_HANDLE(panvk_device, device, _device);
1842
VK_FROM_HANDLE(panvk_buffer, buffer, _buffer);
1843
1844
if (!buffer)
1845
return;
1846
1847
vk_object_free(&device->vk, pAllocator, buffer);
1848
}
1849
1850
VkResult
1851
panvk_CreateFramebuffer(VkDevice _device,
1852
const VkFramebufferCreateInfo *pCreateInfo,
1853
const VkAllocationCallbacks *pAllocator,
1854
VkFramebuffer *pFramebuffer)
1855
{
1856
VK_FROM_HANDLE(panvk_device, device, _device);
1857
struct panvk_framebuffer *framebuffer;
1858
1859
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
1860
1861
size_t size = sizeof(*framebuffer) + sizeof(struct panvk_attachment_info) *
1862
pCreateInfo->attachmentCount;
1863
framebuffer = vk_object_alloc(&device->vk, pAllocator, size,
1864
VK_OBJECT_TYPE_FRAMEBUFFER);
1865
if (framebuffer == NULL)
1866
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1867
1868
framebuffer->attachment_count = pCreateInfo->attachmentCount;
1869
framebuffer->width = pCreateInfo->width;
1870
framebuffer->height = pCreateInfo->height;
1871
framebuffer->layers = pCreateInfo->layers;
1872
for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
1873
VkImageView _iview = pCreateInfo->pAttachments[i];
1874
struct panvk_image_view *iview = panvk_image_view_from_handle(_iview);
1875
framebuffer->attachments[i].iview = iview;
1876
}
1877
1878
*pFramebuffer = panvk_framebuffer_to_handle(framebuffer);
1879
return VK_SUCCESS;
1880
}
1881
1882
void
1883
panvk_DestroyFramebuffer(VkDevice _device,
1884
VkFramebuffer _fb,
1885
const VkAllocationCallbacks *pAllocator)
1886
{
1887
VK_FROM_HANDLE(panvk_device, device, _device);
1888
VK_FROM_HANDLE(panvk_framebuffer, fb, _fb);
1889
1890
if (fb)
1891
vk_object_free(&device->vk, pAllocator, fb);
1892
}
1893
1894
static enum mali_mipmap_mode
1895
panvk_translate_sampler_mipmap_mode(VkSamplerMipmapMode mode)
1896
{
1897
switch (mode) {
1898
case VK_SAMPLER_MIPMAP_MODE_NEAREST: return MALI_MIPMAP_MODE_NEAREST;
1899
case VK_SAMPLER_MIPMAP_MODE_LINEAR: return MALI_MIPMAP_MODE_TRILINEAR;
1900
default: unreachable("Invalid mipmap mode");
1901
}
1902
}
1903
1904
static unsigned
1905
panvk_translate_sampler_address_mode(VkSamplerAddressMode mode)
1906
{
1907
switch (mode) {
1908
case VK_SAMPLER_ADDRESS_MODE_REPEAT: return MALI_WRAP_MODE_REPEAT;
1909
case VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT: return MALI_WRAP_MODE_MIRRORED_REPEAT;
1910
case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE: return MALI_WRAP_MODE_CLAMP_TO_EDGE;
1911
case VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER: return MALI_WRAP_MODE_CLAMP_TO_BORDER;
1912
case VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE: return MALI_WRAP_MODE_MIRRORED_CLAMP_TO_EDGE;
1913
default: unreachable("Invalid wrap");
1914
}
1915
}
1916
1917
static enum mali_func
1918
panvk_translate_sampler_compare_func(const VkSamplerCreateInfo *pCreateInfo)
1919
{
1920
if (!pCreateInfo->compareEnable)
1921
return MALI_FUNC_NEVER;
1922
1923
enum mali_func f = panvk_translate_compare_func(pCreateInfo->compareOp);
1924
return panfrost_flip_compare_func(f);
1925
}
1926
1927
static void
1928
panvk_init_midgard_sampler(struct panvk_sampler *sampler,
1929
const VkSamplerCreateInfo *pCreateInfo)
1930
{
1931
const VkSamplerCustomBorderColorCreateInfoEXT *pBorderColor =
1932
vk_find_struct_const(pCreateInfo->pNext, SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT);
1933
1934
pan_pack(&sampler->desc, MIDGARD_SAMPLER, cfg) {
1935
cfg.magnify_nearest = pCreateInfo->magFilter == VK_FILTER_NEAREST;
1936
cfg.minify_nearest = pCreateInfo->minFilter == VK_FILTER_NEAREST;
1937
cfg.mipmap_mode = panvk_translate_sampler_mipmap_mode(pCreateInfo->mipmapMode);
1938
cfg.normalized_coordinates = !pCreateInfo->unnormalizedCoordinates;
1939
cfg.lod_bias = FIXED_16(pCreateInfo->mipLodBias, true);
1940
cfg.minimum_lod = FIXED_16(pCreateInfo->minLod, false);
1941
cfg.maximum_lod = FIXED_16(pCreateInfo->maxLod, false);
1942
1943
cfg.wrap_mode_s = panvk_translate_sampler_address_mode(pCreateInfo->addressModeU);
1944
cfg.wrap_mode_t = panvk_translate_sampler_address_mode(pCreateInfo->addressModeV);
1945
cfg.wrap_mode_r = panvk_translate_sampler_address_mode(pCreateInfo->addressModeW);
1946
cfg.compare_function = panvk_translate_sampler_compare_func(pCreateInfo);
1947
1948
switch (pCreateInfo->borderColor) {
1949
case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
1950
case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:
1951
cfg.border_color_r = fui(0.0);
1952
cfg.border_color_g = fui(0.0);
1953
cfg.border_color_b = fui(0.0);
1954
cfg.border_color_a =
1955
pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK ?
1956
fui(1.0) : fui(0.0);
1957
break;
1958
case VK_BORDER_COLOR_INT_OPAQUE_BLACK:
1959
case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
1960
cfg.border_color_r = 0;
1961
cfg.border_color_g = 0;
1962
cfg.border_color_b = 0;
1963
cfg.border_color_a =
1964
pCreateInfo->borderColor == VK_BORDER_COLOR_INT_OPAQUE_BLACK ?
1965
UINT_MAX : 0;
1966
break;
1967
case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:
1968
cfg.border_color_r = fui(1.0);
1969
cfg.border_color_g = fui(1.0);
1970
cfg.border_color_b = fui(1.0);
1971
cfg.border_color_a = fui(1.0);
1972
break;
1973
case VK_BORDER_COLOR_INT_OPAQUE_WHITE:
1974
cfg.border_color_r = UINT_MAX;
1975
cfg.border_color_g = UINT_MAX;
1976
cfg.border_color_b = UINT_MAX;
1977
cfg.border_color_a = UINT_MAX;
1978
break;
1979
case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
1980
case VK_BORDER_COLOR_INT_CUSTOM_EXT:
1981
cfg.border_color_r = pBorderColor->customBorderColor.int32[0];
1982
cfg.border_color_g = pBorderColor->customBorderColor.int32[1];
1983
cfg.border_color_b = pBorderColor->customBorderColor.int32[2];
1984
cfg.border_color_a = pBorderColor->customBorderColor.int32[3];
1985
break;
1986
default:
1987
unreachable("Invalid border color");
1988
}
1989
}
1990
}
1991
1992
static void
1993
panvk_init_bifrost_sampler(struct panvk_sampler *sampler,
1994
const VkSamplerCreateInfo *pCreateInfo)
1995
{
1996
const VkSamplerCustomBorderColorCreateInfoEXT *pBorderColor =
1997
vk_find_struct_const(pCreateInfo->pNext, SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT);
1998
1999
pan_pack(&sampler->desc, BIFROST_SAMPLER, cfg) {
2000
cfg.point_sample_magnify = pCreateInfo->magFilter == VK_FILTER_LINEAR;
2001
cfg.point_sample_minify = pCreateInfo->minFilter == VK_FILTER_LINEAR;
2002
cfg.mipmap_mode = panvk_translate_sampler_mipmap_mode(pCreateInfo->mipmapMode);
2003
cfg.normalized_coordinates = !pCreateInfo->unnormalizedCoordinates;
2004
2005
cfg.lod_bias = FIXED_16(pCreateInfo->mipLodBias, true);
2006
cfg.minimum_lod = FIXED_16(pCreateInfo->minLod, false);
2007
cfg.maximum_lod = FIXED_16(pCreateInfo->maxLod, false);
2008
cfg.wrap_mode_s = panvk_translate_sampler_address_mode(pCreateInfo->addressModeU);
2009
cfg.wrap_mode_t = panvk_translate_sampler_address_mode(pCreateInfo->addressModeV);
2010
cfg.wrap_mode_r = panvk_translate_sampler_address_mode(pCreateInfo->addressModeW);
2011
cfg.compare_function = panvk_translate_sampler_compare_func(pCreateInfo);
2012
2013
switch (pCreateInfo->borderColor) {
2014
case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
2015
case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:
2016
cfg.border_color_r = fui(0.0);
2017
cfg.border_color_g = fui(0.0);
2018
cfg.border_color_b = fui(0.0);
2019
cfg.border_color_a =
2020
pCreateInfo->borderColor == VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK ?
2021
fui(1.0) : fui(0.0);
2022
break;
2023
case VK_BORDER_COLOR_INT_OPAQUE_BLACK:
2024
case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
2025
cfg.border_color_r = 0;
2026
cfg.border_color_g = 0;
2027
cfg.border_color_b = 0;
2028
cfg.border_color_a =
2029
pCreateInfo->borderColor == VK_BORDER_COLOR_INT_OPAQUE_BLACK ?
2030
UINT_MAX : 0;
2031
break;
2032
case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:
2033
cfg.border_color_r = fui(1.0);
2034
cfg.border_color_g = fui(1.0);
2035
cfg.border_color_b = fui(1.0);
2036
cfg.border_color_a = fui(1.0);
2037
break;
2038
case VK_BORDER_COLOR_INT_OPAQUE_WHITE:
2039
cfg.border_color_r = UINT_MAX;
2040
cfg.border_color_g = UINT_MAX;
2041
cfg.border_color_b = UINT_MAX;
2042
cfg.border_color_a = UINT_MAX;
2043
break;
2044
case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
2045
case VK_BORDER_COLOR_INT_CUSTOM_EXT:
2046
cfg.border_color_r = pBorderColor->customBorderColor.int32[0];
2047
cfg.border_color_g = pBorderColor->customBorderColor.int32[1];
2048
cfg.border_color_b = pBorderColor->customBorderColor.int32[2];
2049
cfg.border_color_a = pBorderColor->customBorderColor.int32[3];
2050
break;
2051
default:
2052
unreachable("Invalid border color");
2053
}
2054
}
2055
}
2056
2057
static void
2058
panvk_init_sampler(struct panvk_device *device,
2059
struct panvk_sampler *sampler,
2060
const VkSamplerCreateInfo *pCreateInfo)
2061
{
2062
if (pan_is_bifrost(&device->physical_device->pdev))
2063
panvk_init_bifrost_sampler(sampler, pCreateInfo);
2064
else
2065
panvk_init_midgard_sampler(sampler, pCreateInfo);
2066
}
2067
2068
VkResult
2069
panvk_CreateSampler(VkDevice _device,
2070
const VkSamplerCreateInfo *pCreateInfo,
2071
const VkAllocationCallbacks *pAllocator,
2072
VkSampler *pSampler)
2073
{
2074
VK_FROM_HANDLE(panvk_device, device, _device);
2075
struct panvk_sampler *sampler;
2076
2077
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
2078
2079
sampler = vk_object_alloc(&device->vk, pAllocator, sizeof(*sampler),
2080
VK_OBJECT_TYPE_SAMPLER);
2081
if (!sampler)
2082
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
2083
2084
panvk_init_sampler(device, sampler, pCreateInfo);
2085
*pSampler = panvk_sampler_to_handle(sampler);
2086
2087
return VK_SUCCESS;
2088
}
2089
2090
void
2091
panvk_DestroySampler(VkDevice _device,
2092
VkSampler _sampler,
2093
const VkAllocationCallbacks *pAllocator)
2094
{
2095
VK_FROM_HANDLE(panvk_device, device, _device);
2096
VK_FROM_HANDLE(panvk_sampler, sampler, _sampler);
2097
2098
if (!sampler)
2099
return;
2100
2101
vk_object_free(&device->vk, pAllocator, sampler);
2102
}
2103
2104
/* vk_icd.h does not declare this function, so we declare it here to
2105
* suppress Wmissing-prototypes.
2106
*/
2107
PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2108
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion);
2109
2110
PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2111
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t *pSupportedVersion)
2112
{
2113
/* For the full details on loader interface versioning, see
2114
* <https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/blob/master/loader/LoaderAndLayerInterface.md>.
2115
* What follows is a condensed summary, to help you navigate the large and
2116
* confusing official doc.
2117
*
2118
* - Loader interface v0 is incompatible with later versions. We don't
2119
* support it.
2120
*
2121
* - In loader interface v1:
2122
* - The first ICD entrypoint called by the loader is
2123
* vk_icdGetInstanceProcAddr(). The ICD must statically expose this
2124
* entrypoint.
2125
* - The ICD must statically expose no other Vulkan symbol unless it
2126
* is linked with -Bsymbolic.
2127
* - Each dispatchable Vulkan handle created by the ICD must be
2128
* a pointer to a struct whose first member is VK_LOADER_DATA. The
2129
* ICD must initialize VK_LOADER_DATA.loadMagic to
2130
* ICD_LOADER_MAGIC.
2131
* - The loader implements vkCreate{PLATFORM}SurfaceKHR() and
2132
* vkDestroySurfaceKHR(). The ICD must be capable of working with
2133
* such loader-managed surfaces.
2134
*
2135
* - Loader interface v2 differs from v1 in:
2136
* - The first ICD entrypoint called by the loader is
2137
* vk_icdNegotiateLoaderICDInterfaceVersion(). The ICD must
2138
* statically expose this entrypoint.
2139
*
2140
* - Loader interface v3 differs from v2 in:
2141
* - The ICD must implement vkCreate{PLATFORM}SurfaceKHR(),
2142
* vkDestroySurfaceKHR(), and other API which uses VKSurfaceKHR,
2143
* because the loader no longer does so.
2144
*/
2145
*pSupportedVersion = MIN2(*pSupportedVersion, 3u);
2146
return VK_SUCCESS;
2147
}
2148
2149
VkResult
2150
panvk_GetMemoryFdKHR(VkDevice _device,
2151
const VkMemoryGetFdInfoKHR *pGetFdInfo,
2152
int *pFd)
2153
{
2154
VK_FROM_HANDLE(panvk_device, device, _device);
2155
VK_FROM_HANDLE(panvk_device_memory, memory, pGetFdInfo->memory);
2156
2157
assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
2158
2159
/* At the moment, we support only the below handle types. */
2160
assert(pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
2161
pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2162
2163
int prime_fd = panfrost_bo_export(memory->bo);
2164
if (prime_fd < 0)
2165
return vk_error(device->instance, VK_ERROR_OUT_OF_DEVICE_MEMORY);
2166
2167
*pFd = prime_fd;
2168
return VK_SUCCESS;
2169
}
2170
2171
VkResult
2172
panvk_GetMemoryFdPropertiesKHR(VkDevice _device,
2173
VkExternalMemoryHandleTypeFlagBits handleType,
2174
int fd,
2175
VkMemoryFdPropertiesKHR *pMemoryFdProperties)
2176
{
2177
assert(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2178
pMemoryFdProperties->memoryTypeBits = 1;
2179
return VK_SUCCESS;
2180
}
2181
2182
void
2183
panvk_GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,
2184
const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
2185
VkExternalSemaphoreProperties *pExternalSemaphoreProperties)
2186
{
2187
if ((pExternalSemaphoreInfo->handleType == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT ||
2188
pExternalSemaphoreInfo->handleType == VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT)) {
2189
pExternalSemaphoreProperties->exportFromImportedHandleTypes =
2190
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT |
2191
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
2192
pExternalSemaphoreProperties->compatibleHandleTypes =
2193
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT |
2194
VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
2195
pExternalSemaphoreProperties->externalSemaphoreFeatures =
2196
VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT |
2197
VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT;
2198
} else {
2199
pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
2200
pExternalSemaphoreProperties->compatibleHandleTypes = 0;
2201
pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
2202
}
2203
}
2204
2205
void
2206
panvk_GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,
2207
const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
2208
VkExternalFenceProperties *pExternalFenceProperties)
2209
{
2210
pExternalFenceProperties->exportFromImportedHandleTypes = 0;
2211
pExternalFenceProperties->compatibleHandleTypes = 0;
2212
pExternalFenceProperties->externalFenceFeatures = 0;
2213
}
2214
2215
void
2216
panvk_GetDeviceGroupPeerMemoryFeatures(VkDevice device,
2217
uint32_t heapIndex,
2218
uint32_t localDeviceIndex,
2219
uint32_t remoteDeviceIndex,
2220
VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
2221
{
2222
assert(localDeviceIndex == remoteDeviceIndex);
2223
2224
*pPeerMemoryFeatures = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT |
2225
VK_PEER_MEMORY_FEATURE_COPY_DST_BIT |
2226
VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT |
2227
VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT;
2228
}
2229
2230