Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/broadcom/vulkan/v3dv_device.c
4560 views
1
/*
2
* Copyright © 2019 Raspberry Pi
3
*
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
* and/or sell copies of the Software, and to permit persons to whom the
9
* Software is furnished to do so, subject to the following conditions:
10
*
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
13
* Software.
14
*
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21
* IN THE SOFTWARE.
22
*/
23
24
#include <assert.h>
25
#include <fcntl.h>
26
#include <stdbool.h>
27
#include <string.h>
28
#include <sys/mman.h>
29
#include <sys/sysinfo.h>
30
#include <unistd.h>
31
#include <xf86drm.h>
32
33
#include "v3dv_private.h"
34
35
#include "common/v3d_debug.h"
36
37
#include "compiler/v3d_compiler.h"
38
39
#include "drm-uapi/v3d_drm.h"
40
#include "format/u_format.h"
41
#include "vk_util.h"
42
43
#include "util/build_id.h"
44
#include "util/debug.h"
45
#include "util/u_cpu_detect.h"
46
47
#ifdef VK_USE_PLATFORM_XCB_KHR
48
#include <xcb/xcb.h>
49
#include <xcb/dri3.h>
50
#include <X11/Xlib-xcb.h>
51
#endif
52
53
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
54
#include <wayland-client.h>
55
#include "wayland-drm-client-protocol.h"
56
#endif
57
58
#ifdef USE_V3D_SIMULATOR
59
#include "drm-uapi/i915_drm.h"
60
#endif
61
62
#define V3DV_API_VERSION VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION)
63
64
VKAPI_ATTR VkResult VKAPI_CALL
65
v3dv_EnumerateInstanceVersion(uint32_t *pApiVersion)
66
{
67
*pApiVersion = V3DV_API_VERSION;
68
return VK_SUCCESS;
69
}
70
71
#define V3DV_HAS_SURFACE (VK_USE_PLATFORM_WIN32_KHR || \
72
VK_USE_PLATFORM_WAYLAND_KHR || \
73
VK_USE_PLATFORM_XCB_KHR || \
74
VK_USE_PLATFORM_XLIB_KHR || \
75
VK_USE_PLATFORM_DISPLAY_KHR)
76
77
static const struct vk_instance_extension_table instance_extensions = {
78
.KHR_device_group_creation = true,
79
#ifdef VK_USE_PLATFORM_DISPLAY_KHR
80
.KHR_display = true,
81
#endif
82
.KHR_external_fence_capabilities = true,
83
.KHR_external_memory_capabilities = true,
84
.KHR_external_semaphore_capabilities = true,
85
.KHR_get_display_properties2 = true,
86
.KHR_get_physical_device_properties2 = true,
87
#ifdef V3DV_HAS_SURFACE
88
.KHR_get_surface_capabilities2 = true,
89
.KHR_surface = true,
90
#endif
91
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
92
.KHR_wayland_surface = true,
93
#endif
94
#ifdef VK_USE_PLATFORM_XCB_KHR
95
.KHR_xcb_surface = true,
96
#endif
97
#ifdef VK_USE_PLATFORM_XLIB_KHR
98
.KHR_xlib_surface = true,
99
#endif
100
.EXT_debug_report = true,
101
};
102
103
static void
104
get_device_extensions(const struct v3dv_physical_device *device,
105
struct vk_device_extension_table *ext)
106
{
107
*ext = (struct vk_device_extension_table) {
108
.KHR_bind_memory2 = true,
109
.KHR_copy_commands2 = true,
110
.KHR_dedicated_allocation = true,
111
.KHR_device_group = true,
112
.KHR_descriptor_update_template = true,
113
.KHR_external_fence = true,
114
.KHR_external_fence_fd = true,
115
.KHR_external_memory = true,
116
.KHR_external_memory_fd = true,
117
.KHR_external_semaphore = true,
118
.KHR_external_semaphore_fd = true,
119
.KHR_get_memory_requirements2 = true,
120
.KHR_image_format_list = true,
121
.KHR_relaxed_block_layout = true,
122
.KHR_maintenance1 = true,
123
.KHR_maintenance2 = true,
124
.KHR_maintenance3 = true,
125
.KHR_shader_non_semantic_info = true,
126
.KHR_sampler_mirror_clamp_to_edge = true,
127
.KHR_storage_buffer_storage_class = true,
128
.KHR_uniform_buffer_standard_layout = true,
129
#ifdef V3DV_HAS_SURFACE
130
.KHR_swapchain = true,
131
.KHR_incremental_present = true,
132
#endif
133
.KHR_variable_pointers = true,
134
.EXT_external_memory_dma_buf = true,
135
.EXT_index_type_uint8 = true,
136
.EXT_private_data = true,
137
};
138
}
139
140
VKAPI_ATTR VkResult VKAPI_CALL
141
v3dv_EnumerateInstanceExtensionProperties(const char *pLayerName,
142
uint32_t *pPropertyCount,
143
VkExtensionProperties *pProperties)
144
{
145
/* We don't support any layers */
146
if (pLayerName)
147
return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
148
149
return vk_enumerate_instance_extension_properties(
150
&instance_extensions, pPropertyCount, pProperties);
151
}
152
153
VKAPI_ATTR VkResult VKAPI_CALL
154
v3dv_CreateInstance(const VkInstanceCreateInfo *pCreateInfo,
155
const VkAllocationCallbacks *pAllocator,
156
VkInstance *pInstance)
157
{
158
struct v3dv_instance *instance;
159
VkResult result;
160
161
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
162
163
if (pAllocator == NULL)
164
pAllocator = vk_default_allocator();
165
166
instance = vk_alloc(pAllocator, sizeof(*instance), 8,
167
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
168
if (!instance)
169
return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
170
171
struct vk_instance_dispatch_table dispatch_table;
172
vk_instance_dispatch_table_from_entrypoints(
173
&dispatch_table, &v3dv_instance_entrypoints, true);
174
175
result = vk_instance_init(&instance->vk,
176
&instance_extensions,
177
&dispatch_table,
178
pCreateInfo, pAllocator);
179
180
if (result != VK_SUCCESS) {
181
vk_free(pAllocator, instance);
182
return vk_error(instance, result);
183
}
184
185
v3d_process_debug_variable();
186
187
instance->physicalDeviceCount = -1;
188
189
/* We start with the default values for the pipeline_cache envvars */
190
instance->pipeline_cache_enabled = true;
191
instance->default_pipeline_cache_enabled = true;
192
const char *pipeline_cache_str = getenv("V3DV_ENABLE_PIPELINE_CACHE");
193
if (pipeline_cache_str != NULL) {
194
if (strncmp(pipeline_cache_str, "full", 4) == 0) {
195
/* nothing to do, just to filter correct values */
196
} else if (strncmp(pipeline_cache_str, "no-default-cache", 16) == 0) {
197
instance->default_pipeline_cache_enabled = false;
198
} else if (strncmp(pipeline_cache_str, "off", 3) == 0) {
199
instance->pipeline_cache_enabled = false;
200
instance->default_pipeline_cache_enabled = false;
201
} else {
202
fprintf(stderr, "Wrong value for envvar V3DV_ENABLE_PIPELINE_CACHE. "
203
"Allowed values are: full, no-default-cache, off\n");
204
}
205
}
206
207
if (instance->pipeline_cache_enabled == false) {
208
fprintf(stderr, "WARNING: v3dv pipeline cache is disabled. Performance "
209
"can be affected negatively\n");
210
} else {
211
if (instance->default_pipeline_cache_enabled == false) {
212
fprintf(stderr, "WARNING: default v3dv pipeline cache is disabled. "
213
"Performance can be affected negatively\n");
214
}
215
}
216
217
util_cpu_detect();
218
219
VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
220
221
*pInstance = v3dv_instance_to_handle(instance);
222
223
return VK_SUCCESS;
224
}
225
226
static void
227
v3dv_physical_device_free_disk_cache(struct v3dv_physical_device *device)
228
{
229
#ifdef ENABLE_SHADER_CACHE
230
if (device->disk_cache)
231
disk_cache_destroy(device->disk_cache);
232
#else
233
assert(device->disk_cache == NULL);
234
#endif
235
}
236
237
static void
238
physical_device_finish(struct v3dv_physical_device *device)
239
{
240
v3dv_wsi_finish(device);
241
v3dv_physical_device_free_disk_cache(device);
242
v3d_compiler_free(device->compiler);
243
244
close(device->render_fd);
245
if (device->display_fd >= 0)
246
close(device->display_fd);
247
if (device->master_fd >= 0)
248
close(device->master_fd);
249
250
free(device->name);
251
252
#if using_v3d_simulator
253
v3d_simulator_destroy(device->sim_file);
254
#endif
255
256
vk_physical_device_finish(&device->vk);
257
mtx_destroy(&device->mutex);
258
}
259
260
VKAPI_ATTR void VKAPI_CALL
261
v3dv_DestroyInstance(VkInstance _instance,
262
const VkAllocationCallbacks *pAllocator)
263
{
264
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
265
266
if (!instance)
267
return;
268
269
if (instance->physicalDeviceCount > 0) {
270
/* We support at most one physical device. */
271
assert(instance->physicalDeviceCount == 1);
272
physical_device_finish(&instance->physicalDevice);
273
}
274
275
VG(VALGRIND_DESTROY_MEMPOOL(instance));
276
277
vk_instance_finish(&instance->vk);
278
vk_free(&instance->vk.alloc, instance);
279
}
280
281
static uint64_t
282
compute_heap_size()
283
{
284
#if !using_v3d_simulator
285
/* Query the total ram from the system */
286
struct sysinfo info;
287
sysinfo(&info);
288
289
uint64_t total_ram = (uint64_t)info.totalram * (uint64_t)info.mem_unit;
290
#else
291
uint64_t total_ram = (uint64_t) v3d_simulator_get_mem_size();
292
#endif
293
294
/* We don't want to burn too much ram with the GPU. If the user has 4GiB
295
* or less, we use at most half. If they have more than 4GiB, we use 3/4.
296
*/
297
uint64_t available_ram;
298
if (total_ram <= 4ull * 1024ull * 1024ull * 1024ull)
299
available_ram = total_ram / 2;
300
else
301
available_ram = total_ram * 3 / 4;
302
303
return available_ram;
304
}
305
306
#if !using_v3d_simulator
307
#ifdef VK_USE_PLATFORM_XCB_KHR
308
static int
309
create_display_fd_xcb(VkIcdSurfaceBase *surface)
310
{
311
int fd = -1;
312
313
xcb_connection_t *conn;
314
xcb_dri3_open_reply_t *reply = NULL;
315
if (surface) {
316
if (surface->platform == VK_ICD_WSI_PLATFORM_XLIB)
317
conn = XGetXCBConnection(((VkIcdSurfaceXlib *)surface)->dpy);
318
else
319
conn = ((VkIcdSurfaceXcb *)surface)->connection;
320
} else {
321
conn = xcb_connect(NULL, NULL);
322
}
323
324
if (xcb_connection_has_error(conn))
325
goto finish;
326
327
const xcb_setup_t *setup = xcb_get_setup(conn);
328
xcb_screen_iterator_t iter = xcb_setup_roots_iterator(setup);
329
xcb_screen_t *screen = iter.data;
330
331
xcb_dri3_open_cookie_t cookie;
332
cookie = xcb_dri3_open(conn, screen->root, None);
333
reply = xcb_dri3_open_reply(conn, cookie, NULL);
334
if (!reply)
335
goto finish;
336
337
if (reply->nfd != 1)
338
goto finish;
339
340
fd = xcb_dri3_open_reply_fds(conn, reply)[0];
341
fcntl(fd, F_SETFD, fcntl(fd, F_GETFD) | FD_CLOEXEC);
342
343
finish:
344
if (!surface)
345
xcb_disconnect(conn);
346
if (reply)
347
free(reply);
348
349
return fd;
350
}
351
#endif
352
353
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
354
struct v3dv_wayland_info {
355
struct wl_drm *wl_drm;
356
int fd;
357
bool is_set;
358
bool authenticated;
359
};
360
361
static void
362
v3dv_drm_handle_device(void *data, struct wl_drm *drm, const char *device)
363
{
364
struct v3dv_wayland_info *info = data;
365
info->fd = open(device, O_RDWR | O_CLOEXEC);
366
info->is_set = info->fd != -1;
367
if (!info->is_set) {
368
fprintf(stderr, "v3dv_drm_handle_device: could not open %s (%s)\n",
369
device, strerror(errno));
370
return;
371
}
372
373
drm_magic_t magic;
374
if (drmGetMagic(info->fd, &magic)) {
375
fprintf(stderr, "v3dv_drm_handle_device: drmGetMagic failed\n");
376
close(info->fd);
377
info->fd = -1;
378
info->is_set = false;
379
return;
380
}
381
wl_drm_authenticate(info->wl_drm, magic);
382
}
383
384
static void
385
v3dv_drm_handle_format(void *data, struct wl_drm *drm, uint32_t format)
386
{
387
}
388
389
static void
390
v3dv_drm_handle_authenticated(void *data, struct wl_drm *drm)
391
{
392
struct v3dv_wayland_info *info = data;
393
info->authenticated = true;
394
}
395
396
static void
397
v3dv_drm_handle_capabilities(void *data, struct wl_drm *drm, uint32_t value)
398
{
399
}
400
401
struct wl_drm_listener v3dv_drm_listener = {
402
.device = v3dv_drm_handle_device,
403
.format = v3dv_drm_handle_format,
404
.authenticated = v3dv_drm_handle_authenticated,
405
.capabilities = v3dv_drm_handle_capabilities
406
};
407
408
static void
409
v3dv_registry_global(void *data,
410
struct wl_registry *registry,
411
uint32_t name,
412
const char *interface,
413
uint32_t version)
414
{
415
struct v3dv_wayland_info *info = data;
416
if (strcmp(interface, "wl_drm") == 0) {
417
info->wl_drm = wl_registry_bind(registry, name, &wl_drm_interface,
418
MIN2(version, 2));
419
wl_drm_add_listener(info->wl_drm, &v3dv_drm_listener, data);
420
};
421
}
422
423
static void
424
v3dv_registry_global_remove_cb(void *data,
425
struct wl_registry *registry,
426
uint32_t name)
427
{
428
}
429
430
static int
431
create_display_fd_wayland(VkIcdSurfaceBase *surface)
432
{
433
struct wl_display *display;
434
struct wl_registry *registry = NULL;
435
436
struct v3dv_wayland_info info = {
437
.wl_drm = NULL,
438
.fd = -1,
439
.is_set = false,
440
.authenticated = false
441
};
442
443
if (surface)
444
display = ((VkIcdSurfaceWayland *) surface)->display;
445
else
446
display = wl_display_connect(NULL);
447
448
if (!display)
449
return -1;
450
451
registry = wl_display_get_registry(display);
452
if (!registry) {
453
if (!surface)
454
wl_display_disconnect(display);
455
return -1;
456
}
457
458
static const struct wl_registry_listener registry_listener = {
459
v3dv_registry_global,
460
v3dv_registry_global_remove_cb
461
};
462
wl_registry_add_listener(registry, &registry_listener, &info);
463
464
wl_display_roundtrip(display); /* For the registry advertisement */
465
wl_display_roundtrip(display); /* For the DRM device event */
466
wl_display_roundtrip(display); /* For the authentication event */
467
468
wl_drm_destroy(info.wl_drm);
469
wl_registry_destroy(registry);
470
471
if (!surface)
472
wl_display_disconnect(display);
473
474
if (!info.is_set)
475
return -1;
476
477
if (!info.authenticated)
478
return -1;
479
480
return info.fd;
481
}
482
#endif
483
484
/* Acquire an authenticated display fd without a surface reference. This is the
485
* case where the application is making WSI allocations outside the Vulkan
486
* swapchain context (only Zink, for now). Since we lack information about the
487
* underlying surface we just try our best to figure out the correct display
488
* and platform to use. It should work in most cases.
489
*/
490
static void
491
acquire_display_device_no_surface(struct v3dv_instance *instance,
492
struct v3dv_physical_device *pdevice)
493
{
494
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
495
pdevice->display_fd = create_display_fd_wayland(NULL);
496
#endif
497
498
#ifdef VK_USE_PLATFORM_XCB_KHR
499
if (pdevice->display_fd == -1)
500
pdevice->display_fd = create_display_fd_xcb(NULL);
501
#endif
502
503
#ifdef VK_USE_PLATFORM_DISPLAY_KHR
504
if (pdevice->display_fd == - 1 && pdevice->master_fd >= 0)
505
pdevice->display_fd = dup(pdevice->master_fd);
506
#endif
507
}
508
509
/* Acquire an authenticated display fd from the surface. This is the regular
510
* case where the application is using swapchains to create WSI allocations.
511
* In this case we use the surface information to figure out the correct
512
* display and platform combination.
513
*/
514
static void
515
acquire_display_device_surface(struct v3dv_instance *instance,
516
struct v3dv_physical_device *pdevice,
517
VkIcdSurfaceBase *surface)
518
{
519
/* Mesa will set both of VK_USE_PLATFORM_{XCB,XLIB} when building with
520
* platform X11, so only check for XCB and rely on XCB to get an
521
* authenticated device also for Xlib.
522
*/
523
#ifdef VK_USE_PLATFORM_XCB_KHR
524
if (surface->platform == VK_ICD_WSI_PLATFORM_XCB ||
525
surface->platform == VK_ICD_WSI_PLATFORM_XLIB) {
526
pdevice->display_fd = create_display_fd_xcb(surface);
527
}
528
#endif
529
530
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
531
if (surface->platform == VK_ICD_WSI_PLATFORM_WAYLAND)
532
pdevice->display_fd = create_display_fd_wayland(surface);
533
#endif
534
535
#ifdef VK_USE_PLATFORM_DISPLAY_KHR
536
if (surface->platform == VK_ICD_WSI_PLATFORM_DISPLAY &&
537
pdevice->master_fd >= 0) {
538
pdevice->display_fd = dup(pdevice->master_fd);
539
}
540
#endif
541
}
542
#endif /* !using_v3d_simulator */
543
544
/* Attempts to get an authenticated display fd from the display server that
545
* we can use to allocate BOs for presentable images.
546
*/
547
VkResult
548
v3dv_physical_device_acquire_display(struct v3dv_instance *instance,
549
struct v3dv_physical_device *pdevice,
550
VkIcdSurfaceBase *surface)
551
{
552
VkResult result = VK_SUCCESS;
553
mtx_lock(&pdevice->mutex);
554
555
if (pdevice->display_fd != -1)
556
goto done;
557
558
/* When running on the simulator we do everything on a single render node so
559
* we don't need to get an authenticated display fd from the display server.
560
*/
561
#if !using_v3d_simulator
562
if (surface)
563
acquire_display_device_surface(instance, pdevice, surface);
564
else
565
acquire_display_device_no_surface(instance, pdevice);
566
567
if (pdevice->display_fd == -1)
568
result = VK_ERROR_INITIALIZATION_FAILED;
569
#endif
570
571
done:
572
mtx_unlock(&pdevice->mutex);
573
return result;
574
}
575
576
static bool
577
v3d_has_feature(struct v3dv_physical_device *device, enum drm_v3d_param feature)
578
{
579
struct drm_v3d_get_param p = {
580
.param = feature,
581
};
582
if (v3dv_ioctl(device->render_fd, DRM_IOCTL_V3D_GET_PARAM, &p) != 0)
583
return false;
584
return p.value;
585
}
586
587
static bool
588
device_has_expected_features(struct v3dv_physical_device *device)
589
{
590
return v3d_has_feature(device, DRM_V3D_PARAM_SUPPORTS_TFU) &&
591
v3d_has_feature(device, DRM_V3D_PARAM_SUPPORTS_CSD) &&
592
v3d_has_feature(device, DRM_V3D_PARAM_SUPPORTS_CACHE_FLUSH);
593
}
594
595
596
static VkResult
597
init_uuids(struct v3dv_physical_device *device)
598
{
599
const struct build_id_note *note =
600
build_id_find_nhdr_for_addr(init_uuids);
601
if (!note) {
602
return vk_errorf((struct v3dv_instance*) device->vk.instance,
603
VK_ERROR_INITIALIZATION_FAILED,
604
"Failed to find build-id");
605
}
606
607
unsigned build_id_len = build_id_length(note);
608
if (build_id_len < 20) {
609
return vk_errorf((struct v3dv_instance*) device->vk.instance,
610
VK_ERROR_INITIALIZATION_FAILED,
611
"build-id too short. It needs to be a SHA");
612
}
613
614
memcpy(device->driver_build_sha1, build_id_data(note), 20);
615
616
uint32_t vendor_id = v3dv_physical_device_vendor_id(device);
617
uint32_t device_id = v3dv_physical_device_device_id(device);
618
619
struct mesa_sha1 sha1_ctx;
620
uint8_t sha1[20];
621
STATIC_ASSERT(VK_UUID_SIZE <= sizeof(sha1));
622
623
/* The pipeline cache UUID is used for determining when a pipeline cache is
624
* invalid. It needs both a driver build and the PCI ID of the device.
625
*/
626
_mesa_sha1_init(&sha1_ctx);
627
_mesa_sha1_update(&sha1_ctx, build_id_data(note), build_id_len);
628
_mesa_sha1_update(&sha1_ctx, &device_id, sizeof(device_id));
629
_mesa_sha1_final(&sha1_ctx, sha1);
630
memcpy(device->pipeline_cache_uuid, sha1, VK_UUID_SIZE);
631
632
/* The driver UUID is used for determining sharability of images and memory
633
* between two Vulkan instances in separate processes. People who want to
634
* share memory need to also check the device UUID (below) so all this
635
* needs to be is the build-id.
636
*/
637
memcpy(device->driver_uuid, build_id_data(note), VK_UUID_SIZE);
638
639
/* The device UUID uniquely identifies the given device within the machine.
640
* Since we never have more than one device, this doesn't need to be a real
641
* UUID.
642
*/
643
_mesa_sha1_init(&sha1_ctx);
644
_mesa_sha1_update(&sha1_ctx, &vendor_id, sizeof(vendor_id));
645
_mesa_sha1_update(&sha1_ctx, &device_id, sizeof(device_id));
646
_mesa_sha1_final(&sha1_ctx, sha1);
647
memcpy(device->device_uuid, sha1, VK_UUID_SIZE);
648
649
return VK_SUCCESS;
650
}
651
652
static void
653
v3dv_physical_device_init_disk_cache(struct v3dv_physical_device *device)
654
{
655
#ifdef ENABLE_SHADER_CACHE
656
char timestamp[41];
657
_mesa_sha1_format(timestamp, device->driver_build_sha1);
658
659
assert(device->name);
660
device->disk_cache = disk_cache_create(device->name, timestamp, 0);
661
#else
662
device->disk_cache = NULL;
663
#endif
664
}
665
666
static VkResult
667
physical_device_init(struct v3dv_physical_device *device,
668
struct v3dv_instance *instance,
669
drmDevicePtr drm_render_device,
670
drmDevicePtr drm_primary_device)
671
{
672
VkResult result = VK_SUCCESS;
673
int32_t master_fd = -1;
674
int32_t render_fd = -1;
675
676
struct vk_physical_device_dispatch_table dispatch_table;
677
vk_physical_device_dispatch_table_from_entrypoints
678
(&dispatch_table, &v3dv_physical_device_entrypoints, true);
679
680
result = vk_physical_device_init(&device->vk, &instance->vk, NULL,
681
&dispatch_table);
682
683
if (result != VK_SUCCESS)
684
goto fail;
685
686
assert(drm_render_device);
687
const char *path = drm_render_device->nodes[DRM_NODE_RENDER];
688
render_fd = open(path, O_RDWR | O_CLOEXEC);
689
if (render_fd < 0) {
690
fprintf(stderr, "Opening %s failed: %s\n", path, strerror(errno));
691
result = VK_ERROR_INCOMPATIBLE_DRIVER;
692
goto fail;
693
}
694
695
/* If we are running on VK_KHR_display we need to acquire the master
696
* display device now for the v3dv_wsi_init() call below. For anything else
697
* we postpone that until a swapchain is created.
698
*/
699
700
if (instance->vk.enabled_extensions.KHR_display) {
701
#if !using_v3d_simulator
702
/* Open the primary node on the vc4 display device */
703
assert(drm_primary_device);
704
const char *primary_path = drm_primary_device->nodes[DRM_NODE_PRIMARY];
705
master_fd = open(primary_path, O_RDWR | O_CLOEXEC);
706
#else
707
/* There is only one device with primary and render nodes.
708
* Open its primary node.
709
*/
710
const char *primary_path = drm_render_device->nodes[DRM_NODE_PRIMARY];
711
master_fd = open(primary_path, O_RDWR | O_CLOEXEC);
712
#endif
713
}
714
715
#if using_v3d_simulator
716
device->sim_file = v3d_simulator_init(render_fd);
717
#endif
718
719
device->render_fd = render_fd; /* The v3d render node */
720
device->display_fd = -1; /* Authenticated vc4 primary node */
721
device->master_fd = master_fd; /* Master vc4 primary node */
722
723
if (!v3d_get_device_info(device->render_fd, &device->devinfo, &v3dv_ioctl)) {
724
result = VK_ERROR_INCOMPATIBLE_DRIVER;
725
goto fail;
726
}
727
728
if (device->devinfo.ver < 42) {
729
result = VK_ERROR_INCOMPATIBLE_DRIVER;
730
goto fail;
731
}
732
733
if (!device_has_expected_features(device)) {
734
result = VK_ERROR_INCOMPATIBLE_DRIVER;
735
goto fail;
736
}
737
738
result = init_uuids(device);
739
if (result != VK_SUCCESS)
740
goto fail;
741
742
device->compiler = v3d_compiler_init(&device->devinfo);
743
device->next_program_id = 0;
744
745
ASSERTED int len =
746
asprintf(&device->name, "V3D %d.%d",
747
device->devinfo.ver / 10, device->devinfo.ver % 10);
748
assert(len != -1);
749
750
v3dv_physical_device_init_disk_cache(device);
751
752
/* Setup available memory heaps and types */
753
VkPhysicalDeviceMemoryProperties *mem = &device->memory;
754
mem->memoryHeapCount = 1;
755
mem->memoryHeaps[0].size = compute_heap_size();
756
mem->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
757
758
/* This is the only combination required by the spec */
759
mem->memoryTypeCount = 1;
760
mem->memoryTypes[0].propertyFlags =
761
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
762
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
763
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
764
mem->memoryTypes[0].heapIndex = 0;
765
766
device->options.merge_jobs = getenv("V3DV_NO_MERGE_JOBS") == NULL;
767
768
result = v3dv_wsi_init(device);
769
if (result != VK_SUCCESS) {
770
vk_error(instance, result);
771
goto fail;
772
}
773
774
get_device_extensions(device, &device->vk.supported_extensions);
775
776
pthread_mutex_init(&device->mutex, NULL);
777
778
return VK_SUCCESS;
779
780
fail:
781
vk_physical_device_finish(&device->vk);
782
783
if (render_fd >= 0)
784
close(render_fd);
785
if (master_fd >= 0)
786
close(master_fd);
787
788
return result;
789
}
790
791
static VkResult
792
enumerate_devices(struct v3dv_instance *instance)
793
{
794
/* TODO: Check for more devices? */
795
drmDevicePtr devices[8];
796
VkResult result = VK_ERROR_INCOMPATIBLE_DRIVER;
797
int max_devices;
798
799
instance->physicalDeviceCount = 0;
800
801
max_devices = drmGetDevices2(0, devices, ARRAY_SIZE(devices));
802
if (max_devices < 1)
803
return VK_ERROR_INCOMPATIBLE_DRIVER;
804
805
#if !using_v3d_simulator
806
int32_t v3d_idx = -1;
807
int32_t vc4_idx = -1;
808
#endif
809
for (unsigned i = 0; i < (unsigned)max_devices; i++) {
810
#if using_v3d_simulator
811
/* In the simulator, we look for an Intel render node */
812
const int required_nodes = (1 << DRM_NODE_RENDER) | (1 << DRM_NODE_PRIMARY);
813
if ((devices[i]->available_nodes & required_nodes) == required_nodes &&
814
devices[i]->bustype == DRM_BUS_PCI &&
815
devices[i]->deviceinfo.pci->vendor_id == 0x8086) {
816
result = physical_device_init(&instance->physicalDevice, instance,
817
devices[i], NULL);
818
if (result != VK_ERROR_INCOMPATIBLE_DRIVER)
819
break;
820
}
821
#else
822
/* On actual hardware, we should have a render node (v3d)
823
* and a primary node (vc4). We will need to use the primary
824
* to allocate WSI buffers and share them with the render node
825
* via prime, but that is a privileged operation so we need the
826
* primary node to be authenticated, and for that we need the
827
* display server to provide the device fd (with DRI3), so we
828
* here we only check that the device is present but we don't
829
* try to open it.
830
*/
831
if (devices[i]->bustype != DRM_BUS_PLATFORM)
832
continue;
833
834
if (devices[i]->available_nodes & 1 << DRM_NODE_RENDER) {
835
char **compat = devices[i]->deviceinfo.platform->compatible;
836
while (*compat) {
837
if (strncmp(*compat, "brcm,2711-v3d", 13) == 0) {
838
v3d_idx = i;
839
break;
840
}
841
compat++;
842
}
843
} else if (devices[i]->available_nodes & 1 << DRM_NODE_PRIMARY) {
844
char **compat = devices[i]->deviceinfo.platform->compatible;
845
while (*compat) {
846
if (strncmp(*compat, "brcm,bcm2711-vc5", 16) == 0 ||
847
strncmp(*compat, "brcm,bcm2835-vc4", 16) == 0 ) {
848
vc4_idx = i;
849
break;
850
}
851
compat++;
852
}
853
}
854
#endif
855
}
856
857
#if !using_v3d_simulator
858
if (v3d_idx == -1 || vc4_idx == -1)
859
result = VK_ERROR_INCOMPATIBLE_DRIVER;
860
else
861
result = physical_device_init(&instance->physicalDevice, instance,
862
devices[v3d_idx], devices[vc4_idx]);
863
#endif
864
865
drmFreeDevices(devices, max_devices);
866
867
if (result == VK_SUCCESS)
868
instance->physicalDeviceCount = 1;
869
870
return result;
871
}
872
873
static VkResult
874
instance_ensure_physical_device(struct v3dv_instance *instance)
875
{
876
if (instance->physicalDeviceCount < 0) {
877
VkResult result = enumerate_devices(instance);
878
if (result != VK_SUCCESS &&
879
result != VK_ERROR_INCOMPATIBLE_DRIVER)
880
return result;
881
}
882
883
return VK_SUCCESS;
884
}
885
886
VKAPI_ATTR VkResult VKAPI_CALL
887
v3dv_EnumeratePhysicalDevices(VkInstance _instance,
888
uint32_t *pPhysicalDeviceCount,
889
VkPhysicalDevice *pPhysicalDevices)
890
{
891
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
892
VK_OUTARRAY_MAKE(out, pPhysicalDevices, pPhysicalDeviceCount);
893
894
VkResult result = instance_ensure_physical_device(instance);
895
if (result != VK_SUCCESS)
896
return result;
897
898
if (instance->physicalDeviceCount == 0)
899
return VK_SUCCESS;
900
901
assert(instance->physicalDeviceCount == 1);
902
vk_outarray_append(&out, i) {
903
*i = v3dv_physical_device_to_handle(&instance->physicalDevice);
904
}
905
906
return vk_outarray_status(&out);
907
}
908
909
VKAPI_ATTR VkResult VKAPI_CALL
910
v3dv_EnumeratePhysicalDeviceGroups(
911
VkInstance _instance,
912
uint32_t *pPhysicalDeviceGroupCount,
913
VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties)
914
{
915
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
916
VK_OUTARRAY_MAKE(out, pPhysicalDeviceGroupProperties,
917
pPhysicalDeviceGroupCount);
918
919
VkResult result = instance_ensure_physical_device(instance);
920
if (result != VK_SUCCESS)
921
return result;
922
923
assert(instance->physicalDeviceCount == 1);
924
925
vk_outarray_append(&out, p) {
926
p->physicalDeviceCount = 1;
927
memset(p->physicalDevices, 0, sizeof(p->physicalDevices));
928
p->physicalDevices[0] =
929
v3dv_physical_device_to_handle(&instance->physicalDevice);
930
p->subsetAllocation = false;
931
932
vk_foreach_struct(ext, p->pNext)
933
v3dv_debug_ignored_stype(ext->sType);
934
}
935
936
return vk_outarray_status(&out);
937
}
938
939
VKAPI_ATTR void VKAPI_CALL
940
v3dv_GetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice,
941
VkPhysicalDeviceFeatures *pFeatures)
942
{
943
memset(pFeatures, 0, sizeof(*pFeatures));
944
945
*pFeatures = (VkPhysicalDeviceFeatures) {
946
.robustBufferAccess = true, /* This feature is mandatory */
947
.fullDrawIndexUint32 = false, /* Only available since V3D 4.4.9.1 */
948
.imageCubeArray = true,
949
.independentBlend = true,
950
.geometryShader = true,
951
.tessellationShader = false,
952
.sampleRateShading = true,
953
.dualSrcBlend = false,
954
.logicOp = true,
955
.multiDrawIndirect = false,
956
.drawIndirectFirstInstance = true,
957
.depthClamp = false,
958
.depthBiasClamp = true,
959
.fillModeNonSolid = true,
960
.depthBounds = false, /* Only available since V3D 4.3.16.2 */
961
.wideLines = true,
962
.largePoints = true,
963
.alphaToOne = true,
964
.multiViewport = false,
965
.samplerAnisotropy = true,
966
.textureCompressionETC2 = true,
967
.textureCompressionASTC_LDR = true,
968
/* Note that textureCompressionBC requires that the driver support all
969
* the BC formats. V3D 4.2 only support the BC1-3, so we can't claim
970
* that we support it.
971
*/
972
.textureCompressionBC = false,
973
.occlusionQueryPrecise = true,
974
.pipelineStatisticsQuery = false,
975
.vertexPipelineStoresAndAtomics = true,
976
.fragmentStoresAndAtomics = true,
977
.shaderTessellationAndGeometryPointSize = true,
978
.shaderImageGatherExtended = false,
979
.shaderStorageImageExtendedFormats = true,
980
.shaderStorageImageMultisample = false,
981
.shaderStorageImageReadWithoutFormat = false,
982
.shaderStorageImageWriteWithoutFormat = false,
983
.shaderUniformBufferArrayDynamicIndexing = false,
984
.shaderSampledImageArrayDynamicIndexing = false,
985
.shaderStorageBufferArrayDynamicIndexing = false,
986
.shaderStorageImageArrayDynamicIndexing = false,
987
.shaderClipDistance = true,
988
.shaderCullDistance = false,
989
.shaderFloat64 = false,
990
.shaderInt64 = false,
991
.shaderInt16 = false,
992
.shaderResourceResidency = false,
993
.shaderResourceMinLod = false,
994
.sparseBinding = false,
995
.sparseResidencyBuffer = false,
996
.sparseResidencyImage2D = false,
997
.sparseResidencyImage3D = false,
998
.sparseResidency2Samples = false,
999
.sparseResidency4Samples = false,
1000
.sparseResidency8Samples = false,
1001
.sparseResidency16Samples = false,
1002
.sparseResidencyAliased = false,
1003
.variableMultisampleRate = false,
1004
.inheritedQueries = true,
1005
};
1006
}
1007
1008
VKAPI_ATTR void VKAPI_CALL
1009
v3dv_GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,
1010
VkPhysicalDeviceFeatures2 *pFeatures)
1011
{
1012
v3dv_GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
1013
1014
VkPhysicalDeviceVulkan11Features vk11 = {
1015
.storageBuffer16BitAccess = false,
1016
.uniformAndStorageBuffer16BitAccess = false,
1017
.storagePushConstant16 = false,
1018
.storageInputOutput16 = false,
1019
.multiview = false,
1020
.multiviewGeometryShader = false,
1021
.multiviewTessellationShader = false,
1022
.variablePointersStorageBuffer = true,
1023
/* FIXME: this needs support for non-constant index on UBO/SSBO */
1024
.variablePointers = false,
1025
.protectedMemory = false,
1026
.samplerYcbcrConversion = false,
1027
.shaderDrawParameters = false,
1028
};
1029
1030
vk_foreach_struct(ext, pFeatures->pNext) {
1031
switch (ext->sType) {
1032
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR: {
1033
VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *features =
1034
(VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR *)ext;
1035
features->uniformBufferStandardLayout = true;
1036
break;
1037
}
1038
1039
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: {
1040
VkPhysicalDevicePrivateDataFeaturesEXT *features =
1041
(VkPhysicalDevicePrivateDataFeaturesEXT *)ext;
1042
features->privateData = true;
1043
break;
1044
}
1045
1046
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
1047
VkPhysicalDeviceIndexTypeUint8FeaturesEXT *features =
1048
(VkPhysicalDeviceIndexTypeUint8FeaturesEXT *)ext;
1049
features->indexTypeUint8 = true;
1050
break;
1051
}
1052
1053
/* Vulkan 1.1 */
1054
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES: {
1055
VkPhysicalDeviceVulkan11Features *features =
1056
(VkPhysicalDeviceVulkan11Features *)ext;
1057
memcpy(features, &vk11, sizeof(VkPhysicalDeviceVulkan11Features));
1058
break;
1059
}
1060
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
1061
VkPhysicalDevice16BitStorageFeatures *features = (void *) ext;
1062
features->storageBuffer16BitAccess = vk11.storageBuffer16BitAccess;
1063
features->uniformAndStorageBuffer16BitAccess =
1064
vk11.uniformAndStorageBuffer16BitAccess;
1065
features->storagePushConstant16 = vk11.storagePushConstant16;
1066
features->storageInputOutput16 = vk11.storageInputOutput16;
1067
break;
1068
}
1069
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
1070
VkPhysicalDeviceMultiviewFeatures *features = (void *) ext;
1071
features->multiview = vk11.multiview;
1072
features->multiviewGeometryShader = vk11.multiviewGeometryShader;
1073
features->multiviewTessellationShader = vk11.multiviewTessellationShader;
1074
break;
1075
}
1076
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
1077
VkPhysicalDeviceProtectedMemoryFeatures *features = (void *) ext;
1078
features->protectedMemory = vk11.protectedMemory;
1079
break;
1080
}
1081
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
1082
VkPhysicalDeviceSamplerYcbcrConversionFeatures *features = (void *) ext;
1083
features->samplerYcbcrConversion = vk11.samplerYcbcrConversion;
1084
break;
1085
}
1086
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
1087
VkPhysicalDeviceShaderDrawParametersFeatures *features = (void *) ext;
1088
features->shaderDrawParameters = vk11.shaderDrawParameters;
1089
break;
1090
}
1091
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
1092
VkPhysicalDeviceVariablePointersFeatures *features = (void *) ext;
1093
features->variablePointersStorageBuffer =
1094
vk11.variablePointersStorageBuffer;
1095
features->variablePointers = vk11.variablePointers;
1096
break;
1097
}
1098
1099
default:
1100
v3dv_debug_ignored_stype(ext->sType);
1101
break;
1102
}
1103
}
1104
}
1105
1106
VKAPI_ATTR void VKAPI_CALL
1107
v3dv_GetDeviceGroupPeerMemoryFeatures(VkDevice device,
1108
uint32_t heapIndex,
1109
uint32_t localDeviceIndex,
1110
uint32_t remoteDeviceIndex,
1111
VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
1112
{
1113
assert(localDeviceIndex == 0 && remoteDeviceIndex == 0);
1114
*pPeerMemoryFeatures = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT |
1115
VK_PEER_MEMORY_FEATURE_COPY_DST_BIT |
1116
VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT |
1117
VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT;
1118
}
1119
1120
uint32_t
1121
v3dv_physical_device_vendor_id(struct v3dv_physical_device *dev)
1122
{
1123
return 0x14E4; /* Broadcom */
1124
}
1125
1126
1127
#if using_v3d_simulator
1128
static bool
1129
get_i915_param(int fd, uint32_t param, int *value)
1130
{
1131
int tmp;
1132
1133
struct drm_i915_getparam gp = {
1134
.param = param,
1135
.value = &tmp,
1136
};
1137
1138
int ret = drmIoctl(fd, DRM_IOCTL_I915_GETPARAM, &gp);
1139
if (ret != 0)
1140
return false;
1141
1142
*value = tmp;
1143
return true;
1144
}
1145
#endif
1146
1147
uint32_t
1148
v3dv_physical_device_device_id(struct v3dv_physical_device *dev)
1149
{
1150
#if using_v3d_simulator
1151
int devid = 0;
1152
1153
if (!get_i915_param(dev->render_fd, I915_PARAM_CHIPSET_ID, &devid))
1154
fprintf(stderr, "Error getting device_id\n");
1155
1156
return devid;
1157
#else
1158
return dev->devinfo.ver;
1159
#endif
1160
}
1161
1162
VKAPI_ATTR void VKAPI_CALL
1163
v3dv_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
1164
VkPhysicalDeviceProperties *pProperties)
1165
{
1166
V3DV_FROM_HANDLE(v3dv_physical_device, pdevice, physicalDevice);
1167
1168
STATIC_ASSERT(MAX_SAMPLED_IMAGES + MAX_STORAGE_IMAGES + MAX_INPUT_ATTACHMENTS
1169
<= V3D_MAX_TEXTURE_SAMPLERS);
1170
STATIC_ASSERT(MAX_UNIFORM_BUFFERS >= MAX_DYNAMIC_UNIFORM_BUFFERS);
1171
STATIC_ASSERT(MAX_STORAGE_BUFFERS >= MAX_DYNAMIC_STORAGE_BUFFERS);
1172
1173
const uint32_t page_size = 4096;
1174
const uint32_t mem_size = compute_heap_size();
1175
1176
const uint32_t max_varying_components = 16 * 4;
1177
1178
const uint32_t v3d_coord_shift = 6;
1179
1180
const uint32_t v3d_point_line_granularity = 2.0f / (1 << v3d_coord_shift);
1181
const uint32_t max_fb_size = 4096;
1182
1183
const VkSampleCountFlags supported_sample_counts =
1184
VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
1185
1186
struct timespec clock_res;
1187
clock_getres(CLOCK_MONOTONIC, &clock_res);
1188
const float timestamp_period =
1189
clock_res.tv_sec * 1000000000.0f + clock_res.tv_nsec;
1190
1191
/* FIXME: this will probably require an in-depth review */
1192
VkPhysicalDeviceLimits limits = {
1193
.maxImageDimension1D = 4096,
1194
.maxImageDimension2D = 4096,
1195
.maxImageDimension3D = 4096,
1196
.maxImageDimensionCube = 4096,
1197
.maxImageArrayLayers = 2048,
1198
.maxTexelBufferElements = (1ul << 28),
1199
.maxUniformBufferRange = V3D_MAX_BUFFER_RANGE,
1200
.maxStorageBufferRange = V3D_MAX_BUFFER_RANGE,
1201
.maxPushConstantsSize = MAX_PUSH_CONSTANTS_SIZE,
1202
.maxMemoryAllocationCount = mem_size / page_size,
1203
.maxSamplerAllocationCount = 64 * 1024,
1204
.bufferImageGranularity = 256, /* A cache line */
1205
.sparseAddressSpaceSize = 0,
1206
.maxBoundDescriptorSets = MAX_SETS,
1207
.maxPerStageDescriptorSamplers = V3D_MAX_TEXTURE_SAMPLERS,
1208
.maxPerStageDescriptorUniformBuffers = MAX_UNIFORM_BUFFERS,
1209
.maxPerStageDescriptorStorageBuffers = MAX_STORAGE_BUFFERS,
1210
.maxPerStageDescriptorSampledImages = MAX_SAMPLED_IMAGES,
1211
.maxPerStageDescriptorStorageImages = MAX_STORAGE_IMAGES,
1212
.maxPerStageDescriptorInputAttachments = MAX_INPUT_ATTACHMENTS,
1213
.maxPerStageResources = 128,
1214
1215
/* Some of these limits are multiplied by 6 because they need to
1216
* include all possible shader stages (even if not supported). See
1217
* 'Required Limits' table in the Vulkan spec.
1218
*/
1219
.maxDescriptorSetSamplers = 6 * V3D_MAX_TEXTURE_SAMPLERS,
1220
.maxDescriptorSetUniformBuffers = 6 * MAX_UNIFORM_BUFFERS,
1221
.maxDescriptorSetUniformBuffersDynamic = MAX_DYNAMIC_UNIFORM_BUFFERS,
1222
.maxDescriptorSetStorageBuffers = 6 * MAX_STORAGE_BUFFERS,
1223
.maxDescriptorSetStorageBuffersDynamic = MAX_DYNAMIC_STORAGE_BUFFERS,
1224
.maxDescriptorSetSampledImages = 6 * MAX_SAMPLED_IMAGES,
1225
.maxDescriptorSetStorageImages = 6 * MAX_STORAGE_IMAGES,
1226
.maxDescriptorSetInputAttachments = MAX_INPUT_ATTACHMENTS,
1227
1228
/* Vertex limits */
1229
.maxVertexInputAttributes = MAX_VERTEX_ATTRIBS,
1230
.maxVertexInputBindings = MAX_VBS,
1231
.maxVertexInputAttributeOffset = 0xffffffff,
1232
.maxVertexInputBindingStride = 0xffffffff,
1233
.maxVertexOutputComponents = max_varying_components,
1234
1235
/* Tessellation limits */
1236
.maxTessellationGenerationLevel = 0,
1237
.maxTessellationPatchSize = 0,
1238
.maxTessellationControlPerVertexInputComponents = 0,
1239
.maxTessellationControlPerVertexOutputComponents = 0,
1240
.maxTessellationControlPerPatchOutputComponents = 0,
1241
.maxTessellationControlTotalOutputComponents = 0,
1242
.maxTessellationEvaluationInputComponents = 0,
1243
.maxTessellationEvaluationOutputComponents = 0,
1244
1245
/* Geometry limits */
1246
.maxGeometryShaderInvocations = 32,
1247
.maxGeometryInputComponents = 64,
1248
.maxGeometryOutputComponents = 64,
1249
.maxGeometryOutputVertices = 256,
1250
.maxGeometryTotalOutputComponents = 1024,
1251
1252
/* Fragment limits */
1253
.maxFragmentInputComponents = max_varying_components,
1254
.maxFragmentOutputAttachments = 4,
1255
.maxFragmentDualSrcAttachments = 0,
1256
.maxFragmentCombinedOutputResources = MAX_RENDER_TARGETS +
1257
MAX_STORAGE_BUFFERS +
1258
MAX_STORAGE_IMAGES,
1259
1260
/* Compute limits */
1261
.maxComputeSharedMemorySize = 16384,
1262
.maxComputeWorkGroupCount = { 65535, 65535, 65535 },
1263
.maxComputeWorkGroupInvocations = 256,
1264
.maxComputeWorkGroupSize = { 256, 256, 256 },
1265
1266
.subPixelPrecisionBits = v3d_coord_shift,
1267
.subTexelPrecisionBits = 8,
1268
.mipmapPrecisionBits = 8,
1269
.maxDrawIndexedIndexValue = 0x00ffffff,
1270
.maxDrawIndirectCount = 0x7fffffff,
1271
.maxSamplerLodBias = 14.0f,
1272
.maxSamplerAnisotropy = 16.0f,
1273
.maxViewports = MAX_VIEWPORTS,
1274
.maxViewportDimensions = { max_fb_size, max_fb_size },
1275
.viewportBoundsRange = { -2.0 * max_fb_size,
1276
2.0 * max_fb_size - 1 },
1277
.viewportSubPixelBits = 0,
1278
.minMemoryMapAlignment = page_size,
1279
.minTexelBufferOffsetAlignment = V3D_UIFBLOCK_SIZE,
1280
.minUniformBufferOffsetAlignment = 32,
1281
.minStorageBufferOffsetAlignment = 32,
1282
.minTexelOffset = -8,
1283
.maxTexelOffset = 7,
1284
.minTexelGatherOffset = -8,
1285
.maxTexelGatherOffset = 7,
1286
.minInterpolationOffset = -0.5,
1287
.maxInterpolationOffset = 0.5,
1288
.subPixelInterpolationOffsetBits = v3d_coord_shift,
1289
.maxFramebufferWidth = max_fb_size,
1290
.maxFramebufferHeight = max_fb_size,
1291
.maxFramebufferLayers = 256,
1292
.framebufferColorSampleCounts = supported_sample_counts,
1293
.framebufferDepthSampleCounts = supported_sample_counts,
1294
.framebufferStencilSampleCounts = supported_sample_counts,
1295
.framebufferNoAttachmentsSampleCounts = supported_sample_counts,
1296
.maxColorAttachments = MAX_RENDER_TARGETS,
1297
.sampledImageColorSampleCounts = supported_sample_counts,
1298
.sampledImageIntegerSampleCounts = supported_sample_counts,
1299
.sampledImageDepthSampleCounts = supported_sample_counts,
1300
.sampledImageStencilSampleCounts = supported_sample_counts,
1301
.storageImageSampleCounts = VK_SAMPLE_COUNT_1_BIT,
1302
.maxSampleMaskWords = 1,
1303
.timestampComputeAndGraphics = true,
1304
.timestampPeriod = timestamp_period,
1305
.maxClipDistances = 8,
1306
.maxCullDistances = 0,
1307
.maxCombinedClipAndCullDistances = 8,
1308
.discreteQueuePriorities = 2,
1309
.pointSizeRange = { v3d_point_line_granularity,
1310
V3D_MAX_POINT_SIZE },
1311
.lineWidthRange = { 1.0f, V3D_MAX_LINE_WIDTH },
1312
.pointSizeGranularity = v3d_point_line_granularity,
1313
.lineWidthGranularity = v3d_point_line_granularity,
1314
.strictLines = true,
1315
.standardSampleLocations = false,
1316
.optimalBufferCopyOffsetAlignment = 32,
1317
.optimalBufferCopyRowPitchAlignment = 32,
1318
.nonCoherentAtomSize = 256,
1319
};
1320
1321
*pProperties = (VkPhysicalDeviceProperties) {
1322
.apiVersion = V3DV_API_VERSION,
1323
.driverVersion = vk_get_driver_version(),
1324
.vendorID = v3dv_physical_device_vendor_id(pdevice),
1325
.deviceID = v3dv_physical_device_device_id(pdevice),
1326
.deviceType = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
1327
.limits = limits,
1328
.sparseProperties = { 0 },
1329
};
1330
1331
snprintf(pProperties->deviceName, sizeof(pProperties->deviceName),
1332
"%s", pdevice->name);
1333
memcpy(pProperties->pipelineCacheUUID,
1334
pdevice->pipeline_cache_uuid, VK_UUID_SIZE);
1335
}
1336
1337
VKAPI_ATTR void VKAPI_CALL
1338
v3dv_GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,
1339
VkPhysicalDeviceProperties2 *pProperties)
1340
{
1341
V3DV_FROM_HANDLE(v3dv_physical_device, pdevice, physicalDevice);
1342
1343
v3dv_GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
1344
1345
vk_foreach_struct(ext, pProperties->pNext) {
1346
switch (ext->sType) {
1347
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES: {
1348
VkPhysicalDeviceIDProperties *id_props =
1349
(VkPhysicalDeviceIDProperties *)ext;
1350
memcpy(id_props->deviceUUID, pdevice->device_uuid, VK_UUID_SIZE);
1351
memcpy(id_props->driverUUID, pdevice->driver_uuid, VK_UUID_SIZE);
1352
/* The LUID is for Windows. */
1353
id_props->deviceLUIDValid = false;
1354
break;
1355
}
1356
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES: {
1357
VkPhysicalDeviceMaintenance3Properties *props =
1358
(VkPhysicalDeviceMaintenance3Properties *)ext;
1359
/* We don't really have special restrictions for the maximum
1360
* descriptors per set, other than maybe not exceeding the limits
1361
* of addressable memory in a single allocation on either the host
1362
* or the GPU. This will be a much larger limit than any of the
1363
* per-stage limits already available in Vulkan though, so in practice,
1364
* it is not expected to limit anything beyond what is already
1365
* constrained through per-stage limits.
1366
*/
1367
uint32_t max_host_descriptors =
1368
(UINT32_MAX - sizeof(struct v3dv_descriptor_set)) /
1369
sizeof(struct v3dv_descriptor);
1370
uint32_t max_gpu_descriptors =
1371
(UINT32_MAX / v3dv_X(pdevice, max_descriptor_bo_size)());
1372
props->maxPerSetDescriptors =
1373
MIN2(max_host_descriptors, max_gpu_descriptors);
1374
1375
/* Minimum required by the spec */
1376
props->maxMemoryAllocationSize = MAX_MEMORY_ALLOCATION_SIZE;
1377
break;
1378
}
1379
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES: {
1380
VkPhysicalDeviceMultiviewProperties *props =
1381
(VkPhysicalDeviceMultiviewProperties *)ext;
1382
props->maxMultiviewViewCount = 1;
1383
/* This assumes that the multiview implementation uses instancing */
1384
props->maxMultiviewInstanceIndex =
1385
(UINT32_MAX / props->maxMultiviewViewCount) - 1;
1386
break;
1387
}
1388
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT:
1389
/* Do nothing, not even logging. This is a non-PCI device, so we will
1390
* never provide this extension.
1391
*/
1392
break;
1393
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
1394
VkPhysicalDevicePointClippingProperties *props =
1395
(VkPhysicalDevicePointClippingProperties *)ext;
1396
props->pointClippingBehavior =
1397
VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES;
1398
break;
1399
}
1400
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES: {
1401
VkPhysicalDeviceProtectedMemoryProperties *props =
1402
(VkPhysicalDeviceProtectedMemoryProperties *)ext;
1403
props->protectedNoFault = false;
1404
break;
1405
}
1406
case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES: {
1407
VkPhysicalDeviceSubgroupProperties *props =
1408
(VkPhysicalDeviceSubgroupProperties *)ext;
1409
props->subgroupSize = V3D_CHANNELS;
1410
props->supportedStages = VK_SHADER_STAGE_COMPUTE_BIT;
1411
props->supportedOperations = VK_SUBGROUP_FEATURE_BASIC_BIT;
1412
props->quadOperationsInAllStages = false;
1413
break;
1414
}
1415
default:
1416
v3dv_debug_ignored_stype(ext->sType);
1417
break;
1418
}
1419
}
1420
}
1421
1422
/* We support exactly one queue family. */
1423
static const VkQueueFamilyProperties
1424
v3dv_queue_family_properties = {
1425
.queueFlags = VK_QUEUE_GRAPHICS_BIT |
1426
VK_QUEUE_COMPUTE_BIT |
1427
VK_QUEUE_TRANSFER_BIT,
1428
.queueCount = 1,
1429
.timestampValidBits = 64,
1430
.minImageTransferGranularity = { 1, 1, 1 },
1431
};
1432
1433
VKAPI_ATTR void VKAPI_CALL
1434
v3dv_GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
1435
uint32_t *pCount,
1436
VkQueueFamilyProperties *pQueueFamilyProperties)
1437
{
1438
VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pCount);
1439
1440
vk_outarray_append(&out, p) {
1441
*p = v3dv_queue_family_properties;
1442
}
1443
}
1444
1445
VKAPI_ATTR void VKAPI_CALL
1446
v3dv_GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
1447
uint32_t *pQueueFamilyPropertyCount,
1448
VkQueueFamilyProperties2 *pQueueFamilyProperties)
1449
{
1450
VK_OUTARRAY_MAKE(out, pQueueFamilyProperties, pQueueFamilyPropertyCount);
1451
1452
vk_outarray_append(&out, p) {
1453
p->queueFamilyProperties = v3dv_queue_family_properties;
1454
1455
vk_foreach_struct(s, p->pNext) {
1456
v3dv_debug_ignored_stype(s->sType);
1457
}
1458
}
1459
}
1460
1461
VKAPI_ATTR void VKAPI_CALL
1462
v3dv_GetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice,
1463
VkPhysicalDeviceMemoryProperties *pMemoryProperties)
1464
{
1465
V3DV_FROM_HANDLE(v3dv_physical_device, device, physicalDevice);
1466
*pMemoryProperties = device->memory;
1467
}
1468
1469
VKAPI_ATTR void VKAPI_CALL
1470
v3dv_GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,
1471
VkPhysicalDeviceMemoryProperties2 *pMemoryProperties)
1472
{
1473
v3dv_GetPhysicalDeviceMemoryProperties(physicalDevice,
1474
&pMemoryProperties->memoryProperties);
1475
1476
vk_foreach_struct(ext, pMemoryProperties->pNext) {
1477
switch (ext->sType) {
1478
default:
1479
v3dv_debug_ignored_stype(ext->sType);
1480
break;
1481
}
1482
}
1483
}
1484
1485
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1486
v3dv_GetInstanceProcAddr(VkInstance _instance,
1487
const char *pName)
1488
{
1489
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
1490
return vk_instance_get_proc_addr(&instance->vk,
1491
&v3dv_instance_entrypoints,
1492
pName);
1493
}
1494
1495
/* With version 1+ of the loader interface the ICD should expose
1496
* vk_icdGetInstanceProcAddr to work around certain LD_PRELOAD issues seen in apps.
1497
*/
1498
PUBLIC
1499
VKAPI_ATTR PFN_vkVoidFunction
1500
VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance,
1501
const char *pName);
1502
1503
PUBLIC
1504
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1505
vk_icdGetInstanceProcAddr(VkInstance instance,
1506
const char* pName)
1507
{
1508
return v3dv_GetInstanceProcAddr(instance, pName);
1509
}
1510
1511
/* With version 4+ of the loader interface the ICD should expose
1512
* vk_icdGetPhysicalDeviceProcAddr()
1513
*/
1514
PUBLIC
1515
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL
1516
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,
1517
const char* pName);
1518
1519
PFN_vkVoidFunction
1520
vk_icdGetPhysicalDeviceProcAddr(VkInstance _instance,
1521
const char* pName)
1522
{
1523
V3DV_FROM_HANDLE(v3dv_instance, instance, _instance);
1524
1525
return vk_instance_get_physical_device_proc_addr(&instance->vk, pName);
1526
}
1527
1528
VKAPI_ATTR VkResult VKAPI_CALL
1529
v3dv_EnumerateInstanceLayerProperties(uint32_t *pPropertyCount,
1530
VkLayerProperties *pProperties)
1531
{
1532
if (pProperties == NULL) {
1533
*pPropertyCount = 0;
1534
return VK_SUCCESS;
1535
}
1536
1537
return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1538
}
1539
1540
VKAPI_ATTR VkResult VKAPI_CALL
1541
v3dv_EnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice,
1542
uint32_t *pPropertyCount,
1543
VkLayerProperties *pProperties)
1544
{
1545
V3DV_FROM_HANDLE(v3dv_physical_device, physical_device, physicalDevice);
1546
1547
if (pProperties == NULL) {
1548
*pPropertyCount = 0;
1549
return VK_SUCCESS;
1550
}
1551
1552
return vk_error((struct v3dv_instance*) physical_device->vk.instance,
1553
VK_ERROR_LAYER_NOT_PRESENT);
1554
}
1555
1556
static VkResult
1557
queue_init(struct v3dv_device *device, struct v3dv_queue *queue)
1558
{
1559
vk_object_base_init(&device->vk, &queue->base, VK_OBJECT_TYPE_QUEUE);
1560
queue->device = device;
1561
queue->flags = 0;
1562
queue->noop_job = NULL;
1563
list_inithead(&queue->submit_wait_list);
1564
pthread_mutex_init(&queue->mutex, NULL);
1565
return VK_SUCCESS;
1566
}
1567
1568
static void
1569
queue_finish(struct v3dv_queue *queue)
1570
{
1571
vk_object_base_finish(&queue->base);
1572
assert(list_is_empty(&queue->submit_wait_list));
1573
if (queue->noop_job)
1574
v3dv_job_destroy(queue->noop_job);
1575
pthread_mutex_destroy(&queue->mutex);
1576
}
1577
1578
static void
1579
init_device_meta(struct v3dv_device *device)
1580
{
1581
mtx_init(&device->meta.mtx, mtx_plain);
1582
v3dv_meta_clear_init(device);
1583
v3dv_meta_blit_init(device);
1584
v3dv_meta_texel_buffer_copy_init(device);
1585
}
1586
1587
static void
1588
destroy_device_meta(struct v3dv_device *device)
1589
{
1590
mtx_destroy(&device->meta.mtx);
1591
v3dv_meta_clear_finish(device);
1592
v3dv_meta_blit_finish(device);
1593
v3dv_meta_texel_buffer_copy_finish(device);
1594
}
1595
1596
VKAPI_ATTR VkResult VKAPI_CALL
1597
v3dv_CreateDevice(VkPhysicalDevice physicalDevice,
1598
const VkDeviceCreateInfo *pCreateInfo,
1599
const VkAllocationCallbacks *pAllocator,
1600
VkDevice *pDevice)
1601
{
1602
V3DV_FROM_HANDLE(v3dv_physical_device, physical_device, physicalDevice);
1603
struct v3dv_instance *instance = (struct v3dv_instance*) physical_device->vk.instance;
1604
VkResult result;
1605
struct v3dv_device *device;
1606
1607
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
1608
1609
/* Check enabled features */
1610
if (pCreateInfo->pEnabledFeatures) {
1611
VkPhysicalDeviceFeatures supported_features;
1612
v3dv_GetPhysicalDeviceFeatures(physicalDevice, &supported_features);
1613
VkBool32 *supported_feature = (VkBool32 *)&supported_features;
1614
VkBool32 *enabled_feature = (VkBool32 *)pCreateInfo->pEnabledFeatures;
1615
unsigned num_features = sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32);
1616
for (uint32_t i = 0; i < num_features; i++) {
1617
if (enabled_feature[i] && !supported_feature[i])
1618
return vk_error(instance, VK_ERROR_FEATURE_NOT_PRESENT);
1619
}
1620
}
1621
1622
/* Check requested queues (we only expose one queue ) */
1623
assert(pCreateInfo->queueCreateInfoCount == 1);
1624
for (uint32_t i = 0; i < pCreateInfo->queueCreateInfoCount; i++) {
1625
assert(pCreateInfo->pQueueCreateInfos[i].queueFamilyIndex == 0);
1626
assert(pCreateInfo->pQueueCreateInfos[i].queueCount == 1);
1627
if (pCreateInfo->pQueueCreateInfos[i].flags != 0)
1628
return vk_error(instance, VK_ERROR_INITIALIZATION_FAILED);
1629
}
1630
1631
device = vk_zalloc2(&physical_device->vk.instance->alloc, pAllocator,
1632
sizeof(*device), 8,
1633
VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
1634
if (!device)
1635
return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1636
1637
struct vk_device_dispatch_table dispatch_table;
1638
vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1639
&v3dv_device_entrypoints, true);
1640
result = vk_device_init(&device->vk, &physical_device->vk,
1641
&dispatch_table, pCreateInfo, pAllocator);
1642
if (result != VK_SUCCESS) {
1643
vk_free(&device->vk.alloc, device);
1644
return vk_error(instance, result);
1645
}
1646
1647
device->instance = instance;
1648
device->pdevice = physical_device;
1649
1650
if (pAllocator)
1651
device->vk.alloc = *pAllocator;
1652
else
1653
device->vk.alloc = physical_device->vk.instance->alloc;
1654
1655
pthread_mutex_init(&device->mutex, NULL);
1656
1657
result = queue_init(device, &device->queue);
1658
if (result != VK_SUCCESS)
1659
goto fail;
1660
1661
device->devinfo = physical_device->devinfo;
1662
1663
if (pCreateInfo->pEnabledFeatures) {
1664
memcpy(&device->features, pCreateInfo->pEnabledFeatures,
1665
sizeof(device->features));
1666
1667
if (device->features.robustBufferAccess)
1668
perf_debug("Device created with Robust Buffer Access enabled.\n");
1669
}
1670
1671
int ret = drmSyncobjCreate(physical_device->render_fd,
1672
DRM_SYNCOBJ_CREATE_SIGNALED,
1673
&device->last_job_sync);
1674
if (ret) {
1675
result = VK_ERROR_INITIALIZATION_FAILED;
1676
goto fail;
1677
}
1678
1679
#ifdef DEBUG
1680
v3dv_X(device, device_check_prepacked_sizes)();
1681
#endif
1682
init_device_meta(device);
1683
v3dv_bo_cache_init(device);
1684
v3dv_pipeline_cache_init(&device->default_pipeline_cache, device,
1685
device->instance->default_pipeline_cache_enabled);
1686
device->default_attribute_float =
1687
v3dv_pipeline_create_default_attribute_values(device, NULL);
1688
1689
*pDevice = v3dv_device_to_handle(device);
1690
1691
return VK_SUCCESS;
1692
1693
fail:
1694
vk_device_finish(&device->vk);
1695
vk_free(&device->vk.alloc, device);
1696
1697
return result;
1698
}
1699
1700
VKAPI_ATTR void VKAPI_CALL
1701
v3dv_DestroyDevice(VkDevice _device,
1702
const VkAllocationCallbacks *pAllocator)
1703
{
1704
V3DV_FROM_HANDLE(v3dv_device, device, _device);
1705
1706
v3dv_DeviceWaitIdle(_device);
1707
queue_finish(&device->queue);
1708
pthread_mutex_destroy(&device->mutex);
1709
drmSyncobjDestroy(device->pdevice->render_fd, device->last_job_sync);
1710
destroy_device_meta(device);
1711
v3dv_pipeline_cache_finish(&device->default_pipeline_cache);
1712
1713
if (device->default_attribute_float) {
1714
v3dv_bo_free(device, device->default_attribute_float);
1715
device->default_attribute_float = NULL;
1716
}
1717
1718
/* Bo cache should be removed the last, as any other object could be
1719
* freeing their private bos
1720
*/
1721
v3dv_bo_cache_destroy(device);
1722
1723
vk_device_finish(&device->vk);
1724
vk_free2(&device->vk.alloc, pAllocator, device);
1725
}
1726
1727
VKAPI_ATTR void VKAPI_CALL
1728
v3dv_GetDeviceQueue(VkDevice _device,
1729
uint32_t queueFamilyIndex,
1730
uint32_t queueIndex,
1731
VkQueue *pQueue)
1732
{
1733
V3DV_FROM_HANDLE(v3dv_device, device, _device);
1734
1735
assert(queueIndex == 0);
1736
assert(queueFamilyIndex == 0);
1737
1738
*pQueue = v3dv_queue_to_handle(&device->queue);
1739
}
1740
1741
VKAPI_ATTR VkResult VKAPI_CALL
1742
v3dv_DeviceWaitIdle(VkDevice _device)
1743
{
1744
V3DV_FROM_HANDLE(v3dv_device, device, _device);
1745
return v3dv_QueueWaitIdle(v3dv_queue_to_handle(&device->queue));
1746
}
1747
1748
static VkResult
1749
device_alloc(struct v3dv_device *device,
1750
struct v3dv_device_memory *mem,
1751
VkDeviceSize size)
1752
{
1753
/* Our kernel interface is 32-bit */
1754
assert(size <= UINT32_MAX);
1755
1756
mem->bo = v3dv_bo_alloc(device, size, "device_alloc", false);
1757
if (!mem->bo)
1758
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
1759
1760
return VK_SUCCESS;
1761
}
1762
1763
static void
1764
device_free_wsi_dumb(int32_t display_fd, int32_t dumb_handle)
1765
{
1766
assert(display_fd != -1);
1767
if (dumb_handle < 0)
1768
return;
1769
1770
struct drm_mode_destroy_dumb destroy_dumb = {
1771
.handle = dumb_handle,
1772
};
1773
if (v3dv_ioctl(display_fd, DRM_IOCTL_MODE_DESTROY_DUMB, &destroy_dumb)) {
1774
fprintf(stderr, "destroy dumb object %d: %s\n", dumb_handle, strerror(errno));
1775
}
1776
}
1777
1778
static void
1779
device_free(struct v3dv_device *device, struct v3dv_device_memory *mem)
1780
{
1781
/* If this memory allocation was for WSI, then we need to use the
1782
* display device to free the allocated dumb BO.
1783
*/
1784
if (mem->is_for_wsi) {
1785
assert(mem->has_bo_ownership);
1786
device_free_wsi_dumb(device->instance->physicalDevice.display_fd,
1787
mem->bo->dumb_handle);
1788
}
1789
1790
if (mem->has_bo_ownership)
1791
v3dv_bo_free(device, mem->bo);
1792
else if (mem->bo)
1793
vk_free(&device->vk.alloc, mem->bo);
1794
}
1795
1796
static void
1797
device_unmap(struct v3dv_device *device, struct v3dv_device_memory *mem)
1798
{
1799
assert(mem && mem->bo->map && mem->bo->map_size > 0);
1800
v3dv_bo_unmap(device, mem->bo);
1801
}
1802
1803
static VkResult
1804
device_map(struct v3dv_device *device, struct v3dv_device_memory *mem)
1805
{
1806
assert(mem && mem->bo);
1807
1808
/* From the spec:
1809
*
1810
* "After a successful call to vkMapMemory the memory object memory is
1811
* considered to be currently host mapped. It is an application error to
1812
* call vkMapMemory on a memory object that is already host mapped."
1813
*
1814
* We are not concerned with this ourselves (validation layers should
1815
* catch these errors and warn users), however, the driver may internally
1816
* map things (for example for debug CLIF dumps or some CPU-side operations)
1817
* so by the time the user calls here the buffer might already been mapped
1818
* internally by the driver.
1819
*/
1820
if (mem->bo->map) {
1821
assert(mem->bo->map_size == mem->bo->size);
1822
return VK_SUCCESS;
1823
}
1824
1825
bool ok = v3dv_bo_map(device, mem->bo, mem->bo->size);
1826
if (!ok)
1827
return VK_ERROR_MEMORY_MAP_FAILED;
1828
1829
return VK_SUCCESS;
1830
}
1831
1832
static VkResult
1833
device_import_bo(struct v3dv_device *device,
1834
const VkAllocationCallbacks *pAllocator,
1835
int fd, uint64_t size,
1836
struct v3dv_bo **bo)
1837
{
1838
VkResult result;
1839
1840
*bo = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(struct v3dv_bo), 8,
1841
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1842
if (*bo == NULL) {
1843
result = VK_ERROR_OUT_OF_HOST_MEMORY;
1844
goto fail;
1845
}
1846
1847
off_t real_size = lseek(fd, 0, SEEK_END);
1848
lseek(fd, 0, SEEK_SET);
1849
if (real_size < 0 || (uint64_t) real_size < size) {
1850
result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1851
goto fail;
1852
}
1853
1854
int render_fd = device->pdevice->render_fd;
1855
assert(render_fd >= 0);
1856
1857
int ret;
1858
uint32_t handle;
1859
ret = drmPrimeFDToHandle(render_fd, fd, &handle);
1860
if (ret) {
1861
result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1862
goto fail;
1863
}
1864
1865
struct drm_v3d_get_bo_offset get_offset = {
1866
.handle = handle,
1867
};
1868
ret = v3dv_ioctl(render_fd, DRM_IOCTL_V3D_GET_BO_OFFSET, &get_offset);
1869
if (ret) {
1870
result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1871
goto fail;
1872
}
1873
assert(get_offset.offset != 0);
1874
1875
v3dv_bo_init(*bo, handle, size, get_offset.offset, "import", false);
1876
1877
return VK_SUCCESS;
1878
1879
fail:
1880
if (*bo) {
1881
vk_free2(&device->vk.alloc, pAllocator, *bo);
1882
*bo = NULL;
1883
}
1884
return result;
1885
}
1886
1887
static VkResult
1888
device_alloc_for_wsi(struct v3dv_device *device,
1889
const VkAllocationCallbacks *pAllocator,
1890
struct v3dv_device_memory *mem,
1891
VkDeviceSize size)
1892
{
1893
/* In the simulator we can get away with a regular allocation since both
1894
* allocation and rendering happen in the same DRM render node. On actual
1895
* hardware we need to allocate our winsys BOs on the vc4 display device
1896
* and import them into v3d.
1897
*/
1898
#if using_v3d_simulator
1899
return device_alloc(device, mem, size);
1900
#else
1901
/* If we are allocating for WSI we should have a swapchain and thus,
1902
* we should've initialized the display device. However, Zink doesn't
1903
* use swapchains, so in that case we can get here without acquiring the
1904
* display device and we need to do it now.
1905
*/
1906
VkResult result;
1907
struct v3dv_instance *instance = device->instance;
1908
struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
1909
if (unlikely(pdevice->display_fd < 0)) {
1910
result = v3dv_physical_device_acquire_display(instance, pdevice, NULL);
1911
if (result != VK_SUCCESS)
1912
return result;
1913
}
1914
assert(pdevice->display_fd != -1);
1915
1916
mem->is_for_wsi = true;
1917
1918
int display_fd = pdevice->display_fd;
1919
struct drm_mode_create_dumb create_dumb = {
1920
.width = 1024, /* one page */
1921
.height = align(size, 4096) / 4096,
1922
.bpp = util_format_get_blocksizebits(PIPE_FORMAT_RGBA8888_UNORM),
1923
};
1924
1925
int err;
1926
err = v3dv_ioctl(display_fd, DRM_IOCTL_MODE_CREATE_DUMB, &create_dumb);
1927
if (err < 0)
1928
goto fail_create;
1929
1930
int fd;
1931
err =
1932
drmPrimeHandleToFD(display_fd, create_dumb.handle, O_CLOEXEC, &fd);
1933
if (err < 0)
1934
goto fail_export;
1935
1936
result = device_import_bo(device, pAllocator, fd, size, &mem->bo);
1937
close(fd);
1938
if (result != VK_SUCCESS)
1939
goto fail_import;
1940
1941
mem->bo->dumb_handle = create_dumb.handle;
1942
return VK_SUCCESS;
1943
1944
fail_import:
1945
fail_export:
1946
device_free_wsi_dumb(display_fd, create_dumb.handle);
1947
1948
fail_create:
1949
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
1950
#endif
1951
}
1952
1953
VKAPI_ATTR VkResult VKAPI_CALL
1954
v3dv_AllocateMemory(VkDevice _device,
1955
const VkMemoryAllocateInfo *pAllocateInfo,
1956
const VkAllocationCallbacks *pAllocator,
1957
VkDeviceMemory *pMem)
1958
{
1959
V3DV_FROM_HANDLE(v3dv_device, device, _device);
1960
struct v3dv_device_memory *mem;
1961
struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
1962
1963
assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
1964
1965
/* The Vulkan 1.0.33 spec says "allocationSize must be greater than 0". */
1966
assert(pAllocateInfo->allocationSize > 0);
1967
1968
mem = vk_object_zalloc(&device->vk, pAllocator, sizeof(*mem),
1969
VK_OBJECT_TYPE_DEVICE_MEMORY);
1970
if (mem == NULL)
1971
return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
1972
1973
assert(pAllocateInfo->memoryTypeIndex < pdevice->memory.memoryTypeCount);
1974
mem->type = &pdevice->memory.memoryTypes[pAllocateInfo->memoryTypeIndex];
1975
mem->has_bo_ownership = true;
1976
mem->is_for_wsi = false;
1977
1978
const struct wsi_memory_allocate_info *wsi_info = NULL;
1979
const VkImportMemoryFdInfoKHR *fd_info = NULL;
1980
vk_foreach_struct_const(ext, pAllocateInfo->pNext) {
1981
switch ((unsigned)ext->sType) {
1982
case VK_STRUCTURE_TYPE_WSI_MEMORY_ALLOCATE_INFO_MESA:
1983
wsi_info = (void *)ext;
1984
break;
1985
case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
1986
fd_info = (void *)ext;
1987
break;
1988
case VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO:
1989
/* We don't support VK_KHR_buffer_device_address or multiple
1990
* devices per device group, so we can ignore this.
1991
*/
1992
break;
1993
case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR:
1994
/* We don't have particular optimizations associated with memory
1995
* allocations that won't be suballocated to multiple resources.
1996
*/
1997
break;
1998
case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR:
1999
/* The mask of handle types specified here must be supported
2000
* according to VkExternalImageFormatProperties, so it must be
2001
* fd or dmabuf, which don't have special requirements for us.
2002
*/
2003
break;
2004
default:
2005
v3dv_debug_ignored_stype(ext->sType);
2006
break;
2007
}
2008
}
2009
2010
VkResult result = VK_SUCCESS;
2011
2012
/* We always allocate device memory in multiples of a page, so round up
2013
* requested size to that.
2014
*/
2015
VkDeviceSize alloc_size = ALIGN(pAllocateInfo->allocationSize, 4096);
2016
2017
if (unlikely(alloc_size > MAX_MEMORY_ALLOCATION_SIZE)) {
2018
result = VK_ERROR_OUT_OF_DEVICE_MEMORY;
2019
} else {
2020
if (wsi_info) {
2021
result = device_alloc_for_wsi(device, pAllocator, mem, alloc_size);
2022
} else if (fd_info && fd_info->handleType) {
2023
assert(fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
2024
fd_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2025
result = device_import_bo(device, pAllocator,
2026
fd_info->fd, alloc_size, &mem->bo);
2027
mem->has_bo_ownership = false;
2028
if (result == VK_SUCCESS)
2029
close(fd_info->fd);
2030
} else {
2031
result = device_alloc(device, mem, alloc_size);
2032
}
2033
}
2034
2035
if (result != VK_SUCCESS) {
2036
vk_object_free(&device->vk, pAllocator, mem);
2037
return vk_error(device->instance, result);
2038
}
2039
2040
*pMem = v3dv_device_memory_to_handle(mem);
2041
return result;
2042
}
2043
2044
VKAPI_ATTR void VKAPI_CALL
2045
v3dv_FreeMemory(VkDevice _device,
2046
VkDeviceMemory _mem,
2047
const VkAllocationCallbacks *pAllocator)
2048
{
2049
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2050
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _mem);
2051
2052
if (mem == NULL)
2053
return;
2054
2055
if (mem->bo->map)
2056
v3dv_UnmapMemory(_device, _mem);
2057
2058
device_free(device, mem);
2059
2060
vk_object_free(&device->vk, pAllocator, mem);
2061
}
2062
2063
VKAPI_ATTR VkResult VKAPI_CALL
2064
v3dv_MapMemory(VkDevice _device,
2065
VkDeviceMemory _memory,
2066
VkDeviceSize offset,
2067
VkDeviceSize size,
2068
VkMemoryMapFlags flags,
2069
void **ppData)
2070
{
2071
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2072
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _memory);
2073
2074
if (mem == NULL) {
2075
*ppData = NULL;
2076
return VK_SUCCESS;
2077
}
2078
2079
assert(offset < mem->bo->size);
2080
2081
/* Since the driver can map BOs internally as well and the mapped range
2082
* required by the user or the driver might not be the same, we always map
2083
* the entire BO and then add the requested offset to the start address
2084
* of the mapped region.
2085
*/
2086
VkResult result = device_map(device, mem);
2087
if (result != VK_SUCCESS)
2088
return vk_error(device->instance, result);
2089
2090
*ppData = ((uint8_t *) mem->bo->map) + offset;
2091
return VK_SUCCESS;
2092
}
2093
2094
VKAPI_ATTR void VKAPI_CALL
2095
v3dv_UnmapMemory(VkDevice _device,
2096
VkDeviceMemory _memory)
2097
{
2098
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2099
V3DV_FROM_HANDLE(v3dv_device_memory, mem, _memory);
2100
2101
if (mem == NULL)
2102
return;
2103
2104
device_unmap(device, mem);
2105
}
2106
2107
VKAPI_ATTR VkResult VKAPI_CALL
2108
v3dv_FlushMappedMemoryRanges(VkDevice _device,
2109
uint32_t memoryRangeCount,
2110
const VkMappedMemoryRange *pMemoryRanges)
2111
{
2112
return VK_SUCCESS;
2113
}
2114
2115
VKAPI_ATTR VkResult VKAPI_CALL
2116
v3dv_InvalidateMappedMemoryRanges(VkDevice _device,
2117
uint32_t memoryRangeCount,
2118
const VkMappedMemoryRange *pMemoryRanges)
2119
{
2120
return VK_SUCCESS;
2121
}
2122
2123
VKAPI_ATTR void VKAPI_CALL
2124
v3dv_GetImageMemoryRequirements2(VkDevice device,
2125
const VkImageMemoryRequirementsInfo2 *pInfo,
2126
VkMemoryRequirements2 *pMemoryRequirements)
2127
{
2128
V3DV_FROM_HANDLE(v3dv_image, image, pInfo->image);
2129
2130
pMemoryRequirements->memoryRequirements = (VkMemoryRequirements) {
2131
.memoryTypeBits = 0x1,
2132
.alignment = image->alignment,
2133
.size = image->size
2134
};
2135
2136
vk_foreach_struct(ext, pMemoryRequirements->pNext) {
2137
switch (ext->sType) {
2138
case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
2139
VkMemoryDedicatedRequirements *req =
2140
(VkMemoryDedicatedRequirements *) ext;
2141
req->requiresDedicatedAllocation = image->external;
2142
req->prefersDedicatedAllocation = image->external;
2143
break;
2144
}
2145
default:
2146
v3dv_debug_ignored_stype(ext->sType);
2147
break;
2148
}
2149
}
2150
}
2151
2152
static void
2153
bind_image_memory(const VkBindImageMemoryInfo *info)
2154
{
2155
V3DV_FROM_HANDLE(v3dv_image, image, info->image);
2156
V3DV_FROM_HANDLE(v3dv_device_memory, mem, info->memory);
2157
2158
/* Valid usage:
2159
*
2160
* "memoryOffset must be an integer multiple of the alignment member of
2161
* the VkMemoryRequirements structure returned from a call to
2162
* vkGetImageMemoryRequirements with image"
2163
*/
2164
assert(info->memoryOffset % image->alignment == 0);
2165
assert(info->memoryOffset < mem->bo->size);
2166
2167
image->mem = mem;
2168
image->mem_offset = info->memoryOffset;
2169
}
2170
2171
VKAPI_ATTR VkResult VKAPI_CALL
2172
v3dv_BindImageMemory2(VkDevice _device,
2173
uint32_t bindInfoCount,
2174
const VkBindImageMemoryInfo *pBindInfos)
2175
{
2176
for (uint32_t i = 0; i < bindInfoCount; i++) {
2177
const VkBindImageMemorySwapchainInfoKHR *swapchain_info =
2178
vk_find_struct_const(pBindInfos->pNext,
2179
BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR);
2180
if (swapchain_info && swapchain_info->swapchain) {
2181
struct v3dv_image *swapchain_image =
2182
v3dv_wsi_get_image_from_swapchain(swapchain_info->swapchain,
2183
swapchain_info->imageIndex);
2184
VkBindImageMemoryInfo swapchain_bind = {
2185
.sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
2186
.image = pBindInfos[i].image,
2187
.memory = v3dv_device_memory_to_handle(swapchain_image->mem),
2188
.memoryOffset = swapchain_image->mem_offset,
2189
};
2190
bind_image_memory(&swapchain_bind);
2191
} else {
2192
bind_image_memory(&pBindInfos[i]);
2193
}
2194
}
2195
2196
return VK_SUCCESS;
2197
}
2198
2199
VKAPI_ATTR void VKAPI_CALL
2200
v3dv_GetBufferMemoryRequirements2(VkDevice device,
2201
const VkBufferMemoryRequirementsInfo2 *pInfo,
2202
VkMemoryRequirements2 *pMemoryRequirements)
2203
{
2204
V3DV_FROM_HANDLE(v3dv_buffer, buffer, pInfo->buffer);
2205
2206
pMemoryRequirements->memoryRequirements = (VkMemoryRequirements) {
2207
.memoryTypeBits = 0x1,
2208
.alignment = buffer->alignment,
2209
.size = align64(buffer->size, buffer->alignment),
2210
};
2211
2212
vk_foreach_struct(ext, pMemoryRequirements->pNext) {
2213
switch (ext->sType) {
2214
case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
2215
VkMemoryDedicatedRequirements *req =
2216
(VkMemoryDedicatedRequirements *) ext;
2217
req->requiresDedicatedAllocation = false;
2218
req->prefersDedicatedAllocation = false;
2219
break;
2220
}
2221
default:
2222
v3dv_debug_ignored_stype(ext->sType);
2223
break;
2224
}
2225
}
2226
}
2227
2228
static void
2229
bind_buffer_memory(const VkBindBufferMemoryInfo *info)
2230
{
2231
V3DV_FROM_HANDLE(v3dv_buffer, buffer, info->buffer);
2232
V3DV_FROM_HANDLE(v3dv_device_memory, mem, info->memory);
2233
2234
/* Valid usage:
2235
*
2236
* "memoryOffset must be an integer multiple of the alignment member of
2237
* the VkMemoryRequirements structure returned from a call to
2238
* vkGetBufferMemoryRequirements with buffer"
2239
*/
2240
assert(info->memoryOffset % buffer->alignment == 0);
2241
assert(info->memoryOffset < mem->bo->size);
2242
2243
buffer->mem = mem;
2244
buffer->mem_offset = info->memoryOffset;
2245
}
2246
2247
2248
VKAPI_ATTR VkResult VKAPI_CALL
2249
v3dv_BindBufferMemory2(VkDevice device,
2250
uint32_t bindInfoCount,
2251
const VkBindBufferMemoryInfo *pBindInfos)
2252
{
2253
for (uint32_t i = 0; i < bindInfoCount; i++)
2254
bind_buffer_memory(&pBindInfos[i]);
2255
2256
return VK_SUCCESS;
2257
}
2258
2259
VKAPI_ATTR VkResult VKAPI_CALL
2260
v3dv_CreateBuffer(VkDevice _device,
2261
const VkBufferCreateInfo *pCreateInfo,
2262
const VkAllocationCallbacks *pAllocator,
2263
VkBuffer *pBuffer)
2264
{
2265
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2266
struct v3dv_buffer *buffer;
2267
2268
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO);
2269
assert(pCreateInfo->usage != 0);
2270
2271
/* We don't support any flags for now */
2272
assert(pCreateInfo->flags == 0);
2273
2274
buffer = vk_object_zalloc(&device->vk, pAllocator, sizeof(*buffer),
2275
VK_OBJECT_TYPE_BUFFER);
2276
if (buffer == NULL)
2277
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
2278
2279
buffer->size = pCreateInfo->size;
2280
buffer->usage = pCreateInfo->usage;
2281
buffer->alignment = 256; /* nonCoherentAtomSize */
2282
2283
/* Limit allocations to 32-bit */
2284
const VkDeviceSize aligned_size = align64(buffer->size, buffer->alignment);
2285
if (aligned_size > UINT32_MAX || aligned_size < buffer->size)
2286
return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2287
2288
*pBuffer = v3dv_buffer_to_handle(buffer);
2289
2290
return VK_SUCCESS;
2291
}
2292
2293
VKAPI_ATTR void VKAPI_CALL
2294
v3dv_DestroyBuffer(VkDevice _device,
2295
VkBuffer _buffer,
2296
const VkAllocationCallbacks *pAllocator)
2297
{
2298
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2299
V3DV_FROM_HANDLE(v3dv_buffer, buffer, _buffer);
2300
2301
if (!buffer)
2302
return;
2303
2304
vk_object_free(&device->vk, pAllocator, buffer);
2305
}
2306
2307
VKAPI_ATTR VkResult VKAPI_CALL
2308
v3dv_CreateFramebuffer(VkDevice _device,
2309
const VkFramebufferCreateInfo *pCreateInfo,
2310
const VkAllocationCallbacks *pAllocator,
2311
VkFramebuffer *pFramebuffer)
2312
{
2313
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2314
struct v3dv_framebuffer *framebuffer;
2315
2316
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
2317
2318
size_t size = sizeof(*framebuffer) +
2319
sizeof(struct v3dv_image_view *) * pCreateInfo->attachmentCount;
2320
framebuffer = vk_object_zalloc(&device->vk, pAllocator, size,
2321
VK_OBJECT_TYPE_FRAMEBUFFER);
2322
if (framebuffer == NULL)
2323
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
2324
2325
framebuffer->width = pCreateInfo->width;
2326
framebuffer->height = pCreateInfo->height;
2327
framebuffer->layers = pCreateInfo->layers;
2328
framebuffer->has_edge_padding = true;
2329
2330
framebuffer->attachment_count = pCreateInfo->attachmentCount;
2331
framebuffer->color_attachment_count = 0;
2332
for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
2333
framebuffer->attachments[i] =
2334
v3dv_image_view_from_handle(pCreateInfo->pAttachments[i]);
2335
if (framebuffer->attachments[i]->aspects & VK_IMAGE_ASPECT_COLOR_BIT)
2336
framebuffer->color_attachment_count++;
2337
}
2338
2339
*pFramebuffer = v3dv_framebuffer_to_handle(framebuffer);
2340
2341
return VK_SUCCESS;
2342
}
2343
2344
VKAPI_ATTR void VKAPI_CALL
2345
v3dv_DestroyFramebuffer(VkDevice _device,
2346
VkFramebuffer _fb,
2347
const VkAllocationCallbacks *pAllocator)
2348
{
2349
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2350
V3DV_FROM_HANDLE(v3dv_framebuffer, fb, _fb);
2351
2352
if (!fb)
2353
return;
2354
2355
vk_object_free(&device->vk, pAllocator, fb);
2356
}
2357
2358
VKAPI_ATTR VkResult VKAPI_CALL
2359
v3dv_GetMemoryFdPropertiesKHR(VkDevice _device,
2360
VkExternalMemoryHandleTypeFlagBits handleType,
2361
int fd,
2362
VkMemoryFdPropertiesKHR *pMemoryFdProperties)
2363
{
2364
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2365
struct v3dv_physical_device *pdevice = &device->instance->physicalDevice;
2366
2367
switch (handleType) {
2368
case VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT:
2369
pMemoryFdProperties->memoryTypeBits =
2370
(1 << pdevice->memory.memoryTypeCount) - 1;
2371
return VK_SUCCESS;
2372
default:
2373
return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
2374
}
2375
}
2376
2377
VKAPI_ATTR VkResult VKAPI_CALL
2378
v3dv_GetMemoryFdKHR(VkDevice _device,
2379
const VkMemoryGetFdInfoKHR *pGetFdInfo,
2380
int *pFd)
2381
{
2382
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2383
V3DV_FROM_HANDLE(v3dv_device_memory, mem, pGetFdInfo->memory);
2384
2385
assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
2386
assert(pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT ||
2387
pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2388
2389
int fd, ret;
2390
ret = drmPrimeHandleToFD(device->pdevice->render_fd,
2391
mem->bo->handle,
2392
DRM_CLOEXEC, &fd);
2393
if (ret)
2394
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
2395
2396
*pFd = fd;
2397
2398
return VK_SUCCESS;
2399
}
2400
2401
VKAPI_ATTR VkResult VKAPI_CALL
2402
v3dv_CreateEvent(VkDevice _device,
2403
const VkEventCreateInfo *pCreateInfo,
2404
const VkAllocationCallbacks *pAllocator,
2405
VkEvent *pEvent)
2406
{
2407
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2408
struct v3dv_event *event =
2409
vk_object_zalloc(&device->vk, pAllocator, sizeof(*event),
2410
VK_OBJECT_TYPE_EVENT);
2411
if (!event)
2412
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
2413
2414
/* Events are created in the unsignaled state */
2415
event->state = false;
2416
*pEvent = v3dv_event_to_handle(event);
2417
2418
return VK_SUCCESS;
2419
}
2420
2421
VKAPI_ATTR void VKAPI_CALL
2422
v3dv_DestroyEvent(VkDevice _device,
2423
VkEvent _event,
2424
const VkAllocationCallbacks *pAllocator)
2425
{
2426
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2427
V3DV_FROM_HANDLE(v3dv_event, event, _event);
2428
2429
if (!event)
2430
return;
2431
2432
vk_object_free(&device->vk, pAllocator, event);
2433
}
2434
2435
VKAPI_ATTR VkResult VKAPI_CALL
2436
v3dv_GetEventStatus(VkDevice _device, VkEvent _event)
2437
{
2438
V3DV_FROM_HANDLE(v3dv_event, event, _event);
2439
return p_atomic_read(&event->state) ? VK_EVENT_SET : VK_EVENT_RESET;
2440
}
2441
2442
VKAPI_ATTR VkResult VKAPI_CALL
2443
v3dv_SetEvent(VkDevice _device, VkEvent _event)
2444
{
2445
V3DV_FROM_HANDLE(v3dv_event, event, _event);
2446
p_atomic_set(&event->state, 1);
2447
return VK_SUCCESS;
2448
}
2449
2450
VKAPI_ATTR VkResult VKAPI_CALL
2451
v3dv_ResetEvent(VkDevice _device, VkEvent _event)
2452
{
2453
V3DV_FROM_HANDLE(v3dv_event, event, _event);
2454
p_atomic_set(&event->state, 0);
2455
return VK_SUCCESS;
2456
}
2457
2458
VKAPI_ATTR VkResult VKAPI_CALL
2459
v3dv_CreateSampler(VkDevice _device,
2460
const VkSamplerCreateInfo *pCreateInfo,
2461
const VkAllocationCallbacks *pAllocator,
2462
VkSampler *pSampler)
2463
{
2464
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2465
struct v3dv_sampler *sampler;
2466
2467
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
2468
2469
sampler = vk_object_zalloc(&device->vk, pAllocator, sizeof(*sampler),
2470
VK_OBJECT_TYPE_SAMPLER);
2471
if (!sampler)
2472
return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
2473
2474
sampler->compare_enable = pCreateInfo->compareEnable;
2475
sampler->unnormalized_coordinates = pCreateInfo->unnormalizedCoordinates;
2476
v3dv_X(device, pack_sampler_state)(sampler, pCreateInfo);
2477
2478
*pSampler = v3dv_sampler_to_handle(sampler);
2479
2480
return VK_SUCCESS;
2481
}
2482
2483
VKAPI_ATTR void VKAPI_CALL
2484
v3dv_DestroySampler(VkDevice _device,
2485
VkSampler _sampler,
2486
const VkAllocationCallbacks *pAllocator)
2487
{
2488
V3DV_FROM_HANDLE(v3dv_device, device, _device);
2489
V3DV_FROM_HANDLE(v3dv_sampler, sampler, _sampler);
2490
2491
if (!sampler)
2492
return;
2493
2494
vk_object_free(&device->vk, pAllocator, sampler);
2495
}
2496
2497
VKAPI_ATTR void VKAPI_CALL
2498
v3dv_GetDeviceMemoryCommitment(VkDevice device,
2499
VkDeviceMemory memory,
2500
VkDeviceSize *pCommittedMemoryInBytes)
2501
{
2502
*pCommittedMemoryInBytes = 0;
2503
}
2504
2505
VKAPI_ATTR void VKAPI_CALL
2506
v3dv_GetImageSparseMemoryRequirements(
2507
VkDevice device,
2508
VkImage image,
2509
uint32_t *pSparseMemoryRequirementCount,
2510
VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
2511
{
2512
*pSparseMemoryRequirementCount = 0;
2513
}
2514
2515
VKAPI_ATTR void VKAPI_CALL
2516
v3dv_GetImageSparseMemoryRequirements2(
2517
VkDevice device,
2518
const VkImageSparseMemoryRequirementsInfo2 *pInfo,
2519
uint32_t *pSparseMemoryRequirementCount,
2520
VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
2521
{
2522
*pSparseMemoryRequirementCount = 0;
2523
}
2524
2525
/* vk_icd.h does not declare this function, so we declare it here to
2526
* suppress Wmissing-prototypes.
2527
*/
2528
PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2529
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion);
2530
2531
PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2532
vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion)
2533
{
2534
/* For the full details on loader interface versioning, see
2535
* <https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/blob/master/loader/LoaderAndLayerInterface.md>.
2536
* What follows is a condensed summary, to help you navigate the large and
2537
* confusing official doc.
2538
*
2539
* - Loader interface v0 is incompatible with later versions. We don't
2540
* support it.
2541
*
2542
* - In loader interface v1:
2543
* - The first ICD entrypoint called by the loader is
2544
* vk_icdGetInstanceProcAddr(). The ICD must statically expose this
2545
* entrypoint.
2546
* - The ICD must statically expose no other Vulkan symbol unless it is
2547
* linked with -Bsymbolic.
2548
* - Each dispatchable Vulkan handle created by the ICD must be
2549
* a pointer to a struct whose first member is VK_LOADER_DATA. The
2550
* ICD must initialize VK_LOADER_DATA.loadMagic to ICD_LOADER_MAGIC.
2551
* - The loader implements vkCreate{PLATFORM}SurfaceKHR() and
2552
* vkDestroySurfaceKHR(). The ICD must be capable of working with
2553
* such loader-managed surfaces.
2554
*
2555
* - Loader interface v2 differs from v1 in:
2556
* - The first ICD entrypoint called by the loader is
2557
* vk_icdNegotiateLoaderICDInterfaceVersion(). The ICD must
2558
* statically expose this entrypoint.
2559
*
2560
* - Loader interface v3 differs from v2 in:
2561
* - The ICD must implement vkCreate{PLATFORM}SurfaceKHR(),
2562
* vkDestroySurfaceKHR(), and other API which uses VKSurfaceKHR,
2563
* because the loader no longer does so.
2564
*
2565
* - Loader interface v4 differs from v3 in:
2566
* - The ICD must implement vk_icdGetPhysicalDeviceProcAddr().
2567
*/
2568
*pSupportedVersion = MIN2(*pSupportedVersion, 3u);
2569
return VK_SUCCESS;
2570
}
2571
2572