Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/vulkan/wsi/wsi_common_wayland.c
7642 views
1
/*
2
* Copyright © 2015 Intel Corporation
3
*
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
* and/or sell copies of the Software, and to permit persons to whom the
9
* Software is furnished to do so, subject to the following conditions:
10
*
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
13
* Software.
14
*
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21
* IN THE SOFTWARE.
22
*/
23
24
#include <wayland-client.h>
25
26
#include <assert.h>
27
#include <stdlib.h>
28
#include <stdio.h>
29
#include <unistd.h>
30
#include <errno.h>
31
#include <string.h>
32
#include <pthread.h>
33
#include <poll.h>
34
#include <sys/mman.h>
35
36
#include "drm-uapi/drm_fourcc.h"
37
38
#include "vk_util.h"
39
#include "wsi_common_private.h"
40
#include "wsi_common_wayland.h"
41
#include "wayland-drm-client-protocol.h"
42
#include "linux-dmabuf-unstable-v1-client-protocol.h"
43
44
#include <util/compiler.h>
45
#include <util/hash_table.h>
46
#include <util/timespec.h>
47
#include <util/u_vector.h>
48
#include <util/anon_file.h>
49
50
struct wsi_wayland;
51
52
struct wsi_wl_display_swrast {
53
struct wl_shm * wl_shm;
54
struct u_vector formats;
55
};
56
57
struct wsi_wl_display_drm {
58
struct wl_drm * wl_drm;
59
struct u_vector formats;
60
uint32_t capabilities;
61
};
62
63
struct wsi_wl_display_dmabuf {
64
struct zwp_linux_dmabuf_v1 * wl_dmabuf;
65
struct u_vector formats;
66
struct {
67
struct u_vector argb8888;
68
struct u_vector xrgb8888;
69
} modifiers;
70
};
71
72
struct wsi_wl_display {
73
/* The real wl_display */
74
struct wl_display * wl_display;
75
/* Actually a proxy wrapper around the event queue */
76
struct wl_display * wl_display_wrapper;
77
struct wl_event_queue * queue;
78
79
struct wsi_wl_display_swrast swrast;
80
struct wsi_wl_display_drm drm;
81
struct wsi_wl_display_dmabuf dmabuf;
82
83
struct wsi_wayland *wsi_wl;
84
85
/* Points to formats in wsi_wl_display_drm or wsi_wl_display_dmabuf */
86
struct u_vector * formats;
87
88
/* Only used for displays created by wsi_wl_display_create */
89
uint32_t refcount;
90
91
bool sw;
92
};
93
94
struct wsi_wayland {
95
struct wsi_interface base;
96
97
struct wsi_device *wsi;
98
99
const VkAllocationCallbacks *alloc;
100
VkPhysicalDevice physical_device;
101
};
102
103
static void
104
wsi_wl_display_add_vk_format(struct wsi_wl_display *display,
105
struct u_vector *formats, VkFormat format)
106
{
107
/* Don't add a format that's already in the list */
108
VkFormat *f;
109
u_vector_foreach(f, formats)
110
if (*f == format)
111
return;
112
113
/* Don't add formats that aren't renderable. */
114
VkFormatProperties props;
115
116
display->wsi_wl->wsi->GetPhysicalDeviceFormatProperties(display->wsi_wl->physical_device,
117
format, &props);
118
if (!(props.optimalTilingFeatures & VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT))
119
return;
120
121
f = u_vector_add(formats);
122
if (f)
123
*f = format;
124
}
125
126
static void
127
wsi_wl_display_add_wl_format(struct wsi_wl_display *display,
128
struct u_vector *formats, uint32_t wl_format)
129
{
130
switch (wl_format) {
131
#if 0
132
/* TODO: These are only available when VK_EXT_4444_formats is enabled, so
133
* we probably need to make their use conditional on this extension. */
134
case WL_DRM_FORMAT_ARGB4444:
135
case WL_DRM_FORMAT_XRGB4444:
136
wsi_wl_display_add_vk_format(display, formats,
137
VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT);
138
break;
139
case WL_DRM_FORMAT_ABGR4444:
140
case WL_DRM_FORMAT_XBGR4444:
141
wsi_wl_display_add_vk_format(display, formats,
142
VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT);
143
break;
144
#endif
145
146
/* Vulkan _PACKN formats have the same component order as DRM formats
147
* on little endian systems, on big endian there exists no analog. */
148
#if MESA_LITTLE_ENDIAN
149
case WL_DRM_FORMAT_RGBA4444:
150
case WL_DRM_FORMAT_RGBX4444:
151
wsi_wl_display_add_vk_format(display, formats,
152
VK_FORMAT_R4G4B4A4_UNORM_PACK16);
153
break;
154
case WL_DRM_FORMAT_BGRA4444:
155
case WL_DRM_FORMAT_BGRX4444:
156
wsi_wl_display_add_vk_format(display, formats,
157
VK_FORMAT_B4G4R4A4_UNORM_PACK16);
158
break;
159
case WL_DRM_FORMAT_RGB565:
160
wsi_wl_display_add_vk_format(display, formats,
161
VK_FORMAT_R5G6B5_UNORM_PACK16);
162
break;
163
case WL_DRM_FORMAT_BGR565:
164
wsi_wl_display_add_vk_format(display, formats,
165
VK_FORMAT_B5G6R5_UNORM_PACK16);
166
break;
167
case WL_DRM_FORMAT_ARGB1555:
168
case WL_DRM_FORMAT_XRGB1555:
169
wsi_wl_display_add_vk_format(display, formats,
170
VK_FORMAT_A1R5G5B5_UNORM_PACK16);
171
break;
172
case WL_DRM_FORMAT_RGBA5551:
173
case WL_DRM_FORMAT_RGBX5551:
174
wsi_wl_display_add_vk_format(display, formats,
175
VK_FORMAT_R5G5B5A1_UNORM_PACK16);
176
break;
177
case WL_DRM_FORMAT_BGRA5551:
178
case WL_DRM_FORMAT_BGRX5551:
179
wsi_wl_display_add_vk_format(display, formats,
180
VK_FORMAT_B5G5R5A1_UNORM_PACK16);
181
break;
182
case WL_DRM_FORMAT_ARGB2101010:
183
case WL_DRM_FORMAT_XRGB2101010:
184
wsi_wl_display_add_vk_format(display, formats,
185
VK_FORMAT_A2R10G10B10_UNORM_PACK32);
186
break;
187
case WL_DRM_FORMAT_ABGR2101010:
188
case WL_DRM_FORMAT_XBGR2101010:
189
wsi_wl_display_add_vk_format(display, formats,
190
VK_FORMAT_A2B10G10R10_UNORM_PACK32);
191
break;
192
#endif
193
194
/* Non-packed 8-bit formats have an inverted channel order compared to the
195
* little endian DRM formats, because the DRM channel ordering is high->low
196
* but the vulkan channel ordering is in memory byte order
197
*
198
* For all UNORM formats which have a SRGB variant, we must support both if
199
* we can. SRGB in this context means that rendering to it will result in a
200
* linear -> nonlinear SRGB colorspace conversion before the data is stored.
201
* The inverse function is applied when sampling from SRGB images.
202
* From Wayland's perspective nothing changes, the difference is just how
203
* Vulkan interprets the pixel data. */
204
case WL_DRM_FORMAT_XBGR8888:
205
wsi_wl_display_add_vk_format(display, formats,
206
VK_FORMAT_R8G8B8_SRGB);
207
wsi_wl_display_add_vk_format(display, formats,
208
VK_FORMAT_R8G8B8_UNORM);
209
FALLTHROUGH;
210
case WL_DRM_FORMAT_ABGR8888:
211
wsi_wl_display_add_vk_format(display, formats,
212
VK_FORMAT_R8G8B8A8_SRGB);
213
wsi_wl_display_add_vk_format(display, formats,
214
VK_FORMAT_R8G8B8A8_UNORM);
215
break;
216
case WL_DRM_FORMAT_XRGB8888:
217
wsi_wl_display_add_vk_format(display, formats,
218
VK_FORMAT_B8G8R8_SRGB);
219
wsi_wl_display_add_vk_format(display, formats,
220
VK_FORMAT_B8G8R8_UNORM);
221
FALLTHROUGH;
222
case WL_DRM_FORMAT_ARGB8888:
223
wsi_wl_display_add_vk_format(display, formats,
224
VK_FORMAT_B8G8R8A8_SRGB);
225
wsi_wl_display_add_vk_format(display, formats,
226
VK_FORMAT_B8G8R8A8_UNORM);
227
break;
228
}
229
}
230
231
static void
232
wsi_wl_display_add_wl_shm_format(struct wsi_wl_display *display,
233
struct u_vector *formats,
234
uint32_t wl_shm_format)
235
{
236
switch (wl_shm_format) {
237
case WL_SHM_FORMAT_XBGR8888:
238
wsi_wl_display_add_vk_format(display, formats,
239
VK_FORMAT_R8G8B8_SRGB);
240
wsi_wl_display_add_vk_format(display, formats,
241
VK_FORMAT_R8G8B8_UNORM);
242
FALLTHROUGH;
243
case WL_SHM_FORMAT_ABGR8888:
244
wsi_wl_display_add_vk_format(display, formats,
245
VK_FORMAT_R8G8B8A8_SRGB);
246
wsi_wl_display_add_vk_format(display, formats,
247
VK_FORMAT_R8G8B8A8_UNORM);
248
break;
249
case WL_SHM_FORMAT_XRGB8888:
250
wsi_wl_display_add_vk_format(display, formats,
251
VK_FORMAT_B8G8R8_SRGB);
252
wsi_wl_display_add_vk_format(display, formats,
253
VK_FORMAT_B8G8R8_UNORM);
254
FALLTHROUGH;
255
case WL_SHM_FORMAT_ARGB8888:
256
wsi_wl_display_add_vk_format(display, formats,
257
VK_FORMAT_B8G8R8A8_SRGB);
258
wsi_wl_display_add_vk_format(display, formats,
259
VK_FORMAT_B8G8R8A8_UNORM);
260
break;
261
}
262
}
263
264
265
static void
266
drm_handle_device(void *data, struct wl_drm *drm, const char *name)
267
{
268
}
269
270
static uint32_t
271
wl_drm_format_for_vk_format(VkFormat vk_format, bool alpha)
272
{
273
switch (vk_format) {
274
#if 0
275
case VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT:
276
return alpha ? WL_DRM_FORMAT_ARGB4444 : WL_DRM_FORMAT_XRGB4444;
277
case VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT:
278
return alpha ? WL_DRM_FORMAT_ABGR4444 : WL_DRM_FORMAT_XBGR4444;
279
#endif
280
#if MESA_LITTLE_ENDIAN
281
case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
282
return alpha ? WL_DRM_FORMAT_RGBA4444 : WL_DRM_FORMAT_RGBX4444;
283
case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
284
return alpha ? WL_DRM_FORMAT_BGRA4444 : WL_DRM_FORMAT_BGRX4444;
285
case VK_FORMAT_R5G6B5_UNORM_PACK16:
286
return WL_DRM_FORMAT_RGB565;
287
case VK_FORMAT_B5G6R5_UNORM_PACK16:
288
return WL_DRM_FORMAT_BGR565;
289
case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
290
return alpha ? WL_DRM_FORMAT_ARGB1555 : WL_DRM_FORMAT_XRGB1555;
291
case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
292
return alpha ? WL_DRM_FORMAT_RGBA5551 : WL_DRM_FORMAT_RGBX5551;
293
case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
294
return alpha ? WL_DRM_FORMAT_BGRA5551 : WL_DRM_FORMAT_BGRX5551;
295
case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
296
return alpha ? WL_DRM_FORMAT_ARGB2101010 : WL_DRM_FORMAT_XRGB2101010;
297
case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
298
return alpha ? WL_DRM_FORMAT_ABGR2101010 : WL_DRM_FORMAT_XBGR2101010;
299
#endif
300
case VK_FORMAT_R8G8B8_UNORM:
301
case VK_FORMAT_R8G8B8_SRGB:
302
return WL_DRM_FORMAT_XBGR8888;
303
case VK_FORMAT_R8G8B8A8_UNORM:
304
case VK_FORMAT_R8G8B8A8_SRGB:
305
return alpha ? WL_DRM_FORMAT_ABGR8888 : WL_DRM_FORMAT_XBGR8888;
306
case VK_FORMAT_B8G8R8_UNORM:
307
case VK_FORMAT_B8G8R8_SRGB:
308
return WL_DRM_FORMAT_BGRX8888;
309
case VK_FORMAT_B8G8R8A8_UNORM:
310
case VK_FORMAT_B8G8R8A8_SRGB:
311
return alpha ? WL_DRM_FORMAT_ARGB8888 : WL_DRM_FORMAT_XRGB8888;
312
313
default:
314
assert(!"Unsupported Vulkan format");
315
return 0;
316
}
317
}
318
319
static uint32_t
320
wl_shm_format_for_vk_format(VkFormat vk_format, bool alpha)
321
{
322
switch (vk_format) {
323
case VK_FORMAT_R8G8B8A8_UNORM:
324
case VK_FORMAT_R8G8B8A8_SRGB:
325
return alpha ? WL_SHM_FORMAT_ABGR8888 : WL_SHM_FORMAT_XBGR8888;
326
case VK_FORMAT_B8G8R8A8_UNORM:
327
case VK_FORMAT_B8G8R8A8_SRGB:
328
return alpha ? WL_SHM_FORMAT_ARGB8888 : WL_SHM_FORMAT_XRGB8888;
329
330
default:
331
assert(!"Unsupported Vulkan format");
332
return 0;
333
}
334
}
335
336
static void
337
drm_handle_format(void *data, struct wl_drm *drm, uint32_t wl_format)
338
{
339
struct wsi_wl_display *display = data;
340
if (display->drm.formats.element_size == 0)
341
return;
342
343
wsi_wl_display_add_wl_format(display, &display->drm.formats, wl_format);
344
}
345
346
static void
347
drm_handle_authenticated(void *data, struct wl_drm *drm)
348
{
349
}
350
351
static void
352
drm_handle_capabilities(void *data, struct wl_drm *drm, uint32_t capabilities)
353
{
354
struct wsi_wl_display *display = data;
355
356
display->drm.capabilities = capabilities;
357
}
358
359
static const struct wl_drm_listener drm_listener = {
360
drm_handle_device,
361
drm_handle_format,
362
drm_handle_authenticated,
363
drm_handle_capabilities,
364
};
365
366
static void
367
dmabuf_handle_format(void *data, struct zwp_linux_dmabuf_v1 *dmabuf,
368
uint32_t format)
369
{
370
/* Formats are implicitly advertised by the modifier event, so we ignore
371
* them here. */
372
}
373
374
static void
375
dmabuf_handle_modifier(void *data, struct zwp_linux_dmabuf_v1 *dmabuf,
376
uint32_t format, uint32_t modifier_hi,
377
uint32_t modifier_lo)
378
{
379
struct wsi_wl_display *display = data;
380
struct u_vector *modifiers;
381
uint64_t *mod = NULL;
382
383
/* If we're not fetching formats, don't fetch modifiers either. */
384
if (display->dmabuf.formats.element_size == 0)
385
return;
386
387
switch (format) {
388
case WL_DRM_FORMAT_ARGB8888:
389
modifiers = &display->dmabuf.modifiers.argb8888;
390
break;
391
case WL_DRM_FORMAT_XRGB8888:
392
modifiers = &display->dmabuf.modifiers.xrgb8888;
393
break;
394
default:
395
return; /* Unsupported format */
396
}
397
398
wsi_wl_display_add_wl_format(display, &display->dmabuf.formats, format);
399
400
if (modifier_hi == (DRM_FORMAT_MOD_INVALID >> 32) &&
401
modifier_lo == (DRM_FORMAT_MOD_INVALID & 0xffffffff))
402
return;
403
404
mod = u_vector_add(modifiers);
405
if (!mod)
406
return;
407
408
*mod = (uint64_t) modifier_hi << 32;
409
*mod |= (uint64_t) (modifier_lo & 0xffffffff);
410
}
411
412
static const struct zwp_linux_dmabuf_v1_listener dmabuf_listener = {
413
dmabuf_handle_format,
414
dmabuf_handle_modifier,
415
};
416
417
static void
418
shm_handle_format(void *data, struct wl_shm *shm, uint32_t format)
419
{
420
struct wsi_wl_display *display = data;
421
if (display->swrast.formats.element_size == 0)
422
return;
423
424
wsi_wl_display_add_wl_shm_format(display, &display->swrast.formats, format);
425
}
426
427
static const struct wl_shm_listener shm_listener = {
428
.format = shm_handle_format
429
};
430
431
static void
432
registry_handle_global(void *data, struct wl_registry *registry,
433
uint32_t name, const char *interface, uint32_t version)
434
{
435
struct wsi_wl_display *display = data;
436
437
if (display->sw) {
438
if (strcmp(interface, "wl_shm") == 0) {
439
display->swrast.wl_shm = wl_registry_bind(registry, name, &wl_shm_interface, 1);
440
wl_shm_add_listener(display->swrast.wl_shm, &shm_listener, display);
441
}
442
return;
443
}
444
445
if (strcmp(interface, "wl_drm") == 0) {
446
assert(display->drm.wl_drm == NULL);
447
448
assert(version >= 2);
449
display->drm.wl_drm =
450
wl_registry_bind(registry, name, &wl_drm_interface, 2);
451
wl_drm_add_listener(display->drm.wl_drm, &drm_listener, display);
452
} else if (strcmp(interface, "zwp_linux_dmabuf_v1") == 0 && version >= 3) {
453
display->dmabuf.wl_dmabuf =
454
wl_registry_bind(registry, name, &zwp_linux_dmabuf_v1_interface, 3);
455
zwp_linux_dmabuf_v1_add_listener(display->dmabuf.wl_dmabuf,
456
&dmabuf_listener, display);
457
}
458
}
459
460
static void
461
registry_handle_global_remove(void *data, struct wl_registry *registry,
462
uint32_t name)
463
{ /* No-op */ }
464
465
static const struct wl_registry_listener registry_listener = {
466
registry_handle_global,
467
registry_handle_global_remove
468
};
469
470
static void
471
wsi_wl_display_finish(struct wsi_wl_display *display)
472
{
473
assert(display->refcount == 0);
474
475
u_vector_finish(&display->swrast.formats);
476
u_vector_finish(&display->drm.formats);
477
u_vector_finish(&display->dmabuf.formats);
478
u_vector_finish(&display->dmabuf.modifiers.argb8888);
479
u_vector_finish(&display->dmabuf.modifiers.xrgb8888);
480
if (display->swrast.wl_shm)
481
wl_shm_destroy(display->swrast.wl_shm);
482
if (display->drm.wl_drm)
483
wl_drm_destroy(display->drm.wl_drm);
484
if (display->dmabuf.wl_dmabuf)
485
zwp_linux_dmabuf_v1_destroy(display->dmabuf.wl_dmabuf);
486
if (display->wl_display_wrapper)
487
wl_proxy_wrapper_destroy(display->wl_display_wrapper);
488
if (display->queue)
489
wl_event_queue_destroy(display->queue);
490
}
491
492
static VkResult
493
wsi_wl_display_init(struct wsi_wayland *wsi_wl,
494
struct wsi_wl_display *display,
495
struct wl_display *wl_display,
496
bool get_format_list, bool sw)
497
{
498
VkResult result = VK_SUCCESS;
499
memset(display, 0, sizeof(*display));
500
501
display->wsi_wl = wsi_wl;
502
display->wl_display = wl_display;
503
display->sw = sw;
504
505
if (get_format_list) {
506
if (!u_vector_init(&display->swrast.formats, sizeof(VkFormat), 8) ||
507
!u_vector_init(&display->drm.formats, sizeof(VkFormat), 8) ||
508
!u_vector_init(&display->dmabuf.formats, sizeof(VkFormat), 8) ||
509
!u_vector_init(&display->dmabuf.modifiers.argb8888,
510
sizeof(uint64_t), 32) ||
511
!u_vector_init(&display->dmabuf.modifiers.xrgb8888,
512
sizeof(uint64_t), 32)) {
513
result = VK_ERROR_OUT_OF_HOST_MEMORY;
514
goto fail;
515
}
516
}
517
518
display->queue = wl_display_create_queue(wl_display);
519
if (!display->queue) {
520
result = VK_ERROR_OUT_OF_HOST_MEMORY;
521
goto fail;
522
}
523
524
display->wl_display_wrapper = wl_proxy_create_wrapper(wl_display);
525
if (!display->wl_display_wrapper) {
526
result = VK_ERROR_OUT_OF_HOST_MEMORY;
527
goto fail;
528
}
529
530
wl_proxy_set_queue((struct wl_proxy *) display->wl_display_wrapper,
531
display->queue);
532
533
struct wl_registry *registry =
534
wl_display_get_registry(display->wl_display_wrapper);
535
if (!registry) {
536
result = VK_ERROR_OUT_OF_HOST_MEMORY;
537
goto fail;
538
}
539
540
wl_registry_add_listener(registry, &registry_listener, display);
541
542
/* Round-trip to get wl_drms and zwp_linux_dmabuf_v1 globals */
543
wl_display_roundtrip_queue(display->wl_display, display->queue);
544
545
/* Round-trip again to get formats, modifiers and capabilities */
546
if (display->drm.wl_drm || display->dmabuf.wl_dmabuf || display->swrast.wl_shm)
547
wl_display_roundtrip_queue(display->wl_display, display->queue);
548
549
if (wsi_wl->wsi->force_bgra8_unorm_first) {
550
/* Find BGRA8_UNORM in the list and swap it to the first position if we
551
* can find it. Some apps get confused if SRGB is first in the list.
552
*/
553
VkFormat *first_fmt = u_vector_head(display->formats);
554
VkFormat *iter_fmt;
555
u_vector_foreach(iter_fmt, display->formats) {
556
if (*iter_fmt == VK_FORMAT_B8G8R8A8_UNORM) {
557
*iter_fmt = *first_fmt;
558
*first_fmt = VK_FORMAT_B8G8R8A8_UNORM;
559
break;
560
}
561
}
562
}
563
564
/* Prefer the linux-dmabuf protocol if available */
565
if (display->sw)
566
display->formats = &display->swrast.formats;
567
else if (display->dmabuf.wl_dmabuf) {
568
display->formats = &display->dmabuf.formats;
569
} else if (display->drm.wl_drm &&
570
(display->drm.capabilities & WL_DRM_CAPABILITY_PRIME)) {
571
/* We need prime support for wl_drm */
572
display->formats = &display->drm.formats;
573
}
574
575
if (!display->formats) {
576
result = VK_ERROR_SURFACE_LOST_KHR;
577
goto fail_registry;
578
}
579
580
/* We don't need this anymore */
581
wl_registry_destroy(registry);
582
583
display->refcount = 0;
584
585
return VK_SUCCESS;
586
587
fail_registry:
588
if (registry)
589
wl_registry_destroy(registry);
590
591
fail:
592
wsi_wl_display_finish(display);
593
return result;
594
}
595
596
static VkResult
597
wsi_wl_display_create(struct wsi_wayland *wsi, struct wl_display *wl_display,
598
bool sw,
599
struct wsi_wl_display **display_out)
600
{
601
struct wsi_wl_display *display =
602
vk_alloc(wsi->alloc, sizeof(*display), 8,
603
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
604
if (!display)
605
return VK_ERROR_OUT_OF_HOST_MEMORY;
606
607
VkResult result = wsi_wl_display_init(wsi, display, wl_display, true,
608
sw);
609
if (result != VK_SUCCESS) {
610
vk_free(wsi->alloc, display);
611
return result;
612
}
613
614
display->refcount++;
615
*display_out = display;
616
617
return result;
618
}
619
620
static struct wsi_wl_display *
621
wsi_wl_display_ref(struct wsi_wl_display *display)
622
{
623
display->refcount++;
624
return display;
625
}
626
627
static void
628
wsi_wl_display_unref(struct wsi_wl_display *display)
629
{
630
if (display->refcount-- > 1)
631
return;
632
633
struct wsi_wayland *wsi = display->wsi_wl;
634
wsi_wl_display_finish(display);
635
vk_free(wsi->alloc, display);
636
}
637
638
VkBool32
639
wsi_wl_get_presentation_support(struct wsi_device *wsi_device,
640
struct wl_display *wl_display)
641
{
642
struct wsi_wayland *wsi =
643
(struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
644
645
struct wsi_wl_display display;
646
VkResult ret = wsi_wl_display_init(wsi, &display, wl_display, false,
647
wsi_device->sw);
648
if (ret == VK_SUCCESS)
649
wsi_wl_display_finish(&display);
650
651
return ret == VK_SUCCESS;
652
}
653
654
static VkResult
655
wsi_wl_surface_get_support(VkIcdSurfaceBase *surface,
656
struct wsi_device *wsi_device,
657
uint32_t queueFamilyIndex,
658
VkBool32* pSupported)
659
{
660
*pSupported = true;
661
662
return VK_SUCCESS;
663
}
664
665
static const VkPresentModeKHR present_modes[] = {
666
VK_PRESENT_MODE_MAILBOX_KHR,
667
VK_PRESENT_MODE_FIFO_KHR,
668
};
669
670
static VkResult
671
wsi_wl_surface_get_capabilities(VkIcdSurfaceBase *surface,
672
struct wsi_device *wsi_device,
673
VkSurfaceCapabilitiesKHR* caps)
674
{
675
/* For true mailbox mode, we need at least 4 images:
676
* 1) One to scan out from
677
* 2) One to have queued for scan-out
678
* 3) One to be currently held by the Wayland compositor
679
* 4) One to render to
680
*/
681
caps->minImageCount = 4;
682
/* There is no real maximum */
683
caps->maxImageCount = 0;
684
685
caps->currentExtent = (VkExtent2D) { UINT32_MAX, UINT32_MAX };
686
caps->minImageExtent = (VkExtent2D) { 1, 1 };
687
caps->maxImageExtent = (VkExtent2D) {
688
wsi_device->maxImageDimension2D,
689
wsi_device->maxImageDimension2D,
690
};
691
692
caps->supportedTransforms = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
693
caps->currentTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
694
caps->maxImageArrayLayers = 1;
695
696
caps->supportedCompositeAlpha =
697
VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR |
698
VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
699
700
caps->supportedUsageFlags =
701
VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
702
VK_IMAGE_USAGE_SAMPLED_BIT |
703
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
704
VK_IMAGE_USAGE_STORAGE_BIT |
705
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
706
707
return VK_SUCCESS;
708
}
709
710
static VkResult
711
wsi_wl_surface_get_capabilities2(VkIcdSurfaceBase *surface,
712
struct wsi_device *wsi_device,
713
const void *info_next,
714
VkSurfaceCapabilities2KHR* caps)
715
{
716
assert(caps->sType == VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR);
717
718
VkResult result =
719
wsi_wl_surface_get_capabilities(surface, wsi_device,
720
&caps->surfaceCapabilities);
721
722
vk_foreach_struct(ext, caps->pNext) {
723
switch (ext->sType) {
724
case VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR: {
725
VkSurfaceProtectedCapabilitiesKHR *protected = (void *)ext;
726
protected->supportsProtected = VK_FALSE;
727
break;
728
}
729
730
default:
731
/* Ignored */
732
break;
733
}
734
}
735
736
return result;
737
}
738
739
static VkResult
740
wsi_wl_surface_get_formats(VkIcdSurfaceBase *icd_surface,
741
struct wsi_device *wsi_device,
742
uint32_t* pSurfaceFormatCount,
743
VkSurfaceFormatKHR* pSurfaceFormats)
744
{
745
VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
746
struct wsi_wayland *wsi =
747
(struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
748
749
struct wsi_wl_display display;
750
if (wsi_wl_display_init(wsi, &display, surface->display, true,
751
wsi_device->sw))
752
return VK_ERROR_SURFACE_LOST_KHR;
753
754
VK_OUTARRAY_MAKE(out, pSurfaceFormats, pSurfaceFormatCount);
755
756
VkFormat *disp_fmt;
757
u_vector_foreach(disp_fmt, display.formats) {
758
vk_outarray_append(&out, out_fmt) {
759
out_fmt->format = *disp_fmt;
760
out_fmt->colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
761
}
762
}
763
764
wsi_wl_display_finish(&display);
765
766
return vk_outarray_status(&out);
767
}
768
769
static VkResult
770
wsi_wl_surface_get_formats2(VkIcdSurfaceBase *icd_surface,
771
struct wsi_device *wsi_device,
772
const void *info_next,
773
uint32_t* pSurfaceFormatCount,
774
VkSurfaceFormat2KHR* pSurfaceFormats)
775
{
776
VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
777
struct wsi_wayland *wsi =
778
(struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
779
780
struct wsi_wl_display display;
781
if (wsi_wl_display_init(wsi, &display, surface->display, true,
782
wsi_device->sw))
783
return VK_ERROR_SURFACE_LOST_KHR;
784
785
VK_OUTARRAY_MAKE(out, pSurfaceFormats, pSurfaceFormatCount);
786
787
VkFormat *disp_fmt;
788
u_vector_foreach(disp_fmt, display.formats) {
789
vk_outarray_append(&out, out_fmt) {
790
out_fmt->surfaceFormat.format = *disp_fmt;
791
out_fmt->surfaceFormat.colorSpace = VK_COLORSPACE_SRGB_NONLINEAR_KHR;
792
}
793
}
794
795
wsi_wl_display_finish(&display);
796
797
return vk_outarray_status(&out);
798
}
799
800
static VkResult
801
wsi_wl_surface_get_present_modes(VkIcdSurfaceBase *surface,
802
uint32_t* pPresentModeCount,
803
VkPresentModeKHR* pPresentModes)
804
{
805
if (pPresentModes == NULL) {
806
*pPresentModeCount = ARRAY_SIZE(present_modes);
807
return VK_SUCCESS;
808
}
809
810
*pPresentModeCount = MIN2(*pPresentModeCount, ARRAY_SIZE(present_modes));
811
typed_memcpy(pPresentModes, present_modes, *pPresentModeCount);
812
813
if (*pPresentModeCount < ARRAY_SIZE(present_modes))
814
return VK_INCOMPLETE;
815
else
816
return VK_SUCCESS;
817
}
818
819
static VkResult
820
wsi_wl_surface_get_present_rectangles(VkIcdSurfaceBase *surface,
821
struct wsi_device *wsi_device,
822
uint32_t* pRectCount,
823
VkRect2D* pRects)
824
{
825
VK_OUTARRAY_MAKE(out, pRects, pRectCount);
826
827
vk_outarray_append(&out, rect) {
828
/* We don't know a size so just return the usual "I don't know." */
829
*rect = (VkRect2D) {
830
.offset = { 0, 0 },
831
.extent = { UINT32_MAX, UINT32_MAX },
832
};
833
}
834
835
return vk_outarray_status(&out);
836
}
837
838
VkResult wsi_create_wl_surface(const VkAllocationCallbacks *pAllocator,
839
const VkWaylandSurfaceCreateInfoKHR *pCreateInfo,
840
VkSurfaceKHR *pSurface)
841
{
842
VkIcdSurfaceWayland *surface;
843
844
surface = vk_alloc(pAllocator, sizeof *surface, 8,
845
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
846
if (surface == NULL)
847
return VK_ERROR_OUT_OF_HOST_MEMORY;
848
849
surface->base.platform = VK_ICD_WSI_PLATFORM_WAYLAND;
850
surface->display = pCreateInfo->display;
851
surface->surface = pCreateInfo->surface;
852
853
*pSurface = VkIcdSurfaceBase_to_handle(&surface->base);
854
855
return VK_SUCCESS;
856
}
857
858
struct wsi_wl_image {
859
struct wsi_image base;
860
struct wl_buffer * buffer;
861
bool busy;
862
void *data_ptr;
863
uint32_t data_size;
864
};
865
866
struct wsi_wl_swapchain {
867
struct wsi_swapchain base;
868
869
struct wsi_wl_display *display;
870
871
struct wl_surface * surface;
872
873
struct wl_callback * frame;
874
875
VkExtent2D extent;
876
VkFormat vk_format;
877
uint32_t drm_format;
878
uint32_t shm_format;
879
880
uint32_t num_drm_modifiers;
881
const uint64_t * drm_modifiers;
882
883
VkPresentModeKHR present_mode;
884
bool fifo_ready;
885
886
struct wsi_wl_image images[0];
887
};
888
VK_DEFINE_NONDISP_HANDLE_CASTS(wsi_wl_swapchain, base.base, VkSwapchainKHR,
889
VK_OBJECT_TYPE_SWAPCHAIN_KHR)
890
891
static struct wsi_image *
892
wsi_wl_swapchain_get_wsi_image(struct wsi_swapchain *wsi_chain,
893
uint32_t image_index)
894
{
895
struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
896
return &chain->images[image_index].base;
897
}
898
899
static VkResult
900
wsi_wl_swapchain_acquire_next_image(struct wsi_swapchain *wsi_chain,
901
const VkAcquireNextImageInfoKHR *info,
902
uint32_t *image_index)
903
{
904
struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
905
struct timespec start_time, end_time;
906
struct timespec rel_timeout;
907
int wl_fd = wl_display_get_fd(chain->display->wl_display);
908
909
timespec_from_nsec(&rel_timeout, info->timeout);
910
911
clock_gettime(CLOCK_MONOTONIC, &start_time);
912
timespec_add(&end_time, &rel_timeout, &start_time);
913
914
while (1) {
915
/* Try to dispatch potential events. */
916
int ret = wl_display_dispatch_queue_pending(chain->display->wl_display,
917
chain->display->queue);
918
if (ret < 0)
919
return VK_ERROR_OUT_OF_DATE_KHR;
920
921
/* Try to find a free image. */
922
for (uint32_t i = 0; i < chain->base.image_count; i++) {
923
if (!chain->images[i].busy) {
924
/* We found a non-busy image */
925
*image_index = i;
926
chain->images[i].busy = true;
927
return VK_SUCCESS;
928
}
929
}
930
931
/* Check for timeout. */
932
struct timespec current_time;
933
clock_gettime(CLOCK_MONOTONIC, &current_time);
934
if (timespec_after(&current_time, &end_time))
935
return VK_NOT_READY;
936
937
/* Try to read events from the server. */
938
ret = wl_display_prepare_read_queue(chain->display->wl_display,
939
chain->display->queue);
940
if (ret < 0) {
941
/* Another thread might have read events for our queue already. Go
942
* back to dispatch them.
943
*/
944
if (errno == EAGAIN)
945
continue;
946
return VK_ERROR_OUT_OF_DATE_KHR;
947
}
948
949
struct pollfd pollfd = {
950
.fd = wl_fd,
951
.events = POLLIN
952
};
953
timespec_sub(&rel_timeout, &end_time, &current_time);
954
ret = ppoll(&pollfd, 1, &rel_timeout, NULL);
955
if (ret <= 0) {
956
int lerrno = errno;
957
wl_display_cancel_read(chain->display->wl_display);
958
if (ret < 0) {
959
/* If ppoll() was interrupted, try again. */
960
if (lerrno == EINTR || lerrno == EAGAIN)
961
continue;
962
return VK_ERROR_OUT_OF_DATE_KHR;
963
}
964
assert(ret == 0);
965
continue;
966
}
967
968
ret = wl_display_read_events(chain->display->wl_display);
969
if (ret < 0)
970
return VK_ERROR_OUT_OF_DATE_KHR;
971
}
972
}
973
974
static void
975
frame_handle_done(void *data, struct wl_callback *callback, uint32_t serial)
976
{
977
struct wsi_wl_swapchain *chain = data;
978
979
chain->frame = NULL;
980
chain->fifo_ready = true;
981
982
wl_callback_destroy(callback);
983
}
984
985
static const struct wl_callback_listener frame_listener = {
986
frame_handle_done,
987
};
988
989
static VkResult
990
wsi_wl_swapchain_queue_present(struct wsi_swapchain *wsi_chain,
991
uint32_t image_index,
992
const VkPresentRegionKHR *damage)
993
{
994
struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
995
996
if (chain->display->sw) {
997
struct wsi_wl_image *image = &chain->images[image_index];
998
void *dptr = image->data_ptr;
999
void *sptr;
1000
chain->base.wsi->MapMemory(chain->base.device,
1001
image->base.memory,
1002
0, 0, 0, &sptr);
1003
1004
for (unsigned r = 0; r < chain->extent.height; r++) {
1005
memcpy(dptr, sptr, image->base.row_pitches[0]);
1006
dptr += image->base.row_pitches[0];
1007
sptr += image->base.row_pitches[0];
1008
}
1009
chain->base.wsi->UnmapMemory(chain->base.device,
1010
image->base.memory);
1011
1012
}
1013
if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
1014
while (!chain->fifo_ready) {
1015
int ret = wl_display_dispatch_queue(chain->display->wl_display,
1016
chain->display->queue);
1017
if (ret < 0)
1018
return VK_ERROR_OUT_OF_DATE_KHR;
1019
}
1020
}
1021
1022
assert(image_index < chain->base.image_count);
1023
wl_surface_attach(chain->surface, chain->images[image_index].buffer, 0, 0);
1024
1025
if (wl_surface_get_version(chain->surface) >= 4 && damage &&
1026
damage->pRectangles && damage->rectangleCount > 0) {
1027
for (unsigned i = 0; i < damage->rectangleCount; i++) {
1028
const VkRectLayerKHR *rect = &damage->pRectangles[i];
1029
assert(rect->layer == 0);
1030
wl_surface_damage_buffer(chain->surface,
1031
rect->offset.x, rect->offset.y,
1032
rect->extent.width, rect->extent.height);
1033
}
1034
} else {
1035
wl_surface_damage(chain->surface, 0, 0, INT32_MAX, INT32_MAX);
1036
}
1037
1038
if (chain->base.present_mode == VK_PRESENT_MODE_FIFO_KHR) {
1039
chain->frame = wl_surface_frame(chain->surface);
1040
wl_callback_add_listener(chain->frame, &frame_listener, chain);
1041
chain->fifo_ready = false;
1042
}
1043
1044
chain->images[image_index].busy = true;
1045
wl_surface_commit(chain->surface);
1046
wl_display_flush(chain->display->wl_display);
1047
1048
return VK_SUCCESS;
1049
}
1050
1051
static void
1052
buffer_handle_release(void *data, struct wl_buffer *buffer)
1053
{
1054
struct wsi_wl_image *image = data;
1055
1056
assert(image->buffer == buffer);
1057
1058
image->busy = false;
1059
}
1060
1061
static const struct wl_buffer_listener buffer_listener = {
1062
buffer_handle_release,
1063
};
1064
1065
static VkResult
1066
wsi_wl_image_init(struct wsi_wl_swapchain *chain,
1067
struct wsi_wl_image *image,
1068
const VkSwapchainCreateInfoKHR *pCreateInfo,
1069
const VkAllocationCallbacks* pAllocator)
1070
{
1071
struct wsi_wl_display *display = chain->display;
1072
VkResult result;
1073
1074
result = wsi_create_native_image(&chain->base, pCreateInfo,
1075
chain->num_drm_modifiers > 0 ? 1 : 0,
1076
&chain->num_drm_modifiers,
1077
&chain->drm_modifiers, &image->base);
1078
1079
if (result != VK_SUCCESS)
1080
return result;
1081
1082
if (display->sw) {
1083
int fd, stride;
1084
1085
stride = image->base.row_pitches[0];
1086
image->data_size = stride * chain->extent.height;
1087
1088
/* Create a shareable buffer */
1089
fd = os_create_anonymous_file(image->data_size, NULL);
1090
if (fd < 0)
1091
goto fail_image;
1092
1093
image->data_ptr = mmap(NULL, image->data_size, PROT_READ | PROT_WRITE, MAP_SHARED, fd, 0);
1094
if (image->data_ptr == MAP_FAILED) {
1095
close(fd);
1096
goto fail_image;
1097
}
1098
/* Share it in a wl_buffer */
1099
struct wl_shm_pool *pool = wl_shm_create_pool(display->swrast.wl_shm, fd, image->data_size);
1100
wl_proxy_set_queue((struct wl_proxy *)pool, display->queue);
1101
image->buffer = wl_shm_pool_create_buffer(pool, 0, chain->extent.width,
1102
chain->extent.height, stride,
1103
chain->shm_format);
1104
wl_shm_pool_destroy(pool);
1105
close(fd);
1106
} else if (display->dmabuf.wl_dmabuf) {
1107
struct zwp_linux_buffer_params_v1 *params =
1108
zwp_linux_dmabuf_v1_create_params(display->dmabuf.wl_dmabuf);
1109
if (!params)
1110
goto fail_image;
1111
1112
for (int i = 0; i < image->base.num_planes; i++) {
1113
zwp_linux_buffer_params_v1_add(params,
1114
image->base.fds[i],
1115
i,
1116
image->base.offsets[i],
1117
image->base.row_pitches[i],
1118
image->base.drm_modifier >> 32,
1119
image->base.drm_modifier & 0xffffffff);
1120
close(image->base.fds[i]);
1121
}
1122
1123
image->buffer =
1124
zwp_linux_buffer_params_v1_create_immed(params,
1125
chain->extent.width,
1126
chain->extent.height,
1127
chain->drm_format,
1128
0);
1129
zwp_linux_buffer_params_v1_destroy(params);
1130
} else {
1131
/* Without passing modifiers, we can't have multi-plane RGB images. */
1132
assert(image->base.num_planes == 1);
1133
assert(image->base.drm_modifier == DRM_FORMAT_MOD_INVALID);
1134
1135
image->buffer =
1136
wl_drm_create_prime_buffer(display->drm.wl_drm,
1137
image->base.fds[0], /* name */
1138
chain->extent.width,
1139
chain->extent.height,
1140
chain->drm_format,
1141
image->base.offsets[0],
1142
image->base.row_pitches[0],
1143
0, 0, 0, 0 /* unused */);
1144
close(image->base.fds[0]);
1145
}
1146
1147
if (!image->buffer)
1148
goto fail_image;
1149
1150
wl_buffer_add_listener(image->buffer, &buffer_listener, image);
1151
1152
return VK_SUCCESS;
1153
1154
fail_image:
1155
wsi_destroy_image(&chain->base, &image->base);
1156
1157
return VK_ERROR_OUT_OF_HOST_MEMORY;
1158
}
1159
1160
static VkResult
1161
wsi_wl_swapchain_destroy(struct wsi_swapchain *wsi_chain,
1162
const VkAllocationCallbacks *pAllocator)
1163
{
1164
struct wsi_wl_swapchain *chain = (struct wsi_wl_swapchain *)wsi_chain;
1165
1166
for (uint32_t i = 0; i < chain->base.image_count; i++) {
1167
if (chain->images[i].buffer) {
1168
wl_buffer_destroy(chain->images[i].buffer);
1169
wsi_destroy_image(&chain->base, &chain->images[i].base);
1170
if (chain->images[i].data_ptr)
1171
munmap(chain->images[i].data_ptr, chain->images[i].data_size);
1172
}
1173
}
1174
1175
if (chain->frame)
1176
wl_callback_destroy(chain->frame);
1177
if (chain->surface)
1178
wl_proxy_wrapper_destroy(chain->surface);
1179
1180
if (chain->display)
1181
wsi_wl_display_unref(chain->display);
1182
1183
wsi_swapchain_finish(&chain->base);
1184
1185
vk_free(pAllocator, chain);
1186
1187
return VK_SUCCESS;
1188
}
1189
1190
static VkResult
1191
wsi_wl_surface_create_swapchain(VkIcdSurfaceBase *icd_surface,
1192
VkDevice device,
1193
struct wsi_device *wsi_device,
1194
const VkSwapchainCreateInfoKHR* pCreateInfo,
1195
const VkAllocationCallbacks* pAllocator,
1196
struct wsi_swapchain **swapchain_out)
1197
{
1198
VkIcdSurfaceWayland *surface = (VkIcdSurfaceWayland *)icd_surface;
1199
struct wsi_wayland *wsi =
1200
(struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
1201
struct wsi_wl_swapchain *chain;
1202
VkResult result;
1203
1204
assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR);
1205
1206
int num_images = pCreateInfo->minImageCount;
1207
1208
size_t size = sizeof(*chain) + num_images * sizeof(chain->images[0]);
1209
chain = vk_alloc(pAllocator, size, 8,
1210
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1211
if (chain == NULL)
1212
return VK_ERROR_OUT_OF_HOST_MEMORY;
1213
1214
result = wsi_swapchain_init(wsi_device, &chain->base, device,
1215
pCreateInfo, pAllocator);
1216
if (result != VK_SUCCESS) {
1217
vk_free(pAllocator, chain);
1218
return result;
1219
}
1220
1221
/* Mark a bunch of stuff as NULL. This way we can just call
1222
* destroy_swapchain for cleanup.
1223
*/
1224
for (uint32_t i = 0; i < num_images; i++) {
1225
chain->images[i].buffer = NULL;
1226
chain->images[i].data_ptr = NULL;
1227
}
1228
chain->surface = NULL;
1229
chain->frame = NULL;
1230
1231
bool alpha = pCreateInfo->compositeAlpha ==
1232
VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR;
1233
1234
chain->base.destroy = wsi_wl_swapchain_destroy;
1235
chain->base.get_wsi_image = wsi_wl_swapchain_get_wsi_image;
1236
chain->base.acquire_next_image = wsi_wl_swapchain_acquire_next_image;
1237
chain->base.queue_present = wsi_wl_swapchain_queue_present;
1238
chain->base.present_mode = wsi_swapchain_get_present_mode(wsi_device, pCreateInfo);
1239
chain->base.image_count = num_images;
1240
chain->extent = pCreateInfo->imageExtent;
1241
chain->vk_format = pCreateInfo->imageFormat;
1242
if (wsi_device->sw)
1243
chain->shm_format = wl_shm_format_for_vk_format(chain->vk_format, alpha);
1244
else
1245
chain->drm_format = wl_drm_format_for_vk_format(chain->vk_format, alpha);
1246
1247
if (pCreateInfo->oldSwapchain) {
1248
/* If we have an oldSwapchain parameter, copy the display struct over
1249
* from the old one so we don't have to fully re-initialize it.
1250
*/
1251
VK_FROM_HANDLE(wsi_wl_swapchain, old_chain, pCreateInfo->oldSwapchain);
1252
chain->display = wsi_wl_display_ref(old_chain->display);
1253
} else {
1254
chain->display = NULL;
1255
result = wsi_wl_display_create(wsi, surface->display,
1256
wsi_device->sw, &chain->display);
1257
if (result != VK_SUCCESS)
1258
goto fail;
1259
}
1260
1261
chain->surface = wl_proxy_create_wrapper(surface->surface);
1262
if (!chain->surface) {
1263
result = VK_ERROR_OUT_OF_HOST_MEMORY;
1264
goto fail;
1265
}
1266
wl_proxy_set_queue((struct wl_proxy *) chain->surface,
1267
chain->display->queue);
1268
1269
chain->num_drm_modifiers = 0;
1270
chain->drm_modifiers = 0;
1271
1272
/* Use explicit DRM format modifiers when both the server and the driver
1273
* support them.
1274
*/
1275
if (chain->display->dmabuf.wl_dmabuf &&
1276
chain->base.wsi->supports_modifiers) {
1277
struct u_vector *modifiers;
1278
switch (chain->drm_format) {
1279
case WL_DRM_FORMAT_ARGB8888:
1280
modifiers = &chain->display->dmabuf.modifiers.argb8888;
1281
break;
1282
case WL_DRM_FORMAT_XRGB8888:
1283
modifiers = &chain->display->dmabuf.modifiers.xrgb8888;
1284
break;
1285
default:
1286
modifiers = NULL;
1287
break;
1288
}
1289
1290
if (modifiers) {
1291
chain->drm_modifiers = u_vector_tail(modifiers);
1292
chain->num_drm_modifiers = u_vector_length(modifiers);
1293
}
1294
}
1295
1296
chain->fifo_ready = true;
1297
1298
for (uint32_t i = 0; i < chain->base.image_count; i++) {
1299
result = wsi_wl_image_init(chain, &chain->images[i],
1300
pCreateInfo, pAllocator);
1301
if (result != VK_SUCCESS)
1302
goto fail;
1303
chain->images[i].busy = false;
1304
}
1305
1306
*swapchain_out = &chain->base;
1307
1308
return VK_SUCCESS;
1309
1310
fail:
1311
wsi_wl_swapchain_destroy(&chain->base, pAllocator);
1312
1313
return result;
1314
}
1315
1316
VkResult
1317
wsi_wl_init_wsi(struct wsi_device *wsi_device,
1318
const VkAllocationCallbacks *alloc,
1319
VkPhysicalDevice physical_device)
1320
{
1321
struct wsi_wayland *wsi;
1322
VkResult result;
1323
1324
wsi = vk_alloc(alloc, sizeof(*wsi), 8,
1325
VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
1326
if (!wsi) {
1327
result = VK_ERROR_OUT_OF_HOST_MEMORY;
1328
goto fail;
1329
}
1330
1331
wsi->physical_device = physical_device;
1332
wsi->alloc = alloc;
1333
wsi->wsi = wsi_device;
1334
1335
wsi->base.get_support = wsi_wl_surface_get_support;
1336
wsi->base.get_capabilities2 = wsi_wl_surface_get_capabilities2;
1337
wsi->base.get_formats = wsi_wl_surface_get_formats;
1338
wsi->base.get_formats2 = wsi_wl_surface_get_formats2;
1339
wsi->base.get_present_modes = wsi_wl_surface_get_present_modes;
1340
wsi->base.get_present_rectangles = wsi_wl_surface_get_present_rectangles;
1341
wsi->base.create_swapchain = wsi_wl_surface_create_swapchain;
1342
1343
wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = &wsi->base;
1344
1345
return VK_SUCCESS;
1346
1347
fail:
1348
wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND] = NULL;
1349
1350
return result;
1351
}
1352
1353
void
1354
wsi_wl_finish_wsi(struct wsi_device *wsi_device,
1355
const VkAllocationCallbacks *alloc)
1356
{
1357
struct wsi_wayland *wsi =
1358
(struct wsi_wayland *)wsi_device->wsi[VK_ICD_WSI_PLATFORM_WAYLAND];
1359
if (!wsi)
1360
return;
1361
1362
vk_free(alloc, wsi);
1363
}
1364
1365