CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
hrydgard

CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!

GitHub Repository: hrydgard/ppsspp
Path: blob/master/Common/GPU/Vulkan/VulkanContext.cpp
Views: 1401
1
#define __STDC_LIMIT_MACROS
2
3
#include <cstdlib>
4
#include <cstdint>
5
#include <cstring>
6
#include <iostream>
7
8
#include "Core/Config.h"
9
#include "Common/System/System.h"
10
#include "Common/System/Display.h"
11
#include "Common/Log.h"
12
#include "Common/GPU/Shader.h"
13
#include "Common/GPU/Vulkan/VulkanContext.h"
14
#include "Common/GPU/Vulkan/VulkanDebug.h"
15
#include "Common/StringUtils.h"
16
17
#ifdef USE_CRT_DBG
18
#undef new
19
#endif
20
21
#include "ext/vma/vk_mem_alloc.h"
22
23
24
// Change this to 1, 2, and 3 to fake failures in a few places, so that
25
// we can test our fallback-to-GL code.
26
#define SIMULATE_VULKAN_FAILURE 0
27
28
#include "ext/glslang/SPIRV/GlslangToSpv.h"
29
30
#ifdef USE_CRT_DBG
31
#define new DBG_NEW
32
#endif
33
34
using namespace PPSSPP_VK;
35
36
VulkanLogOptions g_LogOptions;
37
38
static const char * const validationLayers[] = {
39
"VK_LAYER_KHRONOS_validation",
40
/*
41
// For layers included in the Android NDK.
42
"VK_LAYER_GOOGLE_threading",
43
"VK_LAYER_LUNARG_parameter_validation",
44
"VK_LAYER_LUNARG_core_validation",
45
"VK_LAYER_LUNARG_image",
46
"VK_LAYER_LUNARG_object_tracker",
47
"VK_LAYER_LUNARG_swapchain",
48
"VK_LAYER_GOOGLE_unique_objects",
49
*/
50
};
51
52
std::string VulkanVendorString(uint32_t vendorId) {
53
switch (vendorId) {
54
case VULKAN_VENDOR_INTEL: return "Intel";
55
case VULKAN_VENDOR_NVIDIA: return "NVIDIA";
56
case VULKAN_VENDOR_AMD: return "AMD";
57
case VULKAN_VENDOR_ARM: return "ARM";
58
case VULKAN_VENDOR_QUALCOMM: return "Qualcomm";
59
case VULKAN_VENDOR_IMGTEC: return "Imagination";
60
case VULKAN_VENDOR_APPLE: return "Apple";
61
case VULKAN_VENDOR_MESA: return "Mesa";
62
default:
63
return StringFromFormat("%08x", vendorId);
64
}
65
}
66
67
const char *VulkanPresentModeToString(VkPresentModeKHR presentMode) {
68
switch (presentMode) {
69
case VK_PRESENT_MODE_IMMEDIATE_KHR: return "IMMEDIATE";
70
case VK_PRESENT_MODE_MAILBOX_KHR: return "MAILBOX";
71
case VK_PRESENT_MODE_FIFO_KHR: return "FIFO";
72
case VK_PRESENT_MODE_FIFO_RELAXED_KHR: return "FIFO_RELAXED";
73
case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR: return "SHARED_DEMAND_REFRESH_KHR";
74
case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR: return "SHARED_CONTINUOUS_REFRESH_KHR";
75
default: return "UNKNOWN";
76
}
77
}
78
79
const char *VulkanImageLayoutToString(VkImageLayout imageLayout) {
80
switch (imageLayout) {
81
case VK_IMAGE_LAYOUT_UNDEFINED: return "UNDEFINED";
82
case VK_IMAGE_LAYOUT_GENERAL: return "GENERAL";
83
case VK_IMAGE_LAYOUT_PREINITIALIZED: return "PREINITIALIZED";
84
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: return "TRANSFER_SRC_OPTIMAL";
85
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: return "TRANSFER_DST_OPTIMAL";
86
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: return "SHADER_READ_ONLY_OPTIMAL";
87
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: return "COLOR_ATTACHMENT_OPTIMAL";
88
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: return "DEPTH_STENCIL_ATTACHMENT_OPTIMAL";
89
default: return "OTHER";
90
}
91
}
92
93
VulkanContext::VulkanContext() {
94
// Do nothing here.
95
}
96
97
VkResult VulkanContext::CreateInstance(const CreateInfo &info) {
98
if (!vkCreateInstance) {
99
init_error_ = "Vulkan not loaded - can't create instance";
100
return VK_ERROR_INITIALIZATION_FAILED;
101
}
102
103
// Check which Vulkan version we should request.
104
// Our code is fine with any version from 1.0 to 1.2, we don't know about higher versions.
105
vulkanInstanceApiVersion_ = VK_API_VERSION_1_0;
106
if (vkEnumerateInstanceVersion) {
107
vkEnumerateInstanceVersion(&vulkanInstanceApiVersion_);
108
vulkanInstanceApiVersion_ &= 0xFFFFF000; // Remove patch version.
109
vulkanInstanceApiVersion_ = std::min(VK_API_VERSION_1_3, vulkanInstanceApiVersion_);
110
std::string versionString = FormatAPIVersion(vulkanInstanceApiVersion_);
111
INFO_LOG(Log::G3D, "Detected Vulkan API version: %s", versionString.c_str());
112
}
113
114
instance_layer_names_.clear();
115
device_layer_names_.clear();
116
117
// We can get the list of layers and extensions without an instance so we can use this information
118
// to enable the extensions we need that are available.
119
GetInstanceLayerProperties();
120
GetInstanceLayerExtensionList(nullptr, instance_extension_properties_);
121
122
if (!IsInstanceExtensionAvailable(VK_KHR_SURFACE_EXTENSION_NAME)) {
123
// Cannot create a Vulkan display without VK_KHR_SURFACE_EXTENSION.
124
init_error_ = "Vulkan not loaded - no surface extension";
125
return VK_ERROR_INITIALIZATION_FAILED;
126
}
127
flags_ = info.flags;
128
129
// List extensions to try to enable.
130
instance_extensions_enabled_.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
131
#ifdef _WIN32
132
instance_extensions_enabled_.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
133
#elif defined(__ANDROID__)
134
instance_extensions_enabled_.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
135
#else
136
#if defined(VK_USE_PLATFORM_XLIB_KHR)
137
if (IsInstanceExtensionAvailable(VK_KHR_XLIB_SURFACE_EXTENSION_NAME)) {
138
instance_extensions_enabled_.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
139
}
140
#endif
141
//#if defined(VK_USE_PLATFORM_XCB_KHR)
142
// instance_extensions_enabled_.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
143
//#endif
144
#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
145
if (IsInstanceExtensionAvailable(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME)) {
146
instance_extensions_enabled_.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
147
}
148
#endif
149
#if defined(VK_USE_PLATFORM_DISPLAY_KHR)
150
if (IsInstanceExtensionAvailable(VK_KHR_DISPLAY_EXTENSION_NAME)) {
151
instance_extensions_enabled_.push_back(VK_KHR_DISPLAY_EXTENSION_NAME);
152
}
153
#endif
154
#if defined(VK_USE_PLATFORM_METAL_EXT)
155
if (IsInstanceExtensionAvailable(VK_EXT_METAL_SURFACE_EXTENSION_NAME)) {
156
instance_extensions_enabled_.push_back(VK_EXT_METAL_SURFACE_EXTENSION_NAME);
157
}
158
#endif
159
#endif
160
161
if ((flags_ & VULKAN_FLAG_VALIDATE) && g_Config.sCustomDriver.empty()) {
162
if (IsInstanceExtensionAvailable(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) {
163
// Enable the validation layers
164
for (size_t i = 0; i < ARRAY_SIZE(validationLayers); i++) {
165
instance_layer_names_.push_back(validationLayers[i]);
166
device_layer_names_.push_back(validationLayers[i]);
167
}
168
instance_extensions_enabled_.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
169
extensionsLookup_.EXT_debug_utils = true;
170
INFO_LOG(Log::G3D, "Vulkan debug_utils validation enabled.");
171
} else {
172
ERROR_LOG(Log::G3D, "Validation layer extension not available - not enabling Vulkan validation.");
173
flags_ &= ~VULKAN_FLAG_VALIDATE;
174
}
175
}
176
177
// Temporary hack for libretro. For some reason, when we try to load the functions from this extension,
178
// we get null pointers when running libretro. Quite strange.
179
#if !defined(__LIBRETRO__)
180
if (EnableInstanceExtension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_API_VERSION_1_1)) {
181
extensionsLookup_.KHR_get_physical_device_properties2 = true;
182
}
183
#endif
184
185
if (EnableInstanceExtension(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, 0)) {
186
extensionsLookup_.EXT_swapchain_colorspace = true;
187
}
188
#if PPSSPP_PLATFORM(IOS_APP_STORE)
189
if (EnableInstanceExtension(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME, 0)) {
190
191
}
192
#endif
193
194
// Validate that all the instance extensions we ask for are actually available.
195
for (auto ext : instance_extensions_enabled_) {
196
if (!IsInstanceExtensionAvailable(ext))
197
WARN_LOG(Log::G3D, "WARNING: Does not seem that instance extension '%s' is available. Trying to proceed anyway.", ext);
198
}
199
200
VkApplicationInfo app_info{ VK_STRUCTURE_TYPE_APPLICATION_INFO };
201
app_info.pApplicationName = info.app_name;
202
app_info.applicationVersion = info.app_ver;
203
app_info.pEngineName = info.app_name;
204
// Let's increment this when we make major engine/context changes.
205
app_info.engineVersion = 2;
206
app_info.apiVersion = vulkanInstanceApiVersion_;
207
208
VkInstanceCreateInfo inst_info{ VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO };
209
inst_info.flags = 0;
210
inst_info.pApplicationInfo = &app_info;
211
inst_info.enabledLayerCount = (uint32_t)instance_layer_names_.size();
212
inst_info.ppEnabledLayerNames = instance_layer_names_.size() ? instance_layer_names_.data() : nullptr;
213
inst_info.enabledExtensionCount = (uint32_t)instance_extensions_enabled_.size();
214
inst_info.ppEnabledExtensionNames = instance_extensions_enabled_.size() ? instance_extensions_enabled_.data() : nullptr;
215
216
#if PPSSPP_PLATFORM(IOS_APP_STORE)
217
inst_info.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
218
#endif
219
220
#if SIMULATE_VULKAN_FAILURE == 2
221
VkResult res = VK_ERROR_INCOMPATIBLE_DRIVER;
222
#else
223
VkResult res = vkCreateInstance(&inst_info, nullptr, &instance_);
224
#endif
225
if (res != VK_SUCCESS) {
226
if (res == VK_ERROR_LAYER_NOT_PRESENT) {
227
WARN_LOG(Log::G3D, "Validation on but instance layer not available - dropping layers");
228
// Drop the validation layers and try again.
229
instance_layer_names_.clear();
230
device_layer_names_.clear();
231
inst_info.enabledLayerCount = 0;
232
inst_info.ppEnabledLayerNames = nullptr;
233
res = vkCreateInstance(&inst_info, nullptr, &instance_);
234
if (res != VK_SUCCESS)
235
ERROR_LOG(Log::G3D, "Failed to create instance even without validation: %d", res);
236
} else {
237
ERROR_LOG(Log::G3D, "Failed to create instance : %d", res);
238
}
239
}
240
if (res != VK_SUCCESS) {
241
init_error_ = "Failed to create Vulkan instance";
242
return res;
243
}
244
245
VulkanLoadInstanceFunctions(instance_, extensionsLookup_, vulkanInstanceApiVersion_);
246
if (!CheckLayers(instance_layer_properties_, instance_layer_names_)) {
247
WARN_LOG(Log::G3D, "CheckLayers for instance failed");
248
// init_error_ = "Failed to validate instance layers";
249
// return;
250
}
251
252
uint32_t gpu_count = 1;
253
#if SIMULATE_VULKAN_FAILURE == 3
254
gpu_count = 0;
255
#else
256
res = vkEnumeratePhysicalDevices(instance_, &gpu_count, nullptr);
257
#endif
258
if (gpu_count <= 0) {
259
ERROR_LOG(Log::G3D, "Vulkan driver found but no supported GPU is available");
260
init_error_ = "No Vulkan physical devices found";
261
vkDestroyInstance(instance_, nullptr);
262
instance_ = nullptr;
263
return VK_ERROR_INITIALIZATION_FAILED;
264
}
265
266
_dbg_assert_(gpu_count > 0);
267
physical_devices_.resize(gpu_count);
268
physicalDeviceProperties_.resize(gpu_count);
269
res = vkEnumeratePhysicalDevices(instance_, &gpu_count, physical_devices_.data());
270
if (res != VK_SUCCESS) {
271
init_error_ = "Failed to enumerate physical devices";
272
vkDestroyInstance(instance_, nullptr);
273
instance_ = nullptr;
274
return res;
275
}
276
277
if (extensionsLookup_.KHR_get_physical_device_properties2 && vkGetPhysicalDeviceProperties2) {
278
for (uint32_t i = 0; i < gpu_count; i++) {
279
VkPhysicalDeviceProperties2 props2{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2};
280
VkPhysicalDevicePushDescriptorPropertiesKHR pushProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR};
281
VkPhysicalDeviceExternalMemoryHostPropertiesEXT extHostMemProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT};
282
VkPhysicalDeviceDepthStencilResolveProperties depthStencilResolveProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES};
283
ChainStruct(props2, &pushProps);
284
ChainStruct(props2, &extHostMemProps);
285
ChainStruct(props2, &depthStencilResolveProps);
286
vkGetPhysicalDeviceProperties2(physical_devices_[i], &props2);
287
288
// Don't want bad pointers sitting around. Probably not really necessary.
289
props2.pNext = nullptr;
290
pushProps.pNext = nullptr;
291
extHostMemProps.pNext = nullptr;
292
depthStencilResolveProps.pNext = nullptr;
293
physicalDeviceProperties_[i].properties = props2.properties;
294
physicalDeviceProperties_[i].pushDescriptorProperties = pushProps;
295
physicalDeviceProperties_[i].externalMemoryHostProperties = extHostMemProps;
296
physicalDeviceProperties_[i].depthStencilResolve = depthStencilResolveProps;
297
}
298
} else {
299
for (uint32_t i = 0; i < gpu_count; i++) {
300
vkGetPhysicalDeviceProperties(physical_devices_[i], &physicalDeviceProperties_[i].properties);
301
}
302
}
303
304
if (extensionsLookup_.EXT_debug_utils) {
305
_assert_(vkCreateDebugUtilsMessengerEXT != nullptr);
306
InitDebugUtilsCallback();
307
}
308
309
return VK_SUCCESS;
310
}
311
312
VulkanContext::~VulkanContext() {
313
_dbg_assert_(instance_ == VK_NULL_HANDLE);
314
}
315
316
void VulkanContext::DestroyInstance() {
317
if (extensionsLookup_.EXT_debug_utils) {
318
while (utils_callbacks.size() > 0) {
319
vkDestroyDebugUtilsMessengerEXT(instance_, utils_callbacks.back(), nullptr);
320
utils_callbacks.pop_back();
321
}
322
}
323
324
vkDestroyInstance(instance_, nullptr);
325
VulkanFree();
326
instance_ = VK_NULL_HANDLE;
327
}
328
329
void VulkanContext::BeginFrame(VkCommandBuffer firstCommandBuffer) {
330
FrameData *frame = &frame_[curFrame_];
331
// Process pending deletes.
332
frame->deleteList.PerformDeletes(this, allocator_);
333
// VK_NULL_HANDLE when profiler is disabled.
334
if (firstCommandBuffer) {
335
frame->profiler.BeginFrame(this, firstCommandBuffer);
336
}
337
}
338
339
void VulkanContext::EndFrame() {
340
frame_[curFrame_].deleteList.Take(globalDeleteList_);
341
curFrame_++;
342
if (curFrame_ >= inflightFrames_) {
343
curFrame_ = 0;
344
}
345
}
346
347
void VulkanContext::UpdateInflightFrames(int n) {
348
_dbg_assert_(n >= 1 && n <= MAX_INFLIGHT_FRAMES);
349
inflightFrames_ = n;
350
if (curFrame_ >= inflightFrames_) {
351
curFrame_ = 0;
352
}
353
}
354
355
void VulkanContext::WaitUntilQueueIdle() {
356
// Should almost never be used
357
vkQueueWaitIdle(gfx_queue_);
358
}
359
360
bool VulkanContext::MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex) {
361
// Search memtypes to find first index with those properties
362
for (uint32_t i = 0; i < 32; i++) {
363
if ((typeBits & 1) == 1) {
364
// Type is available, does it match user properties?
365
if ((memory_properties_.memoryTypes[i].propertyFlags & requirements_mask) == requirements_mask) {
366
*typeIndex = i;
367
return true;
368
}
369
}
370
typeBits >>= 1;
371
}
372
// No memory types matched, return failure
373
return false;
374
}
375
376
void VulkanContext::DestroySwapchain() {
377
if (swapchain_ != VK_NULL_HANDLE) {
378
vkDestroySwapchainKHR(device_, swapchain_, nullptr);
379
swapchain_ = VK_NULL_HANDLE;
380
}
381
}
382
383
void VulkanContext::DestroySurface() {
384
if (surface_ != VK_NULL_HANDLE) {
385
vkDestroySurfaceKHR(instance_, surface_, nullptr);
386
surface_ = VK_NULL_HANDLE;
387
}
388
}
389
390
VkResult VulkanContext::GetInstanceLayerExtensionList(const char *layerName, std::vector<VkExtensionProperties> &extensions) {
391
VkResult res;
392
do {
393
uint32_t instance_extension_count;
394
res = vkEnumerateInstanceExtensionProperties(layerName, &instance_extension_count, nullptr);
395
if (res != VK_SUCCESS)
396
return res;
397
if (instance_extension_count == 0)
398
return VK_SUCCESS;
399
extensions.resize(instance_extension_count);
400
res = vkEnumerateInstanceExtensionProperties(layerName, &instance_extension_count, extensions.data());
401
} while (res == VK_INCOMPLETE);
402
return res;
403
}
404
405
VkResult VulkanContext::GetInstanceLayerProperties() {
406
/*
407
* It's possible, though very rare, that the number of
408
* instance layers could change. For example, installing something
409
* could include new layers that the loader would pick up
410
* between the initial query for the count and the
411
* request for VkLayerProperties. The loader indicates that
412
* by returning a VK_INCOMPLETE status and will update the
413
* the count parameter.
414
* The count parameter will be updated with the number of
415
* entries loaded into the data pointer - in case the number
416
* of layers went down or is smaller than the size given.
417
*/
418
uint32_t instance_layer_count;
419
std::vector<VkLayerProperties> vk_props;
420
VkResult res;
421
do {
422
res = vkEnumerateInstanceLayerProperties(&instance_layer_count, nullptr);
423
if (res != VK_SUCCESS)
424
return res;
425
if (!instance_layer_count)
426
return VK_SUCCESS;
427
vk_props.resize(instance_layer_count);
428
res = vkEnumerateInstanceLayerProperties(&instance_layer_count, vk_props.data());
429
} while (res == VK_INCOMPLETE);
430
431
// Now gather the extension list for each instance layer.
432
for (uint32_t i = 0; i < instance_layer_count; i++) {
433
LayerProperties layer_props;
434
layer_props.properties = vk_props[i];
435
res = GetInstanceLayerExtensionList(layer_props.properties.layerName, layer_props.extensions);
436
if (res != VK_SUCCESS)
437
return res;
438
instance_layer_properties_.push_back(layer_props);
439
}
440
return res;
441
}
442
443
// Pass layerName == nullptr to get the extension list for the device.
444
VkResult VulkanContext::GetDeviceLayerExtensionList(const char *layerName, std::vector<VkExtensionProperties> &extensions) {
445
VkResult res;
446
do {
447
uint32_t device_extension_count;
448
res = vkEnumerateDeviceExtensionProperties(physical_devices_[physical_device_], layerName, &device_extension_count, nullptr);
449
if (res != VK_SUCCESS)
450
return res;
451
if (!device_extension_count)
452
return VK_SUCCESS;
453
extensions.resize(device_extension_count);
454
res = vkEnumerateDeviceExtensionProperties(physical_devices_[physical_device_], layerName, &device_extension_count, extensions.data());
455
} while (res == VK_INCOMPLETE);
456
return res;
457
}
458
459
VkResult VulkanContext::GetDeviceLayerProperties() {
460
/*
461
* It's possible, though very rare, that the number of
462
* instance layers could change. For example, installing something
463
* could include new layers that the loader would pick up
464
* between the initial query for the count and the
465
* request for VkLayerProperties. The loader indicates that
466
* by returning a VK_INCOMPLETE status and will update the
467
* the count parameter.
468
* The count parameter will be updated with the number of
469
* entries loaded into the data pointer - in case the number
470
* of layers went down or is smaller than the size given.
471
*/
472
uint32_t device_layer_count;
473
std::vector<VkLayerProperties> vk_props;
474
VkResult res;
475
do {
476
res = vkEnumerateDeviceLayerProperties(physical_devices_[physical_device_], &device_layer_count, nullptr);
477
if (res != VK_SUCCESS)
478
return res;
479
if (device_layer_count == 0)
480
return VK_SUCCESS;
481
vk_props.resize(device_layer_count);
482
res = vkEnumerateDeviceLayerProperties(physical_devices_[physical_device_], &device_layer_count, vk_props.data());
483
} while (res == VK_INCOMPLETE);
484
485
// Gather the list of extensions for each device layer.
486
for (uint32_t i = 0; i < device_layer_count; i++) {
487
LayerProperties layer_props;
488
layer_props.properties = vk_props[i];
489
res = GetDeviceLayerExtensionList(layer_props.properties.layerName, layer_props.extensions);
490
if (res != VK_SUCCESS)
491
return res;
492
device_layer_properties_.push_back(layer_props);
493
}
494
return res;
495
}
496
497
// Returns true if all layer names specified in check_names can be found in given layer properties.
498
bool VulkanContext::CheckLayers(const std::vector<LayerProperties> &layer_props, const std::vector<const char *> &layer_names) const {
499
uint32_t check_count = (uint32_t)layer_names.size();
500
uint32_t layer_count = (uint32_t)layer_props.size();
501
for (uint32_t i = 0; i < check_count; i++) {
502
bool found = false;
503
for (uint32_t j = 0; j < layer_count; j++) {
504
if (!strcmp(layer_names[i], layer_props[j].properties.layerName)) {
505
found = true;
506
}
507
}
508
if (!found) {
509
std::cout << "Cannot find layer: " << layer_names[i] << std::endl;
510
return false;
511
}
512
}
513
return true;
514
}
515
516
int VulkanContext::GetPhysicalDeviceByName(const std::string &name) {
517
for (size_t i = 0; i < physical_devices_.size(); i++) {
518
if (physicalDeviceProperties_[i].properties.deviceName == name)
519
return (int)i;
520
}
521
return -1;
522
}
523
524
int VulkanContext::GetBestPhysicalDevice() {
525
// Rules: Prefer discrete over embedded.
526
// Prefer nVidia over Intel.
527
528
int maxScore = -1;
529
int best = -1;
530
531
for (size_t i = 0; i < physical_devices_.size(); i++) {
532
int score = 0;
533
VkPhysicalDeviceProperties props;
534
vkGetPhysicalDeviceProperties(physical_devices_[i], &props);
535
switch (props.deviceType) {
536
case VK_PHYSICAL_DEVICE_TYPE_CPU:
537
score += 1;
538
break;
539
case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
540
score += 2;
541
break;
542
case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
543
score += 20;
544
break;
545
case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
546
score += 10;
547
break;
548
default:
549
break;
550
}
551
if (props.vendorID == VULKAN_VENDOR_AMD) {
552
score += 5;
553
} else if (props.vendorID == VULKAN_VENDOR_NVIDIA) {
554
score += 5;
555
}
556
if (score > maxScore) {
557
best = (int)i;
558
maxScore = score;
559
}
560
}
561
return best;
562
}
563
564
bool VulkanContext::EnableDeviceExtension(const char *extension, uint32_t coreVersion) {
565
if (coreVersion != 0 && vulkanDeviceApiVersion_ >= coreVersion) {
566
return true;
567
}
568
for (auto &iter : device_extension_properties_) {
569
if (!strcmp(iter.extensionName, extension)) {
570
device_extensions_enabled_.push_back(extension);
571
return true;
572
}
573
}
574
return false;
575
}
576
577
bool VulkanContext::EnableInstanceExtension(const char *extension, uint32_t coreVersion) {
578
if (coreVersion != 0 && vulkanInstanceApiVersion_ >= coreVersion) {
579
return true;
580
}
581
for (auto &iter : instance_extension_properties_) {
582
if (!strcmp(iter.extensionName, extension)) {
583
instance_extensions_enabled_.push_back(extension);
584
return true;
585
}
586
}
587
return false;
588
}
589
590
VkResult VulkanContext::CreateDevice(int physical_device) {
591
physical_device_ = physical_device;
592
INFO_LOG(Log::G3D, "Chose physical device %d: %s", physical_device, physicalDeviceProperties_[physical_device].properties.deviceName);
593
594
vulkanDeviceApiVersion_ = physicalDeviceProperties_[physical_device].properties.apiVersion;
595
596
GetDeviceLayerProperties();
597
if (!CheckLayers(device_layer_properties_, device_layer_names_)) {
598
WARN_LOG(Log::G3D, "CheckLayers for device %d failed", physical_device);
599
}
600
601
vkGetPhysicalDeviceQueueFamilyProperties(physical_devices_[physical_device_], &queue_count, nullptr);
602
_dbg_assert_(queue_count >= 1);
603
604
queueFamilyProperties_.resize(queue_count);
605
vkGetPhysicalDeviceQueueFamilyProperties(physical_devices_[physical_device_], &queue_count, queueFamilyProperties_.data());
606
_dbg_assert_(queue_count >= 1);
607
608
// Detect preferred depth/stencil formats, in this order. All supported devices will support at least one of these.
609
static const VkFormat depthStencilFormats[] = {
610
VK_FORMAT_D24_UNORM_S8_UINT,
611
VK_FORMAT_D32_SFLOAT_S8_UINT,
612
VK_FORMAT_D16_UNORM_S8_UINT,
613
};
614
615
deviceInfo_.preferredDepthStencilFormat = VK_FORMAT_UNDEFINED;
616
for (size_t i = 0; i < ARRAY_SIZE(depthStencilFormats); i++) {
617
VkFormatProperties props;
618
vkGetPhysicalDeviceFormatProperties(physical_devices_[physical_device_], depthStencilFormats[i], &props);
619
if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
620
deviceInfo_.preferredDepthStencilFormat = depthStencilFormats[i];
621
break;
622
}
623
}
624
625
_assert_msg_(deviceInfo_.preferredDepthStencilFormat != VK_FORMAT_UNDEFINED, "Could not find a usable depth stencil format.");
626
VkFormatProperties preferredProps;
627
vkGetPhysicalDeviceFormatProperties(physical_devices_[physical_device_], deviceInfo_.preferredDepthStencilFormat, &preferredProps);
628
if ((preferredProps.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) &&
629
(preferredProps.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
630
deviceInfo_.canBlitToPreferredDepthStencilFormat = true;
631
}
632
633
// This is as good a place as any to do this. Though, we don't use this much anymore after we added
634
// support for VMA.
635
vkGetPhysicalDeviceMemoryProperties(physical_devices_[physical_device_], &memory_properties_);
636
INFO_LOG(Log::G3D, "Memory Types (%d):", memory_properties_.memoryTypeCount);
637
for (int i = 0; i < (int)memory_properties_.memoryTypeCount; i++) {
638
// Don't bother printing dummy memory types.
639
if (!memory_properties_.memoryTypes[i].propertyFlags)
640
continue;
641
INFO_LOG(Log::G3D, " %d: Heap %d; Flags: %s%s%s%s ", i, memory_properties_.memoryTypes[i].heapIndex,
642
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) ? "DEVICE_LOCAL " : "",
643
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) ? "HOST_VISIBLE " : "",
644
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) ? "HOST_CACHED " : "",
645
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) ? "HOST_COHERENT " : "");
646
}
647
648
GetDeviceLayerExtensionList(nullptr, device_extension_properties_);
649
650
device_extensions_enabled_.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
651
652
if (!init_error_.empty() || physical_device_ < 0) {
653
ERROR_LOG(Log::G3D, "Vulkan init failed: %s", init_error_.c_str());
654
return VK_ERROR_INITIALIZATION_FAILED;
655
}
656
657
VkDeviceQueueCreateInfo queue_info{ VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO };
658
float queue_priorities[1] = { 1.0f };
659
queue_info.queueCount = 1;
660
queue_info.pQueuePriorities = queue_priorities;
661
bool found = false;
662
for (int i = 0; i < (int)queue_count; i++) {
663
if (queueFamilyProperties_[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
664
queue_info.queueFamilyIndex = i;
665
found = true;
666
break;
667
}
668
}
669
_dbg_assert_(found);
670
671
// TODO: A lot of these are on by default in later Vulkan versions, should check for that, technically.
672
extensionsLookup_.KHR_maintenance1 = EnableDeviceExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_API_VERSION_1_1);
673
extensionsLookup_.KHR_maintenance2 = EnableDeviceExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME, VK_API_VERSION_1_1);
674
extensionsLookup_.KHR_maintenance3 = EnableDeviceExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_API_VERSION_1_1);
675
extensionsLookup_.KHR_maintenance4 = EnableDeviceExtension("VK_KHR_maintenance4", VK_API_VERSION_1_3);
676
extensionsLookup_.KHR_multiview = EnableDeviceExtension(VK_KHR_MULTIVIEW_EXTENSION_NAME, VK_API_VERSION_1_1);
677
678
if (EnableDeviceExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, VK_API_VERSION_1_1)) {
679
extensionsLookup_.KHR_get_memory_requirements2 = true;
680
extensionsLookup_.KHR_dedicated_allocation = EnableDeviceExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, VK_API_VERSION_1_1);
681
}
682
if (EnableDeviceExtension(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, VK_API_VERSION_1_2)) {
683
extensionsLookup_.KHR_create_renderpass2 = true;
684
extensionsLookup_.KHR_depth_stencil_resolve = EnableDeviceExtension(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, VK_API_VERSION_1_2);
685
}
686
687
extensionsLookup_.EXT_shader_stencil_export = EnableDeviceExtension(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, 0);
688
extensionsLookup_.EXT_fragment_shader_interlock = EnableDeviceExtension(VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME, 0);
689
extensionsLookup_.ARM_rasterization_order_attachment_access = EnableDeviceExtension(VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME, 0);
690
691
#if !PPSSPP_PLATFORM(MAC) && !PPSSPP_PLATFORM(IOS)
692
extensionsLookup_.GOOGLE_display_timing = EnableDeviceExtension(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, 0);
693
#endif
694
if (!extensionsLookup_.GOOGLE_display_timing) {
695
extensionsLookup_.KHR_present_id = EnableDeviceExtension(VK_KHR_PRESENT_ID_EXTENSION_NAME, 0);
696
extensionsLookup_.KHR_present_wait = EnableDeviceExtension(VK_KHR_PRESENT_WAIT_EXTENSION_NAME, 0);
697
}
698
699
extensionsLookup_.EXT_provoking_vertex = EnableDeviceExtension(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, 0);
700
701
// Optional features
702
if (extensionsLookup_.KHR_get_physical_device_properties2 && vkGetPhysicalDeviceFeatures2) {
703
VkPhysicalDeviceFeatures2 features2{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR };
704
// Add to chain even if not supported, GetPhysicalDeviceFeatures is supposed to ignore unknown structs.
705
VkPhysicalDeviceMultiviewFeatures multiViewFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES };
706
VkPhysicalDevicePresentWaitFeaturesKHR presentWaitFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR };
707
VkPhysicalDevicePresentIdFeaturesKHR presentIdFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR };
708
VkPhysicalDeviceProvokingVertexFeaturesEXT provokingVertexFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT };
709
710
ChainStruct(features2, &multiViewFeatures);
711
if (extensionsLookup_.KHR_present_wait) {
712
ChainStruct(features2, &presentWaitFeatures);
713
}
714
if (extensionsLookup_.KHR_present_id) {
715
ChainStruct(features2, &presentIdFeatures);
716
}
717
if (extensionsLookup_.EXT_provoking_vertex) {
718
ChainStruct(features2, &provokingVertexFeatures);
719
}
720
vkGetPhysicalDeviceFeatures2(physical_devices_[physical_device_], &features2);
721
deviceFeatures_.available.standard = features2.features;
722
deviceFeatures_.available.multiview = multiViewFeatures;
723
if (extensionsLookup_.KHR_present_wait) {
724
deviceFeatures_.available.presentWait = presentWaitFeatures;
725
}
726
if (extensionsLookup_.KHR_present_id) {
727
deviceFeatures_.available.presentId = presentIdFeatures;
728
}
729
if (extensionsLookup_.EXT_provoking_vertex) {
730
deviceFeatures_.available.provokingVertex = provokingVertexFeatures;
731
}
732
} else {
733
vkGetPhysicalDeviceFeatures(physical_devices_[physical_device_], &deviceFeatures_.available.standard);
734
deviceFeatures_.available.multiview = {};
735
}
736
737
deviceFeatures_.enabled = {};
738
// Enable a few safe ones if they are available.
739
deviceFeatures_.enabled.standard.dualSrcBlend = deviceFeatures_.available.standard.dualSrcBlend;
740
deviceFeatures_.enabled.standard.logicOp = deviceFeatures_.available.standard.logicOp;
741
deviceFeatures_.enabled.standard.depthClamp = deviceFeatures_.available.standard.depthClamp;
742
deviceFeatures_.enabled.standard.depthBounds = deviceFeatures_.available.standard.depthBounds;
743
deviceFeatures_.enabled.standard.samplerAnisotropy = deviceFeatures_.available.standard.samplerAnisotropy;
744
deviceFeatures_.enabled.standard.shaderClipDistance = deviceFeatures_.available.standard.shaderClipDistance;
745
deviceFeatures_.enabled.standard.shaderCullDistance = deviceFeatures_.available.standard.shaderCullDistance;
746
deviceFeatures_.enabled.standard.geometryShader = deviceFeatures_.available.standard.geometryShader;
747
deviceFeatures_.enabled.standard.sampleRateShading = deviceFeatures_.available.standard.sampleRateShading;
748
749
#ifdef _DEBUG
750
// For debugging! Although, it might hide problems, so turning it off. Can be useful to rule out classes of issues.
751
// deviceFeatures_.enabled.standard.robustBufferAccess = deviceFeatures_.available.standard.robustBufferAccess;
752
#endif
753
754
deviceFeatures_.enabled.multiview = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES };
755
if (extensionsLookup_.KHR_multiview) {
756
deviceFeatures_.enabled.multiview.multiview = deviceFeatures_.available.multiview.multiview;
757
}
758
// Strangely, on Intel, it reports these as available even though the extension isn't in the list.
759
deviceFeatures_.enabled.presentId = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR };
760
if (extensionsLookup_.KHR_present_id) {
761
deviceFeatures_.enabled.presentId.presentId = deviceFeatures_.available.presentId.presentId;
762
}
763
deviceFeatures_.enabled.presentWait = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR };
764
if (extensionsLookup_.KHR_present_wait) {
765
deviceFeatures_.enabled.presentWait.presentWait = deviceFeatures_.available.presentWait.presentWait;
766
}
767
deviceFeatures_.enabled.provokingVertex = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT };
768
if (extensionsLookup_.EXT_provoking_vertex) {
769
deviceFeatures_.enabled.provokingVertex.provokingVertexLast = true;
770
}
771
772
// deviceFeatures_.enabled.multiview.multiviewGeometryShader = deviceFeatures_.available.multiview.multiviewGeometryShader;
773
774
VkPhysicalDeviceFeatures2 features2{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
775
776
VkDeviceCreateInfo device_info{ VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO };
777
device_info.queueCreateInfoCount = 1;
778
device_info.pQueueCreateInfos = &queue_info;
779
device_info.enabledLayerCount = (uint32_t)device_layer_names_.size();
780
device_info.ppEnabledLayerNames = device_info.enabledLayerCount ? device_layer_names_.data() : nullptr;
781
device_info.enabledExtensionCount = (uint32_t)device_extensions_enabled_.size();
782
device_info.ppEnabledExtensionNames = device_info.enabledExtensionCount ? device_extensions_enabled_.data() : nullptr;
783
784
if (extensionsLookup_.KHR_get_physical_device_properties2) {
785
device_info.pNext = &features2;
786
features2.features = deviceFeatures_.enabled.standard;
787
ChainStruct(features2, &deviceFeatures_.enabled.multiview);
788
if (extensionsLookup_.KHR_present_wait) {
789
ChainStruct(features2, &deviceFeatures_.enabled.presentWait);
790
}
791
if (extensionsLookup_.KHR_present_id) {
792
ChainStruct(features2, &deviceFeatures_.enabled.presentId);
793
}
794
if (extensionsLookup_.EXT_provoking_vertex) {
795
ChainStruct(features2, &deviceFeatures_.enabled.provokingVertex);
796
}
797
} else {
798
device_info.pEnabledFeatures = &deviceFeatures_.enabled.standard;
799
}
800
801
VkResult res = vkCreateDevice(physical_devices_[physical_device_], &device_info, nullptr, &device_);
802
if (res != VK_SUCCESS) {
803
init_error_ = "Unable to create Vulkan device";
804
ERROR_LOG(Log::G3D, "%s", init_error_.c_str());
805
} else {
806
VulkanLoadDeviceFunctions(device_, extensionsLookup_, vulkanDeviceApiVersion_);
807
}
808
INFO_LOG(Log::G3D, "Vulkan Device created: %s", physicalDeviceProperties_[physical_device_].properties.deviceName);
809
810
// Since we successfully created a device (however we got here, might be interesting in debug), we force the choice to be visible in the menu.
811
VulkanSetAvailable(true);
812
813
VmaAllocatorCreateInfo allocatorInfo = {};
814
allocatorInfo.vulkanApiVersion = std::min(vulkanDeviceApiVersion_, vulkanInstanceApiVersion_);
815
allocatorInfo.physicalDevice = physical_devices_[physical_device_];
816
allocatorInfo.device = device_;
817
allocatorInfo.instance = instance_;
818
VkResult result = vmaCreateAllocator(&allocatorInfo, &allocator_);
819
_assert_(result == VK_SUCCESS);
820
_assert_(allocator_ != VK_NULL_HANDLE);
821
822
// Examine the physical device to figure out super rough performance grade.
823
// Basically all we want to do is to identify low performance mobile devices
824
// so we can make decisions on things like texture scaling strategy.
825
auto &props = physicalDeviceProperties_[physical_device_].properties;
826
switch (props.vendorID) {
827
case VULKAN_VENDOR_AMD:
828
case VULKAN_VENDOR_NVIDIA:
829
case VULKAN_VENDOR_INTEL:
830
devicePerfClass_ = PerfClass::FAST;
831
break;
832
833
case VULKAN_VENDOR_ARM:
834
devicePerfClass_ = PerfClass::SLOW;
835
{
836
// Parse the device name as an ultra rough heuristic.
837
int maliG = 0;
838
if (sscanf(props.deviceName, "Mali-G%d", &maliG) == 1) {
839
if (maliG >= 72) {
840
devicePerfClass_ = PerfClass::FAST;
841
}
842
}
843
}
844
break;
845
846
case VULKAN_VENDOR_QUALCOMM:
847
devicePerfClass_ = PerfClass::SLOW;
848
#if PPSSPP_PLATFORM(ANDROID)
849
if (System_GetPropertyInt(SYSPROP_SYSTEMVERSION) >= 30) {
850
devicePerfClass_ = PerfClass::FAST;
851
}
852
#endif
853
break;
854
855
case VULKAN_VENDOR_IMGTEC:
856
default:
857
devicePerfClass_ = PerfClass::SLOW;
858
break;
859
}
860
861
return res;
862
}
863
864
VkResult VulkanContext::InitDebugUtilsCallback() {
865
// We're intentionally skipping VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT and
866
// VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, just too spammy.
867
int bits = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
868
| VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT
869
| VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
870
871
VkDebugUtilsMessengerCreateInfoEXT callback1{VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT};
872
callback1.messageSeverity = bits;
873
callback1.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
874
callback1.pfnUserCallback = &VulkanDebugUtilsCallback;
875
callback1.pUserData = (void *)&g_LogOptions;
876
VkDebugUtilsMessengerEXT messenger;
877
VkResult res = vkCreateDebugUtilsMessengerEXT(instance_, &callback1, nullptr, &messenger);
878
if (res != VK_SUCCESS) {
879
ERROR_LOG(Log::G3D, "Failed to register debug callback with vkCreateDebugUtilsMessengerEXT");
880
// Do error handling for VK_ERROR_OUT_OF_MEMORY
881
} else {
882
INFO_LOG(Log::G3D, "Debug callback registered with vkCreateDebugUtilsMessengerEXT.");
883
utils_callbacks.push_back(messenger);
884
}
885
return res;
886
}
887
888
bool VulkanContext::CreateInstanceAndDevice(const CreateInfo &info) {
889
VkResult res = CreateInstance(info);
890
if (res != VK_SUCCESS) {
891
ERROR_LOG(Log::G3D, "Failed to create vulkan context: %s", InitError().c_str());
892
VulkanSetAvailable(false);
893
return false;
894
}
895
896
int physicalDevice = GetBestPhysicalDevice();
897
if (physicalDevice < 0) {
898
ERROR_LOG(Log::G3D, "No usable Vulkan device found.");
899
DestroyInstance();
900
return false;
901
}
902
903
INFO_LOG(Log::G3D, "Creating Vulkan device (flags: %08x)", info.flags);
904
if (CreateDevice(physicalDevice) != VK_SUCCESS) {
905
INFO_LOG(Log::G3D, "Failed to create vulkan device: %s", InitError().c_str());
906
DestroyInstance();
907
return false;
908
}
909
910
return true;
911
}
912
913
void VulkanContext::SetDebugNameImpl(uint64_t handle, VkObjectType type, const char *name) {
914
VkDebugUtilsObjectNameInfoEXT info{ VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT };
915
info.pObjectName = name;
916
info.objectHandle = handle;
917
info.objectType = type;
918
vkSetDebugUtilsObjectNameEXT(device_, &info);
919
}
920
921
VkResult VulkanContext::InitSurface(WindowSystem winsys, void *data1, void *data2) {
922
winsys_ = winsys;
923
winsysData1_ = data1;
924
winsysData2_ = data2;
925
return ReinitSurface();
926
}
927
928
VkResult VulkanContext::ReinitSurface() {
929
if (surface_ != VK_NULL_HANDLE) {
930
INFO_LOG(Log::G3D, "Destroying Vulkan surface (%d, %d)", swapChainExtent_.width, swapChainExtent_.height);
931
vkDestroySurfaceKHR(instance_, surface_, nullptr);
932
surface_ = VK_NULL_HANDLE;
933
}
934
935
INFO_LOG(Log::G3D, "Creating Vulkan surface for window (data1=%p data2=%p)", winsysData1_, winsysData2_);
936
937
VkResult retval = VK_SUCCESS;
938
939
switch (winsys_) {
940
#ifdef _WIN32
941
case WINDOWSYSTEM_WIN32:
942
{
943
VkWin32SurfaceCreateInfoKHR win32{ VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR };
944
win32.flags = 0;
945
win32.hwnd = (HWND)winsysData2_;
946
win32.hinstance = (HINSTANCE)winsysData1_;
947
retval = vkCreateWin32SurfaceKHR(instance_, &win32, nullptr, &surface_);
948
break;
949
}
950
#endif
951
#if defined(__ANDROID__)
952
case WINDOWSYSTEM_ANDROID:
953
{
954
ANativeWindow *wnd = (ANativeWindow *)winsysData1_;
955
VkAndroidSurfaceCreateInfoKHR android{ VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR };
956
android.flags = 0;
957
android.window = wnd;
958
retval = vkCreateAndroidSurfaceKHR(instance_, &android, nullptr, &surface_);
959
break;
960
}
961
#endif
962
#if defined(VK_USE_PLATFORM_METAL_EXT)
963
case WINDOWSYSTEM_METAL_EXT:
964
{
965
VkMetalSurfaceCreateInfoEXT metal{ VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT };
966
metal.flags = 0;
967
metal.pLayer = winsysData1_;
968
metal.pNext = winsysData2_;
969
retval = vkCreateMetalSurfaceEXT(instance_, &metal, nullptr, &surface_);
970
break;
971
}
972
#endif
973
#if defined(VK_USE_PLATFORM_XLIB_KHR)
974
case WINDOWSYSTEM_XLIB:
975
{
976
VkXlibSurfaceCreateInfoKHR xlib{ VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR };
977
xlib.flags = 0;
978
xlib.dpy = (Display *)winsysData1_;
979
xlib.window = (Window)winsysData2_;
980
retval = vkCreateXlibSurfaceKHR(instance_, &xlib, nullptr, &surface_);
981
break;
982
}
983
#endif
984
#if defined(VK_USE_PLATFORM_XCB_KHR)
985
case WINDOWSYSTEM_XCB:
986
{
987
VkXCBSurfaceCreateInfoKHR xcb{ VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR };
988
xcb.flags = 0;
989
xcb.connection = (Connection *)winsysData1_;
990
xcb.window = (Window)(uintptr_t)winsysData2_;
991
retval = vkCreateXcbSurfaceKHR(instance_, &xcb, nullptr, &surface_);
992
break;
993
}
994
#endif
995
#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
996
case WINDOWSYSTEM_WAYLAND:
997
{
998
VkWaylandSurfaceCreateInfoKHR wayland{ VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR };
999
wayland.flags = 0;
1000
wayland.display = (wl_display *)winsysData1_;
1001
wayland.surface = (wl_surface *)winsysData2_;
1002
retval = vkCreateWaylandSurfaceKHR(instance_, &wayland, nullptr, &surface_);
1003
break;
1004
}
1005
#endif
1006
#if defined(VK_USE_PLATFORM_DISPLAY_KHR)
1007
case WINDOWSYSTEM_DISPLAY:
1008
{
1009
VkDisplaySurfaceCreateInfoKHR display{ VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR };
1010
#if !defined(__LIBRETRO__)
1011
/*
1012
And when not to use libretro need VkDisplaySurfaceCreateInfoKHR this extension,
1013
then you need to use dlopen to read vulkan loader in VulkanLoader.cpp.
1014
huangzihan China
1015
*/
1016
1017
if(!vkGetPhysicalDeviceDisplayPropertiesKHR ||
1018
!vkGetPhysicalDeviceDisplayPlanePropertiesKHR ||
1019
!vkGetDisplayModePropertiesKHR ||
1020
!vkGetDisplayPlaneSupportedDisplaysKHR ||
1021
!vkGetDisplayPlaneCapabilitiesKHR ) {
1022
_assert_msg_(false, "DISPLAY Vulkan cannot find any vulkan function symbols.");
1023
return VK_ERROR_INITIALIZATION_FAILED;
1024
}
1025
1026
//The following code is for reference:
1027
// https://github.com/vanfanel/ppsspp
1028
// When using the VK_KHR_display extension and not using LIBRETRO, a complete
1029
// VkDisplaySurfaceCreateInfoKHR is needed.
1030
1031
uint32_t display_count;
1032
uint32_t plane_count;
1033
1034
VkDisplayPropertiesKHR *display_props = NULL;
1035
VkDisplayPlanePropertiesKHR *plane_props = NULL;
1036
VkDisplayModePropertiesKHR* mode_props = NULL;
1037
1038
VkExtent2D image_size;
1039
// This is the chosen physical_device, it has been chosen elsewhere.
1040
VkPhysicalDevice phys_device = physical_devices_[physical_device_];
1041
VkDisplayModeKHR display_mode = VK_NULL_HANDLE;
1042
VkDisplayPlaneAlphaFlagBitsKHR alpha_mode = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR;
1043
uint32_t plane = UINT32_MAX;
1044
1045
// For now, use the first available (connected) display.
1046
int display_index = 0;
1047
1048
VkResult result;
1049
bool ret = false;
1050
bool mode_found = false;
1051
1052
int i, j;
1053
1054
// 1 physical device can have N displays connected.
1055
// Vulkan only counts the connected displays.
1056
1057
// Get a list of displays on the physical device.
1058
display_count = 0;
1059
vkGetPhysicalDeviceDisplayPropertiesKHR(phys_device, &display_count, NULL);
1060
if (display_count == 0) {
1061
_assert_msg_(false, "DISPLAY Vulkan couldn't find any displays.");
1062
return VK_ERROR_INITIALIZATION_FAILED;
1063
}
1064
display_props = new VkDisplayPropertiesKHR[display_count];
1065
vkGetPhysicalDeviceDisplayPropertiesKHR(phys_device, &display_count, display_props);
1066
1067
// Get a list of display planes on the physical device.
1068
plane_count = 0;
1069
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(phys_device, &plane_count, NULL);
1070
if (plane_count == 0) {
1071
_assert_msg_(false, "DISPLAY Vulkan couldn't find any planes on the physical device");
1072
return VK_ERROR_INITIALIZATION_FAILED;
1073
1074
}
1075
plane_props = new VkDisplayPlanePropertiesKHR[plane_count];
1076
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(phys_device, &plane_count, plane_props);
1077
1078
// Get the Vulkan display we are going to use.
1079
VkDisplayKHR myDisplay = display_props[display_index].display;
1080
1081
// Get the list of display modes of the display
1082
uint32_t mode_count = 0;
1083
vkGetDisplayModePropertiesKHR(phys_device, myDisplay, &mode_count, NULL);
1084
if (mode_count == 0) {
1085
_assert_msg_(false, "DISPLAY Vulkan couldn't find any video modes on the display");
1086
return VK_ERROR_INITIALIZATION_FAILED;
1087
}
1088
mode_props = new VkDisplayModePropertiesKHR[mode_count];
1089
vkGetDisplayModePropertiesKHR(phys_device, myDisplay, &mode_count, mode_props);
1090
1091
// See if there's an appropiate mode available on the display
1092
display_mode = VK_NULL_HANDLE;
1093
for (i = 0; i < mode_count; ++i)
1094
{
1095
const VkDisplayModePropertiesKHR* mode = &mode_props[i];
1096
1097
if (mode->parameters.visibleRegion.width == g_display.pixel_xres &&
1098
mode->parameters.visibleRegion.height == g_display.pixel_yres)
1099
{
1100
display_mode = mode->displayMode;
1101
mode_found = true;
1102
break;
1103
}
1104
}
1105
1106
// Free the mode list now.
1107
delete [] mode_props;
1108
1109
// If there are no useable modes found on the display, error out
1110
if (display_mode == VK_NULL_HANDLE)
1111
{
1112
_assert_msg_(false, "DISPLAY Vulkan couldn't find any video modes on the display");
1113
return VK_ERROR_INITIALIZATION_FAILED;
1114
}
1115
1116
/* Iterate on the list of planes of the physical device
1117
to find a plane that matches these criteria:
1118
-It must be compatible with the chosen display + mode.
1119
-It isn't currently bound to another display.
1120
-It supports per-pixel alpha, if possible. */
1121
for (i = 0; i < plane_count; i++) {
1122
uint32_t supported_displays_count = 0;
1123
VkDisplayKHR* supported_displays;
1124
VkDisplayPlaneCapabilitiesKHR plane_caps;
1125
1126
/* See if the plane is compatible with the current display. */
1127
vkGetDisplayPlaneSupportedDisplaysKHR(phys_device, i, &supported_displays_count, NULL);
1128
if (supported_displays_count == 0) {
1129
/* This plane doesn't support any displays. Continue to the next plane. */
1130
continue;
1131
}
1132
1133
/* Get the list of displays supported by this plane. */
1134
supported_displays = new VkDisplayKHR[supported_displays_count];
1135
vkGetDisplayPlaneSupportedDisplaysKHR(phys_device, i,
1136
&supported_displays_count, supported_displays);
1137
1138
/* The plane must be bound to the chosen display, or not in use.
1139
If none of these is true, iterate to another plane. */
1140
if ( !( (plane_props[i].currentDisplay == myDisplay) ||
1141
(plane_props[i].currentDisplay == VK_NULL_HANDLE)))
1142
continue;
1143
1144
/* Iterate the list of displays supported by this plane
1145
in order to find out if the chosen display is among them. */
1146
bool plane_supports_display = false;
1147
for (j = 0; j < supported_displays_count; j++) {
1148
if (supported_displays[j] == myDisplay) {
1149
plane_supports_display = true;
1150
break;
1151
}
1152
}
1153
1154
/* Free the list of displays supported by this plane. */
1155
delete [] supported_displays;
1156
1157
/* If the display is not supported by this plane, iterate to the next plane. */
1158
if (!plane_supports_display)
1159
continue;
1160
1161
/* Want a plane that supports the alpha mode we have chosen. */
1162
vkGetDisplayPlaneCapabilitiesKHR(phys_device, display_mode, i, &plane_caps);
1163
if (plane_caps.supportedAlpha & alpha_mode) {
1164
/* Yep, this plane is alright. */
1165
plane = i;
1166
break;
1167
}
1168
}
1169
1170
/* If we couldn't find an appropiate plane, error out. */
1171
if (plane == UINT32_MAX) {
1172
_assert_msg_(false, "DISPLAY Vulkan couldn't find an appropiate plane");
1173
return VK_ERROR_INITIALIZATION_FAILED;
1174
}
1175
1176
// Finally, create the vulkan surface.
1177
image_size.width = g_display.pixel_xres;
1178
image_size.height = g_display.pixel_yres;
1179
1180
display.displayMode = display_mode;
1181
display.imageExtent = image_size;
1182
display.transform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1183
display.alphaMode = alpha_mode;
1184
display.globalAlpha = 1.0f;
1185
display.planeIndex = plane;
1186
display.planeStackIndex = plane_props[plane].currentStackIndex;
1187
display.pNext = nullptr;
1188
delete [] display_props;
1189
delete [] plane_props;
1190
#endif
1191
display.flags = 0;
1192
retval = vkCreateDisplayPlaneSurfaceKHR(instance_, &display, nullptr, &surface_);
1193
break;
1194
}
1195
#endif
1196
1197
default:
1198
_assert_msg_(false, "Vulkan support for chosen window system not implemented");
1199
return VK_ERROR_INITIALIZATION_FAILED;
1200
}
1201
1202
if (retval != VK_SUCCESS) {
1203
return retval;
1204
}
1205
1206
if (!ChooseQueue()) {
1207
return VK_ERROR_INITIALIZATION_FAILED;
1208
}
1209
1210
for (int i = 0; i < ARRAY_SIZE(frame_); i++) {
1211
frame_[i].profiler.Init(this);
1212
}
1213
1214
return VK_SUCCESS;
1215
}
1216
1217
bool VulkanContext::ChooseQueue() {
1218
// Iterate over each queue to learn whether it supports presenting:
1219
VkBool32 *supportsPresent = new VkBool32[queue_count];
1220
for (uint32_t i = 0; i < queue_count; i++) {
1221
vkGetPhysicalDeviceSurfaceSupportKHR(physical_devices_[physical_device_], i, surface_, &supportsPresent[i]);
1222
}
1223
1224
// Search for a graphics queue and a present queue in the array of queue
1225
// families, try to find one that supports both
1226
uint32_t graphicsQueueNodeIndex = UINT32_MAX;
1227
uint32_t presentQueueNodeIndex = UINT32_MAX;
1228
for (uint32_t i = 0; i < queue_count; i++) {
1229
if ((queueFamilyProperties_[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
1230
if (graphicsQueueNodeIndex == UINT32_MAX) {
1231
graphicsQueueNodeIndex = i;
1232
}
1233
1234
if (supportsPresent[i] == VK_TRUE) {
1235
graphicsQueueNodeIndex = i;
1236
presentQueueNodeIndex = i;
1237
break;
1238
}
1239
}
1240
}
1241
if (presentQueueNodeIndex == UINT32_MAX) {
1242
// If didn't find a queue that supports both graphics and present, then
1243
// find a separate present queue. NOTE: We don't actually currently support this arrangement!
1244
for (uint32_t i = 0; i < queue_count; ++i) {
1245
if (supportsPresent[i] == VK_TRUE) {
1246
presentQueueNodeIndex = i;
1247
break;
1248
}
1249
}
1250
}
1251
delete[] supportsPresent;
1252
1253
// Generate error if could not find both a graphics and a present queue
1254
if (graphicsQueueNodeIndex == UINT32_MAX || presentQueueNodeIndex == UINT32_MAX) {
1255
ERROR_LOG(Log::G3D, "Could not find a graphics and a present queue");
1256
return false;
1257
}
1258
1259
graphics_queue_family_index_ = graphicsQueueNodeIndex;
1260
1261
// Get the list of VkFormats that are supported:
1262
uint32_t formatCount = 0;
1263
VkResult res = vkGetPhysicalDeviceSurfaceFormatsKHR(physical_devices_[physical_device_], surface_, &formatCount, nullptr);
1264
_assert_msg_(res == VK_SUCCESS, "Failed to get formats for device %d: %d", physical_device_, (int)res);
1265
if (res != VK_SUCCESS) {
1266
return false;
1267
}
1268
1269
surfFormats_.resize(formatCount);
1270
res = vkGetPhysicalDeviceSurfaceFormatsKHR(physical_devices_[physical_device_], surface_, &formatCount, surfFormats_.data());
1271
_dbg_assert_(res == VK_SUCCESS);
1272
if (res != VK_SUCCESS) {
1273
return false;
1274
}
1275
// If the format list includes just one entry of VK_FORMAT_UNDEFINED,
1276
// the surface has no preferred format. Otherwise, at least one
1277
// supported format will be returned.
1278
if (formatCount == 0 || (formatCount == 1 && surfFormats_[0].format == VK_FORMAT_UNDEFINED)) {
1279
INFO_LOG(Log::G3D, "swapchain_format: Falling back to B8G8R8A8_UNORM");
1280
swapchainFormat_ = VK_FORMAT_B8G8R8A8_UNORM;
1281
} else {
1282
swapchainFormat_ = VK_FORMAT_UNDEFINED;
1283
for (uint32_t i = 0; i < formatCount; ++i) {
1284
if (surfFormats_[i].colorSpace != VK_COLORSPACE_SRGB_NONLINEAR_KHR) {
1285
continue;
1286
}
1287
if (surfFormats_[i].format == VK_FORMAT_B8G8R8A8_UNORM || surfFormats_[i].format == VK_FORMAT_R8G8B8A8_UNORM) {
1288
swapchainFormat_ = surfFormats_[i].format;
1289
break;
1290
}
1291
}
1292
if (swapchainFormat_ == VK_FORMAT_UNDEFINED) {
1293
// Okay, take the first one then.
1294
swapchainFormat_ = surfFormats_[0].format;
1295
}
1296
INFO_LOG(Log::G3D, "swapchain_format: %d (/%d)", swapchainFormat_, formatCount);
1297
}
1298
1299
vkGetDeviceQueue(device_, graphics_queue_family_index_, 0, &gfx_queue_);
1300
return true;
1301
}
1302
1303
int clamp(int x, int a, int b) {
1304
if (x < a)
1305
return a;
1306
if (x > b)
1307
return b;
1308
return x;
1309
}
1310
1311
static std::string surface_transforms_to_string(VkSurfaceTransformFlagsKHR transformFlags) {
1312
std::string str;
1313
if (transformFlags & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) str += "IDENTITY ";
1314
if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR) str += "ROTATE_90 ";
1315
if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR) str += "ROTATE_180 ";
1316
if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR) str += "ROTATE_270 ";
1317
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR) str += "HMIRROR ";
1318
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR) str += "HMIRROR_90 ";
1319
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR) str += "HMIRROR_180 ";
1320
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR) str += "HMIRROR_270 ";
1321
if (transformFlags & VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR) str += "INHERIT ";
1322
return str;
1323
}
1324
1325
bool VulkanContext::InitSwapchain() {
1326
_assert_(physical_device_ >= 0 && physical_device_ < physical_devices_.size());
1327
if (!surface_) {
1328
ERROR_LOG(Log::G3D, "VK: No surface, can't create swapchain");
1329
return false;
1330
}
1331
1332
VkResult res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_devices_[physical_device_], surface_, &surfCapabilities_);
1333
if (res == VK_ERROR_SURFACE_LOST_KHR) {
1334
// Not much to do.
1335
ERROR_LOG(Log::G3D, "VK: Surface lost in InitSwapchain");
1336
return false;
1337
}
1338
_dbg_assert_(res == VK_SUCCESS);
1339
uint32_t presentModeCount;
1340
res = vkGetPhysicalDeviceSurfacePresentModesKHR(physical_devices_[physical_device_], surface_, &presentModeCount, nullptr);
1341
_dbg_assert_(res == VK_SUCCESS);
1342
VkPresentModeKHR *presentModes = new VkPresentModeKHR[presentModeCount];
1343
_dbg_assert_(presentModes);
1344
res = vkGetPhysicalDeviceSurfacePresentModesKHR(physical_devices_[physical_device_], surface_, &presentModeCount, presentModes);
1345
_dbg_assert_(res == VK_SUCCESS);
1346
1347
VkExtent2D currentExtent { surfCapabilities_.currentExtent };
1348
// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkSurfaceCapabilitiesKHR.html
1349
// currentExtent is the current width and height of the surface, or the special value (0xFFFFFFFF, 0xFFFFFFFF) indicating that the surface size will be determined by the extent of a swapchain targeting the surface.
1350
if (currentExtent.width == 0xFFFFFFFFu || currentExtent.height == 0xFFFFFFFFu
1351
#if PPSSPP_PLATFORM(IOS)
1352
|| currentExtent.width == 0 || currentExtent.height == 0
1353
#endif
1354
) {
1355
_dbg_assert_((bool)cbGetDrawSize_)
1356
if (cbGetDrawSize_) {
1357
currentExtent = cbGetDrawSize_();
1358
}
1359
}
1360
1361
swapChainExtent_.width = clamp(currentExtent.width, surfCapabilities_.minImageExtent.width, surfCapabilities_.maxImageExtent.width);
1362
swapChainExtent_.height = clamp(currentExtent.height, surfCapabilities_.minImageExtent.height, surfCapabilities_.maxImageExtent.height);
1363
1364
INFO_LOG(Log::G3D, "surfCapabilities_.current: %dx%d min: %dx%d max: %dx%d computed: %dx%d",
1365
currentExtent.width, currentExtent.height,
1366
surfCapabilities_.minImageExtent.width, surfCapabilities_.minImageExtent.height,
1367
surfCapabilities_.maxImageExtent.width, surfCapabilities_.maxImageExtent.height,
1368
swapChainExtent_.width, swapChainExtent_.height);
1369
1370
availablePresentModes_.clear();
1371
// TODO: Find a better way to specify the prioritized present mode while being able
1372
// to fall back in a sensible way.
1373
VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_MAX_ENUM_KHR;
1374
std::string modes = "";
1375
for (size_t i = 0; i < presentModeCount; i++) {
1376
modes += VulkanPresentModeToString(presentModes[i]);
1377
if (i != presentModeCount - 1) {
1378
modes += ", ";
1379
}
1380
availablePresentModes_.push_back(presentModes[i]);
1381
}
1382
1383
INFO_LOG(Log::G3D, "Supported present modes: %s", modes.c_str());
1384
for (size_t i = 0; i < presentModeCount; i++) {
1385
bool match = false;
1386
match = match || ((flags_ & VULKAN_FLAG_PRESENT_MAILBOX) && presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR);
1387
match = match || ((flags_ & VULKAN_FLAG_PRESENT_IMMEDIATE) && presentModes[i] == VK_PRESENT_MODE_IMMEDIATE_KHR);
1388
match = match || ((flags_ & VULKAN_FLAG_PRESENT_FIFO_RELAXED) && presentModes[i] == VK_PRESENT_MODE_FIFO_RELAXED_KHR);
1389
match = match || ((flags_ & VULKAN_FLAG_PRESENT_FIFO) && presentModes[i] == VK_PRESENT_MODE_FIFO_KHR);
1390
1391
// Default to the first present mode from the list.
1392
if (match || swapchainPresentMode == VK_PRESENT_MODE_MAX_ENUM_KHR) {
1393
swapchainPresentMode = presentModes[i];
1394
}
1395
if (match) {
1396
break;
1397
}
1398
}
1399
delete[] presentModes;
1400
// Determine the number of VkImage's to use in the swap chain (we desire to
1401
// own only 1 image at a time, besides the images being displayed and
1402
// queued for display):
1403
uint32_t desiredNumberOfSwapChainImages = surfCapabilities_.minImageCount + 1;
1404
if ((surfCapabilities_.maxImageCount > 0) &&
1405
(desiredNumberOfSwapChainImages > surfCapabilities_.maxImageCount))
1406
{
1407
// Application must settle for fewer images than desired:
1408
desiredNumberOfSwapChainImages = surfCapabilities_.maxImageCount;
1409
}
1410
1411
INFO_LOG(Log::G3D, "Chosen present mode: %d (%s). numSwapChainImages: %d/%d",
1412
swapchainPresentMode, VulkanPresentModeToString(swapchainPresentMode),
1413
desiredNumberOfSwapChainImages, surfCapabilities_.maxImageCount);
1414
1415
// We mostly follow the practices from
1416
// https://arm-software.github.io/vulkan_best_practice_for_mobile_developers/samples/surface_rotation/surface_rotation_tutorial.html
1417
//
1418
VkSurfaceTransformFlagBitsKHR preTransform;
1419
std::string supportedTransforms = surface_transforms_to_string(surfCapabilities_.supportedTransforms);
1420
std::string currentTransform = surface_transforms_to_string(surfCapabilities_.currentTransform);
1421
g_display.rotation = DisplayRotation::ROTATE_0;
1422
g_display.rot_matrix.setIdentity();
1423
1424
uint32_t allowedRotations = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR | VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR | VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR;
1425
// Hack: Don't allow 270 degrees pretransform (inverse landscape), it creates bizarre issues on some devices (see #15773).
1426
allowedRotations &= ~VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR;
1427
1428
if (surfCapabilities_.currentTransform & (VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR | VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR)) {
1429
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1430
} else if (surfCapabilities_.currentTransform & allowedRotations) {
1431
// Normal, sensible rotations. Let's handle it.
1432
preTransform = surfCapabilities_.currentTransform;
1433
g_display.rot_matrix.setIdentity();
1434
switch (surfCapabilities_.currentTransform) {
1435
case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
1436
g_display.rotation = DisplayRotation::ROTATE_90;
1437
g_display.rot_matrix.setRotationZ90();
1438
std::swap(swapChainExtent_.width, swapChainExtent_.height);
1439
break;
1440
case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
1441
g_display.rotation = DisplayRotation::ROTATE_180;
1442
g_display.rot_matrix.setRotationZ180();
1443
break;
1444
case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
1445
g_display.rotation = DisplayRotation::ROTATE_270;
1446
g_display.rot_matrix.setRotationZ270();
1447
std::swap(swapChainExtent_.width, swapChainExtent_.height);
1448
break;
1449
default:
1450
_dbg_assert_(false);
1451
}
1452
} else {
1453
// Let the OS rotate the image (potentially slower on many Android devices)
1454
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1455
}
1456
1457
std::string preTransformStr = surface_transforms_to_string(preTransform);
1458
INFO_LOG(Log::G3D, "Transform supported: %s current: %s chosen: %s", supportedTransforms.c_str(), currentTransform.c_str(), preTransformStr.c_str());
1459
1460
if (physicalDeviceProperties_[physical_device_].properties.vendorID == VULKAN_VENDOR_IMGTEC) {
1461
u32 driverVersion = physicalDeviceProperties_[physical_device_].properties.driverVersion;
1462
// Cutoff the hack at driver version 1.386.1368 (0x00582558, see issue #15773).
1463
if (driverVersion < 0x00582558) {
1464
INFO_LOG(Log::G3D, "Applying PowerVR hack (rounding off the width!) driverVersion=%08x", driverVersion);
1465
// Swap chain width hack to avoid issue #11743 (PowerVR driver bug).
1466
// To keep the size consistent even with pretransform, do this after the swap. Should be fine.
1467
// This is fixed in newer PowerVR drivers but I don't know the cutoff.
1468
swapChainExtent_.width &= ~31;
1469
1470
// TODO: Also modify display_xres/display_yres appropriately for scissors to match.
1471
// This will get a bit messy. Ideally we should remove that logic from app-android.cpp
1472
// and move it here, but the OpenGL code still needs it.
1473
} else {
1474
INFO_LOG(Log::G3D, "PowerVR driver version new enough (%08x), not applying swapchain width hack", driverVersion);
1475
}
1476
}
1477
1478
VkSwapchainCreateInfoKHR swap_chain_info{ VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR };
1479
swap_chain_info.surface = surface_;
1480
swap_chain_info.minImageCount = desiredNumberOfSwapChainImages;
1481
swap_chain_info.imageFormat = swapchainFormat_;
1482
swap_chain_info.imageColorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
1483
swap_chain_info.imageExtent.width = swapChainExtent_.width;
1484
swap_chain_info.imageExtent.height = swapChainExtent_.height;
1485
swap_chain_info.preTransform = preTransform;
1486
swap_chain_info.imageArrayLayers = 1;
1487
swap_chain_info.presentMode = swapchainPresentMode;
1488
swap_chain_info.oldSwapchain = VK_NULL_HANDLE;
1489
swap_chain_info.clipped = true;
1490
swap_chain_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1491
1492
presentMode_ = swapchainPresentMode;
1493
1494
// Don't ask for TRANSFER_DST for the swapchain image, we don't use that.
1495
// if (surfCapabilities_.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_DST_BIT)
1496
// swap_chain_info.imageUsage |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
1497
1498
#ifndef ANDROID
1499
// We don't support screenshots on Android
1500
// Add more usage flags if they're supported.
1501
if (surfCapabilities_.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT)
1502
swap_chain_info.imageUsage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1503
#endif
1504
1505
swap_chain_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
1506
swap_chain_info.queueFamilyIndexCount = 0;
1507
swap_chain_info.pQueueFamilyIndices = NULL;
1508
// OPAQUE is not supported everywhere.
1509
if (surfCapabilities_.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR) {
1510
swap_chain_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
1511
} else {
1512
// This should be supported anywhere, and is the only thing supported on the SHIELD TV, for example.
1513
swap_chain_info.compositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
1514
}
1515
1516
res = vkCreateSwapchainKHR(device_, &swap_chain_info, NULL, &swapchain_);
1517
if (res != VK_SUCCESS) {
1518
ERROR_LOG(Log::G3D, "vkCreateSwapchainKHR failed!");
1519
return false;
1520
}
1521
INFO_LOG(Log::G3D, "Created swapchain: %dx%d", swap_chain_info.imageExtent.width, swap_chain_info.imageExtent.height);
1522
return true;
1523
}
1524
1525
void VulkanContext::SetCbGetDrawSize(std::function<VkExtent2D()> cb) {
1526
cbGetDrawSize_ = cb;
1527
}
1528
1529
VkFence VulkanContext::CreateFence(bool presignalled) {
1530
VkFence fence;
1531
VkFenceCreateInfo fenceInfo{ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO };
1532
fenceInfo.flags = presignalled ? VK_FENCE_CREATE_SIGNALED_BIT : 0;
1533
vkCreateFence(device_, &fenceInfo, NULL, &fence);
1534
return fence;
1535
}
1536
1537
void VulkanContext::PerformPendingDeletes() {
1538
for (int i = 0; i < ARRAY_SIZE(frame_); i++) {
1539
frame_[i].deleteList.PerformDeletes(this, allocator_);
1540
}
1541
Delete().PerformDeletes(this, allocator_);
1542
}
1543
1544
void VulkanContext::DestroyDevice() {
1545
if (swapchain_) {
1546
ERROR_LOG(Log::G3D, "DestroyDevice: Swapchain should have been destroyed.");
1547
}
1548
if (surface_) {
1549
ERROR_LOG(Log::G3D, "DestroyDevice: Surface should have been destroyed.");
1550
}
1551
1552
for (int i = 0; i < ARRAY_SIZE(frame_); i++) {
1553
frame_[i].profiler.Shutdown();
1554
}
1555
1556
INFO_LOG(Log::G3D, "VulkanContext::DestroyDevice (performing deletes)");
1557
PerformPendingDeletes();
1558
1559
vmaDestroyAllocator(allocator_);
1560
allocator_ = VK_NULL_HANDLE;
1561
1562
vkDestroyDevice(device_, nullptr);
1563
device_ = nullptr;
1564
}
1565
1566
bool VulkanContext::CreateShaderModule(const std::vector<uint32_t> &spirv, VkShaderModule *shaderModule, const char *tag) {
1567
VkShaderModuleCreateInfo sm{ VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO };
1568
sm.pCode = spirv.data();
1569
sm.codeSize = spirv.size() * sizeof(uint32_t);
1570
sm.flags = 0;
1571
VkResult result = vkCreateShaderModule(device_, &sm, nullptr, shaderModule);
1572
if (tag) {
1573
SetDebugName(*shaderModule, VK_OBJECT_TYPE_SHADER_MODULE, tag);
1574
}
1575
if (result != VK_SUCCESS) {
1576
return false;
1577
} else {
1578
return true;
1579
}
1580
}
1581
1582
EShLanguage FindLanguage(const VkShaderStageFlagBits shader_type) {
1583
switch (shader_type) {
1584
case VK_SHADER_STAGE_VERTEX_BIT:
1585
return EShLangVertex;
1586
1587
case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
1588
return EShLangTessControl;
1589
1590
case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
1591
return EShLangTessEvaluation;
1592
1593
case VK_SHADER_STAGE_GEOMETRY_BIT:
1594
return EShLangGeometry;
1595
1596
case VK_SHADER_STAGE_FRAGMENT_BIT:
1597
return EShLangFragment;
1598
1599
case VK_SHADER_STAGE_COMPUTE_BIT:
1600
return EShLangCompute;
1601
1602
default:
1603
return EShLangVertex;
1604
}
1605
}
1606
1607
// Compile a given string containing GLSL into SPV for use by VK
1608
// Return value of false means an error was encountered.
1609
bool GLSLtoSPV(const VkShaderStageFlagBits shader_type, const char *sourceCode, GLSLVariant variant,
1610
std::vector<unsigned int> &spirv, std::string *errorMessage) {
1611
1612
glslang::TProgram program;
1613
const char *shaderStrings[1];
1614
TBuiltInResource Resources{};
1615
InitShaderResources(Resources);
1616
1617
int defaultVersion = 0;
1618
EShMessages messages;
1619
EProfile profile;
1620
1621
switch (variant) {
1622
case GLSLVariant::VULKAN:
1623
// Enable SPIR-V and Vulkan rules when parsing GLSL
1624
messages = (EShMessages)(EShMsgSpvRules | EShMsgVulkanRules);
1625
defaultVersion = 450;
1626
profile = ECoreProfile;
1627
break;
1628
case GLSLVariant::GL140:
1629
messages = (EShMessages)(EShMsgDefault);
1630
defaultVersion = 140;
1631
profile = ECompatibilityProfile;
1632
break;
1633
case GLSLVariant::GLES300:
1634
messages = (EShMessages)(EShMsgDefault);
1635
defaultVersion = 300;
1636
profile = EEsProfile;
1637
break;
1638
default:
1639
return false;
1640
}
1641
1642
EShLanguage stage = FindLanguage(shader_type);
1643
glslang::TShader shader(stage);
1644
1645
shaderStrings[0] = sourceCode;
1646
shader.setStrings(shaderStrings, 1);
1647
1648
if (!shader.parse(&Resources, defaultVersion, profile, false, true, messages)) {
1649
puts(shader.getInfoLog());
1650
puts(shader.getInfoDebugLog());
1651
if (errorMessage) {
1652
*errorMessage = shader.getInfoLog();
1653
(*errorMessage) += shader.getInfoDebugLog();
1654
}
1655
return false; // something didn't work
1656
}
1657
1658
// TODO: Propagate warnings into errorMessages even if we succeeded here.
1659
1660
// Note that program does not take ownership of &shader, so this is fine.
1661
program.addShader(&shader);
1662
1663
if (!program.link(messages)) {
1664
puts(shader.getInfoLog());
1665
puts(shader.getInfoDebugLog());
1666
if (errorMessage) {
1667
*errorMessage = shader.getInfoLog();
1668
(*errorMessage) += shader.getInfoDebugLog();
1669
}
1670
return false;
1671
}
1672
1673
// Can't fail, parsing worked, "linking" worked.
1674
glslang::SpvOptions options;
1675
options.disableOptimizer = false;
1676
options.optimizeSize = false;
1677
options.generateDebugInfo = false;
1678
glslang::GlslangToSpv(*program.getIntermediate(stage), spirv, &options);
1679
return true;
1680
}
1681
1682
void init_glslang() {
1683
glslang::InitializeProcess();
1684
}
1685
1686
void finalize_glslang() {
1687
glslang::FinalizeProcess();
1688
}
1689
1690
void VulkanDeleteList::Take(VulkanDeleteList &del) {
1691
_dbg_assert_(cmdPools_.empty());
1692
_dbg_assert_(descPools_.empty());
1693
_dbg_assert_(modules_.empty());
1694
_dbg_assert_(buffers_.empty());
1695
_dbg_assert_(bufferViews_.empty());
1696
_dbg_assert_(buffersWithAllocs_.empty());
1697
_dbg_assert_(imageViews_.empty());
1698
_dbg_assert_(imagesWithAllocs_.empty());
1699
_dbg_assert_(deviceMemory_.empty());
1700
_dbg_assert_(samplers_.empty());
1701
_dbg_assert_(pipelines_.empty());
1702
_dbg_assert_(pipelineCaches_.empty());
1703
_dbg_assert_(renderPasses_.empty());
1704
_dbg_assert_(framebuffers_.empty());
1705
_dbg_assert_(pipelineLayouts_.empty());
1706
_dbg_assert_(descSetLayouts_.empty());
1707
_dbg_assert_(callbacks_.empty());
1708
cmdPools_ = std::move(del.cmdPools_);
1709
descPools_ = std::move(del.descPools_);
1710
modules_ = std::move(del.modules_);
1711
buffers_ = std::move(del.buffers_);
1712
buffersWithAllocs_ = std::move(del.buffersWithAllocs_);
1713
bufferViews_ = std::move(del.bufferViews_);
1714
imageViews_ = std::move(del.imageViews_);
1715
imagesWithAllocs_ = std::move(del.imagesWithAllocs_);
1716
deviceMemory_ = std::move(del.deviceMemory_);
1717
samplers_ = std::move(del.samplers_);
1718
pipelines_ = std::move(del.pipelines_);
1719
pipelineCaches_ = std::move(del.pipelineCaches_);
1720
renderPasses_ = std::move(del.renderPasses_);
1721
framebuffers_ = std::move(del.framebuffers_);
1722
pipelineLayouts_ = std::move(del.pipelineLayouts_);
1723
descSetLayouts_ = std::move(del.descSetLayouts_);
1724
callbacks_ = std::move(del.callbacks_);
1725
del.cmdPools_.clear();
1726
del.descPools_.clear();
1727
del.modules_.clear();
1728
del.buffers_.clear();
1729
del.buffersWithAllocs_.clear();
1730
del.imageViews_.clear();
1731
del.imagesWithAllocs_.clear();
1732
del.deviceMemory_.clear();
1733
del.samplers_.clear();
1734
del.pipelines_.clear();
1735
del.pipelineCaches_.clear();
1736
del.renderPasses_.clear();
1737
del.framebuffers_.clear();
1738
del.pipelineLayouts_.clear();
1739
del.descSetLayouts_.clear();
1740
del.callbacks_.clear();
1741
}
1742
1743
void VulkanDeleteList::PerformDeletes(VulkanContext *vulkan, VmaAllocator allocator) {
1744
int deleteCount = 0;
1745
1746
for (auto &callback : callbacks_) {
1747
callback.func(vulkan, callback.userdata);
1748
deleteCount++;
1749
}
1750
callbacks_.clear();
1751
1752
VkDevice device = vulkan->GetDevice();
1753
for (auto &cmdPool : cmdPools_) {
1754
vkDestroyCommandPool(device, cmdPool, nullptr);
1755
deleteCount++;
1756
}
1757
cmdPools_.clear();
1758
for (auto &descPool : descPools_) {
1759
vkDestroyDescriptorPool(device, descPool, nullptr);
1760
deleteCount++;
1761
}
1762
descPools_.clear();
1763
for (auto &module : modules_) {
1764
vkDestroyShaderModule(device, module, nullptr);
1765
deleteCount++;
1766
}
1767
modules_.clear();
1768
for (auto &buf : buffers_) {
1769
vkDestroyBuffer(device, buf, nullptr);
1770
deleteCount++;
1771
}
1772
buffers_.clear();
1773
for (auto &buf : buffersWithAllocs_) {
1774
vmaDestroyBuffer(allocator, buf.buffer, buf.alloc);
1775
deleteCount++;
1776
}
1777
buffersWithAllocs_.clear();
1778
for (auto &bufView : bufferViews_) {
1779
vkDestroyBufferView(device, bufView, nullptr);
1780
deleteCount++;
1781
}
1782
bufferViews_.clear();
1783
for (auto &imageWithAlloc : imagesWithAllocs_) {
1784
vmaDestroyImage(allocator, imageWithAlloc.image, imageWithAlloc.alloc);
1785
deleteCount++;
1786
}
1787
imagesWithAllocs_.clear();
1788
for (auto &imageView : imageViews_) {
1789
vkDestroyImageView(device, imageView, nullptr);
1790
deleteCount++;
1791
}
1792
imageViews_.clear();
1793
for (auto &mem : deviceMemory_) {
1794
vkFreeMemory(device, mem, nullptr);
1795
deleteCount++;
1796
}
1797
deviceMemory_.clear();
1798
for (auto &sampler : samplers_) {
1799
vkDestroySampler(device, sampler, nullptr);
1800
deleteCount++;
1801
}
1802
samplers_.clear();
1803
for (auto &pipeline : pipelines_) {
1804
vkDestroyPipeline(device, pipeline, nullptr);
1805
deleteCount++;
1806
}
1807
pipelines_.clear();
1808
for (auto &pcache : pipelineCaches_) {
1809
vkDestroyPipelineCache(device, pcache, nullptr);
1810
deleteCount++;
1811
}
1812
pipelineCaches_.clear();
1813
for (auto &renderPass : renderPasses_) {
1814
vkDestroyRenderPass(device, renderPass, nullptr);
1815
deleteCount++;
1816
}
1817
renderPasses_.clear();
1818
for (auto &framebuffer : framebuffers_) {
1819
vkDestroyFramebuffer(device, framebuffer, nullptr);
1820
deleteCount++;
1821
}
1822
framebuffers_.clear();
1823
for (auto &pipeLayout : pipelineLayouts_) {
1824
vkDestroyPipelineLayout(device, pipeLayout, nullptr);
1825
deleteCount++;
1826
}
1827
pipelineLayouts_.clear();
1828
for (auto &descSetLayout : descSetLayouts_) {
1829
vkDestroyDescriptorSetLayout(device, descSetLayout, nullptr);
1830
deleteCount++;
1831
}
1832
descSetLayouts_.clear();
1833
for (auto &queryPool : queryPools_) {
1834
vkDestroyQueryPool(device, queryPool, nullptr);
1835
deleteCount++;
1836
}
1837
queryPools_.clear();
1838
deleteCount_ = deleteCount;
1839
}
1840
1841
void VulkanContext::GetImageMemoryRequirements(VkImage image, VkMemoryRequirements *mem_reqs, bool *dedicatedAllocation) {
1842
if (Extensions().KHR_dedicated_allocation) {
1843
VkImageMemoryRequirementsInfo2KHR memReqInfo2{VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR};
1844
memReqInfo2.image = image;
1845
1846
VkMemoryRequirements2KHR memReq2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
1847
VkMemoryDedicatedRequirementsKHR memDedicatedReq{VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR};
1848
ChainStruct(memReq2, &memDedicatedReq);
1849
1850
vkGetImageMemoryRequirements2(GetDevice(), &memReqInfo2, &memReq2);
1851
1852
*mem_reqs = memReq2.memoryRequirements;
1853
*dedicatedAllocation =
1854
(memDedicatedReq.requiresDedicatedAllocation != VK_FALSE) ||
1855
(memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
1856
} else {
1857
vkGetImageMemoryRequirements(GetDevice(), image, mem_reqs);
1858
*dedicatedAllocation = false;
1859
}
1860
}
1861
1862
bool IsHashMaliDriverVersion(const VkPhysicalDeviceProperties &props) {
1863
// ARM used to put a hash in place of the driver version.
1864
// Now they only use major versions. We'll just make a bad heuristic.
1865
uint32_t major = VK_VERSION_MAJOR(props.driverVersion);
1866
uint32_t branch = VK_VERSION_PATCH(props.driverVersion);
1867
if (branch > 0)
1868
return true;
1869
if (branch > 100 || major > 100)
1870
return true;
1871
// Can (in theory) have false negatives!
1872
return false;
1873
}
1874
1875
// From Sascha's code
1876
std::string FormatDriverVersion(const VkPhysicalDeviceProperties &props) {
1877
if (props.vendorID == VULKAN_VENDOR_NVIDIA) {
1878
// For whatever reason, NVIDIA has their own scheme.
1879
// 10 bits = major version (up to r1023)
1880
// 8 bits = minor version (up to 255)
1881
// 8 bits = secondary branch version/build version (up to 255)
1882
// 6 bits = tertiary branch/build version (up to 63)
1883
uint32_t major = (props.driverVersion >> 22) & 0x3ff;
1884
uint32_t minor = (props.driverVersion >> 14) & 0x0ff;
1885
uint32_t secondaryBranch = (props.driverVersion >> 6) & 0x0ff;
1886
uint32_t tertiaryBranch = (props.driverVersion) & 0x003f;
1887
return StringFromFormat("%d.%d.%d.%d", major, minor, secondaryBranch, tertiaryBranch);
1888
} else if (props.vendorID == VULKAN_VENDOR_ARM) {
1889
// ARM used to just put a hash here. No point in splitting it up.
1890
if (IsHashMaliDriverVersion(props)) {
1891
return StringFromFormat("(hash) %08x", props.driverVersion);
1892
}
1893
}
1894
// Qualcomm has an inscrutable versioning scheme. Let's just display it as normal.
1895
// Standard scheme, use the standard macros.
1896
uint32_t major = VK_VERSION_MAJOR(props.driverVersion);
1897
uint32_t minor = VK_VERSION_MINOR(props.driverVersion);
1898
uint32_t branch = VK_VERSION_PATCH(props.driverVersion);
1899
return StringFromFormat("%d.%d.%d (%08x)", major, minor, branch, props.driverVersion);
1900
}
1901
1902
std::string FormatAPIVersion(u32 version) {
1903
return StringFromFormat("%d.%d.%d", VK_API_VERSION_MAJOR(version), VK_API_VERSION_MINOR(version), VK_API_VERSION_PATCH(version));
1904
}
1905
1906
// Mainly just the formats seen on gpuinfo.org for swapchains, as this function is only used for listing
1907
// those in the UI. Also depth buffers that we used in one place.
1908
// Might add more in the future if we find more uses for this.
1909
const char *VulkanFormatToString(VkFormat format) {
1910
switch (format) {
1911
case VK_FORMAT_A1R5G5B5_UNORM_PACK16: return "A1R5G5B5_UNORM_PACK16";
1912
case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return "A2B10G10R10_UNORM_PACK32";
1913
case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return "A2R10G10B10_UNORM_PACK32";
1914
case VK_FORMAT_A8B8G8R8_SNORM_PACK32: return "A8B8G8R8_SNORM_PACK32";
1915
case VK_FORMAT_A8B8G8R8_SRGB_PACK32: return "A8B8G8R8_SRGB_PACK32";
1916
case VK_FORMAT_A8B8G8R8_UNORM_PACK32: return "A8B8G8R8_UNORM_PACK32";
1917
case VK_FORMAT_B10G11R11_UFLOAT_PACK32: return "B10G11R11_UFLOAT_PACK32";
1918
case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return "B4G4R4A4_UNORM_PACK16";
1919
case VK_FORMAT_B5G5R5A1_UNORM_PACK16: return "B5G5R5A1_UNORM_PACK16";
1920
case VK_FORMAT_B5G6R5_UNORM_PACK16: return "B5G6R5_UNORM_PACK16";
1921
case VK_FORMAT_B8G8R8A8_SNORM: return "B8G8R8A8_SNORM";
1922
case VK_FORMAT_B8G8R8A8_SRGB: return "B8G8R8A8_SRGB";
1923
case VK_FORMAT_B8G8R8A8_UNORM: return "B8G8R8A8_UNORM";
1924
case VK_FORMAT_R16G16B16A16_SFLOAT: return "R16G16B16A16_SFLOAT";
1925
case VK_FORMAT_R16G16B16A16_SNORM: return "R16G16B16A16_SNORM";
1926
case VK_FORMAT_R16G16B16A16_UNORM: return "R16G16B16A16_UNORM";
1927
case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return "R4G4B4A4_UNORM_PACK16";
1928
case VK_FORMAT_R5G5B5A1_UNORM_PACK16: return "R5G5B5A1_UNORM_PACK16";
1929
case VK_FORMAT_R5G6B5_UNORM_PACK16: return "R5G6B5_UNORM_PACK16";
1930
case VK_FORMAT_R8G8B8A8_SNORM: return "R8G8B8A8_SNORM";
1931
case VK_FORMAT_R8G8B8A8_SRGB: return "R8G8B8A8_SRGB";
1932
case VK_FORMAT_R8G8B8A8_UNORM: return "R8G8B8A8_UNORM";
1933
1934
case VK_FORMAT_D24_UNORM_S8_UINT: return "D24S8";
1935
case VK_FORMAT_D16_UNORM: return "D16";
1936
case VK_FORMAT_D16_UNORM_S8_UINT: return "D16S8";
1937
case VK_FORMAT_D32_SFLOAT: return "D32f";
1938
case VK_FORMAT_D32_SFLOAT_S8_UINT: return "D32fS8";
1939
case VK_FORMAT_S8_UINT: return "S8";
1940
case VK_FORMAT_UNDEFINED: return "UNDEFINED (BAD!)";
1941
1942
default: return "(format not added to string list)";
1943
}
1944
}
1945
1946
// I miss Rust where this is automatic :(
1947
const char *VulkanColorSpaceToString(VkColorSpaceKHR colorSpace) {
1948
switch (colorSpace) {
1949
case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR: return "SRGB_NONLINEAR";
1950
case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT: return "DISPLAY_P3_NONLINEAR";
1951
case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT: return "EXTENDED_SRGB_LINEAR";
1952
case VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT: return "DISPLAY_P3_LINEAR";
1953
case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT: return "DCI_P3_NONLINEAR";
1954
case VK_COLOR_SPACE_BT709_LINEAR_EXT: return "BT709_LINEAR";
1955
case VK_COLOR_SPACE_BT709_NONLINEAR_EXT: return "BT709_NONLINEAR";
1956
case VK_COLOR_SPACE_BT2020_LINEAR_EXT: return "BT2020_LINEAR";
1957
case VK_COLOR_SPACE_HDR10_ST2084_EXT: return "HDR10_ST2084";
1958
case VK_COLOR_SPACE_DOLBYVISION_EXT: return "DOLBYVISION";
1959
case VK_COLOR_SPACE_HDR10_HLG_EXT: return "HDR10_HLG";
1960
case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT: return "ADOBERGB_LINEAR";
1961
case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT: return "ADOBERGB_NONLINEAR";
1962
case VK_COLOR_SPACE_PASS_THROUGH_EXT: return "PASS_THROUGH";
1963
case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT: return "EXTENDED_SRGB_NONLINEAR";
1964
case VK_COLOR_SPACE_DISPLAY_NATIVE_AMD: return "DISPLAY_NATIVE_AMD";
1965
default: return "(unknown)";
1966
}
1967
}
1968
1969