Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
hrydgard
GitHub Repository: hrydgard/ppsspp
Path: blob/master/Common/GPU/Vulkan/VulkanContext.cpp
5659 views
1
#define __STDC_LIMIT_MACROS
2
3
#include <cstdlib>
4
#include <cstdint>
5
#include <cstring>
6
#include <iostream>
7
8
#include "Common/System/System.h"
9
#include "Common/System/Display.h"
10
#include "Common/Log.h"
11
#include "Common/GPU/Shader.h"
12
#include "Common/GPU/Vulkan/VulkanContext.h"
13
#include "Common/GPU/Vulkan/VulkanDebug.h"
14
#include "Common/StringUtils.h"
15
16
#ifdef USE_CRT_DBG
17
#undef new
18
#endif
19
20
#include "ext/vma/vk_mem_alloc.h"
21
22
23
// Change this to 1, 2, and 3 to fake failures in a few places, so that
24
// we can test our fallback-to-GL code.
25
#define SIMULATE_VULKAN_FAILURE 0
26
27
#include "ext/glslang/SPIRV/GlslangToSpv.h"
28
29
#ifdef USE_CRT_DBG
30
#define new DBG_NEW
31
#endif
32
33
using namespace PPSSPP_VK;
34
35
VulkanLogOptions g_LogOptions;
36
37
static const char * const validationLayers[] = {
38
"VK_LAYER_KHRONOS_validation",
39
/*
40
// For layers included in the Android NDK.
41
"VK_LAYER_GOOGLE_threading",
42
"VK_LAYER_LUNARG_parameter_validation",
43
"VK_LAYER_LUNARG_core_validation",
44
"VK_LAYER_LUNARG_image",
45
"VK_LAYER_LUNARG_object_tracker",
46
"VK_LAYER_LUNARG_swapchain",
47
"VK_LAYER_GOOGLE_unique_objects",
48
*/
49
};
50
51
std::string VulkanVendorString(uint32_t vendorId) {
52
switch (vendorId) {
53
case VULKAN_VENDOR_INTEL: return "Intel";
54
case VULKAN_VENDOR_NVIDIA: return "NVIDIA";
55
case VULKAN_VENDOR_AMD: return "AMD";
56
case VULKAN_VENDOR_ARM: return "ARM";
57
case VULKAN_VENDOR_QUALCOMM: return "Qualcomm";
58
case VULKAN_VENDOR_IMGTEC: return "Imagination";
59
case VULKAN_VENDOR_APPLE: return "Apple";
60
case VULKAN_VENDOR_MESA: return "Mesa";
61
default:
62
return StringFromFormat("%08x", vendorId);
63
}
64
}
65
66
const char *VulkanPresentModeToString(VkPresentModeKHR presentMode) {
67
switch (presentMode) {
68
case VK_PRESENT_MODE_IMMEDIATE_KHR: return "IMMEDIATE";
69
case VK_PRESENT_MODE_MAILBOX_KHR: return "MAILBOX";
70
case VK_PRESENT_MODE_FIFO_KHR: return "FIFO";
71
case VK_PRESENT_MODE_FIFO_RELAXED_KHR: return "FIFO_RELAXED";
72
case VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR: return "SHARED_DEMAND_REFRESH_KHR";
73
case VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR: return "SHARED_CONTINUOUS_REFRESH_KHR";
74
case VK_PRESENT_MODE_FIFO_LATEST_READY_KHR: return "FIFO_LATEST_READY";
75
default: return "UNKNOWN";
76
}
77
}
78
79
const char *VulkanImageLayoutToString(VkImageLayout imageLayout) {
80
switch (imageLayout) {
81
case VK_IMAGE_LAYOUT_UNDEFINED: return "UNDEFINED";
82
case VK_IMAGE_LAYOUT_GENERAL: return "GENERAL";
83
case VK_IMAGE_LAYOUT_PREINITIALIZED: return "PREINITIALIZED";
84
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL: return "TRANSFER_SRC_OPTIMAL";
85
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL: return "TRANSFER_DST_OPTIMAL";
86
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL: return "SHADER_READ_ONLY_OPTIMAL";
87
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL: return "COLOR_ATTACHMENT_OPTIMAL";
88
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL: return "DEPTH_STENCIL_ATTACHMENT_OPTIMAL";
89
default: return "OTHER";
90
}
91
}
92
93
VulkanContext::VulkanContext() {
94
// Do nothing here.
95
}
96
97
VkResult VulkanContext::CreateInstance(const CreateInfo &info) {
98
if (!vkCreateInstance) {
99
init_error_ = "Vulkan not loaded - can't create instance";
100
return VK_ERROR_INITIALIZATION_FAILED;
101
}
102
103
if (info.flags & VulkanInitFlags::DISABLE_IMPLICIT_LAYERS) {
104
// https://github.com/KhronosGroup/Vulkan-Loader/blob/main/docs/LoaderDebugging.md
105
#if PPSSPP_PLATFORM(WINDOWS)
106
#if !PPSSPP_PLATFORM(UWP)
107
// Windows uses _putenv_s
108
_putenv_s("VK_LOADER_LAYERS_DISABLE", "~implicit~");
109
#endif
110
#else
111
// POSIX: use setenv
112
setenv("VK_LOADER_LAYERS_DISABLE", "~implicit~", 1); // overwrite = 1
113
#endif
114
}
115
116
// Check which Vulkan version we should request.
117
// Our code is fine with any version from 1.0 to 1.2, we don't know about higher versions.
118
vulkanInstanceApiVersion_ = VK_API_VERSION_1_0;
119
if (vkEnumerateInstanceVersion) {
120
vkEnumerateInstanceVersion(&vulkanInstanceApiVersion_);
121
vulkanInstanceApiVersion_ &= 0xFFFFF000; // Remove patch version.
122
vulkanInstanceApiVersion_ = std::min(VK_API_VERSION_1_4, vulkanInstanceApiVersion_);
123
std::string versionString = FormatAPIVersion(vulkanInstanceApiVersion_);
124
INFO_LOG(Log::G3D, "Detected Vulkan API version: %s", versionString.c_str());
125
}
126
127
instance_layer_names_.clear();
128
device_layer_names_.clear();
129
130
// We can get the list of layers and extensions without an instance so we can use this information
131
// to enable the extensions we need that are available.
132
GetInstanceLayerProperties();
133
GetInstanceLayerExtensionList(nullptr, instance_extension_properties_);
134
135
if (!IsInstanceExtensionAvailable(VK_KHR_SURFACE_EXTENSION_NAME)) {
136
// Cannot create a Vulkan display without VK_KHR_SURFACE_EXTENSION.
137
init_error_ = "Vulkan not loaded - no surface extension";
138
return VK_ERROR_INITIALIZATION_FAILED;
139
}
140
createInfo_ = info;
141
142
// List extensions to try to enable.
143
instance_extensions_enabled_.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
144
#ifdef _WIN32
145
instance_extensions_enabled_.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
146
#elif defined(__ANDROID__)
147
instance_extensions_enabled_.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
148
#else
149
#if defined(VK_USE_PLATFORM_XLIB_KHR)
150
if (IsInstanceExtensionAvailable(VK_KHR_XLIB_SURFACE_EXTENSION_NAME)) {
151
instance_extensions_enabled_.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
152
}
153
#endif
154
//#if defined(VK_USE_PLATFORM_XCB_KHR)
155
// instance_extensions_enabled_.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
156
//#endif
157
#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
158
if (IsInstanceExtensionAvailable(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME)) {
159
instance_extensions_enabled_.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
160
}
161
#endif
162
#if defined(VK_USE_PLATFORM_DISPLAY_KHR)
163
if (IsInstanceExtensionAvailable(VK_KHR_DISPLAY_EXTENSION_NAME)) {
164
instance_extensions_enabled_.push_back(VK_KHR_DISPLAY_EXTENSION_NAME);
165
}
166
#endif
167
#if defined(VK_USE_PLATFORM_METAL_EXT)
168
if (IsInstanceExtensionAvailable(VK_EXT_METAL_SURFACE_EXTENSION_NAME)) {
169
instance_extensions_enabled_.push_back(VK_EXT_METAL_SURFACE_EXTENSION_NAME);
170
}
171
#endif
172
#endif
173
174
if ((createInfo_.flags & VulkanInitFlags::VALIDATE) && info.customDriver.empty()) {
175
if (IsInstanceExtensionAvailable(VK_EXT_DEBUG_UTILS_EXTENSION_NAME)) {
176
// Enable the validation layers
177
for (size_t i = 0; i < ARRAY_SIZE(validationLayers); i++) {
178
instance_layer_names_.push_back(validationLayers[i]);
179
device_layer_names_.push_back(validationLayers[i]);
180
}
181
instance_extensions_enabled_.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
182
extensionsLookup_.EXT_debug_utils = true;
183
INFO_LOG(Log::G3D, "Vulkan debug_utils validation enabled.");
184
} else {
185
ERROR_LOG(Log::G3D, "Validation layer extension not available - not enabling Vulkan validation.");
186
createInfo_.flags &= ~VulkanInitFlags::VALIDATE;
187
}
188
}
189
190
// Uncomment to test GPU backend fallback
191
// abort();
192
193
if (EnableInstanceExtension(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_API_VERSION_1_1)) {
194
extensionsLookup_.KHR_get_physical_device_properties2 = true;
195
}
196
197
if (EnableInstanceExtension(VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME, 0)) {
198
extensionsLookup_.EXT_swapchain_colorspace = true;
199
}
200
201
// Validate that all the instance extensions we ask for are actually available.
202
for (auto ext : instance_extensions_enabled_) {
203
if (!IsInstanceExtensionAvailable(ext))
204
WARN_LOG(Log::G3D, "WARNING: Does not seem that instance extension '%s' is available. Trying to proceed anyway.", ext);
205
}
206
207
VkApplicationInfo app_info{ VK_STRUCTURE_TYPE_APPLICATION_INFO };
208
app_info.pApplicationName = info.app_name;
209
app_info.applicationVersion = info.app_ver;
210
app_info.pEngineName = info.app_name;
211
// Let's increment this when we make major engine/context changes.
212
app_info.engineVersion = 2;
213
app_info.apiVersion = vulkanInstanceApiVersion_;
214
215
VkInstanceCreateInfo inst_info{ VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO };
216
inst_info.flags = 0;
217
inst_info.pApplicationInfo = &app_info;
218
inst_info.enabledLayerCount = (uint32_t)instance_layer_names_.size();
219
inst_info.ppEnabledLayerNames = instance_layer_names_.size() ? instance_layer_names_.data() : nullptr;
220
inst_info.enabledExtensionCount = (uint32_t)instance_extensions_enabled_.size();
221
inst_info.ppEnabledExtensionNames = instance_extensions_enabled_.size() ? instance_extensions_enabled_.data() : nullptr;
222
223
#if PPSSPP_PLATFORM(IOS_APP_STORE)
224
inst_info.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
225
#endif
226
227
#if SIMULATE_VULKAN_FAILURE == 2
228
VkResult res = VK_ERROR_INCOMPATIBLE_DRIVER;
229
#else
230
VkResult res = vkCreateInstance(&inst_info, nullptr, &instance_);
231
#endif
232
if (res != VK_SUCCESS) {
233
if (res == VK_ERROR_LAYER_NOT_PRESENT) {
234
WARN_LOG(Log::G3D, "Validation on but instance layer not available - dropping layers");
235
// Drop the validation layers and try again.
236
instance_layer_names_.clear();
237
device_layer_names_.clear();
238
inst_info.enabledLayerCount = 0;
239
inst_info.ppEnabledLayerNames = nullptr;
240
res = vkCreateInstance(&inst_info, nullptr, &instance_);
241
if (res != VK_SUCCESS)
242
ERROR_LOG(Log::G3D, "Failed to create instance even without validation: %d", res);
243
} else {
244
ERROR_LOG(Log::G3D, "Failed to create instance : %d", res);
245
}
246
}
247
if (res != VK_SUCCESS) {
248
init_error_ = "Failed to create Vulkan instance";
249
return res;
250
}
251
252
VulkanLoadInstanceFunctions(instance_, extensionsLookup_, vulkanInstanceApiVersion_);
253
if (!CheckLayers(instance_layer_properties_, instance_layer_names_)) {
254
WARN_LOG(Log::G3D, "CheckLayers for instance failed");
255
// init_error_ = "Failed to validate instance layers";
256
// return;
257
}
258
259
uint32_t gpu_count = 1;
260
#if SIMULATE_VULKAN_FAILURE == 3
261
gpu_count = 0;
262
#else
263
res = vkEnumeratePhysicalDevices(instance_, &gpu_count, nullptr);
264
#endif
265
if (gpu_count <= 0) {
266
ERROR_LOG(Log::G3D, "Vulkan driver found but no supported GPU is available");
267
init_error_ = "No Vulkan physical devices found";
268
vkDestroyInstance(instance_, nullptr);
269
instance_ = nullptr;
270
return VK_ERROR_INITIALIZATION_FAILED;
271
}
272
273
_dbg_assert_(gpu_count > 0);
274
physical_devices_.resize(gpu_count);
275
physicalDeviceProperties_.resize(gpu_count);
276
res = vkEnumeratePhysicalDevices(instance_, &gpu_count, physical_devices_.data());
277
if (res != VK_SUCCESS) {
278
init_error_ = "Failed to enumerate physical devices";
279
vkDestroyInstance(instance_, nullptr);
280
instance_ = nullptr;
281
return res;
282
}
283
284
if (extensionsLookup_.KHR_get_physical_device_properties2 && vkGetPhysicalDeviceProperties2) {
285
for (uint32_t i = 0; i < gpu_count; i++) {
286
VkPhysicalDeviceProperties2 props2{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2};
287
VkPhysicalDevicePushDescriptorPropertiesKHR pushProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR};
288
VkPhysicalDeviceExternalMemoryHostPropertiesEXT extHostMemProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT};
289
VkPhysicalDeviceDepthStencilResolveProperties depthStencilResolveProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES};
290
ChainStruct(props2, &pushProps);
291
ChainStruct(props2, &extHostMemProps);
292
ChainStruct(props2, &depthStencilResolveProps);
293
vkGetPhysicalDeviceProperties2(physical_devices_[i], &props2);
294
295
// Don't want bad pointers sitting around. Probably not really necessary.
296
props2.pNext = nullptr;
297
pushProps.pNext = nullptr;
298
extHostMemProps.pNext = nullptr;
299
depthStencilResolveProps.pNext = nullptr;
300
physicalDeviceProperties_[i].properties = props2.properties;
301
physicalDeviceProperties_[i].pushDescriptorProperties = pushProps;
302
physicalDeviceProperties_[i].externalMemoryHostProperties = extHostMemProps;
303
physicalDeviceProperties_[i].depthStencilResolve = depthStencilResolveProps;
304
}
305
} else {
306
for (uint32_t i = 0; i < gpu_count; i++) {
307
vkGetPhysicalDeviceProperties(physical_devices_[i], &physicalDeviceProperties_[i].properties);
308
}
309
}
310
311
// Log the list of devices.
312
INFO_LOG(Log::G3D, "%d Vulkan devices found:", (int)physicalDeviceProperties_.size());
313
for (const auto &props : physicalDeviceProperties_) {
314
INFO_LOG(Log::G3D, "%s (vendor: %08x)", props.properties.deviceName, props.properties.vendorID);
315
}
316
317
if (extensionsLookup_.EXT_debug_utils) {
318
_assert_(vkCreateDebugUtilsMessengerEXT != nullptr);
319
InitDebugUtilsCallback();
320
}
321
322
return VK_SUCCESS;
323
}
324
325
VulkanContext::~VulkanContext() {
326
_dbg_assert_(instance_ == VK_NULL_HANDLE);
327
}
328
329
void VulkanContext::DestroyInstance() {
330
if (extensionsLookup_.EXT_debug_utils) {
331
while (utils_callbacks.size() > 0) {
332
vkDestroyDebugUtilsMessengerEXT(instance_, utils_callbacks.back(), nullptr);
333
utils_callbacks.pop_back();
334
}
335
}
336
337
vkDestroyInstance(instance_, nullptr);
338
VulkanFree();
339
instance_ = VK_NULL_HANDLE;
340
}
341
342
void VulkanContext::BeginFrame(VkCommandBuffer firstCommandBuffer) {
343
FrameData *frame = &frame_[curFrame_];
344
// Process pending deletes.
345
frame->deleteList.PerformDeletes(this, allocator_);
346
// VK_NULL_HANDLE when profiler is disabled.
347
if (firstCommandBuffer) {
348
frame->profiler.BeginFrame(this, firstCommandBuffer);
349
}
350
}
351
352
void VulkanContext::EndFrame() {
353
frame_[curFrame_].deleteList.Take(globalDeleteList_);
354
curFrame_++;
355
if (curFrame_ >= inflightFrames_) {
356
curFrame_ = 0;
357
}
358
}
359
360
void VulkanContext::UpdateInflightFrames(int n) {
361
_dbg_assert_(n >= 1 && n <= MAX_INFLIGHT_FRAMES);
362
inflightFrames_ = n;
363
if (curFrame_ >= inflightFrames_) {
364
curFrame_ = 0;
365
}
366
}
367
368
void VulkanContext::WaitUntilQueueIdle() {
369
// Should almost never be used
370
vkQueueWaitIdle(gfx_queue_);
371
}
372
373
bool VulkanContext::MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex) {
374
// Search memtypes to find first index with those properties
375
for (uint32_t i = 0; i < 32; i++) {
376
if ((typeBits & 1) == 1) {
377
// Type is available, does it match user properties?
378
if ((memory_properties_.memoryTypes[i].propertyFlags & requirements_mask) == requirements_mask) {
379
*typeIndex = i;
380
return true;
381
}
382
}
383
typeBits >>= 1;
384
}
385
// No memory types matched, return failure
386
return false;
387
}
388
389
void VulkanContext::DestroySwapchain() {
390
if (swapchain_ != VK_NULL_HANDLE) {
391
vkDestroySwapchainKHR(device_, swapchain_, nullptr);
392
swapchain_ = VK_NULL_HANDLE;
393
}
394
swapchainInited_ = false;
395
}
396
397
void VulkanContext::DestroySurface() {
398
if (surface_ != VK_NULL_HANDLE) {
399
vkDestroySurfaceKHR(instance_, surface_, nullptr);
400
surface_ = VK_NULL_HANDLE;
401
402
// NOTE: We do not reset winSysData1 and 2, it's useful for debugging to compare them.
403
}
404
}
405
406
VkResult VulkanContext::GetInstanceLayerExtensionList(const char *layerName, std::vector<VkExtensionProperties> &extensions) {
407
VkResult res;
408
do {
409
uint32_t instance_extension_count;
410
res = vkEnumerateInstanceExtensionProperties(layerName, &instance_extension_count, nullptr);
411
if (res != VK_SUCCESS)
412
return res;
413
if (instance_extension_count == 0)
414
return VK_SUCCESS;
415
extensions.resize(instance_extension_count);
416
res = vkEnumerateInstanceExtensionProperties(layerName, &instance_extension_count, extensions.data());
417
} while (res == VK_INCOMPLETE);
418
return res;
419
}
420
421
VkResult VulkanContext::GetInstanceLayerProperties() {
422
/*
423
* It's possible, though very rare, that the number of
424
* instance layers could change. For example, installing something
425
* could include new layers that the loader would pick up
426
* between the initial query for the count and the
427
* request for VkLayerProperties. The loader indicates that
428
* by returning a VK_INCOMPLETE status and will update the
429
* the count parameter.
430
* The count parameter will be updated with the number of
431
* entries loaded into the data pointer - in case the number
432
* of layers went down or is smaller than the size given.
433
*/
434
uint32_t instance_layer_count;
435
std::vector<VkLayerProperties> vk_props;
436
VkResult res;
437
do {
438
res = vkEnumerateInstanceLayerProperties(&instance_layer_count, nullptr);
439
if (res != VK_SUCCESS)
440
return res;
441
if (!instance_layer_count)
442
return VK_SUCCESS;
443
vk_props.resize(instance_layer_count);
444
res = vkEnumerateInstanceLayerProperties(&instance_layer_count, vk_props.data());
445
} while (res == VK_INCOMPLETE);
446
447
// Now gather the extension list for each instance layer.
448
for (uint32_t i = 0; i < instance_layer_count; i++) {
449
LayerProperties layer_props;
450
layer_props.properties = vk_props[i];
451
res = GetInstanceLayerExtensionList(layer_props.properties.layerName, layer_props.extensions);
452
if (res != VK_SUCCESS)
453
return res;
454
instance_layer_properties_.push_back(layer_props);
455
}
456
return res;
457
}
458
459
// Pass layerName == nullptr to get the extension list for the device.
460
VkResult VulkanContext::GetDeviceLayerExtensionList(const char *layerName, std::vector<VkExtensionProperties> &extensions) {
461
VkResult res;
462
do {
463
uint32_t device_extension_count;
464
res = vkEnumerateDeviceExtensionProperties(physical_devices_[physical_device_], layerName, &device_extension_count, nullptr);
465
if (res != VK_SUCCESS)
466
return res;
467
if (!device_extension_count)
468
return VK_SUCCESS;
469
extensions.resize(device_extension_count);
470
res = vkEnumerateDeviceExtensionProperties(physical_devices_[physical_device_], layerName, &device_extension_count, extensions.data());
471
} while (res == VK_INCOMPLETE);
472
return res;
473
}
474
475
VkResult VulkanContext::GetDeviceLayerProperties() {
476
/*
477
* It's possible, though very rare, that the number of
478
* instance layers could change. For example, installing something
479
* could include new layers that the loader would pick up
480
* between the initial query for the count and the
481
* request for VkLayerProperties. The loader indicates that
482
* by returning a VK_INCOMPLETE status and will update the
483
* the count parameter.
484
* The count parameter will be updated with the number of
485
* entries loaded into the data pointer - in case the number
486
* of layers went down or is smaller than the size given.
487
*/
488
uint32_t device_layer_count;
489
std::vector<VkLayerProperties> vk_props;
490
VkResult res;
491
do {
492
res = vkEnumerateDeviceLayerProperties(physical_devices_[physical_device_], &device_layer_count, nullptr);
493
if (res != VK_SUCCESS)
494
return res;
495
if (device_layer_count == 0)
496
return VK_SUCCESS;
497
vk_props.resize(device_layer_count);
498
res = vkEnumerateDeviceLayerProperties(physical_devices_[physical_device_], &device_layer_count, vk_props.data());
499
} while (res == VK_INCOMPLETE);
500
501
// Gather the list of extensions for each device layer.
502
for (uint32_t i = 0; i < device_layer_count; i++) {
503
LayerProperties layer_props;
504
layer_props.properties = vk_props[i];
505
res = GetDeviceLayerExtensionList(layer_props.properties.layerName, layer_props.extensions);
506
if (res != VK_SUCCESS)
507
return res;
508
device_layer_properties_.push_back(layer_props);
509
}
510
return res;
511
}
512
513
// Returns true if all layer names specified in check_names can be found in given layer properties.
514
bool VulkanContext::CheckLayers(const std::vector<LayerProperties> &layer_props, const std::vector<const char *> &layer_names) const {
515
uint32_t check_count = (uint32_t)layer_names.size();
516
uint32_t layer_count = (uint32_t)layer_props.size();
517
for (uint32_t i = 0; i < check_count; i++) {
518
bool found = false;
519
for (uint32_t j = 0; j < layer_count; j++) {
520
if (!strcmp(layer_names[i], layer_props[j].properties.layerName)) {
521
found = true;
522
}
523
}
524
if (!found) {
525
std::cout << "Cannot find layer: " << layer_names[i] << std::endl;
526
return false;
527
}
528
}
529
return true;
530
}
531
532
int VulkanContext::GetPhysicalDeviceByName(std::string_view name) const {
533
for (size_t i = 0; i < physical_devices_.size(); i++) {
534
if (equals(physicalDeviceProperties_[i].properties.deviceName, name))
535
return (int)i;
536
}
537
return -1;
538
}
539
540
int VulkanContext::GetBestPhysicalDevice() const {
541
// Rules: Prefer discrete over embedded.
542
// Prefer nVidia over Intel.
543
544
int maxScore = -1;
545
int best = -1;
546
547
for (size_t i = 0; i < physical_devices_.size(); i++) {
548
int score = 0;
549
VkPhysicalDeviceProperties props;
550
vkGetPhysicalDeviceProperties(physical_devices_[i], &props);
551
switch (props.deviceType) {
552
case VK_PHYSICAL_DEVICE_TYPE_CPU:
553
score += 1;
554
break;
555
case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
556
score += 2;
557
break;
558
case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
559
score += 20;
560
break;
561
case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
562
score += 10;
563
break;
564
default:
565
break;
566
}
567
if (props.vendorID == VULKAN_VENDOR_AMD) {
568
score += 5;
569
} else if (props.vendorID == VULKAN_VENDOR_NVIDIA) {
570
score += 5;
571
}
572
if (score > maxScore) {
573
best = (int)i;
574
maxScore = score;
575
}
576
}
577
return best;
578
}
579
580
bool VulkanContext::EnableDeviceExtension(const char *extension, uint32_t coreVersion) {
581
if (coreVersion != 0 && vulkanDeviceApiVersion_ >= coreVersion) {
582
return true;
583
}
584
for (auto &iter : device_extension_properties_) {
585
if (!strcmp(iter.extensionName, extension)) {
586
device_extensions_enabled_.push_back(extension);
587
return true;
588
}
589
}
590
return false;
591
}
592
593
bool VulkanContext::EnableInstanceExtension(const char *extension, uint32_t coreVersion) {
594
if (coreVersion != 0 && vulkanInstanceApiVersion_ >= coreVersion) {
595
return true;
596
}
597
for (auto &iter : instance_extension_properties_) {
598
if (!strcmp(iter.extensionName, extension)) {
599
instance_extensions_enabled_.push_back(extension);
600
return true;
601
}
602
}
603
return false;
604
}
605
606
VkResult VulkanContext::CreateDevice(int physical_device) {
607
physical_device_ = physical_device;
608
INFO_LOG(Log::G3D, "Chose physical device %d: %s", physical_device, physicalDeviceProperties_[physical_device].properties.deviceName);
609
610
vulkanDeviceApiVersion_ = physicalDeviceProperties_[physical_device].properties.apiVersion;
611
612
GetDeviceLayerProperties();
613
if (!CheckLayers(device_layer_properties_, device_layer_names_)) {
614
WARN_LOG(Log::G3D, "CheckLayers for device %d failed", physical_device);
615
}
616
617
vkGetPhysicalDeviceQueueFamilyProperties(physical_devices_[physical_device_], &queue_count, nullptr);
618
_dbg_assert_(queue_count >= 1);
619
620
queueFamilyProperties_.resize(queue_count);
621
vkGetPhysicalDeviceQueueFamilyProperties(physical_devices_[physical_device_], &queue_count, queueFamilyProperties_.data());
622
_dbg_assert_(queue_count >= 1);
623
624
// Detect preferred depth/stencil formats, in this order. All supported devices will support at least one of these.
625
static const VkFormat depthStencilFormats[] = {
626
VK_FORMAT_D24_UNORM_S8_UINT,
627
VK_FORMAT_D32_SFLOAT_S8_UINT,
628
VK_FORMAT_D16_UNORM_S8_UINT,
629
};
630
631
deviceInfo_.preferredDepthStencilFormat = VK_FORMAT_UNDEFINED;
632
for (size_t i = 0; i < ARRAY_SIZE(depthStencilFormats); i++) {
633
VkFormatProperties props;
634
vkGetPhysicalDeviceFormatProperties(physical_devices_[physical_device_], depthStencilFormats[i], &props);
635
if (props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) {
636
deviceInfo_.preferredDepthStencilFormat = depthStencilFormats[i];
637
break;
638
}
639
}
640
641
_assert_msg_(deviceInfo_.preferredDepthStencilFormat != VK_FORMAT_UNDEFINED, "Could not find a usable depth stencil format.");
642
VkFormatProperties preferredProps;
643
vkGetPhysicalDeviceFormatProperties(physical_devices_[physical_device_], deviceInfo_.preferredDepthStencilFormat, &preferredProps);
644
if ((preferredProps.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT) &&
645
(preferredProps.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
646
deviceInfo_.canBlitToPreferredDepthStencilFormat = true;
647
}
648
649
// This is as good a place as any to do this. Though, we don't use this much anymore after we added
650
// support for VMA.
651
vkGetPhysicalDeviceMemoryProperties(physical_devices_[physical_device_], &memory_properties_);
652
DEBUG_LOG(Log::G3D, "Memory Types (%d):", memory_properties_.memoryTypeCount);
653
for (int i = 0; i < (int)memory_properties_.memoryTypeCount; i++) {
654
// Don't bother printing dummy memory types.
655
if (!memory_properties_.memoryTypes[i].propertyFlags)
656
continue;
657
DEBUG_LOG(Log::G3D, " %d: Heap %d; Flags: %s%s%s%s ", i, memory_properties_.memoryTypes[i].heapIndex,
658
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) ? "DEVICE_LOCAL " : "",
659
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) ? "HOST_VISIBLE " : "",
660
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) ? "HOST_CACHED " : "",
661
(memory_properties_.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) ? "HOST_COHERENT " : "");
662
}
663
664
GetDeviceLayerExtensionList(nullptr, device_extension_properties_);
665
666
device_extensions_enabled_.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
667
668
if (!init_error_.empty() || physical_device_ < 0) {
669
ERROR_LOG(Log::G3D, "Vulkan init failed: %s", init_error_.c_str());
670
return VK_ERROR_INITIALIZATION_FAILED;
671
}
672
673
VkDeviceQueueCreateInfo queue_info{ VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO };
674
float queue_priorities[1] = { 1.0f };
675
queue_info.queueCount = 1;
676
queue_info.pQueuePriorities = queue_priorities;
677
bool found = false;
678
for (int i = 0; i < (int)queue_count; i++) {
679
if (queueFamilyProperties_[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) {
680
queue_info.queueFamilyIndex = i;
681
found = true;
682
break;
683
}
684
}
685
_dbg_assert_(found);
686
687
// TODO: A lot of these are on by default in later Vulkan versions, should check for that, technically.
688
extensionsLookup_.KHR_maintenance1 = EnableDeviceExtension(VK_KHR_MAINTENANCE1_EXTENSION_NAME, VK_API_VERSION_1_1);
689
extensionsLookup_.KHR_maintenance2 = EnableDeviceExtension(VK_KHR_MAINTENANCE2_EXTENSION_NAME, VK_API_VERSION_1_1);
690
extensionsLookup_.KHR_maintenance3 = EnableDeviceExtension(VK_KHR_MAINTENANCE3_EXTENSION_NAME, VK_API_VERSION_1_1);
691
extensionsLookup_.KHR_maintenance4 = EnableDeviceExtension("VK_KHR_maintenance4", VK_API_VERSION_1_3);
692
extensionsLookup_.KHR_multiview = EnableDeviceExtension(VK_KHR_MULTIVIEW_EXTENSION_NAME, VK_API_VERSION_1_1);
693
694
if (EnableDeviceExtension(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME, VK_API_VERSION_1_1)) {
695
extensionsLookup_.KHR_get_memory_requirements2 = true;
696
extensionsLookup_.KHR_dedicated_allocation = EnableDeviceExtension(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME, VK_API_VERSION_1_1);
697
}
698
if (EnableDeviceExtension(VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME, VK_API_VERSION_1_2)) {
699
extensionsLookup_.KHR_create_renderpass2 = true;
700
extensionsLookup_.KHR_depth_stencil_resolve = EnableDeviceExtension(VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME, VK_API_VERSION_1_2);
701
}
702
703
extensionsLookup_.EXT_shader_stencil_export = EnableDeviceExtension(VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME, 0);
704
extensionsLookup_.EXT_fragment_shader_interlock = EnableDeviceExtension(VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME, 0);
705
extensionsLookup_.ARM_rasterization_order_attachment_access = EnableDeviceExtension(VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME, 0);
706
707
#if !PPSSPP_PLATFORM(MAC) && !PPSSPP_PLATFORM(IOS)
708
extensionsLookup_.GOOGLE_display_timing = EnableDeviceExtension(VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME, 0);
709
#endif
710
if (!extensionsLookup_.GOOGLE_display_timing) {
711
extensionsLookup_.KHR_present_id = EnableDeviceExtension(VK_KHR_PRESENT_ID_EXTENSION_NAME, 0);
712
extensionsLookup_.KHR_present_wait = EnableDeviceExtension(VK_KHR_PRESENT_WAIT_EXTENSION_NAME, 0);
713
}
714
715
extensionsLookup_.EXT_provoking_vertex = EnableDeviceExtension(VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME, 0);
716
717
extensionsLookup_.KHR_present_mode_fifo_latest_ready = EnableDeviceExtension(VK_KHR_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME, 0);
718
if (!extensionsLookup_.KHR_present_mode_fifo_latest_ready) {
719
// Enable the EXT extension instead if available, it's equivalent (was promoted).
720
extensionsLookup_.KHR_present_mode_fifo_latest_ready = EnableDeviceExtension(VK_EXT_PRESENT_MODE_FIFO_LATEST_READY_EXTENSION_NAME, 0);
721
}
722
723
// Optional features
724
if (extensionsLookup_.KHR_get_physical_device_properties2 && vkGetPhysicalDeviceFeatures2) {
725
VkPhysicalDeviceFeatures2 features2{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR };
726
// Add to chain even if not supported, GetPhysicalDeviceFeatures is supposed to ignore unknown structs.
727
VkPhysicalDeviceMultiviewFeatures multiViewFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES };
728
VkPhysicalDevicePresentWaitFeaturesKHR presentWaitFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR };
729
VkPhysicalDevicePresentIdFeaturesKHR presentIdFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR };
730
VkPhysicalDeviceProvokingVertexFeaturesEXT provokingVertexFeatures{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT };
731
VkPhysicalDevicePresentModeFifoLatestReadyFeaturesKHR presentModeFifoProps{VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_KHR};
732
733
ChainStruct(features2, &multiViewFeatures);
734
if (extensionsLookup_.KHR_present_wait) {
735
ChainStruct(features2, &presentWaitFeatures);
736
}
737
if (extensionsLookup_.KHR_present_id) {
738
ChainStruct(features2, &presentIdFeatures);
739
}
740
if (extensionsLookup_.EXT_provoking_vertex) {
741
ChainStruct(features2, &provokingVertexFeatures);
742
}
743
if (extensionsLookup_.KHR_present_mode_fifo_latest_ready) {
744
ChainStruct(features2, &presentModeFifoProps);
745
}
746
vkGetPhysicalDeviceFeatures2(physical_devices_[physical_device_], &features2);
747
deviceFeatures_.available.standard = features2.features;
748
deviceFeatures_.available.multiview = multiViewFeatures;
749
if (extensionsLookup_.KHR_present_wait) {
750
deviceFeatures_.available.presentWait = presentWaitFeatures;
751
}
752
if (extensionsLookup_.KHR_present_id) {
753
deviceFeatures_.available.presentId = presentIdFeatures;
754
}
755
if (extensionsLookup_.EXT_provoking_vertex) {
756
deviceFeatures_.available.provokingVertex = provokingVertexFeatures;
757
}
758
if (extensionsLookup_.KHR_present_mode_fifo_latest_ready) {
759
deviceFeatures_.available.presentModeFifoProps = presentModeFifoProps;
760
}
761
} else {
762
vkGetPhysicalDeviceFeatures(physical_devices_[physical_device_], &deviceFeatures_.available.standard);
763
deviceFeatures_.available.multiview = {};
764
}
765
766
deviceFeatures_.enabled = {};
767
// Enable a few safe ones if they are available.
768
deviceFeatures_.enabled.standard.dualSrcBlend = deviceFeatures_.available.standard.dualSrcBlend;
769
deviceFeatures_.enabled.standard.logicOp = deviceFeatures_.available.standard.logicOp;
770
deviceFeatures_.enabled.standard.depthClamp = deviceFeatures_.available.standard.depthClamp;
771
deviceFeatures_.enabled.standard.depthBounds = deviceFeatures_.available.standard.depthBounds;
772
deviceFeatures_.enabled.standard.samplerAnisotropy = deviceFeatures_.available.standard.samplerAnisotropy;
773
deviceFeatures_.enabled.standard.shaderClipDistance = deviceFeatures_.available.standard.shaderClipDistance;
774
deviceFeatures_.enabled.standard.shaderCullDistance = deviceFeatures_.available.standard.shaderCullDistance;
775
deviceFeatures_.enabled.standard.geometryShader = deviceFeatures_.available.standard.geometryShader;
776
deviceFeatures_.enabled.standard.sampleRateShading = deviceFeatures_.available.standard.sampleRateShading;
777
778
#ifdef _DEBUG
779
// For debugging! Although, it might hide problems, so turning it off. Can be useful to rule out classes of issues.
780
// deviceFeatures_.enabled.standard.robustBufferAccess = deviceFeatures_.available.standard.robustBufferAccess;
781
#endif
782
783
deviceFeatures_.enabled.multiview = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES };
784
if (extensionsLookup_.KHR_multiview) {
785
deviceFeatures_.enabled.multiview.multiview = deviceFeatures_.available.multiview.multiview;
786
}
787
// Strangely, on Intel, it reports these as available even though the extension isn't in the list.
788
deviceFeatures_.enabled.presentId = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR };
789
if (extensionsLookup_.KHR_present_id) {
790
deviceFeatures_.enabled.presentId.presentId = deviceFeatures_.available.presentId.presentId;
791
}
792
deviceFeatures_.enabled.presentWait = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR };
793
if (extensionsLookup_.KHR_present_wait) {
794
deviceFeatures_.enabled.presentWait.presentWait = deviceFeatures_.available.presentWait.presentWait;
795
}
796
deviceFeatures_.enabled.provokingVertex = { VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT };
797
if (extensionsLookup_.EXT_provoking_vertex) {
798
deviceFeatures_.enabled.provokingVertex.provokingVertexLast = true;
799
}
800
deviceFeatures_.enabled.presentModeFifoProps = {VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_MODE_FIFO_LATEST_READY_FEATURES_KHR};
801
if (extensionsLookup_.KHR_present_mode_fifo_latest_ready) {
802
deviceFeatures_.enabled.presentModeFifoProps.presentModeFifoLatestReady = deviceFeatures_.available.presentModeFifoProps.presentModeFifoLatestReady;
803
}
804
805
// deviceFeatures_.enabled.multiview.multiviewGeometryShader = deviceFeatures_.available.multiview.multiviewGeometryShader;
806
807
VkPhysicalDeviceFeatures2 features2{ VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 };
808
809
VkDeviceCreateInfo device_info{ VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO };
810
device_info.queueCreateInfoCount = 1;
811
device_info.pQueueCreateInfos = &queue_info;
812
device_info.enabledLayerCount = (uint32_t)device_layer_names_.size();
813
device_info.ppEnabledLayerNames = device_info.enabledLayerCount ? device_layer_names_.data() : nullptr;
814
device_info.enabledExtensionCount = (uint32_t)device_extensions_enabled_.size();
815
device_info.ppEnabledExtensionNames = device_info.enabledExtensionCount ? device_extensions_enabled_.data() : nullptr;
816
817
if (extensionsLookup_.KHR_get_physical_device_properties2) {
818
device_info.pNext = &features2;
819
features2.features = deviceFeatures_.enabled.standard;
820
ChainStruct(features2, &deviceFeatures_.enabled.multiview);
821
if (extensionsLookup_.KHR_present_wait) {
822
ChainStruct(features2, &deviceFeatures_.enabled.presentWait);
823
}
824
if (extensionsLookup_.KHR_present_id) {
825
ChainStruct(features2, &deviceFeatures_.enabled.presentId);
826
}
827
if (extensionsLookup_.EXT_provoking_vertex) {
828
ChainStruct(features2, &deviceFeatures_.enabled.provokingVertex);
829
}
830
if (extensionsLookup_.KHR_present_mode_fifo_latest_ready) {
831
ChainStruct(features2, &deviceFeatures_.enabled.presentModeFifoProps);
832
}
833
} else {
834
device_info.pEnabledFeatures = &deviceFeatures_.enabled.standard;
835
}
836
837
VkResult res = vkCreateDevice(physical_devices_[physical_device_], &device_info, nullptr, &device_);
838
if (res != VK_SUCCESS) {
839
init_error_ = "Unable to create Vulkan device";
840
ERROR_LOG(Log::G3D, "%s", init_error_.c_str());
841
} else {
842
VulkanLoadDeviceFunctions(device_, extensionsLookup_, vulkanDeviceApiVersion_);
843
}
844
INFO_LOG(Log::G3D, "Vulkan Device created: %s", physicalDeviceProperties_[physical_device_].properties.deviceName);
845
846
// Since we successfully created a device (however we got here, might be interesting in debug), we force the choice to be visible in the menu.
847
VulkanSetAvailable(true);
848
849
VmaAllocatorCreateInfo allocatorInfo = {};
850
allocatorInfo.vulkanApiVersion = std::min(vulkanDeviceApiVersion_, vulkanInstanceApiVersion_);
851
allocatorInfo.physicalDevice = physical_devices_[physical_device_];
852
allocatorInfo.device = device_;
853
allocatorInfo.instance = instance_;
854
VkResult result = vmaCreateAllocator(&allocatorInfo, &allocator_);
855
_assert_(result == VK_SUCCESS);
856
_assert_(allocator_ != VK_NULL_HANDLE);
857
858
// Examine the physical device to figure out super rough performance grade.
859
// Basically all we want to do is to identify low performance mobile devices
860
// so we can make decisions on things like texture scaling strategy.
861
auto &props = physicalDeviceProperties_[physical_device_].properties;
862
switch (props.vendorID) {
863
case VULKAN_VENDOR_AMD:
864
case VULKAN_VENDOR_NVIDIA:
865
case VULKAN_VENDOR_INTEL:
866
devicePerfClass_ = PerfClass::FAST;
867
break;
868
869
case VULKAN_VENDOR_ARM:
870
devicePerfClass_ = PerfClass::SLOW;
871
{
872
// Parse the device name as an ultra rough heuristic.
873
int maliG = 0;
874
if (sscanf(props.deviceName, "Mali-G%d", &maliG) == 1) {
875
if (maliG >= 72) {
876
devicePerfClass_ = PerfClass::FAST;
877
}
878
}
879
}
880
break;
881
882
case VULKAN_VENDOR_QUALCOMM:
883
devicePerfClass_ = PerfClass::SLOW;
884
#if PPSSPP_PLATFORM(ANDROID)
885
if (System_GetPropertyInt(SYSPROP_SYSTEMVERSION) >= 30) {
886
devicePerfClass_ = PerfClass::FAST;
887
}
888
#endif
889
break;
890
891
case VULKAN_VENDOR_IMGTEC:
892
default:
893
devicePerfClass_ = PerfClass::SLOW;
894
break;
895
}
896
897
return res;
898
}
899
900
VkResult VulkanContext::InitDebugUtilsCallback() {
901
VkDebugUtilsMessengerCreateInfoEXT callback1{VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT};
902
// We're intentionally skipping VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT and
903
// VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT, just too spammy.
904
callback1.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT;
905
callback1.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
906
callback1.pfnUserCallback = &VulkanDebugUtilsCallback;
907
callback1.pUserData = (void *)&g_LogOptions;
908
VkDebugUtilsMessengerEXT messenger;
909
VkResult res = vkCreateDebugUtilsMessengerEXT(instance_, &callback1, nullptr, &messenger);
910
if (res != VK_SUCCESS) {
911
ERROR_LOG(Log::G3D, "Failed to register debug callback with vkCreateDebugUtilsMessengerEXT");
912
// Do error handling for VK_ERROR_OUT_OF_MEMORY
913
} else {
914
INFO_LOG(Log::G3D, "Debug callback registered with vkCreateDebugUtilsMessengerEXT.");
915
utils_callbacks.push_back(messenger);
916
}
917
return res;
918
}
919
920
bool VulkanContext::CreateInstanceAndDevice(const CreateInfo &info) {
921
VkResult res = CreateInstance(info);
922
if (res != VK_SUCCESS) {
923
ERROR_LOG(Log::G3D, "Failed to create vulkan context: %s", InitError().c_str());
924
VulkanSetAvailable(false);
925
return false;
926
}
927
928
int physicalDevice = GetBestPhysicalDevice();
929
if (physicalDevice < 0) {
930
ERROR_LOG(Log::G3D, "No usable Vulkan device found.");
931
DestroyInstance();
932
return false;
933
}
934
935
INFO_LOG(Log::G3D, "Creating Vulkan device (flags: %08x)", (u32)info.flags);
936
if (CreateDevice(physicalDevice) != VK_SUCCESS) {
937
INFO_LOG(Log::G3D, "Failed to create vulkan device: %s", InitError().c_str());
938
DestroyInstance();
939
return false;
940
}
941
return true;
942
}
943
944
void VulkanContext::SetDebugNameImpl(uint64_t handle, VkObjectType type, const char *name) {
945
VkDebugUtilsObjectNameInfoEXT info{ VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT };
946
info.pObjectName = name;
947
info.objectHandle = handle;
948
info.objectType = type;
949
vkSetDebugUtilsObjectNameEXT(device_, &info);
950
}
951
952
VkResult VulkanContext::InitSurface(WindowSystem winsys, void *data1, void *data2) {
953
winsys_ = winsys;
954
if (winsysData1_ != data1 && winsysData1_ != 0) {
955
WARN_LOG(Log::G3D, "winsysData1 changed from %p to %p", winsysData1_, data1);
956
}
957
if (winsysData2_ != data2 && winsysData2_ != 0) {
958
WARN_LOG(Log::G3D, "winsysData2 changed from %p to %p", winsysData2_, data2);
959
}
960
winsysData1_ = data1;
961
winsysData2_ = data2;
962
return ReinitSurface();
963
}
964
965
VkResult VulkanContext::ReinitSurface() {
966
if (surface_ != VK_NULL_HANDLE) {
967
INFO_LOG(Log::G3D, "Destroying Vulkan surface (%d, %d)", swapChainExtent_.width, swapChainExtent_.height);
968
vkDestroySurfaceKHR(instance_, surface_, nullptr);
969
surface_ = VK_NULL_HANDLE;
970
}
971
972
INFO_LOG(Log::G3D, "Creating Vulkan surface for window (data1=%p data2=%p)", winsysData1_, winsysData2_);
973
974
VkResult retval = VK_SUCCESS;
975
976
switch (winsys_) {
977
#ifdef _WIN32
978
case WINDOWSYSTEM_WIN32:
979
{
980
VkWin32SurfaceCreateInfoKHR win32{ VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR };
981
win32.flags = 0;
982
win32.hwnd = (HWND)winsysData2_;
983
win32.hinstance = (HINSTANCE)winsysData1_;
984
retval = vkCreateWin32SurfaceKHR(instance_, &win32, nullptr, &surface_);
985
break;
986
}
987
#endif
988
#if defined(__ANDROID__)
989
case WINDOWSYSTEM_ANDROID:
990
{
991
ANativeWindow *wnd = (ANativeWindow *)winsysData1_;
992
VkAndroidSurfaceCreateInfoKHR android{ VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR };
993
android.flags = 0;
994
android.window = wnd;
995
retval = vkCreateAndroidSurfaceKHR(instance_, &android, nullptr, &surface_);
996
break;
997
}
998
#endif
999
#if defined(VK_USE_PLATFORM_METAL_EXT)
1000
case WINDOWSYSTEM_METAL_EXT:
1001
{
1002
VkMetalSurfaceCreateInfoEXT metal{ VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT };
1003
metal.flags = 0;
1004
metal.pLayer = winsysData1_;
1005
metal.pNext = winsysData2_;
1006
retval = vkCreateMetalSurfaceEXT(instance_, &metal, nullptr, &surface_);
1007
break;
1008
}
1009
#endif
1010
#if defined(VK_USE_PLATFORM_XLIB_KHR)
1011
case WINDOWSYSTEM_XLIB:
1012
{
1013
VkXlibSurfaceCreateInfoKHR xlib{ VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR };
1014
xlib.flags = 0;
1015
xlib.dpy = (Display *)winsysData1_;
1016
xlib.window = (Window)winsysData2_;
1017
retval = vkCreateXlibSurfaceKHR(instance_, &xlib, nullptr, &surface_);
1018
break;
1019
}
1020
#endif
1021
#if defined(VK_USE_PLATFORM_XCB_KHR)
1022
case WINDOWSYSTEM_XCB:
1023
{
1024
VkXCBSurfaceCreateInfoKHR xcb{ VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR };
1025
xcb.flags = 0;
1026
xcb.connection = (Connection *)winsysData1_;
1027
xcb.window = (Window)(uintptr_t)winsysData2_;
1028
retval = vkCreateXcbSurfaceKHR(instance_, &xcb, nullptr, &surface_);
1029
break;
1030
}
1031
#endif
1032
#if defined(VK_USE_PLATFORM_WAYLAND_KHR)
1033
case WINDOWSYSTEM_WAYLAND:
1034
{
1035
VkWaylandSurfaceCreateInfoKHR wayland{ VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR };
1036
wayland.flags = 0;
1037
wayland.display = (wl_display *)winsysData1_;
1038
wayland.surface = (wl_surface *)winsysData2_;
1039
retval = vkCreateWaylandSurfaceKHR(instance_, &wayland, nullptr, &surface_);
1040
break;
1041
}
1042
#endif
1043
#if defined(VK_USE_PLATFORM_DISPLAY_KHR)
1044
case WINDOWSYSTEM_DISPLAY:
1045
{
1046
VkDisplaySurfaceCreateInfoKHR display{ VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR };
1047
#if !defined(__LIBRETRO__)
1048
/*
1049
And when not to use libretro need VkDisplaySurfaceCreateInfoKHR this extension,
1050
then you need to use dlopen to read vulkan loader in VulkanLoader.cpp.
1051
huangzihan China
1052
*/
1053
1054
if(!vkGetPhysicalDeviceDisplayPropertiesKHR ||
1055
!vkGetPhysicalDeviceDisplayPlanePropertiesKHR ||
1056
!vkGetDisplayModePropertiesKHR ||
1057
!vkGetDisplayPlaneSupportedDisplaysKHR ||
1058
!vkGetDisplayPlaneCapabilitiesKHR ) {
1059
_assert_msg_(false, "DISPLAY Vulkan cannot find any vulkan function symbols.");
1060
return VK_ERROR_INITIALIZATION_FAILED;
1061
}
1062
1063
//The following code is for reference:
1064
// https://github.com/vanfanel/ppsspp
1065
// When using the VK_KHR_display extension and not using LIBRETRO, a complete
1066
// VkDisplaySurfaceCreateInfoKHR is needed.
1067
1068
uint32_t display_count;
1069
uint32_t plane_count;
1070
1071
VkDisplayPropertiesKHR *display_props = NULL;
1072
VkDisplayPlanePropertiesKHR *plane_props = NULL;
1073
VkDisplayModePropertiesKHR* mode_props = NULL;
1074
1075
VkExtent2D image_size;
1076
// This is the chosen physical_device, it has been chosen elsewhere.
1077
VkPhysicalDevice phys_device = physical_devices_[physical_device_];
1078
VkDisplayModeKHR display_mode = VK_NULL_HANDLE;
1079
VkDisplayPlaneAlphaFlagBitsKHR alpha_mode = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR;
1080
uint32_t plane = UINT32_MAX;
1081
1082
// For now, use the first available (connected) display.
1083
int display_index = 0;
1084
1085
VkResult result;
1086
bool ret = false;
1087
bool mode_found = false;
1088
1089
int i, j;
1090
1091
// 1 physical device can have N displays connected.
1092
// Vulkan only counts the connected displays.
1093
1094
// Get a list of displays on the physical device.
1095
display_count = 0;
1096
vkGetPhysicalDeviceDisplayPropertiesKHR(phys_device, &display_count, NULL);
1097
if (display_count == 0) {
1098
_assert_msg_(false, "DISPLAY Vulkan couldn't find any displays.");
1099
return VK_ERROR_INITIALIZATION_FAILED;
1100
}
1101
display_props = new VkDisplayPropertiesKHR[display_count];
1102
vkGetPhysicalDeviceDisplayPropertiesKHR(phys_device, &display_count, display_props);
1103
1104
// Get a list of display planes on the physical device.
1105
plane_count = 0;
1106
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(phys_device, &plane_count, NULL);
1107
if (plane_count == 0) {
1108
_assert_msg_(false, "DISPLAY Vulkan couldn't find any planes on the physical device");
1109
return VK_ERROR_INITIALIZATION_FAILED;
1110
1111
}
1112
plane_props = new VkDisplayPlanePropertiesKHR[plane_count];
1113
vkGetPhysicalDeviceDisplayPlanePropertiesKHR(phys_device, &plane_count, plane_props);
1114
1115
// Get the Vulkan display we are going to use.
1116
VkDisplayKHR myDisplay = display_props[display_index].display;
1117
1118
// Get the list of display modes of the display
1119
uint32_t mode_count = 0;
1120
vkGetDisplayModePropertiesKHR(phys_device, myDisplay, &mode_count, NULL);
1121
if (mode_count == 0) {
1122
_assert_msg_(false, "DISPLAY Vulkan couldn't find any video modes on the display");
1123
return VK_ERROR_INITIALIZATION_FAILED;
1124
}
1125
mode_props = new VkDisplayModePropertiesKHR[mode_count];
1126
vkGetDisplayModePropertiesKHR(phys_device, myDisplay, &mode_count, mode_props);
1127
1128
// See if there's an appropiate mode available on the display
1129
display_mode = VK_NULL_HANDLE;
1130
for (i = 0; i < mode_count; ++i)
1131
{
1132
const VkDisplayModePropertiesKHR* mode = &mode_props[i];
1133
1134
if (mode->parameters.visibleRegion.width == g_display.pixel_xres &&
1135
mode->parameters.visibleRegion.height == g_display.pixel_yres)
1136
{
1137
display_mode = mode->displayMode;
1138
mode_found = true;
1139
break;
1140
}
1141
}
1142
1143
// Free the mode list now.
1144
delete [] mode_props;
1145
1146
// If there are no useable modes found on the display, error out
1147
if (display_mode == VK_NULL_HANDLE)
1148
{
1149
_assert_msg_(false, "DISPLAY Vulkan couldn't find any video modes on the display");
1150
return VK_ERROR_INITIALIZATION_FAILED;
1151
}
1152
1153
/* Iterate on the list of planes of the physical device
1154
to find a plane that matches these criteria:
1155
-It must be compatible with the chosen display + mode.
1156
-It isn't currently bound to another display.
1157
-It supports per-pixel alpha, if possible. */
1158
for (i = 0; i < plane_count; i++) {
1159
uint32_t supported_displays_count = 0;
1160
VkDisplayKHR* supported_displays;
1161
VkDisplayPlaneCapabilitiesKHR plane_caps;
1162
1163
/* See if the plane is compatible with the current display. */
1164
vkGetDisplayPlaneSupportedDisplaysKHR(phys_device, i, &supported_displays_count, NULL);
1165
if (supported_displays_count == 0) {
1166
/* This plane doesn't support any displays. Continue to the next plane. */
1167
continue;
1168
}
1169
1170
/* Get the list of displays supported by this plane. */
1171
supported_displays = new VkDisplayKHR[supported_displays_count];
1172
vkGetDisplayPlaneSupportedDisplaysKHR(phys_device, i,
1173
&supported_displays_count, supported_displays);
1174
1175
/* The plane must be bound to the chosen display, or not in use.
1176
If none of these is true, iterate to another plane. */
1177
if ( !( (plane_props[i].currentDisplay == myDisplay) ||
1178
(plane_props[i].currentDisplay == VK_NULL_HANDLE)))
1179
continue;
1180
1181
/* Iterate the list of displays supported by this plane
1182
in order to find out if the chosen display is among them. */
1183
bool plane_supports_display = false;
1184
for (j = 0; j < supported_displays_count; j++) {
1185
if (supported_displays[j] == myDisplay) {
1186
plane_supports_display = true;
1187
break;
1188
}
1189
}
1190
1191
/* Free the list of displays supported by this plane. */
1192
delete [] supported_displays;
1193
1194
/* If the display is not supported by this plane, iterate to the next plane. */
1195
if (!plane_supports_display)
1196
continue;
1197
1198
/* Want a plane that supports the alpha mode we have chosen. */
1199
vkGetDisplayPlaneCapabilitiesKHR(phys_device, display_mode, i, &plane_caps);
1200
if (plane_caps.supportedAlpha & alpha_mode) {
1201
/* Yep, this plane is alright. */
1202
plane = i;
1203
break;
1204
}
1205
}
1206
1207
/* If we couldn't find an appropiate plane, error out. */
1208
if (plane == UINT32_MAX) {
1209
_assert_msg_(false, "DISPLAY Vulkan couldn't find an appropiate plane");
1210
return VK_ERROR_INITIALIZATION_FAILED;
1211
}
1212
1213
// Finally, create the vulkan surface.
1214
image_size.width = g_display.pixel_xres;
1215
image_size.height = g_display.pixel_yres;
1216
1217
display.displayMode = display_mode;
1218
display.imageExtent = image_size;
1219
display.transform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1220
display.alphaMode = alpha_mode;
1221
display.globalAlpha = 1.0f;
1222
display.planeIndex = plane;
1223
display.planeStackIndex = plane_props[plane].currentStackIndex;
1224
display.pNext = nullptr;
1225
delete [] display_props;
1226
delete [] plane_props;
1227
#endif
1228
display.flags = 0;
1229
retval = vkCreateDisplayPlaneSurfaceKHR(instance_, &display, nullptr, &surface_);
1230
break;
1231
}
1232
#endif
1233
1234
default:
1235
_assert_msg_(false, "Vulkan support for chosen window system not implemented");
1236
return VK_ERROR_INITIALIZATION_FAILED;
1237
}
1238
1239
if (retval != VK_SUCCESS) {
1240
return retval;
1241
}
1242
1243
if (!ChooseQueue()) {
1244
return VK_ERROR_INITIALIZATION_FAILED;
1245
}
1246
1247
for (int i = 0; i < ARRAY_SIZE(frame_); i++) {
1248
frame_[i].profiler.Init(this);
1249
}
1250
1251
// Query presentation modes. We need to know which ones are available for InitSwapchain().
1252
availablePresentModes_.clear();
1253
uint32_t presentModeCount;
1254
VkResult res = vkGetPhysicalDeviceSurfacePresentModesKHR(physical_devices_[physical_device_], surface_, &presentModeCount, nullptr);
1255
availablePresentModes_.resize(presentModeCount);
1256
_dbg_assert_(res == VK_SUCCESS);
1257
res = vkGetPhysicalDeviceSurfacePresentModesKHR(physical_devices_[physical_device_], surface_, &presentModeCount, availablePresentModes_.data());
1258
_dbg_assert_(res == VK_SUCCESS);
1259
1260
return VK_SUCCESS;
1261
}
1262
1263
bool VulkanContext::ChooseQueue() {
1264
// Iterate over each queue to learn whether it supports presenting:
1265
VkBool32 *supportsPresent = new VkBool32[queue_count];
1266
for (uint32_t i = 0; i < queue_count; i++) {
1267
vkGetPhysicalDeviceSurfaceSupportKHR(physical_devices_[physical_device_], i, surface_, &supportsPresent[i]);
1268
}
1269
1270
// Search for a graphics queue and a present queue in the array of queue
1271
// families, try to find one that supports both
1272
uint32_t graphicsQueueNodeIndex = UINT32_MAX;
1273
uint32_t presentQueueNodeIndex = UINT32_MAX;
1274
for (uint32_t i = 0; i < queue_count; i++) {
1275
if ((queueFamilyProperties_[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
1276
if (graphicsQueueNodeIndex == UINT32_MAX) {
1277
graphicsQueueNodeIndex = i;
1278
}
1279
1280
if (supportsPresent[i] == VK_TRUE) {
1281
graphicsQueueNodeIndex = i;
1282
presentQueueNodeIndex = i;
1283
break;
1284
}
1285
}
1286
}
1287
if (presentQueueNodeIndex == UINT32_MAX) {
1288
// If didn't find a queue that supports both graphics and present, then
1289
// find a separate present queue. NOTE: We don't actually currently support this arrangement!
1290
for (uint32_t i = 0; i < queue_count; ++i) {
1291
if (supportsPresent[i] == VK_TRUE) {
1292
presentQueueNodeIndex = i;
1293
break;
1294
}
1295
}
1296
}
1297
delete[] supportsPresent;
1298
1299
// Generate error if could not find both a graphics and a present queue
1300
if (graphicsQueueNodeIndex == UINT32_MAX || presentQueueNodeIndex == UINT32_MAX) {
1301
ERROR_LOG(Log::G3D, "Could not find a graphics and a present queue");
1302
return false;
1303
}
1304
1305
graphics_queue_family_index_ = graphicsQueueNodeIndex;
1306
1307
// Get the list of VkFormats that are supported:
1308
uint32_t formatCount = 0;
1309
VkResult res = vkGetPhysicalDeviceSurfaceFormatsKHR(physical_devices_[physical_device_], surface_, &formatCount, nullptr);
1310
_assert_msg_(res == VK_SUCCESS, "Failed to get formats for device %d: %d", physical_device_, (int)res);
1311
if (res != VK_SUCCESS) {
1312
return false;
1313
}
1314
1315
surfFormats_.resize(formatCount);
1316
res = vkGetPhysicalDeviceSurfaceFormatsKHR(physical_devices_[physical_device_], surface_, &formatCount, surfFormats_.data());
1317
_dbg_assert_(res == VK_SUCCESS);
1318
if (res != VK_SUCCESS) {
1319
return false;
1320
}
1321
// If the format list includes just one entry of VK_FORMAT_UNDEFINED,
1322
// the surface has no preferred format. Otherwise, at least one
1323
// supported format will be returned.
1324
if (formatCount == 0 || (formatCount == 1 && surfFormats_[0].format == VK_FORMAT_UNDEFINED)) {
1325
INFO_LOG(Log::G3D, "swapchain_format: Falling back to B8G8R8A8_UNORM");
1326
swapchainFormat_ = VK_FORMAT_B8G8R8A8_UNORM;
1327
} else {
1328
swapchainFormat_ = VK_FORMAT_UNDEFINED;
1329
for (uint32_t i = 0; i < formatCount; ++i) {
1330
if (surfFormats_[i].colorSpace != VK_COLORSPACE_SRGB_NONLINEAR_KHR) {
1331
continue;
1332
}
1333
if (surfFormats_[i].format == VK_FORMAT_B8G8R8A8_UNORM || surfFormats_[i].format == VK_FORMAT_R8G8B8A8_UNORM) {
1334
swapchainFormat_ = surfFormats_[i].format;
1335
break;
1336
}
1337
}
1338
if (swapchainFormat_ == VK_FORMAT_UNDEFINED) {
1339
// Okay, take the first one then.
1340
swapchainFormat_ = surfFormats_[0].format;
1341
}
1342
INFO_LOG(Log::G3D, "swapchain_format: %s (%d) (/%d)", VulkanFormatToString(swapchainFormat_), (int)swapchainFormat_, formatCount);
1343
}
1344
1345
vkGetDeviceQueue(device_, graphics_queue_family_index_, 0, &gfx_queue_);
1346
return true;
1347
}
1348
1349
int clamp(int x, int a, int b) {
1350
if (x < a)
1351
return a;
1352
if (x > b)
1353
return b;
1354
return x;
1355
}
1356
1357
static std::string surface_transforms_to_string(VkSurfaceTransformFlagsKHR transformFlags) {
1358
std::string str;
1359
if (transformFlags & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) str += "IDENTITY ";
1360
if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR) str += "ROTATE_90 ";
1361
if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR) str += "ROTATE_180 ";
1362
if (transformFlags & VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR) str += "ROTATE_270 ";
1363
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR) str += "HMIRROR ";
1364
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR) str += "HMIRROR_90 ";
1365
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR) str += "HMIRROR_180 ";
1366
if (transformFlags & VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR) str += "HMIRROR_270 ";
1367
if (transformFlags & VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR) str += "INHERIT ";
1368
return str;
1369
}
1370
1371
bool VulkanContext::InitSwapchain(VkPresentModeKHR desiredPresentMode) {
1372
_assert_(physical_device_ >= 0 && physical_device_ < (int)physical_devices_.size());
1373
if (!surface_) {
1374
ERROR_LOG(Log::G3D, "VK: No surface, can't create swapchain");
1375
return false;
1376
}
1377
1378
if (swapchain_) {
1379
INFO_LOG(Log::G3D, "Swapchain already exists, recreating...");
1380
}
1381
1382
VkResult res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physical_devices_[physical_device_], surface_, &surfCapabilities_);
1383
if (res == VK_ERROR_SURFACE_LOST_KHR) {
1384
// Not much to do.
1385
ERROR_LOG(Log::G3D, "VK: Surface lost in InitSwapchain");
1386
return false;
1387
}
1388
1389
if (surfCapabilities_.maxImageExtent.width == 0 || surfCapabilities_.maxImageExtent.height == 0) {
1390
WARN_LOG(Log::G3D, "Max image extent is 0 - app is probably minimized. Faking having a swapchain.");
1391
swapChainExtent_ = {}; // makes it so querying width/height returns 0.
1392
// We pretend to have a swapchain initialized - though we won't actually render to it.
1393
swapchainInited_ = true;
1394
return true;
1395
}
1396
1397
VkExtent2D currentExtent{ surfCapabilities_.currentExtent };
1398
// https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkSurfaceCapabilitiesKHR.html
1399
// currentExtent is the current width and height of the surface, or the special value (0xFFFFFFFF, 0xFFFFFFFF) indicating that the surface size will be determined by the extent of a swapchain targeting the surface.
1400
if (currentExtent.width == 0xFFFFFFFFu || currentExtent.height == 0xFFFFFFFFu
1401
#if PPSSPP_PLATFORM(IOS)
1402
|| currentExtent.width == 0 || currentExtent.height == 0
1403
#endif
1404
) {
1405
_dbg_assert_((bool)cbGetDrawSize_);
1406
if (cbGetDrawSize_) {
1407
currentExtent = cbGetDrawSize_();
1408
}
1409
}
1410
1411
swapChainExtent_.width = clamp(currentExtent.width, surfCapabilities_.minImageExtent.width, surfCapabilities_.maxImageExtent.width);
1412
swapChainExtent_.height = clamp(currentExtent.height, surfCapabilities_.minImageExtent.height, surfCapabilities_.maxImageExtent.height);
1413
1414
INFO_LOG(Log::G3D, "surfCapabilities_.current: %dx%d min: %dx%d max: %dx%d computed: %dx%d",
1415
currentExtent.width, currentExtent.height,
1416
surfCapabilities_.minImageExtent.width, surfCapabilities_.minImageExtent.height,
1417
surfCapabilities_.maxImageExtent.width, surfCapabilities_.maxImageExtent.height,
1418
swapChainExtent_.width, swapChainExtent_.height);
1419
1420
// TODO: Find a better way to specify the prioritized present mode while being able
1421
// to fall back in a sensible way.
1422
VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_MAX_ENUM_KHR;
1423
// Kind of silly logic now, but at least it performs a final sanity check of the chosen value.
1424
for (size_t i = 0; i < availablePresentModes_.size(); i++) {
1425
bool match = availablePresentModes_[i] == desiredPresentMode;
1426
// Default to the first present mode from the list.
1427
if (match || swapchainPresentMode == VK_PRESENT_MODE_MAX_ENUM_KHR) {
1428
swapchainPresentMode = availablePresentModes_[i];
1429
}
1430
if (match) {
1431
break;
1432
}
1433
}
1434
// Determine the number of VkImage's to use in the swap chain (we desire to
1435
// own only 1 image at a time, besides the images being displayed and
1436
// queued for display):
1437
uint32_t desiredNumberOfSwapChainImages = surfCapabilities_.minImageCount + 1;
1438
if ((surfCapabilities_.maxImageCount > 0) &&
1439
(desiredNumberOfSwapChainImages > surfCapabilities_.maxImageCount)) {
1440
// Application must settle for fewer images than desired:
1441
desiredNumberOfSwapChainImages = surfCapabilities_.maxImageCount;
1442
}
1443
1444
std::string modes = "";
1445
for (size_t i = 0; i < availablePresentModes_.size(); i++) {
1446
modes += VulkanPresentModeToString(availablePresentModes_[i]);
1447
if (i != availablePresentModes_.size() - 1) {
1448
modes += ", ";
1449
}
1450
}
1451
1452
INFO_LOG(Log::G3D, "Supported present modes: %s. Chosen present mode: %d (%s). numSwapChainImages: %d (max: %d)",
1453
modes.c_str(), swapchainPresentMode, VulkanPresentModeToString(swapchainPresentMode),
1454
desiredNumberOfSwapChainImages, surfCapabilities_.maxImageCount);
1455
1456
// We mostly follow the practices from
1457
// https://arm-software.github.io/vulkan_best_practice_for_mobile_developers/samples/surface_rotation/surface_rotation_tutorial.html
1458
//
1459
VkSurfaceTransformFlagBitsKHR preTransform;
1460
std::string supportedTransforms = surface_transforms_to_string(surfCapabilities_.supportedTransforms);
1461
std::string currentTransform = surface_transforms_to_string(surfCapabilities_.currentTransform);
1462
g_display.rotation = DisplayRotation::ROTATE_0;
1463
g_display.rot_matrix.setIdentity();
1464
1465
uint32_t allowedRotations = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR | VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR | VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR;
1466
// Hack: Don't allow 270 degrees pretransform (inverse landscape), it creates bizarre issues on some devices (see #15773).
1467
allowedRotations &= ~VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR;
1468
1469
if (surfCapabilities_.currentTransform & (VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR | VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR)) {
1470
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1471
} else if (surfCapabilities_.currentTransform & allowedRotations) {
1472
// Normal, sensible rotations. Let's handle it.
1473
preTransform = surfCapabilities_.currentTransform;
1474
g_display.rot_matrix.setIdentity();
1475
switch (surfCapabilities_.currentTransform) {
1476
case VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR:
1477
g_display.rotation = DisplayRotation::ROTATE_90;
1478
g_display.rot_matrix.setRotationZ90();
1479
std::swap(swapChainExtent_.width, swapChainExtent_.height);
1480
break;
1481
case VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR:
1482
g_display.rotation = DisplayRotation::ROTATE_180;
1483
g_display.rot_matrix.setRotationZ180();
1484
break;
1485
case VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR:
1486
g_display.rotation = DisplayRotation::ROTATE_270;
1487
g_display.rot_matrix.setRotationZ270();
1488
std::swap(swapChainExtent_.width, swapChainExtent_.height);
1489
break;
1490
default:
1491
_dbg_assert_(false);
1492
}
1493
} else {
1494
// Let the OS rotate the image (potentially slower on many Android devices)
1495
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
1496
}
1497
1498
// Only log transforms if relevant.
1499
if (surfCapabilities_.supportedTransforms != VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
1500
std::string preTransformStr = surface_transforms_to_string(preTransform);
1501
INFO_LOG(Log::G3D, "Transform supported: %s current: %s chosen: %s", supportedTransforms.c_str(), currentTransform.c_str(), preTransformStr.c_str());
1502
}
1503
1504
if (physicalDeviceProperties_[physical_device_].properties.vendorID == VULKAN_VENDOR_IMGTEC) {
1505
u32 driverVersion = physicalDeviceProperties_[physical_device_].properties.driverVersion;
1506
// Cutoff the hack at driver version 1.386.1368 (0x00582558, see issue #15773).
1507
if (driverVersion < 0x00582558) {
1508
INFO_LOG(Log::G3D, "Applying PowerVR hack (rounding off the width!) driverVersion=%08x", driverVersion);
1509
// Swap chain width hack to avoid issue #11743 (PowerVR driver bug).
1510
// To keep the size consistent even with pretransform, do this after the swap. Should be fine.
1511
// This is fixed in newer PowerVR drivers but I don't know the cutoff.
1512
swapChainExtent_.width &= ~31;
1513
1514
// TODO: Also modify display_xres/display_yres appropriately for scissors to match.
1515
// This will get a bit messy. Ideally we should remove that logic from app-android.cpp
1516
// and move it here, but the OpenGL code still needs it.
1517
} else {
1518
INFO_LOG(Log::G3D, "PowerVR driver version new enough (%08x), not applying swapchain width hack", driverVersion);
1519
}
1520
}
1521
1522
VkSwapchainKHR oldSwapchain = swapchain_;
1523
1524
VkSwapchainCreateInfoKHR swap_chain_info{ VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR };
1525
swap_chain_info.surface = surface_;
1526
swap_chain_info.minImageCount = desiredNumberOfSwapChainImages;
1527
swap_chain_info.imageFormat = swapchainFormat_;
1528
swap_chain_info.imageColorSpace = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR;
1529
swap_chain_info.imageExtent.width = swapChainExtent_.width;
1530
swap_chain_info.imageExtent.height = swapChainExtent_.height;
1531
swap_chain_info.preTransform = preTransform;
1532
swap_chain_info.imageArrayLayers = 1;
1533
swap_chain_info.presentMode = swapchainPresentMode;
1534
swap_chain_info.oldSwapchain = swapchain_;
1535
swap_chain_info.clipped = true;
1536
swap_chain_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
1537
1538
presentMode_ = swapchainPresentMode;
1539
1540
// We don't support screenshots on Android if TRANSFER_SRC usage flag is not supported.
1541
if (surfCapabilities_.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) {
1542
swap_chain_info.imageUsage |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
1543
}
1544
1545
swap_chain_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
1546
swap_chain_info.queueFamilyIndexCount = 0;
1547
swap_chain_info.pQueueFamilyIndices = NULL;
1548
// OPAQUE is not supported everywhere.
1549
if (surfCapabilities_.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR) {
1550
swap_chain_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
1551
} else {
1552
// This should be supported anywhere, and is the only thing supported on the SHIELD TV, for example.
1553
swap_chain_info.compositeAlpha = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR;
1554
}
1555
1556
res = vkCreateSwapchainKHR(device_, &swap_chain_info, NULL, &swapchain_);
1557
if (res != VK_SUCCESS) {
1558
ERROR_LOG(Log::G3D, "vkCreateSwapchainKHR failed! %s", VulkanResultToString(res));
1559
return false;
1560
}
1561
INFO_LOG(Log::G3D, "Created swapchain: %dx%d %s", swap_chain_info.imageExtent.width, swap_chain_info.imageExtent.height, (surfCapabilities_.supportedUsageFlags & VK_IMAGE_USAGE_TRANSFER_SRC_BIT) ? "(TRANSFER_SRC_BIT supported)" : "");
1562
swapchainInited_ = true;
1563
1564
if (oldSwapchain != VK_NULL_HANDLE) {
1565
vkDestroySwapchainKHR(device_, oldSwapchain, nullptr);
1566
INFO_LOG(Log::G3D, "Destroyed old swapchain.");
1567
}
1568
return true;
1569
}
1570
1571
void VulkanContext::SetCbGetDrawSize(std::function<VkExtent2D()> cb) {
1572
cbGetDrawSize_ = cb;
1573
}
1574
1575
VkFence VulkanContext::CreateFence(bool presignalled) {
1576
VkFence fence;
1577
VkFenceCreateInfo fenceInfo{ VK_STRUCTURE_TYPE_FENCE_CREATE_INFO };
1578
fenceInfo.flags = presignalled ? VK_FENCE_CREATE_SIGNALED_BIT : 0;
1579
vkCreateFence(device_, &fenceInfo, NULL, &fence);
1580
return fence;
1581
}
1582
1583
void VulkanContext::PerformPendingDeletes() {
1584
for (int i = 0; i < ARRAY_SIZE(frame_); i++) {
1585
frame_[i].deleteList.PerformDeletes(this, allocator_);
1586
}
1587
Delete().PerformDeletes(this, allocator_);
1588
}
1589
1590
void VulkanContext::DestroyDevice() {
1591
if (swapchain_) {
1592
ERROR_LOG(Log::G3D, "DestroyDevice: Swapchain should have been destroyed.");
1593
}
1594
if (surface_) {
1595
ERROR_LOG(Log::G3D, "DestroyDevice: Surface should have been destroyed.");
1596
}
1597
1598
for (int i = 0; i < ARRAY_SIZE(frame_); i++) {
1599
frame_[i].profiler.Shutdown();
1600
}
1601
1602
INFO_LOG(Log::G3D, "VulkanContext::DestroyDevice (performing deletes)");
1603
PerformPendingDeletes();
1604
1605
vmaDestroyAllocator(allocator_);
1606
allocator_ = VK_NULL_HANDLE;
1607
1608
vkDestroyDevice(device_, nullptr);
1609
device_ = nullptr;
1610
}
1611
1612
bool VulkanContext::CreateShaderModule(const std::vector<uint32_t> &spirv, VkShaderModule *shaderModule, const char *tag) {
1613
VkShaderModuleCreateInfo sm{ VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO };
1614
sm.pCode = spirv.data();
1615
sm.codeSize = spirv.size() * sizeof(uint32_t);
1616
sm.flags = 0;
1617
VkResult result = vkCreateShaderModule(device_, &sm, nullptr, shaderModule);
1618
if (tag) {
1619
SetDebugName(*shaderModule, VK_OBJECT_TYPE_SHADER_MODULE, tag);
1620
}
1621
if (result != VK_SUCCESS) {
1622
return false;
1623
} else {
1624
return true;
1625
}
1626
}
1627
1628
EShLanguage FindLanguage(const VkShaderStageFlagBits shader_type) {
1629
switch (shader_type) {
1630
case VK_SHADER_STAGE_VERTEX_BIT:
1631
return EShLangVertex;
1632
1633
case VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT:
1634
return EShLangTessControl;
1635
1636
case VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT:
1637
return EShLangTessEvaluation;
1638
1639
case VK_SHADER_STAGE_GEOMETRY_BIT:
1640
return EShLangGeometry;
1641
1642
case VK_SHADER_STAGE_FRAGMENT_BIT:
1643
return EShLangFragment;
1644
1645
case VK_SHADER_STAGE_COMPUTE_BIT:
1646
return EShLangCompute;
1647
1648
default:
1649
return EShLangVertex;
1650
}
1651
}
1652
1653
// Compile a given string containing GLSL into SPV for use by VK
1654
// Return value of false means an error was encountered.
1655
bool GLSLtoSPV(const VkShaderStageFlagBits shader_type, const char *sourceCode, GLSLVariant variant,
1656
std::vector<unsigned int> &spirv, std::string *errorMessage) {
1657
1658
glslang::TProgram program;
1659
const char *shaderStrings[1];
1660
TBuiltInResource Resources{};
1661
InitShaderResources(Resources);
1662
1663
int defaultVersion = 0;
1664
EShMessages messages;
1665
EProfile profile;
1666
1667
switch (variant) {
1668
case GLSLVariant::VULKAN:
1669
// Enable SPIR-V and Vulkan rules when parsing GLSL
1670
messages = (EShMessages)(EShMsgSpvRules | EShMsgVulkanRules);
1671
defaultVersion = 450;
1672
profile = ECoreProfile;
1673
break;
1674
case GLSLVariant::GL140:
1675
messages = (EShMessages)(EShMsgDefault);
1676
defaultVersion = 140;
1677
profile = ECompatibilityProfile;
1678
break;
1679
case GLSLVariant::GLES300:
1680
messages = (EShMessages)(EShMsgDefault);
1681
defaultVersion = 300;
1682
profile = EEsProfile;
1683
break;
1684
default:
1685
return false;
1686
}
1687
1688
EShLanguage stage = FindLanguage(shader_type);
1689
glslang::TShader shader(stage);
1690
1691
shaderStrings[0] = sourceCode;
1692
shader.setStrings(shaderStrings, 1);
1693
1694
if (!shader.parse(&Resources, defaultVersion, profile, false, true, messages)) {
1695
puts(shader.getInfoLog());
1696
puts(shader.getInfoDebugLog());
1697
if (errorMessage) {
1698
*errorMessage = shader.getInfoLog();
1699
(*errorMessage) += shader.getInfoDebugLog();
1700
}
1701
return false; // something didn't work
1702
}
1703
1704
// TODO: Propagate warnings into errorMessages even if we succeeded here.
1705
1706
// Note that program does not take ownership of &shader, so this is fine.
1707
program.addShader(&shader);
1708
1709
if (!program.link(messages)) {
1710
puts(shader.getInfoLog());
1711
puts(shader.getInfoDebugLog());
1712
if (errorMessage) {
1713
*errorMessage = shader.getInfoLog();
1714
(*errorMessage) += shader.getInfoDebugLog();
1715
}
1716
return false;
1717
}
1718
1719
// Can't fail, parsing worked, "linking" worked.
1720
glslang::SpvOptions options;
1721
options.disableOptimizer = false;
1722
options.optimizeSize = false;
1723
options.generateDebugInfo = false;
1724
glslang::GlslangToSpv(*program.getIntermediate(stage), spirv, &options);
1725
return true;
1726
}
1727
1728
void init_glslang() {
1729
glslang::InitializeProcess();
1730
}
1731
1732
void finalize_glslang() {
1733
glslang::FinalizeProcess();
1734
}
1735
1736
void VulkanDeleteList::Take(VulkanDeleteList &del) {
1737
_dbg_assert_(cmdPools_.empty());
1738
_dbg_assert_(descPools_.empty());
1739
_dbg_assert_(modules_.empty());
1740
_dbg_assert_(buffers_.empty());
1741
_dbg_assert_(bufferViews_.empty());
1742
_dbg_assert_(buffersWithAllocs_.empty());
1743
_dbg_assert_(imageViews_.empty());
1744
_dbg_assert_(imagesWithAllocs_.empty());
1745
_dbg_assert_(deviceMemory_.empty());
1746
_dbg_assert_(samplers_.empty());
1747
_dbg_assert_(pipelines_.empty());
1748
_dbg_assert_(pipelineCaches_.empty());
1749
_dbg_assert_(renderPasses_.empty());
1750
_dbg_assert_(framebuffers_.empty());
1751
_dbg_assert_(pipelineLayouts_.empty());
1752
_dbg_assert_(descSetLayouts_.empty());
1753
_dbg_assert_(callbacks_.empty());
1754
cmdPools_ = std::move(del.cmdPools_);
1755
descPools_ = std::move(del.descPools_);
1756
modules_ = std::move(del.modules_);
1757
buffers_ = std::move(del.buffers_);
1758
buffersWithAllocs_ = std::move(del.buffersWithAllocs_);
1759
bufferViews_ = std::move(del.bufferViews_);
1760
imageViews_ = std::move(del.imageViews_);
1761
imagesWithAllocs_ = std::move(del.imagesWithAllocs_);
1762
deviceMemory_ = std::move(del.deviceMemory_);
1763
samplers_ = std::move(del.samplers_);
1764
pipelines_ = std::move(del.pipelines_);
1765
pipelineCaches_ = std::move(del.pipelineCaches_);
1766
renderPasses_ = std::move(del.renderPasses_);
1767
framebuffers_ = std::move(del.framebuffers_);
1768
pipelineLayouts_ = std::move(del.pipelineLayouts_);
1769
descSetLayouts_ = std::move(del.descSetLayouts_);
1770
callbacks_ = std::move(del.callbacks_);
1771
del.cmdPools_.clear();
1772
del.descPools_.clear();
1773
del.modules_.clear();
1774
del.buffers_.clear();
1775
del.buffersWithAllocs_.clear();
1776
del.imageViews_.clear();
1777
del.imagesWithAllocs_.clear();
1778
del.deviceMemory_.clear();
1779
del.samplers_.clear();
1780
del.pipelines_.clear();
1781
del.pipelineCaches_.clear();
1782
del.renderPasses_.clear();
1783
del.framebuffers_.clear();
1784
del.pipelineLayouts_.clear();
1785
del.descSetLayouts_.clear();
1786
del.callbacks_.clear();
1787
}
1788
1789
void VulkanDeleteList::PerformDeletes(VulkanContext *vulkan, VmaAllocator allocator) {
1790
int deleteCount = 0;
1791
1792
for (auto &callback : callbacks_) {
1793
callback.func(vulkan, callback.userdata);
1794
deleteCount++;
1795
}
1796
callbacks_.clear();
1797
1798
VkDevice device = vulkan->GetDevice();
1799
for (auto &cmdPool : cmdPools_) {
1800
vkDestroyCommandPool(device, cmdPool, nullptr);
1801
deleteCount++;
1802
}
1803
cmdPools_.clear();
1804
for (auto &descPool : descPools_) {
1805
vkDestroyDescriptorPool(device, descPool, nullptr);
1806
deleteCount++;
1807
}
1808
descPools_.clear();
1809
for (auto &module : modules_) {
1810
vkDestroyShaderModule(device, module, nullptr);
1811
deleteCount++;
1812
}
1813
modules_.clear();
1814
for (auto &buf : buffers_) {
1815
vkDestroyBuffer(device, buf, nullptr);
1816
deleteCount++;
1817
}
1818
buffers_.clear();
1819
for (auto &buf : buffersWithAllocs_) {
1820
vmaDestroyBuffer(allocator, buf.buffer, buf.alloc);
1821
deleteCount++;
1822
}
1823
buffersWithAllocs_.clear();
1824
for (auto &bufView : bufferViews_) {
1825
vkDestroyBufferView(device, bufView, nullptr);
1826
deleteCount++;
1827
}
1828
bufferViews_.clear();
1829
for (auto &imageWithAlloc : imagesWithAllocs_) {
1830
vmaDestroyImage(allocator, imageWithAlloc.image, imageWithAlloc.alloc);
1831
deleteCount++;
1832
}
1833
imagesWithAllocs_.clear();
1834
for (auto &imageView : imageViews_) {
1835
vkDestroyImageView(device, imageView, nullptr);
1836
deleteCount++;
1837
}
1838
imageViews_.clear();
1839
for (auto &mem : deviceMemory_) {
1840
vkFreeMemory(device, mem, nullptr);
1841
deleteCount++;
1842
}
1843
deviceMemory_.clear();
1844
for (auto &sampler : samplers_) {
1845
vkDestroySampler(device, sampler, nullptr);
1846
deleteCount++;
1847
}
1848
samplers_.clear();
1849
for (auto &pipeline : pipelines_) {
1850
vkDestroyPipeline(device, pipeline, nullptr);
1851
deleteCount++;
1852
}
1853
pipelines_.clear();
1854
for (auto &pcache : pipelineCaches_) {
1855
vkDestroyPipelineCache(device, pcache, nullptr);
1856
deleteCount++;
1857
}
1858
pipelineCaches_.clear();
1859
for (auto &renderPass : renderPasses_) {
1860
vkDestroyRenderPass(device, renderPass, nullptr);
1861
deleteCount++;
1862
}
1863
renderPasses_.clear();
1864
for (auto &framebuffer : framebuffers_) {
1865
vkDestroyFramebuffer(device, framebuffer, nullptr);
1866
deleteCount++;
1867
}
1868
framebuffers_.clear();
1869
for (auto &pipeLayout : pipelineLayouts_) {
1870
vkDestroyPipelineLayout(device, pipeLayout, nullptr);
1871
deleteCount++;
1872
}
1873
pipelineLayouts_.clear();
1874
for (auto &descSetLayout : descSetLayouts_) {
1875
vkDestroyDescriptorSetLayout(device, descSetLayout, nullptr);
1876
deleteCount++;
1877
}
1878
descSetLayouts_.clear();
1879
for (auto &queryPool : queryPools_) {
1880
vkDestroyQueryPool(device, queryPool, nullptr);
1881
deleteCount++;
1882
}
1883
queryPools_.clear();
1884
deleteCount_ = deleteCount;
1885
}
1886
1887
void VulkanContext::GetImageMemoryRequirements(VkImage image, VkMemoryRequirements *mem_reqs, bool *dedicatedAllocation) {
1888
if (Extensions().KHR_dedicated_allocation) {
1889
VkImageMemoryRequirementsInfo2KHR memReqInfo2{VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR};
1890
memReqInfo2.image = image;
1891
1892
VkMemoryRequirements2KHR memReq2 = {VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR};
1893
VkMemoryDedicatedRequirementsKHR memDedicatedReq{VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR};
1894
ChainStruct(memReq2, &memDedicatedReq);
1895
1896
vkGetImageMemoryRequirements2(GetDevice(), &memReqInfo2, &memReq2);
1897
1898
*mem_reqs = memReq2.memoryRequirements;
1899
*dedicatedAllocation =
1900
(memDedicatedReq.requiresDedicatedAllocation != VK_FALSE) ||
1901
(memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
1902
} else {
1903
vkGetImageMemoryRequirements(GetDevice(), image, mem_reqs);
1904
*dedicatedAllocation = false;
1905
}
1906
}
1907
1908
bool IsHashMaliDriverVersion(const VkPhysicalDeviceProperties &props) {
1909
// ARM used to put a hash in place of the driver version.
1910
// Now they only use major versions. We'll just make a bad heuristic.
1911
uint32_t major = VK_VERSION_MAJOR(props.driverVersion);
1912
uint32_t branch = VK_VERSION_PATCH(props.driverVersion);
1913
if (branch > 0)
1914
return true;
1915
if (branch > 100 || major > 100)
1916
return true;
1917
// Can (in theory) have false negatives!
1918
return false;
1919
}
1920
1921
// From Sascha's code
1922
std::string FormatDriverVersion(const VkPhysicalDeviceProperties &props) {
1923
if (props.vendorID == VULKAN_VENDOR_NVIDIA) {
1924
// For whatever reason, NVIDIA has their own scheme.
1925
// 10 bits = major version (up to r1023)
1926
// 8 bits = minor version (up to 255)
1927
// 8 bits = secondary branch version/build version (up to 255)
1928
// 6 bits = tertiary branch/build version (up to 63)
1929
uint32_t major = (props.driverVersion >> 22) & 0x3ff;
1930
uint32_t minor = (props.driverVersion >> 14) & 0x0ff;
1931
uint32_t secondaryBranch = (props.driverVersion >> 6) & 0x0ff;
1932
uint32_t tertiaryBranch = (props.driverVersion) & 0x003f;
1933
return StringFromFormat("%d.%d.%d.%d", major, minor, secondaryBranch, tertiaryBranch);
1934
} else if (props.vendorID == VULKAN_VENDOR_ARM) {
1935
// ARM used to just put a hash here. No point in splitting it up.
1936
if (IsHashMaliDriverVersion(props)) {
1937
return StringFromFormat("(hash) %08x", props.driverVersion);
1938
}
1939
}
1940
// Qualcomm has an inscrutable versioning scheme. Let's just display it as normal.
1941
// Standard scheme, use the standard macros.
1942
uint32_t major = VK_VERSION_MAJOR(props.driverVersion);
1943
uint32_t minor = VK_VERSION_MINOR(props.driverVersion);
1944
uint32_t branch = VK_VERSION_PATCH(props.driverVersion);
1945
return StringFromFormat("%d.%d.%d (%08x)", major, minor, branch, props.driverVersion);
1946
}
1947
1948
std::string FormatAPIVersion(u32 version) {
1949
return StringFromFormat("%d.%d.%d", VK_API_VERSION_MAJOR(version), VK_API_VERSION_MINOR(version), VK_API_VERSION_PATCH(version));
1950
}
1951
1952
// Mainly just the formats seen on gpuinfo.org for swapchains, as this function is only used for listing
1953
// those in the UI. Also depth buffers that we used in one place.
1954
// Might add more in the future if we find more uses for this.
1955
const char *VulkanFormatToString(VkFormat format) {
1956
switch (format) {
1957
case VK_FORMAT_A1R5G5B5_UNORM_PACK16: return "A1R5G5B5_UNORM_PACK16";
1958
case VK_FORMAT_A2B10G10R10_UNORM_PACK32: return "A2B10G10R10_UNORM_PACK32";
1959
case VK_FORMAT_A2R10G10B10_UNORM_PACK32: return "A2R10G10B10_UNORM_PACK32";
1960
case VK_FORMAT_A8B8G8R8_SNORM_PACK32: return "A8B8G8R8_SNORM_PACK32";
1961
case VK_FORMAT_A8B8G8R8_SRGB_PACK32: return "A8B8G8R8_SRGB_PACK32";
1962
case VK_FORMAT_A8B8G8R8_UNORM_PACK32: return "A8B8G8R8_UNORM_PACK32";
1963
case VK_FORMAT_B10G11R11_UFLOAT_PACK32: return "B10G11R11_UFLOAT_PACK32";
1964
case VK_FORMAT_B4G4R4A4_UNORM_PACK16: return "B4G4R4A4_UNORM_PACK16";
1965
case VK_FORMAT_B5G5R5A1_UNORM_PACK16: return "B5G5R5A1_UNORM_PACK16";
1966
case VK_FORMAT_B5G6R5_UNORM_PACK16: return "B5G6R5_UNORM_PACK16";
1967
case VK_FORMAT_B8G8R8A8_SNORM: return "B8G8R8A8_SNORM";
1968
case VK_FORMAT_B8G8R8A8_SRGB: return "B8G8R8A8_SRGB";
1969
case VK_FORMAT_B8G8R8A8_UNORM: return "B8G8R8A8_UNORM";
1970
case VK_FORMAT_R16G16B16A16_SFLOAT: return "R16G16B16A16_SFLOAT";
1971
case VK_FORMAT_R16G16B16A16_SNORM: return "R16G16B16A16_SNORM";
1972
case VK_FORMAT_R16G16B16A16_UNORM: return "R16G16B16A16_UNORM";
1973
case VK_FORMAT_R4G4B4A4_UNORM_PACK16: return "R4G4B4A4_UNORM_PACK16";
1974
case VK_FORMAT_R5G5B5A1_UNORM_PACK16: return "R5G5B5A1_UNORM_PACK16";
1975
case VK_FORMAT_R5G6B5_UNORM_PACK16: return "R5G6B5_UNORM_PACK16";
1976
case VK_FORMAT_R8G8B8A8_SNORM: return "R8G8B8A8_SNORM";
1977
case VK_FORMAT_R8G8B8A8_SRGB: return "R8G8B8A8_SRGB";
1978
case VK_FORMAT_R8G8B8A8_UNORM: return "R8G8B8A8_UNORM";
1979
1980
case VK_FORMAT_D24_UNORM_S8_UINT: return "D24S8";
1981
case VK_FORMAT_D16_UNORM: return "D16";
1982
case VK_FORMAT_D16_UNORM_S8_UINT: return "D16S8";
1983
case VK_FORMAT_D32_SFLOAT: return "D32f";
1984
case VK_FORMAT_D32_SFLOAT_S8_UINT: return "D32fS8";
1985
case VK_FORMAT_S8_UINT: return "S8";
1986
case VK_FORMAT_UNDEFINED: return "UNDEFINED (BAD!)";
1987
1988
default: return "(format not added to string list)";
1989
}
1990
}
1991
1992
// I miss Rust where this is automatic :(
1993
const char *VulkanColorSpaceToString(VkColorSpaceKHR colorSpace) {
1994
switch (colorSpace) {
1995
case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR: return "SRGB_NONLINEAR";
1996
case VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT: return "DISPLAY_P3_NONLINEAR";
1997
case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT: return "EXTENDED_SRGB_LINEAR";
1998
case VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT: return "DISPLAY_P3_LINEAR";
1999
case VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT: return "DCI_P3_NONLINEAR";
2000
case VK_COLOR_SPACE_BT709_LINEAR_EXT: return "BT709_LINEAR";
2001
case VK_COLOR_SPACE_BT709_NONLINEAR_EXT: return "BT709_NONLINEAR";
2002
case VK_COLOR_SPACE_BT2020_LINEAR_EXT: return "BT2020_LINEAR";
2003
case VK_COLOR_SPACE_HDR10_ST2084_EXT: return "HDR10_ST2084";
2004
case VK_COLOR_SPACE_DOLBYVISION_EXT: return "DOLBYVISION";
2005
case VK_COLOR_SPACE_HDR10_HLG_EXT: return "HDR10_HLG";
2006
case VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT: return "ADOBERGB_LINEAR";
2007
case VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT: return "ADOBERGB_NONLINEAR";
2008
case VK_COLOR_SPACE_PASS_THROUGH_EXT: return "PASS_THROUGH";
2009
case VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT: return "EXTENDED_SRGB_NONLINEAR";
2010
case VK_COLOR_SPACE_DISPLAY_NATIVE_AMD: return "DISPLAY_NATIVE_AMD";
2011
default: return "(unknown)";
2012
}
2013
}
2014
2015