CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutSign UpSign In
hrydgard

CoCalc provides the best real-time collaborative environment for Jupyter Notebooks, LaTeX documents, and SageMath, scalable from individual users to large groups and classes!

GitHub Repository: hrydgard/ppsspp
Path: blob/master/libretro/libretro_vulkan.cpp
Views: 1401
1
// Debugging notes
2
// The crash happens when we try to call vkGetPhysicalDeviceProperties2KHR which seems to be null.
3
//
4
// Apparently we don't manage to specify the extensions we want. Still something reports that this one
5
// is present?
6
// Failed to load : vkGetPhysicalDeviceProperties2KHR
7
// Failed to load : vkGetPhysicalDeviceFeatures2KHR
8
9
#include <cstring>
10
#include <cassert>
11
#include <vector>
12
#include <mutex>
13
#include <condition_variable>
14
15
#include "Common/GPU/Vulkan/VulkanLoader.h"
16
#include "Common/Log.h"
17
#include "Core/Config.h"
18
19
#define VK_NO_PROTOTYPES
20
#include "libretro/libretro_vulkan.h"
21
22
using namespace PPSSPP_VK;
23
24
static retro_hw_render_interface_vulkan *vulkan;
25
26
static struct {
27
VkInstance instance;
28
VkPhysicalDevice gpu;
29
VkSurfaceKHR surface;
30
PFN_vkGetInstanceProcAddr get_instance_proc_addr;
31
const char **required_device_extensions;
32
unsigned num_required_device_extensions;
33
const char **required_device_layers;
34
unsigned num_required_device_layers;
35
const VkPhysicalDeviceFeatures *required_features;
36
} vk_init_info;
37
static bool DEDICATED_ALLOCATION;
38
39
#define VULKAN_MAX_SWAPCHAIN_IMAGES 8
40
struct VkSwapchainKHR_T {
41
uint32_t count;
42
struct {
43
VkImage handle;
44
VkDeviceMemory memory;
45
retro_vulkan_image retro_image;
46
} images[VULKAN_MAX_SWAPCHAIN_IMAGES];
47
std::mutex mutex;
48
std::condition_variable condVar;
49
int current_index;
50
};
51
static VkSwapchainKHR_T chain;
52
53
#define LIBRETRO_VK_WARP_LIST() \
54
LIBRETRO_VK_WARP_FUNC(vkCreateInstance); \
55
LIBRETRO_VK_WARP_FUNC(vkDestroyInstance); \
56
LIBRETRO_VK_WARP_FUNC(vkCreateDevice); \
57
LIBRETRO_VK_WARP_FUNC(vkDestroyDevice); \
58
LIBRETRO_VK_WARP_FUNC(vkGetPhysicalDeviceSurfaceCapabilitiesKHR); \
59
LIBRETRO_VK_WARP_FUNC(vkDestroySurfaceKHR); \
60
LIBRETRO_VK_WARP_FUNC(vkCreateSwapchainKHR); \
61
LIBRETRO_VK_WARP_FUNC(vkGetSwapchainImagesKHR); \
62
LIBRETRO_VK_WARP_FUNC(vkAcquireNextImageKHR); \
63
LIBRETRO_VK_WARP_FUNC(vkQueuePresentKHR); \
64
LIBRETRO_VK_WARP_FUNC(vkDestroySwapchainKHR); \
65
LIBRETRO_VK_WARP_FUNC(vkQueueSubmit); \
66
LIBRETRO_VK_WARP_FUNC(vkQueueWaitIdle); \
67
LIBRETRO_VK_WARP_FUNC(vkCmdPipelineBarrier); \
68
LIBRETRO_VK_WARP_FUNC(vkCreateRenderPass);
69
70
#define LIBRETRO_VK_WARP_FUNC(x) \
71
PFN_##x x##_org
72
73
LIBRETRO_VK_WARP_FUNC(vkGetInstanceProcAddr);
74
LIBRETRO_VK_WARP_FUNC(vkGetDeviceProcAddr);
75
LIBRETRO_VK_WARP_LIST();
76
77
static VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance_libretro(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance) {
78
*pInstance = vk_init_info.instance;
79
return VK_SUCCESS;
80
}
81
82
static void add_name_unique(std::vector<const char *> &list, const char *value) {
83
for (const char *name : list)
84
if (!strcmp(value, name))
85
return;
86
87
list.push_back(value);
88
}
89
static VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice_libretro(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkDevice *pDevice) {
90
VkDeviceCreateInfo info = *pCreateInfo;
91
std::vector<const char *> EnabledLayerNames(info.ppEnabledLayerNames, info.ppEnabledLayerNames + info.enabledLayerCount);
92
std::vector<const char *> EnabledExtensionNames(info.ppEnabledExtensionNames, info.ppEnabledExtensionNames + info.enabledExtensionCount);
93
VkPhysicalDeviceFeatures EnabledFeatures = *info.pEnabledFeatures;
94
95
for (unsigned i = 0; i < vk_init_info.num_required_device_layers; i++)
96
add_name_unique(EnabledLayerNames, vk_init_info.required_device_layers[i]);
97
98
for (unsigned i = 0; i < vk_init_info.num_required_device_extensions; i++)
99
add_name_unique(EnabledExtensionNames, vk_init_info.required_device_extensions[i]);
100
101
for (unsigned i = 0; i < sizeof(VkPhysicalDeviceFeatures) / sizeof(VkBool32); i++) {
102
if (((VkBool32 *)vk_init_info.required_features)[i])
103
((VkBool32 *)&EnabledFeatures)[i] = VK_TRUE;
104
}
105
106
for (auto extension_name : EnabledExtensionNames) {
107
if (!strcmp(extension_name, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME))
108
DEDICATED_ALLOCATION = true;
109
}
110
111
info.enabledLayerCount = (uint32_t)EnabledLayerNames.size();
112
info.ppEnabledLayerNames = info.enabledLayerCount ? EnabledLayerNames.data() : nullptr;
113
info.enabledExtensionCount = (uint32_t)EnabledExtensionNames.size();
114
info.ppEnabledExtensionNames = info.enabledExtensionCount ? EnabledExtensionNames.data() : nullptr;
115
info.pEnabledFeatures = &EnabledFeatures;
116
117
return vkCreateDevice_org(physicalDevice, &info, pAllocator, pDevice);
118
}
119
120
static VKAPI_ATTR VkResult VKAPI_CALL vkCreateLibretroSurfaceKHR(VkInstance instance, const void *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSurfaceKHR *pSurface) {
121
*pSurface = vk_init_info.surface;
122
return VK_SUCCESS;
123
}
124
125
VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR_libretro(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR *pSurfaceCapabilities) {
126
VkResult res = vkGetPhysicalDeviceSurfaceCapabilitiesKHR_org(physicalDevice, surface, pSurfaceCapabilities);
127
if (res == VK_SUCCESS) {
128
int w = g_Config.iInternalResolution * NATIVEWIDTH;
129
int h = g_Config.iInternalResolution * NATIVEHEIGHT;
130
131
if (g_Config.bDisplayCropTo16x9)
132
h -= g_Config.iInternalResolution * 2;
133
134
pSurfaceCapabilities->minImageExtent.width = w;
135
pSurfaceCapabilities->minImageExtent.height = h;
136
pSurfaceCapabilities->maxImageExtent.width = w;
137
pSurfaceCapabilities->maxImageExtent.height = h;
138
pSurfaceCapabilities->currentExtent.width = w;
139
pSurfaceCapabilities->currentExtent.height = h;
140
}
141
return res;
142
}
143
144
static bool MemoryTypeFromProperties(uint32_t typeBits, VkFlags requirements_mask, uint32_t *typeIndex) {
145
VkPhysicalDeviceMemoryProperties memory_properties;
146
vkGetPhysicalDeviceMemoryProperties(vulkan->gpu, &memory_properties);
147
// Search memtypes to find first index with those properties
148
for (uint32_t i = 0; i < 32; i++) {
149
if ((typeBits & 1) == 1) {
150
// Type is available, does it match user properties?
151
if ((memory_properties.memoryTypes[i].propertyFlags & requirements_mask) == requirements_mask) {
152
*typeIndex = i;
153
return true;
154
}
155
}
156
typeBits >>= 1;
157
}
158
// No memory types matched, return failure
159
return false;
160
}
161
162
static VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR_libretro(VkDevice device, const VkSwapchainCreateInfoKHR *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkSwapchainKHR *pSwapchain) {
163
uint32_t swapchain_mask = vulkan->get_sync_index_mask(vulkan->handle);
164
165
chain.count = 0;
166
while (swapchain_mask) {
167
chain.count++;
168
swapchain_mask >>= 1;
169
}
170
assert(chain.count <= VULKAN_MAX_SWAPCHAIN_IMAGES);
171
172
for (uint32_t i = 0; i < chain.count; i++) {
173
{
174
VkImageCreateInfo info{ VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO };
175
info.flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
176
info.imageType = VK_IMAGE_TYPE_2D;
177
info.format = pCreateInfo->imageFormat;
178
info.extent.width = pCreateInfo->imageExtent.width;
179
info.extent.height = pCreateInfo->imageExtent.height;
180
info.extent.depth = 1;
181
info.mipLevels = 1;
182
info.arrayLayers = 1;
183
info.samples = VK_SAMPLE_COUNT_1_BIT;
184
info.tiling = VK_IMAGE_TILING_OPTIMAL;
185
info.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
186
info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
187
188
vkCreateImage(device, &info, pAllocator, &chain.images[i].handle);
189
}
190
191
VkMemoryRequirements memreq;
192
vkGetImageMemoryRequirements(device, chain.images[i].handle, &memreq);
193
194
VkMemoryAllocateInfo alloc{ VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
195
alloc.allocationSize = memreq.size;
196
197
VkMemoryDedicatedAllocateInfoKHR dedicated{ VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
198
if (DEDICATED_ALLOCATION) {
199
alloc.pNext = &dedicated;
200
dedicated.image = chain.images[i].handle;
201
}
202
203
MemoryTypeFromProperties(memreq.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, &alloc.memoryTypeIndex);
204
VkResult res = vkAllocateMemory(device, &alloc, pAllocator, &chain.images[i].memory);
205
assert(res == VK_SUCCESS);
206
res = vkBindImageMemory(device, chain.images[i].handle, chain.images[i].memory, 0);
207
assert(res == VK_SUCCESS);
208
209
chain.images[i].retro_image.create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
210
chain.images[i].retro_image.create_info.image = chain.images[i].handle;
211
chain.images[i].retro_image.create_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
212
chain.images[i].retro_image.create_info.format = pCreateInfo->imageFormat;
213
chain.images[i].retro_image.create_info.components = { VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY };
214
chain.images[i].retro_image.create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
215
chain.images[i].retro_image.create_info.subresourceRange.layerCount = 1;
216
chain.images[i].retro_image.create_info.subresourceRange.levelCount = 1;
217
res = vkCreateImageView(device, &chain.images[i].retro_image.create_info, pAllocator, &chain.images[i].retro_image.image_view);
218
assert(res == VK_SUCCESS);
219
220
chain.images[i].retro_image.image_layout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
221
}
222
223
chain.current_index = -1;
224
*pSwapchain = (VkSwapchainKHR)&chain;
225
226
return VK_SUCCESS;
227
}
228
static VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR_libretro(VkDevice device, VkSwapchainKHR swapchain_, uint32_t *pSwapchainImageCount, VkImage *pSwapchainImages) {
229
VkSwapchainKHR_T *swapchain = (VkSwapchainKHR_T *)swapchain_;
230
if (pSwapchainImages) {
231
assert(*pSwapchainImageCount <= swapchain->count);
232
for (int i = 0; i < *pSwapchainImageCount; i++)
233
pSwapchainImages[i] = swapchain->images[i].handle;
234
} else
235
*pSwapchainImageCount = swapchain->count;
236
237
return VK_SUCCESS;
238
}
239
240
static VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR_libretro(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t *pImageIndex) {
241
vulkan->wait_sync_index(vulkan->handle);
242
*pImageIndex = vulkan->get_sync_index(vulkan->handle);
243
#if 0
244
vulkan->set_signal_semaphore(vulkan->handle, semaphore);
245
#endif
246
return VK_SUCCESS;
247
}
248
249
static VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR_libretro(VkQueue queue, const VkPresentInfoKHR *pPresentInfo) {
250
VkSwapchainKHR_T *swapchain = (VkSwapchainKHR_T *)pPresentInfo->pSwapchains[0];
251
std::unique_lock<std::mutex> lock(swapchain->mutex);
252
#if 0
253
if(chain.current_index >= 0)
254
chain.condVar.wait(lock);
255
#endif
256
257
chain.current_index = pPresentInfo->pImageIndices[0];
258
#if 0
259
vulkan->set_image(vulkan->handle, &swapchain->images[pPresentInfo->pImageIndices[0]].retro_image, pPresentInfo->waitSemaphoreCount, pPresentInfo->pWaitSemaphores, vulkan->queue_index);
260
#else
261
vulkan->set_image(vulkan->handle, &swapchain->images[pPresentInfo->pImageIndices[0]].retro_image, 0, nullptr, vulkan->queue_index);
262
#endif
263
swapchain->condVar.notify_all();
264
265
return VK_SUCCESS;
266
}
267
268
void vk_libretro_wait_for_presentation() {
269
std::unique_lock<std::mutex> lock(chain.mutex);
270
if (chain.current_index < 0)
271
chain.condVar.wait(lock);
272
#if 0
273
chain.current_index = -1;
274
chain.condVar.notify_all();
275
#endif
276
}
277
278
static VKAPI_ATTR void VKAPI_CALL vkDestroyInstance_libretro(VkInstance instance, const VkAllocationCallbacks *pAllocator) {}
279
static VKAPI_ATTR void VKAPI_CALL vkDestroyDevice_libretro(VkDevice device, const VkAllocationCallbacks *pAllocator) {}
280
static VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR_libretro(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks *pAllocator) {}
281
static VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR_libretro(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks *pAllocator) {
282
for (int i = 0; i < chain.count; i++) {
283
vkDestroyImage(device, chain.images[i].handle, pAllocator);
284
vkDestroyImageView(device, chain.images[i].retro_image.image_view, pAllocator);
285
vkFreeMemory(device, chain.images[i].memory, pAllocator);
286
}
287
288
memset(&chain.images, 0x00, sizeof(chain.images));
289
chain.count = 0;
290
chain.current_index = -1;
291
}
292
293
VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit_libretro(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) {
294
VkResult res = VK_SUCCESS;
295
296
#if 0
297
for(int i = 0; i < submitCount; i++)
298
vulkan->set_command_buffers(vulkan->handle, pSubmits[i].commandBufferCount, pSubmits[i].pCommandBuffers);
299
#else
300
#if 1
301
for (int i = 0; i < submitCount; i++) {
302
((VkSubmitInfo *)pSubmits)[i].waitSemaphoreCount = 0;
303
((VkSubmitInfo *)pSubmits)[i].pWaitSemaphores = nullptr;
304
((VkSubmitInfo *)pSubmits)[i].signalSemaphoreCount = 0;
305
((VkSubmitInfo *)pSubmits)[i].pSignalSemaphores = nullptr;
306
}
307
#endif
308
vulkan->lock_queue(vulkan->handle);
309
res = vkQueueSubmit_org(queue, submitCount, pSubmits, fence);
310
vulkan->unlock_queue(vulkan->handle);
311
#endif
312
313
return res;
314
}
315
316
VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle_libretro(VkQueue queue) {
317
vulkan->lock_queue(vulkan->handle);
318
VkResult res = vkQueueWaitIdle_org(queue);
319
vulkan->unlock_queue(vulkan->handle);
320
return res;
321
}
322
323
VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier_libretro(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier *pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier *pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier *pImageMemoryBarriers) {
324
VkImageMemoryBarrier *barriers = (VkImageMemoryBarrier *)pImageMemoryBarriers;
325
for (int i = 0; i < imageMemoryBarrierCount; i++) {
326
if (pImageMemoryBarriers[i].oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
327
barriers[i].oldLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
328
barriers[i].srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
329
}
330
if (pImageMemoryBarriers[i].newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
331
barriers[i].newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
332
barriers[i].dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
333
}
334
}
335
return vkCmdPipelineBarrier_org(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, barriers);
336
}
337
338
VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass_libretro(VkDevice device, const VkRenderPassCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkRenderPass *pRenderPass) {
339
if (pCreateInfo->pAttachments[0].finalLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
340
((VkAttachmentDescription *)pCreateInfo->pAttachments)[0].finalLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
341
342
return vkCreateRenderPass_org(device, pCreateInfo, pAllocator, pRenderPass);
343
}
344
345
#undef LIBRETRO_VK_WARP_FUNC
346
#define LIBRETRO_VK_WARP_FUNC(x) \
347
if (!strcmp(pName, #x)) { \
348
x##_org = (PFN_##x)fptr; \
349
return (PFN_vkVoidFunction)x##_libretro; \
350
}
351
352
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr_libretro(VkInstance instance, const char *pName) {
353
if (false
354
#ifdef _WIN32
355
|| !strcmp(pName, "vkCreateWin32SurfaceKHR")
356
#endif
357
#ifdef __ANDROID__
358
|| !strcmp(pName, "vkCreateAndroidSurfaceKHR")
359
#endif
360
#ifdef VK_USE_PLATFORM_METAL_EXT
361
|| !strcmp(pName, "vkCreateMetalSurfaceEXT")
362
#endif
363
#ifdef VK_USE_PLATFORM_XLIB_KHR
364
|| !strcmp(pName, "vkCreateXlibSurfaceKHR")
365
#endif
366
#ifdef VK_USE_PLATFORM_XCB_KHR
367
|| !strcmp(pName, "vkCreateXcbSurfaceKHR")
368
#endif
369
#ifdef VK_USE_PLATFORM_WAYLAND_KHR
370
|| !strcmp(pName, "vkCreateWaylandSurfaceKHR")
371
#endif
372
#ifdef VK_USE_PLATFORM_DISPLAY_KHR
373
|| !strcmp(pName, "vkCreateDisplayPlaneSurfaceKHR")
374
#endif
375
) {
376
return (PFN_vkVoidFunction)vkCreateLibretroSurfaceKHR;
377
}
378
379
PFN_vkVoidFunction fptr = vk_init_info.get_instance_proc_addr(instance, pName);
380
if (!fptr) {
381
ERROR_LOG(Log::G3D, "Failed to load VK instance function: %s", pName);
382
return fptr;
383
}
384
385
LIBRETRO_VK_WARP_LIST();
386
387
return fptr;
388
}
389
390
VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr_libretro(VkDevice device, const char *pName) {
391
PFN_vkVoidFunction fptr = vkGetDeviceProcAddr_org(device, pName);
392
if (!fptr)
393
return fptr;
394
395
LIBRETRO_VK_WARP_LIST();
396
397
return fptr;
398
}
399
400
void vk_libretro_init(VkInstance instance, VkPhysicalDevice gpu, VkSurfaceKHR surface, PFN_vkGetInstanceProcAddr get_instance_proc_addr, const char **required_device_extensions, unsigned num_required_device_extensions, const char **required_device_layers, unsigned num_required_device_layers, const VkPhysicalDeviceFeatures *required_features) {
401
assert(surface);
402
403
vk_init_info.instance = instance;
404
vk_init_info.gpu = gpu;
405
vk_init_info.surface = surface;
406
vk_init_info.get_instance_proc_addr = get_instance_proc_addr;
407
vk_init_info.required_device_extensions = required_device_extensions;
408
vk_init_info.num_required_device_extensions = num_required_device_extensions;
409
vk_init_info.required_device_layers = required_device_layers;
410
vk_init_info.num_required_device_layers = num_required_device_layers;
411
vk_init_info.required_features = required_features;
412
413
vkGetInstanceProcAddr_org = vkGetInstanceProcAddr;
414
vkGetInstanceProcAddr = vkGetInstanceProcAddr_libretro;
415
vkGetDeviceProcAddr_org = (PFN_vkGetDeviceProcAddr)vkGetInstanceProcAddr(instance, "vkGetDeviceProcAddr");;
416
vkGetDeviceProcAddr = vkGetDeviceProcAddr_libretro;
417
vkCreateInstance = vkCreateInstance_libretro;
418
419
vkEnumerateInstanceVersion = (PFN_vkEnumerateInstanceVersion)vkGetInstanceProcAddr(NULL, "vkEnumerateInstanceVersion");
420
vkEnumerateInstanceExtensionProperties = (PFN_vkEnumerateInstanceExtensionProperties)vkGetInstanceProcAddr(NULL, "vkEnumerateInstanceExtensionProperties");
421
vkEnumerateInstanceLayerProperties = (PFN_vkEnumerateInstanceLayerProperties)vkGetInstanceProcAddr(NULL, "vkEnumerateInstanceLayerProperties");
422
}
423
424
void vk_libretro_set_hwrender_interface(retro_hw_render_interface *hw_render_interface) {
425
vulkan = (retro_hw_render_interface_vulkan *)hw_render_interface;
426
}
427
428
void vk_libretro_shutdown() {
429
memset(&vk_init_info, 0, sizeof(vk_init_info));
430
vulkan = nullptr;
431
DEDICATED_ALLOCATION = false;
432
}
433
434