Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/virtio/vulkan/vn_android.c
4560 views
1
/*
2
* Copyright 2021 Google LLC
3
* SPDX-License-Identifier: MIT
4
*
5
* based in part on anv and radv which are:
6
* Copyright © 2015 Intel Corporation
7
* Copyright © 2016 Red Hat
8
* Copyright © 2016 Bas Nieuwenhuizen
9
*/
10
11
#include "vn_android.h"
12
13
#include <dlfcn.h>
14
#include <hardware/gralloc.h>
15
#include <hardware/hwvulkan.h>
16
#include <vndk/hardware_buffer.h>
17
#include <vulkan/vk_icd.h>
18
19
#include "drm-uapi/drm_fourcc.h"
20
#include "util/libsync.h"
21
#include "util/os_file.h"
22
23
#include "vn_buffer.h"
24
#include "vn_device.h"
25
#include "vn_device_memory.h"
26
#include "vn_image.h"
27
#include "vn_queue.h"
28
29
static int
30
vn_hal_open(const struct hw_module_t *mod,
31
const char *id,
32
struct hw_device_t **dev);
33
34
static void UNUSED
35
static_asserts(void)
36
{
37
STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
38
}
39
40
PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
41
.common = {
42
.tag = HARDWARE_MODULE_TAG,
43
.module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
44
.hal_api_version = HARDWARE_HAL_API_VERSION,
45
.id = HWVULKAN_HARDWARE_MODULE_ID,
46
.name = "Venus Vulkan HAL",
47
.author = "Google LLC",
48
.methods = &(hw_module_methods_t) {
49
.open = vn_hal_open,
50
},
51
},
52
};
53
54
static const gralloc_module_t *gralloc = NULL;
55
56
static int
57
vn_hal_close(UNUSED struct hw_device_t *dev)
58
{
59
dlclose(gralloc->common.dso);
60
return 0;
61
}
62
63
static hwvulkan_device_t vn_hal_dev = {
64
.common = {
65
.tag = HARDWARE_DEVICE_TAG,
66
.version = HWVULKAN_DEVICE_API_VERSION_0_1,
67
.module = &HAL_MODULE_INFO_SYM.common,
68
.close = vn_hal_close,
69
},
70
.EnumerateInstanceExtensionProperties = vn_EnumerateInstanceExtensionProperties,
71
.CreateInstance = vn_CreateInstance,
72
.GetInstanceProcAddr = vn_GetInstanceProcAddr,
73
};
74
75
static int
76
vn_hal_open(const struct hw_module_t *mod,
77
const char *id,
78
struct hw_device_t **dev)
79
{
80
static const char CROS_GRALLOC_MODULE_NAME[] = "CrOS Gralloc";
81
82
assert(mod == &HAL_MODULE_INFO_SYM.common);
83
assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
84
85
/* get gralloc module for gralloc buffer info query */
86
int ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID,
87
(const hw_module_t **)&gralloc);
88
if (ret) {
89
if (VN_DEBUG(WSI))
90
vn_log(NULL, "failed to open gralloc module(ret=%d)", ret);
91
return ret;
92
}
93
94
if (VN_DEBUG(WSI))
95
vn_log(NULL, "opened gralloc module name: %s", gralloc->common.name);
96
97
if (strcmp(gralloc->common.name, CROS_GRALLOC_MODULE_NAME) != 0 ||
98
!gralloc->perform) {
99
dlclose(gralloc->common.dso);
100
return -1;
101
}
102
103
*dev = &vn_hal_dev.common;
104
105
return 0;
106
}
107
108
static uint32_t
109
vn_android_ahb_format_from_vk_format(VkFormat format)
110
{
111
switch (format) {
112
case VK_FORMAT_R8G8B8A8_UNORM:
113
return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
114
case VK_FORMAT_R8G8B8_UNORM:
115
return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
116
case VK_FORMAT_R5G6B5_UNORM_PACK16:
117
return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
118
case VK_FORMAT_R16G16B16A16_SFLOAT:
119
return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
120
case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
121
return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
122
case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
123
return AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420;
124
default:
125
return 0;
126
}
127
}
128
129
VkFormat
130
vn_android_drm_format_to_vk_format(uint32_t format)
131
{
132
switch (format) {
133
case DRM_FORMAT_ABGR8888:
134
case DRM_FORMAT_XBGR8888:
135
return VK_FORMAT_R8G8B8A8_UNORM;
136
case DRM_FORMAT_BGR888:
137
return VK_FORMAT_R8G8B8_UNORM;
138
case DRM_FORMAT_RGB565:
139
return VK_FORMAT_R5G6B5_UNORM_PACK16;
140
case DRM_FORMAT_ABGR16161616F:
141
return VK_FORMAT_R16G16B16A16_SFLOAT;
142
case DRM_FORMAT_ABGR2101010:
143
return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
144
case DRM_FORMAT_YVU420:
145
case DRM_FORMAT_NV12:
146
return VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
147
default:
148
return VK_FORMAT_UNDEFINED;
149
}
150
}
151
152
uint64_t
153
vn_android_get_ahb_usage(const VkImageUsageFlags usage,
154
const VkImageCreateFlags flags)
155
{
156
uint64_t ahb_usage = 0;
157
if (usage &
158
(VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
159
ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
160
161
if (usage & (VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
162
VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT))
163
ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
164
165
if (flags & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
166
ahb_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
167
168
if (flags & VK_IMAGE_CREATE_PROTECTED_BIT)
169
ahb_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
170
171
/* must include at least one GPU usage flag */
172
if (ahb_usage == 0)
173
ahb_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
174
175
return ahb_usage;
176
}
177
178
VkResult
179
vn_GetSwapchainGrallocUsage2ANDROID(
180
VkDevice device,
181
VkFormat format,
182
VkImageUsageFlags imageUsage,
183
VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
184
uint64_t *grallocConsumerUsage,
185
uint64_t *grallocProducerUsage)
186
{
187
struct vn_device *dev = vn_device_from_handle(device);
188
*grallocConsumerUsage = 0;
189
*grallocProducerUsage = 0;
190
191
if (swapchainImageUsage & VK_SWAPCHAIN_IMAGE_USAGE_SHARED_BIT_ANDROID)
192
return vn_error(dev->instance, VK_ERROR_INITIALIZATION_FAILED);
193
194
if (VN_DEBUG(WSI))
195
vn_log(dev->instance, "format=%d, imageUsage=0x%x", format, imageUsage);
196
197
if (imageUsage & (VK_IMAGE_USAGE_TRANSFER_DST_BIT |
198
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
199
*grallocProducerUsage |= AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER;
200
201
if (imageUsage &
202
(VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
203
VK_IMAGE_USAGE_STORAGE_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
204
*grallocConsumerUsage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
205
206
return VK_SUCCESS;
207
}
208
209
struct cros_gralloc0_buffer_info {
210
uint32_t drm_fourcc;
211
int num_fds; /* ignored */
212
int fds[4]; /* ignored */
213
uint64_t modifier;
214
uint32_t offset[4];
215
uint32_t stride[4];
216
};
217
218
struct vn_android_gralloc_buffer_properties {
219
uint32_t drm_fourcc;
220
uint64_t modifier;
221
uint32_t offset[4];
222
uint32_t stride[4];
223
};
224
225
static VkResult
226
vn_android_get_dma_buf_from_native_handle(const native_handle_t *handle,
227
int *out_dma_buf)
228
{
229
/* There can be multiple fds wrapped inside a native_handle_t, but we
230
* expect only the 1st one points to the dma_buf. For multi-planar format,
231
* there should only exist one dma_buf as well. The other fd(s) may point
232
* to shared memory used to store buffer metadata or other vendor specific
233
* bits.
234
*/
235
if (handle->numFds < 1) {
236
vn_log(NULL, "handle->numFds is %d, expected >= 1", handle->numFds);
237
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
238
}
239
240
if (handle->data[0] < 0) {
241
vn_log(NULL, "handle->data[0] < 0");
242
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
243
}
244
245
*out_dma_buf = handle->data[0];
246
return VK_SUCCESS;
247
}
248
249
static bool
250
vn_android_get_gralloc_buffer_properties(
251
buffer_handle_t handle,
252
struct vn_android_gralloc_buffer_properties *out_props)
253
{
254
static const int32_t CROS_GRALLOC_DRM_GET_BUFFER_INFO = 4;
255
struct cros_gralloc0_buffer_info info;
256
if (gralloc->perform(gralloc, CROS_GRALLOC_DRM_GET_BUFFER_INFO, handle,
257
&info) != 0) {
258
vn_log(NULL, "CROS_GRALLOC_DRM_GET_BUFFER_INFO failed");
259
return false;
260
}
261
262
if (info.modifier == DRM_FORMAT_MOD_INVALID) {
263
vn_log(NULL, "Unexpected DRM_FORMAT_MOD_INVALID");
264
return false;
265
}
266
267
out_props->drm_fourcc = info.drm_fourcc;
268
for (uint32_t i = 0; i < 4; i++) {
269
out_props->stride[i] = info.stride[i];
270
out_props->offset[i] = info.offset[i];
271
}
272
out_props->modifier = info.modifier;
273
274
return true;
275
}
276
277
static VkResult
278
vn_android_get_modifier_properties(struct vn_device *dev,
279
VkFormat format,
280
uint64_t modifier,
281
const VkAllocationCallbacks *alloc,
282
VkDrmFormatModifierPropertiesEXT *out_props)
283
{
284
VkPhysicalDevice physical_device =
285
vn_physical_device_to_handle(dev->physical_device);
286
VkDrmFormatModifierPropertiesListEXT mod_prop_list = {
287
.sType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
288
.pNext = NULL,
289
.drmFormatModifierCount = 0,
290
.pDrmFormatModifierProperties = NULL,
291
};
292
VkFormatProperties2 format_prop = {
293
.sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
294
.pNext = &mod_prop_list,
295
};
296
VkDrmFormatModifierPropertiesEXT *mod_props = NULL;
297
bool modifier_found = false;
298
299
vn_GetPhysicalDeviceFormatProperties2(physical_device, format,
300
&format_prop);
301
302
if (!mod_prop_list.drmFormatModifierCount) {
303
vn_log(dev->instance, "No compatible modifier for VkFormat(%u)",
304
format);
305
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
306
}
307
308
mod_props = vk_zalloc(
309
alloc, sizeof(*mod_props) * mod_prop_list.drmFormatModifierCount,
310
VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
311
if (!mod_props)
312
return VK_ERROR_OUT_OF_HOST_MEMORY;
313
314
mod_prop_list.pDrmFormatModifierProperties = mod_props;
315
vn_GetPhysicalDeviceFormatProperties2(physical_device, format,
316
&format_prop);
317
318
for (uint32_t i = 0; i < mod_prop_list.drmFormatModifierCount; i++) {
319
if (mod_props[i].drmFormatModifier == modifier) {
320
*out_props = mod_props[i];
321
modifier_found = true;
322
break;
323
}
324
}
325
326
vk_free(alloc, mod_props);
327
328
if (!modifier_found) {
329
vn_log(dev->instance,
330
"No matching modifier(%" PRIu64 ") properties for VkFormat(%u)",
331
modifier, format);
332
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
333
}
334
335
return VK_SUCCESS;
336
}
337
338
struct vn_android_image_builder {
339
VkImageCreateInfo create;
340
VkSubresourceLayout layouts[4];
341
VkImageDrmFormatModifierExplicitCreateInfoEXT modifier;
342
VkExternalMemoryImageCreateInfo external;
343
};
344
345
static VkResult
346
vn_android_get_image_builder(struct vn_device *dev,
347
const VkImageCreateInfo *create_info,
348
const native_handle_t *handle,
349
const VkAllocationCallbacks *alloc,
350
struct vn_android_image_builder *out_builder)
351
{
352
VkResult result = VK_SUCCESS;
353
struct vn_android_gralloc_buffer_properties buf_props;
354
VkDrmFormatModifierPropertiesEXT mod_props;
355
356
if (!vn_android_get_gralloc_buffer_properties(handle, &buf_props))
357
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
358
359
result = vn_android_get_modifier_properties(
360
dev, create_info->format, buf_props.modifier, alloc, &mod_props);
361
if (result != VK_SUCCESS)
362
return result;
363
364
memset(out_builder->layouts, 0, sizeof(out_builder->layouts));
365
for (uint32_t i = 0; i < mod_props.drmFormatModifierPlaneCount; i++) {
366
out_builder->layouts[i].offset = buf_props.offset[i];
367
out_builder->layouts[i].rowPitch = buf_props.stride[i];
368
}
369
out_builder->modifier = (VkImageDrmFormatModifierExplicitCreateInfoEXT){
370
.sType =
371
VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
372
.pNext = create_info->pNext,
373
.drmFormatModifier = buf_props.modifier,
374
.drmFormatModifierPlaneCount = mod_props.drmFormatModifierPlaneCount,
375
.pPlaneLayouts = out_builder->layouts,
376
};
377
out_builder->external = (VkExternalMemoryImageCreateInfo){
378
.sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
379
.pNext = &out_builder->modifier,
380
.handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
381
};
382
out_builder->create = *create_info;
383
out_builder->create.pNext = &out_builder->external;
384
out_builder->create.tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
385
386
return VK_SUCCESS;
387
}
388
389
VkResult
390
vn_android_image_from_anb(struct vn_device *dev,
391
const VkImageCreateInfo *create_info,
392
const VkNativeBufferANDROID *anb_info,
393
const VkAllocationCallbacks *alloc,
394
struct vn_image **out_img)
395
{
396
/* If anb_info->handle points to a classic resouce created from
397
* virtio_gpu_cmd_resource_create_3d, anb_info->stride is the stride of the
398
* guest shadow storage other than the host gpu storage.
399
*
400
* We also need to pass the correct stride to vn_CreateImage, which will be
401
* done via VkImageDrmFormatModifierExplicitCreateInfoEXT and will require
402
* VK_EXT_image_drm_format_modifier support in the host driver. The struct
403
* needs host storage info which can be queried from cros gralloc.
404
*/
405
VkResult result = VK_SUCCESS;
406
VkDevice device = vn_device_to_handle(dev);
407
VkDeviceMemory memory = VK_NULL_HANDLE;
408
VkImage image = VK_NULL_HANDLE;
409
struct vn_image *img = NULL;
410
uint64_t alloc_size = 0;
411
uint32_t mem_type_bits = 0;
412
int dma_buf_fd = -1;
413
int dup_fd = -1;
414
struct vn_android_image_builder builder;
415
416
result = vn_android_get_dma_buf_from_native_handle(anb_info->handle,
417
&dma_buf_fd);
418
if (result != VK_SUCCESS)
419
goto fail;
420
421
result = vn_android_get_image_builder(dev, create_info, anb_info->handle,
422
alloc, &builder);
423
if (result != VK_SUCCESS)
424
goto fail;
425
426
/* encoder will strip the Android specific pNext structs */
427
result = vn_image_create(dev, &builder.create, alloc, &img);
428
if (result != VK_SUCCESS) {
429
if (VN_DEBUG(WSI))
430
vn_log(dev->instance, "vn_image_create failed");
431
goto fail;
432
}
433
434
image = vn_image_to_handle(img);
435
VkMemoryRequirements mem_req;
436
vn_GetImageMemoryRequirements(device, image, &mem_req);
437
if (!mem_req.memoryTypeBits) {
438
if (VN_DEBUG(WSI))
439
vn_log(dev->instance, "mem_req.memoryTypeBits cannot be zero");
440
result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
441
goto fail;
442
}
443
444
result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
445
&mem_type_bits);
446
if (result != VK_SUCCESS)
447
goto fail;
448
449
if (VN_DEBUG(WSI)) {
450
vn_log(dev->instance,
451
"size = img(%" PRIu64 ") fd(%" PRIu64 "), "
452
"memoryTypeBits = img(0x%X) & fd(0x%X)",
453
mem_req.size, alloc_size, mem_req.memoryTypeBits, mem_type_bits);
454
}
455
456
if (alloc_size < mem_req.size) {
457
if (VN_DEBUG(WSI)) {
458
vn_log(dev->instance,
459
"alloc_size(%" PRIu64 ") mem_req.size(%" PRIu64 ")",
460
alloc_size, mem_req.size);
461
}
462
result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
463
goto fail;
464
}
465
466
mem_type_bits &= mem_req.memoryTypeBits;
467
if (!mem_type_bits) {
468
result = VK_ERROR_INVALID_EXTERNAL_HANDLE;
469
goto fail;
470
}
471
472
dup_fd = os_dupfd_cloexec(dma_buf_fd);
473
if (dup_fd < 0) {
474
result = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
475
: VK_ERROR_OUT_OF_HOST_MEMORY;
476
goto fail;
477
}
478
479
const VkImportMemoryFdInfoKHR import_fd_info = {
480
.sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
481
.pNext = NULL,
482
.handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
483
.fd = dup_fd,
484
};
485
const VkMemoryAllocateInfo memory_info = {
486
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
487
.pNext = &import_fd_info,
488
.allocationSize = mem_req.size,
489
.memoryTypeIndex = ffs(mem_type_bits) - 1,
490
};
491
result = vn_AllocateMemory(device, &memory_info, alloc, &memory);
492
if (result != VK_SUCCESS) {
493
/* only need to close the dup_fd on import failure */
494
close(dup_fd);
495
goto fail;
496
}
497
498
result = vn_BindImageMemory(device, image, memory, 0);
499
if (result != VK_SUCCESS)
500
goto fail;
501
502
img->is_wsi = true;
503
/* Android WSI image owns the memory */
504
img->private_memory = memory;
505
*out_img = img;
506
507
return VK_SUCCESS;
508
509
fail:
510
if (image != VK_NULL_HANDLE)
511
vn_DestroyImage(device, image, alloc);
512
if (memory != VK_NULL_HANDLE)
513
vn_FreeMemory(device, memory, alloc);
514
return vn_error(dev->instance, result);
515
}
516
517
VkResult
518
vn_AcquireImageANDROID(VkDevice device,
519
UNUSED VkImage image,
520
int nativeFenceFd,
521
VkSemaphore semaphore,
522
VkFence fence)
523
{
524
struct vn_device *dev = vn_device_from_handle(device);
525
VkResult result = VK_SUCCESS;
526
527
if (dev->instance->experimental.globalFencing == VK_FALSE) {
528
/* Fallback when VkVenusExperimentalFeatures100000MESA::globalFencing is
529
* VK_FALSE, out semaphore and fence are filled with already signaled
530
* payloads, and the native fence fd is waited inside until signaled.
531
*/
532
if (nativeFenceFd >= 0) {
533
int ret = sync_wait(nativeFenceFd, -1);
534
/* Android loader expects the ICD to always close the fd */
535
close(nativeFenceFd);
536
if (ret)
537
return vn_error(dev->instance, VK_ERROR_SURFACE_LOST_KHR);
538
}
539
540
if (semaphore != VK_NULL_HANDLE)
541
vn_semaphore_signal_wsi(dev, vn_semaphore_from_handle(semaphore));
542
543
if (fence != VK_NULL_HANDLE)
544
vn_fence_signal_wsi(dev, vn_fence_from_handle(fence));
545
546
return VK_SUCCESS;
547
}
548
549
int semaphore_fd = -1;
550
int fence_fd = -1;
551
if (nativeFenceFd >= 0) {
552
if (semaphore != VK_NULL_HANDLE && fence != VK_NULL_HANDLE) {
553
semaphore_fd = nativeFenceFd;
554
fence_fd = os_dupfd_cloexec(nativeFenceFd);
555
if (fence_fd < 0) {
556
result = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
557
: VK_ERROR_OUT_OF_HOST_MEMORY;
558
close(nativeFenceFd);
559
return vn_error(dev->instance, result);
560
}
561
} else if (semaphore != VK_NULL_HANDLE) {
562
semaphore_fd = nativeFenceFd;
563
} else if (fence != VK_NULL_HANDLE) {
564
fence_fd = nativeFenceFd;
565
} else {
566
close(nativeFenceFd);
567
}
568
}
569
570
if (semaphore != VK_NULL_HANDLE) {
571
const VkImportSemaphoreFdInfoKHR info = {
572
.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
573
.pNext = NULL,
574
.semaphore = semaphore,
575
.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
576
.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
577
.fd = semaphore_fd,
578
};
579
result = vn_ImportSemaphoreFdKHR(device, &info);
580
if (result == VK_SUCCESS)
581
semaphore_fd = -1;
582
}
583
584
if (result == VK_SUCCESS && fence != VK_NULL_HANDLE) {
585
const VkImportFenceFdInfoKHR info = {
586
.sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
587
.pNext = NULL,
588
.fence = fence,
589
.flags = VK_FENCE_IMPORT_TEMPORARY_BIT,
590
.handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
591
.fd = fence_fd,
592
};
593
result = vn_ImportFenceFdKHR(device, &info);
594
if (result == VK_SUCCESS)
595
fence_fd = -1;
596
}
597
598
if (semaphore_fd >= 0)
599
close(semaphore_fd);
600
if (fence_fd >= 0)
601
close(fence_fd);
602
603
return vn_result(dev->instance, result);
604
}
605
606
VkResult
607
vn_QueueSignalReleaseImageANDROID(VkQueue queue,
608
uint32_t waitSemaphoreCount,
609
const VkSemaphore *pWaitSemaphores,
610
VkImage image,
611
int *pNativeFenceFd)
612
{
613
struct vn_queue *que = vn_queue_from_handle(queue);
614
struct vn_device *dev = que->device;
615
const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
616
VkDevice device = vn_device_to_handle(dev);
617
VkPipelineStageFlags local_stage_masks[8];
618
VkPipelineStageFlags *stage_masks = local_stage_masks;
619
VkResult result = VK_SUCCESS;
620
int fd = -1;
621
622
if (waitSemaphoreCount == 0) {
623
*pNativeFenceFd = -1;
624
return VK_SUCCESS;
625
}
626
627
if (waitSemaphoreCount > ARRAY_SIZE(local_stage_masks)) {
628
stage_masks =
629
vk_alloc(alloc, sizeof(*stage_masks) * waitSemaphoreCount,
630
VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
631
if (!stage_masks)
632
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
633
}
634
635
for (uint32_t i = 0; i < waitSemaphoreCount; i++)
636
stage_masks[i] = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT;
637
638
const VkSubmitInfo submit_info = {
639
.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
640
.pNext = NULL,
641
.waitSemaphoreCount = waitSemaphoreCount,
642
.pWaitSemaphores = pWaitSemaphores,
643
.pWaitDstStageMask = stage_masks,
644
.commandBufferCount = 0,
645
.pCommandBuffers = NULL,
646
.signalSemaphoreCount = 0,
647
.pSignalSemaphores = NULL,
648
};
649
/* XXX When globalFencing is supported, our implementation is not able to
650
* reset the fence during vn_GetFenceFdKHR currently. Thus to ensure proper
651
* host driver behavior, we pass VK_NULL_HANDLE here.
652
*/
653
result = vn_QueueSubmit(
654
queue, 1, &submit_info,
655
dev->instance->experimental.globalFencing == VK_TRUE ? VK_NULL_HANDLE
656
: que->wait_fence);
657
658
if (stage_masks != local_stage_masks)
659
vk_free(alloc, stage_masks);
660
661
if (result != VK_SUCCESS)
662
return vn_error(dev->instance, result);
663
664
if (dev->instance->experimental.globalFencing == VK_TRUE) {
665
const VkFenceGetFdInfoKHR fd_info = {
666
.sType = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
667
.pNext = NULL,
668
.fence = que->wait_fence,
669
.handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
670
};
671
result = vn_GetFenceFdKHR(device, &fd_info, &fd);
672
} else {
673
result =
674
vn_WaitForFences(device, 1, &que->wait_fence, VK_TRUE, UINT64_MAX);
675
if (result != VK_SUCCESS)
676
return vn_error(dev->instance, result);
677
678
result = vn_ResetFences(device, 1, &que->wait_fence);
679
}
680
681
if (result != VK_SUCCESS)
682
return vn_error(dev->instance, result);
683
684
*pNativeFenceFd = fd;
685
686
return VK_SUCCESS;
687
}
688
689
static VkResult
690
vn_android_get_ahb_format_properties(
691
struct vn_device *dev,
692
const struct AHardwareBuffer *ahb,
693
VkAndroidHardwareBufferFormatPropertiesANDROID *out_props)
694
{
695
AHardwareBuffer_Desc desc;
696
VkFormat format;
697
struct vn_android_gralloc_buffer_properties buf_props;
698
VkDrmFormatModifierPropertiesEXT mod_props;
699
700
AHardwareBuffer_describe(ahb, &desc);
701
if (!(desc.usage & (AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
702
AHARDWAREBUFFER_USAGE_GPU_FRAMEBUFFER |
703
AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER))) {
704
vn_log(dev->instance,
705
"AHB usage(%" PRIu64 ") must include at least one GPU bit",
706
desc.usage);
707
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
708
}
709
710
/* Handle the special AHARDWAREBUFFER_FORMAT_BLOB for VkBuffer case. */
711
if (desc.format == AHARDWAREBUFFER_FORMAT_BLOB) {
712
out_props->format = VK_FORMAT_UNDEFINED;
713
return VK_SUCCESS;
714
}
715
716
if (!vn_android_get_gralloc_buffer_properties(
717
AHardwareBuffer_getNativeHandle(ahb), &buf_props))
718
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
719
720
/* We implement AHB extension support with EXT_image_drm_format_modifier.
721
* It requires us to have a compatible VkFormat but not DRM formats. So if
722
* the ahb is not intended for backing a VkBuffer, error out early if the
723
* format is VK_FORMAT_UNDEFINED.
724
*/
725
format = vn_android_drm_format_to_vk_format(buf_props.drm_fourcc);
726
if (format == VK_FORMAT_UNDEFINED) {
727
vn_log(dev->instance, "Unknown drm_fourcc(%u) from AHB format(0x%X)",
728
buf_props.drm_fourcc, desc.format);
729
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
730
}
731
732
VkResult result = vn_android_get_modifier_properties(
733
dev, format, buf_props.modifier, &dev->base.base.alloc, &mod_props);
734
if (result != VK_SUCCESS)
735
return result;
736
737
/* The spec requires that formatFeatures must include at least one of
738
* VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or
739
* VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT.
740
*/
741
const VkFormatFeatureFlags format_features =
742
mod_props.drmFormatModifierTilingFeatures |
743
VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT;
744
*out_props = (VkAndroidHardwareBufferFormatPropertiesANDROID) {
745
.sType = out_props->sType,
746
.pNext = out_props->pNext,
747
.format = format,
748
.externalFormat = buf_props.drm_fourcc,
749
.formatFeatures = format_features,
750
.samplerYcbcrConversionComponents = {
751
.r = VK_COMPONENT_SWIZZLE_IDENTITY,
752
.g = VK_COMPONENT_SWIZZLE_IDENTITY,
753
.b = VK_COMPONENT_SWIZZLE_IDENTITY,
754
.a = VK_COMPONENT_SWIZZLE_IDENTITY,
755
},
756
.suggestedYcbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
757
.suggestedYcbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
758
.suggestedXChromaOffset = VK_CHROMA_LOCATION_MIDPOINT,
759
.suggestedYChromaOffset = VK_CHROMA_LOCATION_MIDPOINT,
760
};
761
762
return VK_SUCCESS;
763
}
764
765
VkResult
766
vn_GetAndroidHardwareBufferPropertiesANDROID(
767
VkDevice device,
768
const struct AHardwareBuffer *buffer,
769
VkAndroidHardwareBufferPropertiesANDROID *pProperties)
770
{
771
struct vn_device *dev = vn_device_from_handle(device);
772
VkResult result = VK_SUCCESS;
773
int dma_buf_fd = -1;
774
uint64_t alloc_size = 0;
775
uint32_t mem_type_bits = 0;
776
777
VkAndroidHardwareBufferFormatPropertiesANDROID *format_props =
778
vk_find_struct(pProperties->pNext,
779
ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID);
780
if (format_props) {
781
result =
782
vn_android_get_ahb_format_properties(dev, buffer, format_props);
783
if (result != VK_SUCCESS)
784
return vn_error(dev->instance, result);
785
}
786
787
const native_handle_t *handle = AHardwareBuffer_getNativeHandle(buffer);
788
result = vn_android_get_dma_buf_from_native_handle(handle, &dma_buf_fd);
789
if (result != VK_SUCCESS)
790
return vn_error(dev->instance, result);
791
792
result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
793
&mem_type_bits);
794
if (result != VK_SUCCESS)
795
return vn_error(dev->instance, result);
796
797
pProperties->allocationSize = alloc_size;
798
pProperties->memoryTypeBits = mem_type_bits;
799
800
return VK_SUCCESS;
801
}
802
803
static AHardwareBuffer *
804
vn_android_ahb_allocate(uint32_t width,
805
uint32_t height,
806
uint32_t layers,
807
uint32_t format,
808
uint64_t usage)
809
{
810
AHardwareBuffer *ahb = NULL;
811
AHardwareBuffer_Desc desc;
812
int ret = 0;
813
814
memset(&desc, 0, sizeof(desc));
815
desc.width = width;
816
desc.height = height;
817
desc.layers = layers;
818
desc.format = format;
819
desc.usage = usage;
820
821
ret = AHardwareBuffer_allocate(&desc, &ahb);
822
if (ret) {
823
/* We just log the error code here for now since the platform falsely
824
* maps all gralloc allocation failures to oom.
825
*/
826
vn_log(NULL, "AHB alloc(w=%u,h=%u,l=%u,f=%u,u=%" PRIu64 ") failed(%d)",
827
width, height, layers, format, usage, ret);
828
return NULL;
829
}
830
831
return ahb;
832
}
833
834
bool
835
vn_android_get_drm_format_modifier_info(
836
const VkPhysicalDeviceImageFormatInfo2 *format_info,
837
VkPhysicalDeviceImageDrmFormatModifierInfoEXT *out_info)
838
{
839
/* To properly fill VkPhysicalDeviceImageDrmFormatModifierInfoEXT, we have
840
* to allocate an ahb to retrieve the drm format modifier. For the image
841
* sharing mode, we assume VK_SHARING_MODE_EXCLUSIVE for now.
842
*/
843
AHardwareBuffer *ahb = NULL;
844
uint32_t format = 0;
845
uint64_t usage = 0;
846
struct vn_android_gralloc_buffer_properties buf_props;
847
848
assert(format_info->tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
849
850
format = vn_android_ahb_format_from_vk_format(format_info->format);
851
if (!format)
852
return false;
853
854
usage = vn_android_get_ahb_usage(format_info->usage, format_info->flags);
855
ahb = vn_android_ahb_allocate(16, 16, 1, format, usage);
856
if (!ahb)
857
return false;
858
859
if (!vn_android_get_gralloc_buffer_properties(
860
AHardwareBuffer_getNativeHandle(ahb), &buf_props)) {
861
AHardwareBuffer_release(ahb);
862
return false;
863
}
864
865
*out_info = (VkPhysicalDeviceImageDrmFormatModifierInfoEXT){
866
.sType =
867
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
868
.pNext = NULL,
869
.drmFormatModifier = buf_props.modifier,
870
.sharingMode = VK_SHARING_MODE_EXCLUSIVE,
871
.queueFamilyIndexCount = 0,
872
.pQueueFamilyIndices = NULL,
873
};
874
875
AHardwareBuffer_release(ahb);
876
return true;
877
}
878
879
VkResult
880
vn_android_image_from_ahb(struct vn_device *dev,
881
const VkImageCreateInfo *create_info,
882
const VkAllocationCallbacks *alloc,
883
struct vn_image **out_img)
884
{
885
const VkExternalFormatANDROID *ext_info =
886
vk_find_struct_const(create_info->pNext, EXTERNAL_FORMAT_ANDROID);
887
888
VkImageCreateInfo local_info;
889
if (ext_info && ext_info->externalFormat) {
890
assert(create_info->format == VK_FORMAT_UNDEFINED);
891
assert(create_info->imageType == VK_IMAGE_TYPE_2D);
892
assert(create_info->usage == VK_IMAGE_USAGE_SAMPLED_BIT);
893
assert(create_info->tiling == VK_IMAGE_TILING_OPTIMAL);
894
895
local_info = *create_info;
896
local_info.format =
897
vn_android_drm_format_to_vk_format(ext_info->externalFormat);
898
create_info = &local_info;
899
}
900
901
return vn_image_create_deferred(dev, create_info, alloc, out_img);
902
}
903
904
VkResult
905
vn_android_device_import_ahb(struct vn_device *dev,
906
struct vn_device_memory *mem,
907
const VkMemoryAllocateInfo *alloc_info,
908
const VkAllocationCallbacks *alloc,
909
struct AHardwareBuffer *ahb)
910
{
911
VkDevice device = vn_device_to_handle(dev);
912
const VkMemoryDedicatedAllocateInfo *dedicated_info =
913
vk_find_struct_const(alloc_info->pNext, MEMORY_DEDICATED_ALLOCATE_INFO);
914
const native_handle_t *handle = NULL;
915
int dma_buf_fd = -1;
916
int dup_fd = -1;
917
uint64_t alloc_size = 0;
918
uint32_t mem_type_bits = 0;
919
VkResult result = VK_SUCCESS;
920
921
handle = AHardwareBuffer_getNativeHandle(ahb);
922
result = vn_android_get_dma_buf_from_native_handle(handle, &dma_buf_fd);
923
if (result != VK_SUCCESS)
924
return result;
925
926
result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
927
&mem_type_bits);
928
if (result != VK_SUCCESS)
929
return result;
930
931
if (((1 << alloc_info->memoryTypeIndex) & mem_type_bits) == 0) {
932
vn_log(dev->instance, "memoryTypeIndex(%u) mem_type_bits(0x%X)",
933
alloc_info->memoryTypeIndex, mem_type_bits);
934
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
935
}
936
937
/* If ahb is for an image, finish the deferred image creation first */
938
if (dedicated_info && dedicated_info->image != VK_NULL_HANDLE) {
939
struct vn_image *img = vn_image_from_handle(dedicated_info->image);
940
struct vn_android_image_builder builder;
941
942
result = vn_android_get_image_builder(dev, &img->deferred_info->create,
943
handle, alloc, &builder);
944
if (result != VK_SUCCESS)
945
return result;
946
947
result = vn_image_init_deferred(dev, &builder.create, img);
948
if (result != VK_SUCCESS)
949
return result;
950
951
VkMemoryRequirements mem_req;
952
vn_GetImageMemoryRequirements(device, dedicated_info->image, &mem_req);
953
if (alloc_size < mem_req.size) {
954
vn_log(dev->instance,
955
"alloc_size(%" PRIu64 ") mem_req.size(%" PRIu64 ")",
956
alloc_size, mem_req.size);
957
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
958
}
959
960
alloc_size = mem_req.size;
961
}
962
963
if (dedicated_info && dedicated_info->buffer != VK_NULL_HANDLE) {
964
VkMemoryRequirements mem_req;
965
vn_GetBufferMemoryRequirements(device, dedicated_info->buffer,
966
&mem_req);
967
if (alloc_size < mem_req.size) {
968
vn_log(dev->instance,
969
"alloc_size(%" PRIu64 ") mem_req.size(%" PRIu64 ")",
970
alloc_size, mem_req.size);
971
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
972
}
973
974
alloc_size = mem_req.size;
975
}
976
977
errno = 0;
978
dup_fd = os_dupfd_cloexec(dma_buf_fd);
979
if (dup_fd < 0)
980
return (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS
981
: VK_ERROR_OUT_OF_HOST_MEMORY;
982
983
/* Spec requires AHB export info to be present, so we must strip it. In
984
* practice, the AHB import path here only needs the main allocation info
985
* and the dedicated_info.
986
*/
987
VkMemoryDedicatedAllocateInfo local_dedicated_info;
988
/* Override when dedicated_info exists and is not the tail struct. */
989
if (dedicated_info && dedicated_info->pNext) {
990
local_dedicated_info = *dedicated_info;
991
local_dedicated_info.pNext = NULL;
992
dedicated_info = &local_dedicated_info;
993
}
994
const VkMemoryAllocateInfo local_alloc_info = {
995
.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
996
.pNext = dedicated_info,
997
.allocationSize = alloc_size,
998
.memoryTypeIndex = alloc_info->memoryTypeIndex,
999
};
1000
result =
1001
vn_device_memory_import_dma_buf(dev, mem, &local_alloc_info, dup_fd);
1002
if (result != VK_SUCCESS) {
1003
close(dup_fd);
1004
return result;
1005
}
1006
1007
AHardwareBuffer_acquire(ahb);
1008
mem->ahb = ahb;
1009
1010
return VK_SUCCESS;
1011
}
1012
1013
VkResult
1014
vn_android_device_allocate_ahb(struct vn_device *dev,
1015
struct vn_device_memory *mem,
1016
const VkMemoryAllocateInfo *alloc_info,
1017
const VkAllocationCallbacks *alloc)
1018
{
1019
const VkMemoryDedicatedAllocateInfo *dedicated_info =
1020
vk_find_struct_const(alloc_info->pNext, MEMORY_DEDICATED_ALLOCATE_INFO);
1021
uint32_t width = 0;
1022
uint32_t height = 1;
1023
uint32_t layers = 1;
1024
uint32_t format = 0;
1025
uint64_t usage = 0;
1026
struct AHardwareBuffer *ahb = NULL;
1027
1028
if (dedicated_info && dedicated_info->image != VK_NULL_HANDLE) {
1029
const VkImageCreateInfo *image_info =
1030
&vn_image_from_handle(dedicated_info->image)->deferred_info->create;
1031
assert(image_info);
1032
width = image_info->extent.width;
1033
height = image_info->extent.height;
1034
layers = image_info->arrayLayers;
1035
format = vn_android_ahb_format_from_vk_format(image_info->format);
1036
usage = vn_android_get_ahb_usage(image_info->usage, image_info->flags);
1037
} else {
1038
const VkPhysicalDeviceMemoryProperties *mem_props =
1039
&dev->physical_device->memory_properties.memoryProperties;
1040
1041
assert(alloc_info->memoryTypeIndex < mem_props->memoryTypeCount);
1042
1043
width = alloc_info->allocationSize;
1044
format = AHARDWAREBUFFER_FORMAT_BLOB;
1045
usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
1046
if (mem_props->memoryTypes[alloc_info->memoryTypeIndex].propertyFlags &
1047
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
1048
usage |= AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
1049
AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY;
1050
}
1051
}
1052
1053
ahb = vn_android_ahb_allocate(width, height, layers, format, usage);
1054
if (!ahb)
1055
return VK_ERROR_OUT_OF_HOST_MEMORY;
1056
1057
VkResult result =
1058
vn_android_device_import_ahb(dev, mem, alloc_info, alloc, ahb);
1059
1060
/* ahb alloc has already acquired a ref and import will acquire another,
1061
* must release one here to avoid leak.
1062
*/
1063
AHardwareBuffer_release(ahb);
1064
1065
return result;
1066
}
1067
1068
void
1069
vn_android_release_ahb(struct AHardwareBuffer *ahb)
1070
{
1071
AHardwareBuffer_release(ahb);
1072
}
1073
1074
VkResult
1075
vn_GetMemoryAndroidHardwareBufferANDROID(
1076
VkDevice device,
1077
const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
1078
struct AHardwareBuffer **pBuffer)
1079
{
1080
struct vn_device_memory *mem = vn_device_memory_from_handle(pInfo->memory);
1081
1082
AHardwareBuffer_acquire(mem->ahb);
1083
*pBuffer = mem->ahb;
1084
1085
return VK_SUCCESS;
1086
}
1087
1088
struct vn_android_buffer_create_info {
1089
VkBufferCreateInfo create;
1090
VkExternalMemoryBufferCreateInfo external;
1091
VkBufferOpaqueCaptureAddressCreateInfo address;
1092
};
1093
1094
static const VkBufferCreateInfo *
1095
vn_android_fix_buffer_create_info(
1096
const VkBufferCreateInfo *create_info,
1097
struct vn_android_buffer_create_info *local_info)
1098
{
1099
local_info->create = *create_info;
1100
VkBaseOutStructure *dst = (void *)&local_info->create;
1101
1102
vk_foreach_struct_const(src, create_info->pNext) {
1103
void *pnext = NULL;
1104
switch (src->sType) {
1105
case VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO:
1106
memcpy(&local_info->external, src, sizeof(local_info->external));
1107
local_info->external.handleTypes =
1108
VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT;
1109
pnext = &local_info->external;
1110
break;
1111
case VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO:
1112
memcpy(&local_info->address, src, sizeof(local_info->address));
1113
pnext = &local_info->address;
1114
break;
1115
default:
1116
break;
1117
}
1118
1119
if (pnext) {
1120
dst->pNext = pnext;
1121
dst = pnext;
1122
}
1123
}
1124
1125
dst->pNext = NULL;
1126
1127
return &local_info->create;
1128
}
1129
1130
VkResult
1131
vn_android_init_ahb_buffer_memory_type_bits(struct vn_device *dev)
1132
{
1133
const uint32_t format = AHARDWAREBUFFER_FORMAT_BLOB;
1134
/* ensure dma_buf_memory_type_bits covers host visible usage */
1135
const uint64_t usage = AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER |
1136
AHARDWAREBUFFER_USAGE_CPU_READ_RARELY |
1137
AHARDWAREBUFFER_USAGE_CPU_WRITE_RARELY;
1138
AHardwareBuffer *ahb = NULL;
1139
int dma_buf_fd = -1;
1140
uint64_t alloc_size = 0;
1141
uint32_t mem_type_bits = 0;
1142
VkResult result;
1143
1144
ahb = vn_android_ahb_allocate(4096, 1, 1, format, usage);
1145
if (!ahb)
1146
return VK_ERROR_OUT_OF_HOST_MEMORY;
1147
1148
result = vn_android_get_dma_buf_from_native_handle(
1149
AHardwareBuffer_getNativeHandle(ahb), &dma_buf_fd);
1150
if (result != VK_SUCCESS) {
1151
AHardwareBuffer_release(ahb);
1152
return result;
1153
}
1154
1155
result = vn_get_memory_dma_buf_properties(dev, dma_buf_fd, &alloc_size,
1156
&mem_type_bits);
1157
1158
AHardwareBuffer_release(ahb);
1159
1160
if (result != VK_SUCCESS)
1161
return result;
1162
1163
dev->ahb_buffer_memory_type_bits = mem_type_bits;
1164
1165
return VK_SUCCESS;
1166
}
1167
1168
VkResult
1169
vn_android_buffer_from_ahb(struct vn_device *dev,
1170
const VkBufferCreateInfo *create_info,
1171
const VkAllocationCallbacks *alloc,
1172
struct vn_buffer **out_buf)
1173
{
1174
struct vn_android_buffer_create_info local_info;
1175
VkResult result;
1176
1177
create_info = vn_android_fix_buffer_create_info(create_info, &local_info);
1178
result = vn_buffer_create(dev, create_info, alloc, out_buf);
1179
if (result != VK_SUCCESS)
1180
return result;
1181
1182
/* AHB backed buffer layers on top of dma_buf, so here we must comine the
1183
* queried type bits from both buffer memory requirement and dma_buf fd
1184
* properties.
1185
*/
1186
(*out_buf)->memory_requirements.memoryRequirements.memoryTypeBits &=
1187
dev->ahb_buffer_memory_type_bits;
1188
1189
assert((*out_buf)->memory_requirements.memoryRequirements.memoryTypeBits);
1190
1191
return VK_SUCCESS;
1192
}
1193
1194