Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/intel/vulkan/anv_android.c
4547 views
1
/*
2
* Copyright © 2017, Google Inc.
3
*
4
* Permission is hereby granted, free of charge, to any person obtaining a
5
* copy of this software and associated documentation files (the "Software"),
6
* to deal in the Software without restriction, including without limitation
7
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
8
* and/or sell copies of the Software, and to permit persons to whom the
9
* Software is furnished to do so, subject to the following conditions:
10
*
11
* The above copyright notice and this permission notice (including the next
12
* paragraph) shall be included in all copies or substantial portions of the
13
* Software.
14
*
15
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21
* IN THE SOFTWARE.
22
*/
23
24
#include <hardware/gralloc.h>
25
26
#if ANDROID_API_LEVEL >= 26
27
#include <hardware/gralloc1.h>
28
#endif
29
30
#include <hardware/hardware.h>
31
#include <hardware/hwvulkan.h>
32
#include <vulkan/vk_android_native_buffer.h>
33
#include <vulkan/vk_icd.h>
34
#include <sync/sync.h>
35
36
#include "anv_private.h"
37
#include "vk_util.h"
38
39
static int anv_hal_open(const struct hw_module_t* mod, const char* id, struct hw_device_t** dev);
40
static int anv_hal_close(struct hw_device_t *dev);
41
42
static void UNUSED
43
static_asserts(void)
44
{
45
STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC);
46
}
47
48
PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = {
49
.common = {
50
.tag = HARDWARE_MODULE_TAG,
51
.module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
52
.hal_api_version = HARDWARE_MAKE_API_VERSION(1, 0),
53
.id = HWVULKAN_HARDWARE_MODULE_ID,
54
.name = "Intel Vulkan HAL",
55
.author = "Intel",
56
.methods = &(hw_module_methods_t) {
57
.open = anv_hal_open,
58
},
59
},
60
};
61
62
/* If any bits in test_mask are set, then unset them and return true. */
63
static inline bool
64
unmask32(uint32_t *inout_mask, uint32_t test_mask)
65
{
66
uint32_t orig_mask = *inout_mask;
67
*inout_mask &= ~test_mask;
68
return *inout_mask != orig_mask;
69
}
70
71
static int
72
anv_hal_open(const struct hw_module_t* mod, const char* id,
73
struct hw_device_t** dev)
74
{
75
assert(mod == &HAL_MODULE_INFO_SYM.common);
76
assert(strcmp(id, HWVULKAN_DEVICE_0) == 0);
77
78
hwvulkan_device_t *hal_dev = malloc(sizeof(*hal_dev));
79
if (!hal_dev)
80
return -1;
81
82
*hal_dev = (hwvulkan_device_t) {
83
.common = {
84
.tag = HARDWARE_DEVICE_TAG,
85
.version = HWVULKAN_DEVICE_API_VERSION_0_1,
86
.module = &HAL_MODULE_INFO_SYM.common,
87
.close = anv_hal_close,
88
},
89
.EnumerateInstanceExtensionProperties = anv_EnumerateInstanceExtensionProperties,
90
.CreateInstance = anv_CreateInstance,
91
.GetInstanceProcAddr = anv_GetInstanceProcAddr,
92
};
93
94
*dev = &hal_dev->common;
95
return 0;
96
}
97
98
static int
99
anv_hal_close(struct hw_device_t *dev)
100
{
101
/* hwvulkan.h claims that hw_device_t::close() is never called. */
102
return -1;
103
}
104
105
#if ANDROID_API_LEVEL >= 26
106
#include <vndk/hardware_buffer.h>
107
/* See i915_private_android_types.h in minigbm. */
108
#define HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL 0x100
109
110
enum {
111
/* Usage bit equal to GRALLOC_USAGE_HW_CAMERA_MASK */
112
AHARDWAREBUFFER_USAGE_CAMERA_MASK = 0x00060000U,
113
};
114
115
inline VkFormat
116
vk_format_from_android(unsigned android_format, unsigned android_usage)
117
{
118
switch (android_format) {
119
case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM:
120
return VK_FORMAT_R8G8B8A8_UNORM;
121
case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM:
122
case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM:
123
return VK_FORMAT_R8G8B8_UNORM;
124
case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM:
125
return VK_FORMAT_R5G6B5_UNORM_PACK16;
126
case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT:
127
return VK_FORMAT_R16G16B16A16_SFLOAT;
128
case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM:
129
return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
130
case AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420:
131
case HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL:
132
return VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
133
case AHARDWAREBUFFER_FORMAT_IMPLEMENTATION_DEFINED:
134
if (android_usage & AHARDWAREBUFFER_USAGE_CAMERA_MASK)
135
return VK_FORMAT_G8_B8R8_2PLANE_420_UNORM;
136
else
137
return VK_FORMAT_R8G8B8_UNORM;
138
case AHARDWAREBUFFER_FORMAT_BLOB:
139
default:
140
return VK_FORMAT_UNDEFINED;
141
}
142
}
143
144
static inline unsigned
145
android_format_from_vk(unsigned vk_format)
146
{
147
switch (vk_format) {
148
case VK_FORMAT_R8G8B8A8_UNORM:
149
return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM;
150
case VK_FORMAT_R8G8B8_UNORM:
151
return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM;
152
case VK_FORMAT_R5G6B5_UNORM_PACK16:
153
return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM;
154
case VK_FORMAT_R16G16B16A16_SFLOAT:
155
return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT;
156
case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
157
return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM;
158
case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
159
#ifdef HAVE_CROS_GRALLOC
160
return AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420;
161
#else
162
return HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL;
163
#endif
164
default:
165
return AHARDWAREBUFFER_FORMAT_BLOB;
166
}
167
}
168
169
static VkResult
170
get_ahw_buffer_format_properties(
171
VkDevice device_h,
172
const struct AHardwareBuffer *buffer,
173
VkAndroidHardwareBufferFormatPropertiesANDROID *pProperties)
174
{
175
ANV_FROM_HANDLE(anv_device, device, device_h);
176
177
/* Get a description of buffer contents . */
178
AHardwareBuffer_Desc desc;
179
AHardwareBuffer_describe(buffer, &desc);
180
181
/* Verify description. */
182
uint64_t gpu_usage =
183
AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE |
184
AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT |
185
AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER;
186
187
/* "Buffer must be a valid Android hardware buffer object with at least
188
* one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags."
189
*/
190
if (!(desc.usage & (gpu_usage)))
191
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
192
193
/* Fill properties fields based on description. */
194
VkAndroidHardwareBufferFormatPropertiesANDROID *p = pProperties;
195
196
p->format = vk_format_from_android(desc.format, desc.usage);
197
198
const struct anv_format *anv_format = anv_get_format(p->format);
199
p->externalFormat = (uint64_t) (uintptr_t) anv_format;
200
201
/* Default to OPTIMAL tiling but set to linear in case
202
* of AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER usage.
203
*/
204
VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL;
205
206
if (desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER)
207
tiling = VK_IMAGE_TILING_LINEAR;
208
209
p->formatFeatures =
210
anv_get_image_format_features(&device->info, p->format, anv_format,
211
tiling, NULL);
212
213
/* "Images can be created with an external format even if the Android hardware
214
* buffer has a format which has an equivalent Vulkan format to enable
215
* consistent handling of images from sources that might use either category
216
* of format. However, all images created with an external format are subject
217
* to the valid usage requirements associated with external formats, even if
218
* the Android hardware buffer’s format has a Vulkan equivalent."
219
*
220
* "The formatFeatures member *must* include
221
* VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT and at least one of
222
* VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or
223
* VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT"
224
*/
225
p->formatFeatures |=
226
VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT;
227
228
/* "Implementations may not always be able to determine the color model,
229
* numerical range, or chroma offsets of the image contents, so the values
230
* in VkAndroidHardwareBufferFormatPropertiesANDROID are only suggestions.
231
* Applications should treat these values as sensible defaults to use in
232
* the absence of more reliable information obtained through some other
233
* means."
234
*/
235
p->samplerYcbcrConversionComponents.r = VK_COMPONENT_SWIZZLE_IDENTITY;
236
p->samplerYcbcrConversionComponents.g = VK_COMPONENT_SWIZZLE_IDENTITY;
237
p->samplerYcbcrConversionComponents.b = VK_COMPONENT_SWIZZLE_IDENTITY;
238
p->samplerYcbcrConversionComponents.a = VK_COMPONENT_SWIZZLE_IDENTITY;
239
240
p->suggestedYcbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601;
241
p->suggestedYcbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL;
242
243
p->suggestedXChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
244
p->suggestedYChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
245
246
return VK_SUCCESS;
247
}
248
249
VkResult
250
anv_GetAndroidHardwareBufferPropertiesANDROID(
251
VkDevice device_h,
252
const struct AHardwareBuffer *buffer,
253
VkAndroidHardwareBufferPropertiesANDROID *pProperties)
254
{
255
ANV_FROM_HANDLE(anv_device, dev, device_h);
256
257
VkAndroidHardwareBufferFormatPropertiesANDROID *format_prop =
258
vk_find_struct(pProperties->pNext,
259
ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID);
260
261
/* Fill format properties of an Android hardware buffer. */
262
if (format_prop)
263
get_ahw_buffer_format_properties(device_h, buffer, format_prop);
264
265
/* NOTE - We support buffers with only one handle but do not error on
266
* multiple handle case. Reason is that we want to support YUV formats
267
* where we have many logical planes but they all point to the same
268
* buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM.
269
*/
270
const native_handle_t *handle =
271
AHardwareBuffer_getNativeHandle(buffer);
272
int dma_buf = (handle && handle->numFds) ? handle->data[0] : -1;
273
if (dma_buf < 0)
274
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
275
276
/* All memory types. */
277
uint32_t memory_types = (1ull << dev->physical->memory.type_count) - 1;
278
279
pProperties->allocationSize = lseek(dma_buf, 0, SEEK_END);
280
pProperties->memoryTypeBits = memory_types;
281
282
return VK_SUCCESS;
283
}
284
285
VkResult
286
anv_GetMemoryAndroidHardwareBufferANDROID(
287
VkDevice device_h,
288
const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
289
struct AHardwareBuffer **pBuffer)
290
{
291
ANV_FROM_HANDLE(anv_device_memory, mem, pInfo->memory);
292
293
/* Some quotes from Vulkan spec:
294
*
295
* "If the device memory was created by importing an Android hardware
296
* buffer, vkGetMemoryAndroidHardwareBufferANDROID must return that same
297
* Android hardware buffer object."
298
*
299
* "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must
300
* have been included in VkExportMemoryAllocateInfo::handleTypes when
301
* memory was created."
302
*/
303
if (mem->ahw) {
304
*pBuffer = mem->ahw;
305
/* Increase refcount. */
306
AHardwareBuffer_acquire(mem->ahw);
307
return VK_SUCCESS;
308
}
309
310
return VK_ERROR_OUT_OF_HOST_MEMORY;
311
}
312
313
#endif
314
315
/* Construct ahw usage mask from image usage bits, see
316
* 'AHardwareBuffer Usage Equivalence' in Vulkan spec.
317
*/
318
uint64_t
319
anv_ahw_usage_from_vk_usage(const VkImageCreateFlags vk_create,
320
const VkImageUsageFlags vk_usage)
321
{
322
uint64_t ahw_usage = 0;
323
#if ANDROID_API_LEVEL >= 26
324
if (vk_usage & VK_IMAGE_USAGE_SAMPLED_BIT)
325
ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
326
327
if (vk_usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)
328
ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
329
330
if (vk_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
331
ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT;
332
333
if (vk_create & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT)
334
ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP;
335
336
if (vk_create & VK_IMAGE_CREATE_PROTECTED_BIT)
337
ahw_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT;
338
339
/* No usage bits set - set at least one GPU usage. */
340
if (ahw_usage == 0)
341
ahw_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
342
#endif
343
return ahw_usage;
344
}
345
346
/*
347
* Called from anv_AllocateMemory when import AHardwareBuffer.
348
*/
349
VkResult
350
anv_import_ahw_memory(VkDevice device_h,
351
struct anv_device_memory *mem,
352
const VkImportAndroidHardwareBufferInfoANDROID *info)
353
{
354
#if ANDROID_API_LEVEL >= 26
355
ANV_FROM_HANDLE(anv_device, device, device_h);
356
357
/* Import from AHardwareBuffer to anv_device_memory. */
358
const native_handle_t *handle =
359
AHardwareBuffer_getNativeHandle(info->buffer);
360
361
/* NOTE - We support buffers with only one handle but do not error on
362
* multiple handle case. Reason is that we want to support YUV formats
363
* where we have many logical planes but they all point to the same
364
* buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM.
365
*/
366
int dma_buf = (handle && handle->numFds) ? handle->data[0] : -1;
367
if (dma_buf < 0)
368
return VK_ERROR_INVALID_EXTERNAL_HANDLE;
369
370
VkResult result = anv_device_import_bo(device, dma_buf, 0,
371
0 /* client_address */,
372
&mem->bo);
373
assert(result == VK_SUCCESS);
374
375
/* "If the vkAllocateMemory command succeeds, the implementation must
376
* acquire a reference to the imported hardware buffer, which it must
377
* release when the device memory object is freed. If the command fails,
378
* the implementation must not retain a reference."
379
*/
380
AHardwareBuffer_acquire(info->buffer);
381
mem->ahw = info->buffer;
382
383
return VK_SUCCESS;
384
#else
385
return VK_ERROR_EXTENSION_NOT_PRESENT;
386
#endif
387
}
388
389
VkResult
390
anv_create_ahw_memory(VkDevice device_h,
391
struct anv_device_memory *mem,
392
const VkMemoryAllocateInfo *pAllocateInfo)
393
{
394
#if ANDROID_API_LEVEL >= 26
395
ANV_FROM_HANDLE(anv_device, dev, device_h);
396
397
const VkMemoryDedicatedAllocateInfo *dedicated_info =
398
vk_find_struct_const(pAllocateInfo->pNext,
399
MEMORY_DEDICATED_ALLOCATE_INFO);
400
401
uint32_t w = 0;
402
uint32_t h = 1;
403
uint32_t layers = 1;
404
uint32_t format = 0;
405
uint64_t usage = 0;
406
407
/* If caller passed dedicated information. */
408
if (dedicated_info && dedicated_info->image) {
409
ANV_FROM_HANDLE(anv_image, image, dedicated_info->image);
410
w = image->extent.width;
411
h = image->extent.height;
412
layers = image->array_size;
413
format = android_format_from_vk(image->vk_format);
414
usage = anv_ahw_usage_from_vk_usage(image->create_flags, image->usage);
415
} else if (dedicated_info && dedicated_info->buffer) {
416
ANV_FROM_HANDLE(anv_buffer, buffer, dedicated_info->buffer);
417
w = buffer->size;
418
format = AHARDWAREBUFFER_FORMAT_BLOB;
419
usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
420
AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
421
} else {
422
w = pAllocateInfo->allocationSize;
423
format = AHARDWAREBUFFER_FORMAT_BLOB;
424
usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN |
425
AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN;
426
}
427
428
struct AHardwareBuffer *ahw = NULL;
429
struct AHardwareBuffer_Desc desc = {
430
.width = w,
431
.height = h,
432
.layers = layers,
433
.format = format,
434
.usage = usage,
435
};
436
437
if (AHardwareBuffer_allocate(&desc, &ahw) != 0)
438
return VK_ERROR_OUT_OF_HOST_MEMORY;
439
440
const VkImportAndroidHardwareBufferInfoANDROID import_info = {
441
.buffer = ahw,
442
};
443
VkResult result = anv_import_ahw_memory(device_h, mem, &import_info);
444
445
/* Release a reference to avoid leak for AHB allocation. */
446
AHardwareBuffer_release(ahw);
447
448
return result;
449
#else
450
return VK_ERROR_EXTENSION_NOT_PRESENT;
451
#endif
452
453
}
454
455
VkResult
456
anv_image_from_external(
457
VkDevice device_h,
458
const VkImageCreateInfo *base_info,
459
const VkExternalMemoryImageCreateInfo *create_info,
460
const VkAllocationCallbacks *alloc,
461
VkImage *out_image_h)
462
{
463
#if ANDROID_API_LEVEL >= 26
464
ANV_FROM_HANDLE(anv_device, device, device_h);
465
466
const VkExternalFormatANDROID *ext_info =
467
vk_find_struct_const(base_info->pNext, EXTERNAL_FORMAT_ANDROID);
468
469
if (ext_info && ext_info->externalFormat != 0) {
470
assert(base_info->format == VK_FORMAT_UNDEFINED);
471
assert(base_info->imageType == VK_IMAGE_TYPE_2D);
472
assert(base_info->usage == VK_IMAGE_USAGE_SAMPLED_BIT);
473
assert(base_info->tiling == VK_IMAGE_TILING_OPTIMAL);
474
}
475
476
struct anv_image_create_info anv_info = {
477
.vk_info = base_info,
478
.isl_extra_usage_flags = ISL_SURF_USAGE_DISABLE_AUX_BIT,
479
.external_format = true,
480
};
481
482
VkImage image_h;
483
VkResult result = anv_image_create(device_h, &anv_info, alloc, &image_h);
484
if (result != VK_SUCCESS)
485
return result;
486
487
*out_image_h = image_h;
488
489
return VK_SUCCESS;
490
#else
491
return VK_ERROR_EXTENSION_NOT_PRESENT;
492
#endif
493
}
494
495
496
VkResult
497
anv_image_from_gralloc(VkDevice device_h,
498
const VkImageCreateInfo *base_info,
499
const VkNativeBufferANDROID *gralloc_info,
500
const VkAllocationCallbacks *alloc,
501
VkImage *out_image_h)
502
503
{
504
ANV_FROM_HANDLE(anv_device, device, device_h);
505
VkImage image_h = VK_NULL_HANDLE;
506
struct anv_image *image = NULL;
507
struct anv_bo *bo = NULL;
508
VkResult result;
509
510
struct anv_image_create_info anv_info = {
511
.vk_info = base_info,
512
.isl_extra_usage_flags = ISL_SURF_USAGE_DISABLE_AUX_BIT,
513
};
514
515
if (gralloc_info->handle->numFds != 1) {
516
return vk_errorf(device, device, VK_ERROR_INVALID_EXTERNAL_HANDLE,
517
"VkNativeBufferANDROID::handle::numFds is %d, "
518
"expected 1", gralloc_info->handle->numFds);
519
}
520
521
/* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf
522
* must exceed that of the gralloc handle, and we do not own the gralloc
523
* handle.
524
*/
525
int dma_buf = gralloc_info->handle->data[0];
526
527
/* We need to set the WRITE flag on window system buffers so that GEM will
528
* know we're writing to them and synchronize uses on other rings (for
529
* example, if the display server uses the blitter ring).
530
*
531
* If this function fails and if the imported bo was resident in the cache,
532
* we should avoid updating the bo's flags. Therefore, we defer updating
533
* the flags until success is certain.
534
*
535
*/
536
result = anv_device_import_bo(device, dma_buf,
537
ANV_BO_ALLOC_IMPLICIT_SYNC |
538
ANV_BO_ALLOC_IMPLICIT_WRITE,
539
0 /* client_address */,
540
&bo);
541
if (result != VK_SUCCESS) {
542
return vk_errorf(device, device, result,
543
"failed to import dma-buf from VkNativeBufferANDROID");
544
}
545
546
int i915_tiling = anv_gem_get_tiling(device, bo->gem_handle);
547
switch (i915_tiling) {
548
case I915_TILING_NONE:
549
anv_info.isl_tiling_flags = ISL_TILING_LINEAR_BIT;
550
break;
551
case I915_TILING_X:
552
anv_info.isl_tiling_flags = ISL_TILING_X_BIT;
553
break;
554
case I915_TILING_Y:
555
anv_info.isl_tiling_flags = ISL_TILING_Y0_BIT;
556
break;
557
case -1:
558
result = vk_errorf(device, device, VK_ERROR_INVALID_EXTERNAL_HANDLE,
559
"DRM_IOCTL_I915_GEM_GET_TILING failed for "
560
"VkNativeBufferANDROID");
561
goto fail_tiling;
562
default:
563
result = vk_errorf(device, device, VK_ERROR_INVALID_EXTERNAL_HANDLE,
564
"DRM_IOCTL_I915_GEM_GET_TILING returned unknown "
565
"tiling %d for VkNativeBufferANDROID", i915_tiling);
566
goto fail_tiling;
567
}
568
569
enum isl_format format = anv_get_isl_format(&device->info,
570
base_info->format,
571
VK_IMAGE_ASPECT_COLOR_BIT,
572
base_info->tiling);
573
assert(format != ISL_FORMAT_UNSUPPORTED);
574
575
result = anv_image_create(device_h, &anv_info, alloc, &image_h);
576
image = anv_image_from_handle(image_h);
577
if (result != VK_SUCCESS)
578
goto fail_create;
579
580
VkImageMemoryRequirementsInfo2 mem_reqs_info = {
581
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
582
.image = image_h,
583
};
584
585
VkMemoryRequirements2 mem_reqs = {
586
.sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
587
};
588
589
anv_GetImageMemoryRequirements2(device_h, &mem_reqs_info, &mem_reqs);
590
591
VkDeviceSize aligned_image_size =
592
align_u64(mem_reqs.memoryRequirements.size,
593
mem_reqs.memoryRequirements.alignment);
594
595
if (bo->size < aligned_image_size) {
596
result = vk_errorf(device, device, VK_ERROR_INVALID_EXTERNAL_HANDLE,
597
"dma-buf from VkNativeBufferANDROID is too small for "
598
"VkImage: %"PRIu64"B < %"PRIu64"B",
599
bo->size, aligned_image_size);
600
goto fail_size;
601
}
602
603
assert(!image->disjoint);
604
assert(image->n_planes == 1);
605
assert(image->planes[0].primary_surface.memory_range.binding ==
606
ANV_IMAGE_MEMORY_BINDING_MAIN);
607
assert(image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.bo == NULL);
608
assert(image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.offset == 0);
609
image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.bo = bo;
610
image->from_gralloc = true;
611
612
/* Don't clobber the out-parameter until success is certain. */
613
*out_image_h = image_h;
614
615
return VK_SUCCESS;
616
617
fail_size:
618
anv_DestroyImage(device_h, image_h, alloc);
619
fail_create:
620
fail_tiling:
621
anv_device_release_bo(device, bo);
622
623
return result;
624
}
625
626
VkResult
627
anv_image_bind_from_gralloc(struct anv_device *device,
628
struct anv_image *image,
629
const VkNativeBufferANDROID *gralloc_info)
630
{
631
/* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf
632
* must exceed that of the gralloc handle, and we do not own the gralloc
633
* handle.
634
*/
635
int dma_buf = gralloc_info->handle->data[0];
636
637
/* We need to set the WRITE flag on window system buffers so that GEM will
638
* know we're writing to them and synchronize uses on other rings (for
639
* example, if the display server uses the blitter ring).
640
*
641
* If this function fails and if the imported bo was resident in the cache,
642
* we should avoid updating the bo's flags. Therefore, we defer updating
643
* the flags until success is certain.
644
*
645
*/
646
struct anv_bo *bo = NULL;
647
VkResult result = anv_device_import_bo(device, dma_buf,
648
ANV_BO_ALLOC_IMPLICIT_SYNC |
649
ANV_BO_ALLOC_IMPLICIT_WRITE,
650
0 /* client_address */,
651
&bo);
652
if (result != VK_SUCCESS) {
653
return vk_errorf(device, &device->vk.base, result,
654
"failed to import dma-buf from VkNativeBufferANDROID");
655
}
656
657
uint64_t img_size = image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].memory_range.size;
658
if (img_size < bo->size) {
659
result = vk_errorf(device, &device->vk.base, VK_ERROR_INVALID_EXTERNAL_HANDLE,
660
"dma-buf from VkNativeBufferANDROID is too small for "
661
"VkImage: %"PRIu64"B < %"PRIu64"B",
662
bo->size, img_size);
663
anv_device_release_bo(device, bo);
664
return result;
665
}
666
667
assert(!image->disjoint);
668
assert(image->n_planes == 1);
669
assert(image->planes[0].primary_surface.memory_range.binding ==
670
ANV_IMAGE_MEMORY_BINDING_MAIN);
671
assert(image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.bo == NULL);
672
assert(image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.offset == 0);
673
image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.bo = bo;
674
image->from_gralloc = true;
675
676
return VK_SUCCESS;
677
}
678
679
static VkResult
680
format_supported_with_usage(VkDevice device_h, VkFormat format,
681
VkImageUsageFlags imageUsage)
682
{
683
ANV_FROM_HANDLE(anv_device, device, device_h);
684
VkPhysicalDevice phys_dev_h = anv_physical_device_to_handle(device->physical);
685
VkResult result;
686
687
const VkPhysicalDeviceImageFormatInfo2 image_format_info = {
688
.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
689
.format = format,
690
.type = VK_IMAGE_TYPE_2D,
691
.tiling = VK_IMAGE_TILING_OPTIMAL,
692
.usage = imageUsage,
693
};
694
695
VkImageFormatProperties2 image_format_props = {
696
.sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
697
};
698
699
/* Check that requested format and usage are supported. */
700
result = anv_GetPhysicalDeviceImageFormatProperties2(phys_dev_h,
701
&image_format_info, &image_format_props);
702
if (result != VK_SUCCESS) {
703
return vk_errorf(device, device, result,
704
"anv_GetPhysicalDeviceImageFormatProperties2 failed "
705
"inside %s", __func__);
706
}
707
return VK_SUCCESS;
708
}
709
710
711
static VkResult
712
setup_gralloc0_usage(struct anv_device *device, VkFormat format,
713
VkImageUsageFlags imageUsage, int *grallocUsage)
714
{
715
/* WARNING: Android's libvulkan.so hardcodes the VkImageUsageFlags
716
* returned to applications via VkSurfaceCapabilitiesKHR::supportedUsageFlags.
717
* The relevant code in libvulkan/swapchain.cpp contains this fun comment:
718
*
719
* TODO(jessehall): I think these are right, but haven't thought hard
720
* about it. Do we need to query the driver for support of any of
721
* these?
722
*
723
* Any disagreement between this function and the hardcoded
724
* VkSurfaceCapabilitiesKHR:supportedUsageFlags causes tests
725
* dEQP-VK.wsi.android.swapchain.*.image_usage to fail.
726
*/
727
728
if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_DST_BIT |
729
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT))
730
*grallocUsage |= GRALLOC_USAGE_HW_RENDER;
731
732
if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
733
VK_IMAGE_USAGE_SAMPLED_BIT |
734
VK_IMAGE_USAGE_STORAGE_BIT |
735
VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT))
736
*grallocUsage |= GRALLOC_USAGE_HW_TEXTURE;
737
738
/* All VkImageUsageFlags not explicitly checked here are unsupported for
739
* gralloc swapchains.
740
*/
741
if (imageUsage != 0) {
742
return vk_errorf(device, device, VK_ERROR_FORMAT_NOT_SUPPORTED,
743
"unsupported VkImageUsageFlags(0x%x) for gralloc "
744
"swapchain", imageUsage);
745
}
746
747
/* The below formats support GRALLOC_USAGE_HW_FB (that is, display
748
* scanout). This short list of formats is univserally supported on Intel
749
* but is incomplete. The full set of supported formats is dependent on
750
* kernel and hardware.
751
*
752
* FINISHME: Advertise all display-supported formats.
753
*/
754
switch (format) {
755
case VK_FORMAT_B8G8R8A8_UNORM:
756
case VK_FORMAT_R5G6B5_UNORM_PACK16:
757
case VK_FORMAT_R8G8B8A8_UNORM:
758
case VK_FORMAT_R8G8B8A8_SRGB:
759
*grallocUsage |= GRALLOC_USAGE_HW_FB |
760
GRALLOC_USAGE_HW_COMPOSER |
761
GRALLOC_USAGE_EXTERNAL_DISP;
762
break;
763
default:
764
mesa_logw("%s: unsupported format=%d", __func__, format);
765
}
766
767
if (*grallocUsage == 0)
768
return VK_ERROR_FORMAT_NOT_SUPPORTED;
769
770
return VK_SUCCESS;
771
}
772
773
#if ANDROID_API_LEVEL >= 26
774
VkResult anv_GetSwapchainGrallocUsage2ANDROID(
775
VkDevice device_h,
776
VkFormat format,
777
VkImageUsageFlags imageUsage,
778
VkSwapchainImageUsageFlagsANDROID swapchainImageUsage,
779
uint64_t* grallocConsumerUsage,
780
uint64_t* grallocProducerUsage)
781
{
782
ANV_FROM_HANDLE(anv_device, device, device_h);
783
VkResult result;
784
785
*grallocConsumerUsage = 0;
786
*grallocProducerUsage = 0;
787
mesa_logd("%s: format=%d, usage=0x%x", __func__, format, imageUsage);
788
789
result = format_supported_with_usage(device_h, format, imageUsage);
790
if (result != VK_SUCCESS)
791
return result;
792
793
int32_t grallocUsage = 0;
794
result = setup_gralloc0_usage(device, format, imageUsage, &grallocUsage);
795
if (result != VK_SUCCESS)
796
return result;
797
798
/* Setup gralloc1 usage flags from gralloc0 flags. */
799
800
if (grallocUsage & GRALLOC_USAGE_HW_RENDER) {
801
*grallocProducerUsage |= GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET;
802
*grallocConsumerUsage |= GRALLOC1_CONSUMER_USAGE_CLIENT_TARGET;
803
}
804
805
if (grallocUsage & GRALLOC_USAGE_HW_TEXTURE) {
806
*grallocConsumerUsage |= GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE;
807
}
808
809
if (grallocUsage & (GRALLOC_USAGE_HW_FB |
810
GRALLOC_USAGE_HW_COMPOSER |
811
GRALLOC_USAGE_EXTERNAL_DISP)) {
812
*grallocProducerUsage |= GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET;
813
*grallocConsumerUsage |= GRALLOC1_CONSUMER_USAGE_HWCOMPOSER;
814
}
815
816
return VK_SUCCESS;
817
}
818
#endif
819
820
VkResult anv_GetSwapchainGrallocUsageANDROID(
821
VkDevice device_h,
822
VkFormat format,
823
VkImageUsageFlags imageUsage,
824
int* grallocUsage)
825
{
826
ANV_FROM_HANDLE(anv_device, device, device_h);
827
VkResult result;
828
829
*grallocUsage = 0;
830
mesa_logd("%s: format=%d, usage=0x%x", __func__, format, imageUsage);
831
832
result = format_supported_with_usage(device_h, format, imageUsage);
833
if (result != VK_SUCCESS)
834
return result;
835
836
return setup_gralloc0_usage(device, format, imageUsage, grallocUsage);
837
}
838
839
VkResult
840
anv_AcquireImageANDROID(
841
VkDevice device_h,
842
VkImage image_h,
843
int nativeFenceFd,
844
VkSemaphore semaphore_h,
845
VkFence fence_h)
846
{
847
ANV_FROM_HANDLE(anv_device, device, device_h);
848
VkResult result = VK_SUCCESS;
849
850
/* From https://source.android.com/devices/graphics/implement-vulkan :
851
*
852
* "The driver takes ownership of the fence file descriptor and closes
853
* the fence file descriptor when no longer needed. The driver must do
854
* so even if neither a semaphore or fence object is provided, or even
855
* if vkAcquireImageANDROID fails and returns an error."
856
*
857
* The Vulkan spec for VkImportFence/SemaphoreFdKHR(), however, requires
858
* the file descriptor to be left alone on failure.
859
*/
860
int semaphore_fd = -1, fence_fd = -1;
861
if (nativeFenceFd >= 0) {
862
if (semaphore_h != VK_NULL_HANDLE && fence_h != VK_NULL_HANDLE) {
863
/* We have both so we have to import the sync file twice. One of
864
* them needs to be a dup.
865
*/
866
semaphore_fd = nativeFenceFd;
867
fence_fd = dup(nativeFenceFd);
868
if (fence_fd < 0) {
869
VkResult err = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS :
870
VK_ERROR_OUT_OF_HOST_MEMORY;
871
close(nativeFenceFd);
872
return vk_error(err);
873
}
874
} else if (semaphore_h != VK_NULL_HANDLE) {
875
semaphore_fd = nativeFenceFd;
876
} else if (fence_h != VK_NULL_HANDLE) {
877
fence_fd = nativeFenceFd;
878
} else {
879
/* Nothing to import into so we have to close the file */
880
close(nativeFenceFd);
881
}
882
}
883
884
if (semaphore_h != VK_NULL_HANDLE) {
885
const VkImportSemaphoreFdInfoKHR info = {
886
.sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
887
.semaphore = semaphore_h,
888
.flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
889
.handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
890
.fd = semaphore_fd,
891
};
892
result = anv_ImportSemaphoreFdKHR(device_h, &info);
893
if (result == VK_SUCCESS)
894
semaphore_fd = -1; /* ANV took ownership */
895
}
896
897
if (result == VK_SUCCESS && fence_h != VK_NULL_HANDLE) {
898
const VkImportFenceFdInfoKHR info = {
899
.sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
900
.fence = fence_h,
901
.flags = VK_FENCE_IMPORT_TEMPORARY_BIT,
902
.handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
903
.fd = fence_fd,
904
};
905
result = anv_ImportFenceFdKHR(device_h, &info);
906
if (result == VK_SUCCESS)
907
fence_fd = -1; /* ANV took ownership */
908
}
909
910
if (semaphore_fd >= 0)
911
close(semaphore_fd);
912
if (fence_fd >= 0)
913
close(fence_fd);
914
915
return result;
916
}
917
918
VkResult
919
anv_QueueSignalReleaseImageANDROID(
920
VkQueue queue,
921
uint32_t waitSemaphoreCount,
922
const VkSemaphore* pWaitSemaphores,
923
VkImage image,
924
int* pNativeFenceFd)
925
{
926
VkResult result;
927
928
if (waitSemaphoreCount == 0)
929
goto done;
930
931
result = anv_QueueSubmit(queue, 1,
932
&(VkSubmitInfo) {
933
.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
934
.waitSemaphoreCount = 1,
935
.pWaitSemaphores = pWaitSemaphores,
936
},
937
(VkFence) VK_NULL_HANDLE);
938
if (result != VK_SUCCESS)
939
return result;
940
941
done:
942
if (pNativeFenceFd) {
943
/* We can rely implicit on sync because above we submitted all
944
* semaphores to the queue.
945
*/
946
*pNativeFenceFd = -1;
947
}
948
949
return VK_SUCCESS;
950
}
951
952