Path: blob/21.2-virgl/src/freedreno/vulkan/tu_descriptor_set.c
4565 views
/*1* Copyright © 2016 Red Hat.2* Copyright © 2016 Bas Nieuwenhuizen3*4* Permission is hereby granted, free of charge, to any person obtaining a5* copy of this software and associated documentation files (the "Software"),6* to deal in the Software without restriction, including without limitation7* the rights to use, copy, modify, merge, publish, distribute, sublicense,8* and/or sell copies of the Software, and to permit persons to whom the9* Software is furnished to do so, subject to the following conditions:10*11* The above copyright notice and this permission notice (including the next12* paragraph) shall be included in all copies or substantial portions of the13* Software.14*15* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR16* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,17* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL18* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER19* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING20* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER21* DEALINGS IN THE SOFTWARE.22*/2324/**25* @file26*27* We use the bindless descriptor model, which maps fairly closely to how28* Vulkan descriptor sets work. The two exceptions are input attachments and29* dynamic descriptors, which have to be patched when recording command30* buffers. We reserve an extra descriptor set for these. This descriptor set31* contains all the input attachments in the pipeline, in order, and then all32* the dynamic descriptors. The dynamic descriptors are stored in the CPU-side33* datastructure for each tu_descriptor_set, and then combined into one big34* descriptor set at CmdBindDescriptors time/draw time.35*/3637#include "tu_private.h"3839#include <assert.h>40#include <fcntl.h>41#include <stdbool.h>42#include <string.h>43#include <unistd.h>4445#include "util/mesa-sha1.h"46#include "vk_descriptors.h"47#include "vk_util.h"4849static uint32_t50descriptor_size(VkDescriptorType type)51{52switch (type) {53case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:54case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:55case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:56/* These are remapped to the special driver-managed descriptor set,57* hence they don't take up any space in the original descriptor set:58* Input attachment doesn't use descriptor sets at all59*/60return 0;61case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:62/* We make offsets and sizes all 16 dwords, to match how the hardware63* interprets indices passed to sample/load/store instructions in64* multiples of 16 dwords. This means that "normal" descriptors are all65* of size 16, with padding for smaller descriptors like uniform storage66* descriptors which are less than 16 dwords. However combined images67* and samplers are actually two descriptors, so they have size 2.68*/69return A6XX_TEX_CONST_DWORDS * 4 * 2;70default:71return A6XX_TEX_CONST_DWORDS * 4;72}73}7475VKAPI_ATTR VkResult VKAPI_CALL76tu_CreateDescriptorSetLayout(77VkDevice _device,78const VkDescriptorSetLayoutCreateInfo *pCreateInfo,79const VkAllocationCallbacks *pAllocator,80VkDescriptorSetLayout *pSetLayout)81{82TU_FROM_HANDLE(tu_device, device, _device);83struct tu_descriptor_set_layout *set_layout;8485assert(pCreateInfo->sType ==86VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);87const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =88vk_find_struct_const(89pCreateInfo->pNext,90DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);9192uint32_t num_bindings = 0;93uint32_t immutable_sampler_count = 0;94uint32_t ycbcr_sampler_count = 0;95for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {96num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);97if ((pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||98pCreateInfo->pBindings[j].descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&99pCreateInfo->pBindings[j].pImmutableSamplers) {100immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;101102bool has_ycbcr_sampler = false;103for (unsigned i = 0; i < pCreateInfo->pBindings[j].descriptorCount; ++i) {104if (tu_sampler_from_handle(pCreateInfo->pBindings[j].pImmutableSamplers[i])->ycbcr_sampler)105has_ycbcr_sampler = true;106}107108if (has_ycbcr_sampler)109ycbcr_sampler_count += pCreateInfo->pBindings[j].descriptorCount;110}111}112113uint32_t samplers_offset =114offsetof(struct tu_descriptor_set_layout, binding[num_bindings]);115116/* note: only need to store TEX_SAMP_DWORDS for immutable samples,117* but using struct tu_sampler makes things simpler */118uint32_t size = samplers_offset +119immutable_sampler_count * sizeof(struct tu_sampler) +120ycbcr_sampler_count * sizeof(struct tu_sampler_ycbcr_conversion);121122set_layout = vk_object_zalloc(&device->vk, pAllocator, size,123VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT);124if (!set_layout)125return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);126127set_layout->flags = pCreateInfo->flags;128129/* We just allocate all the immutable samplers at the end of the struct */130struct tu_sampler *samplers = (void*) &set_layout->binding[num_bindings];131struct tu_sampler_ycbcr_conversion *ycbcr_samplers =132(void*) &samplers[immutable_sampler_count];133134VkDescriptorSetLayoutBinding *bindings = NULL;135VkResult result = vk_create_sorted_bindings(136pCreateInfo->pBindings, pCreateInfo->bindingCount, &bindings);137if (result != VK_SUCCESS) {138vk_object_free(&device->vk, pAllocator, set_layout);139return vk_error(device->instance, result);140}141142set_layout->binding_count = num_bindings;143set_layout->shader_stages = 0;144set_layout->has_immutable_samplers = false;145set_layout->size = 0;146set_layout->dynamic_ubo = 0;147148uint32_t dynamic_offset_count = 0;149150for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {151const VkDescriptorSetLayoutBinding *binding = bindings + j;152uint32_t b = binding->binding;153154set_layout->binding[b].type = binding->descriptorType;155set_layout->binding[b].array_size = binding->descriptorCount;156set_layout->binding[b].offset = set_layout->size;157set_layout->binding[b].dynamic_offset_offset = dynamic_offset_count;158set_layout->binding[b].size = descriptor_size(binding->descriptorType);159set_layout->binding[b].shader_stages = binding->stageFlags;160161if (variable_flags && binding->binding < variable_flags->bindingCount &&162(variable_flags->pBindingFlags[binding->binding] &163VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {164assert(!binding->pImmutableSamplers); /* Terribly ill defined how165many samplers are valid */166assert(binding->binding == num_bindings - 1);167168set_layout->has_variable_descriptors = true;169}170171if ((binding->descriptorType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER ||172binding->descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER) &&173binding->pImmutableSamplers) {174set_layout->binding[b].immutable_samplers_offset = samplers_offset;175set_layout->has_immutable_samplers = true;176177for (uint32_t i = 0; i < binding->descriptorCount; i++)178samplers[i] = *tu_sampler_from_handle(binding->pImmutableSamplers[i]);179180samplers += binding->descriptorCount;181samplers_offset += sizeof(struct tu_sampler) * binding->descriptorCount;182183bool has_ycbcr_sampler = false;184for (unsigned i = 0; i < pCreateInfo->pBindings[j].descriptorCount; ++i) {185if (tu_sampler_from_handle(binding->pImmutableSamplers[i])->ycbcr_sampler)186has_ycbcr_sampler = true;187}188189if (has_ycbcr_sampler) {190set_layout->binding[b].ycbcr_samplers_offset =191(const char*)ycbcr_samplers - (const char*)set_layout;192for (uint32_t i = 0; i < binding->descriptorCount; i++) {193struct tu_sampler *sampler = tu_sampler_from_handle(binding->pImmutableSamplers[i]);194if (sampler->ycbcr_sampler)195ycbcr_samplers[i] = *sampler->ycbcr_sampler;196else197ycbcr_samplers[i].ycbcr_model = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY;198}199ycbcr_samplers += binding->descriptorCount;200} else {201set_layout->binding[b].ycbcr_samplers_offset = 0;202}203}204205set_layout->size +=206binding->descriptorCount * set_layout->binding[b].size;207if (binding->descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||208binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {209if (binding->descriptorType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {210STATIC_ASSERT(MAX_DYNAMIC_BUFFERS <= 8 * sizeof(set_layout->dynamic_ubo));211set_layout->dynamic_ubo |=212((1u << binding->descriptorCount) - 1) << dynamic_offset_count;213}214215dynamic_offset_count += binding->descriptorCount;216}217218set_layout->shader_stages |= binding->stageFlags;219}220221free(bindings);222223set_layout->dynamic_offset_count = dynamic_offset_count;224225*pSetLayout = tu_descriptor_set_layout_to_handle(set_layout);226227return VK_SUCCESS;228}229230VKAPI_ATTR void VKAPI_CALL231tu_DestroyDescriptorSetLayout(VkDevice _device,232VkDescriptorSetLayout _set_layout,233const VkAllocationCallbacks *pAllocator)234{235TU_FROM_HANDLE(tu_device, device, _device);236TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout, _set_layout);237238if (!set_layout)239return;240241vk_object_free(&device->vk, pAllocator, set_layout);242}243244VKAPI_ATTR void VKAPI_CALL245tu_GetDescriptorSetLayoutSupport(246VkDevice device,247const VkDescriptorSetLayoutCreateInfo *pCreateInfo,248VkDescriptorSetLayoutSupport *pSupport)249{250VkDescriptorSetLayoutBinding *bindings = NULL;251VkResult result = vk_create_sorted_bindings(252pCreateInfo->pBindings, pCreateInfo->bindingCount, &bindings);253if (result != VK_SUCCESS) {254pSupport->supported = false;255return;256}257258const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *variable_flags =259vk_find_struct_const(260pCreateInfo->pNext,261DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);262VkDescriptorSetVariableDescriptorCountLayoutSupportEXT *variable_count =263vk_find_struct(264(void *) pCreateInfo->pNext,265DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT);266if (variable_count) {267variable_count->maxVariableDescriptorCount = 0;268}269270bool supported = true;271uint64_t size = 0;272for (uint32_t i = 0; i < pCreateInfo->bindingCount; i++) {273const VkDescriptorSetLayoutBinding *binding = bindings + i;274275uint64_t descriptor_sz = descriptor_size(binding->descriptorType);276uint64_t descriptor_alignment = 8;277278if (size && !ALIGN_POT(size, descriptor_alignment)) {279supported = false;280}281size = ALIGN_POT(size, descriptor_alignment);282283uint64_t max_count = UINT64_MAX;284if (descriptor_sz)285max_count = (UINT64_MAX - size) / descriptor_sz;286287if (max_count < binding->descriptorCount) {288supported = false;289}290if (variable_flags && binding->binding < variable_flags->bindingCount &&291variable_count &&292(variable_flags->pBindingFlags[binding->binding] &293VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)) {294variable_count->maxVariableDescriptorCount =295MIN2(UINT32_MAX, max_count);296}297size += binding->descriptorCount * descriptor_sz;298}299300free(bindings);301302pSupport->supported = supported;303}304305/*306* Pipeline layouts. These have nothing to do with the pipeline. They are307* just multiple descriptor set layouts pasted together.308*/309310VKAPI_ATTR VkResult VKAPI_CALL311tu_CreatePipelineLayout(VkDevice _device,312const VkPipelineLayoutCreateInfo *pCreateInfo,313const VkAllocationCallbacks *pAllocator,314VkPipelineLayout *pPipelineLayout)315{316TU_FROM_HANDLE(tu_device, device, _device);317struct tu_pipeline_layout *layout;318319assert(pCreateInfo->sType ==320VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);321322layout = vk_object_alloc(&device->vk, pAllocator, sizeof(*layout),323VK_OBJECT_TYPE_PIPELINE_LAYOUT);324if (layout == NULL)325return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);326327layout->num_sets = pCreateInfo->setLayoutCount;328layout->dynamic_offset_count = 0;329330unsigned dynamic_offset_count = 0;331332for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {333TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,334pCreateInfo->pSetLayouts[set]);335layout->set[set].layout = set_layout;336layout->set[set].dynamic_offset_start = dynamic_offset_count;337dynamic_offset_count += set_layout->dynamic_offset_count;338}339340layout->dynamic_offset_count = dynamic_offset_count;341layout->push_constant_size = 0;342343for (unsigned i = 0; i < pCreateInfo->pushConstantRangeCount; ++i) {344const VkPushConstantRange *range = pCreateInfo->pPushConstantRanges + i;345layout->push_constant_size =346MAX2(layout->push_constant_size, range->offset + range->size);347}348349layout->push_constant_size = align(layout->push_constant_size, 16);350*pPipelineLayout = tu_pipeline_layout_to_handle(layout);351352return VK_SUCCESS;353}354355VKAPI_ATTR void VKAPI_CALL356tu_DestroyPipelineLayout(VkDevice _device,357VkPipelineLayout _pipelineLayout,358const VkAllocationCallbacks *pAllocator)359{360TU_FROM_HANDLE(tu_device, device, _device);361TU_FROM_HANDLE(tu_pipeline_layout, pipeline_layout, _pipelineLayout);362363if (!pipeline_layout)364return;365366vk_object_free(&device->vk, pAllocator, pipeline_layout);367}368369#define EMPTY 1370371static VkResult372tu_descriptor_set_create(struct tu_device *device,373struct tu_descriptor_pool *pool,374const struct tu_descriptor_set_layout *layout,375const uint32_t *variable_count,376struct tu_descriptor_set **out_set)377{378struct tu_descriptor_set *set;379unsigned dynamic_offset = sizeof(struct tu_descriptor_set);380unsigned mem_size = dynamic_offset +381A6XX_TEX_CONST_DWORDS * 4 * layout->dynamic_offset_count;382383if (pool->host_memory_base) {384if (pool->host_memory_end - pool->host_memory_ptr < mem_size)385return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);386387set = (struct tu_descriptor_set*)pool->host_memory_ptr;388pool->host_memory_ptr += mem_size;389} else {390set = vk_alloc2(&device->vk.alloc, NULL, mem_size, 8,391VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);392393if (!set)394return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);395}396397memset(set, 0, mem_size);398vk_object_base_init(&device->vk, &set->base, VK_OBJECT_TYPE_DESCRIPTOR_SET);399400if (layout->dynamic_offset_count) {401set->dynamic_descriptors = (uint32_t *)((uint8_t*)set + dynamic_offset);402}403404set->layout = layout;405set->pool = pool;406uint32_t layout_size = layout->size;407if (variable_count) {408assert(layout->has_variable_descriptors);409uint32_t stride = layout->binding[layout->binding_count - 1].size;410layout_size = layout->binding[layout->binding_count - 1].offset +411*variable_count * stride;412}413414if (layout_size) {415set->size = layout_size;416417if (!pool->host_memory_base && pool->entry_count == pool->max_entry_count) {418vk_object_free(&device->vk, NULL, set);419return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);420}421422/* try to allocate linearly first, so that we don't spend423* time looking for gaps if the app only allocates &424* resets via the pool. */425if (pool->current_offset + layout_size <= pool->size) {426set->mapped_ptr = (uint32_t*)(pool->bo.map + pool->current_offset);427set->va = pool->bo.iova + pool->current_offset;428if (!pool->host_memory_base) {429pool->entries[pool->entry_count].offset = pool->current_offset;430pool->entries[pool->entry_count].size = layout_size;431pool->entries[pool->entry_count].set = set;432pool->entry_count++;433}434pool->current_offset += layout_size;435} else if (!pool->host_memory_base) {436uint64_t offset = 0;437int index;438439for (index = 0; index < pool->entry_count; ++index) {440if (pool->entries[index].offset - offset >= layout_size)441break;442offset = pool->entries[index].offset + pool->entries[index].size;443}444445if (pool->size - offset < layout_size) {446vk_object_free(&device->vk, NULL, set);447return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);448}449450set->mapped_ptr = (uint32_t*)(pool->bo.map + offset);451set->va = pool->bo.iova + offset;452memmove(&pool->entries[index + 1], &pool->entries[index],453sizeof(pool->entries[0]) * (pool->entry_count - index));454pool->entries[index].offset = offset;455pool->entries[index].size = layout_size;456pool->entries[index].set = set;457pool->entry_count++;458} else459return vk_error(device->instance, VK_ERROR_OUT_OF_POOL_MEMORY);460}461462if (layout->has_immutable_samplers) {463for (unsigned i = 0; i < layout->binding_count; ++i) {464if (!layout->binding[i].immutable_samplers_offset)465continue;466467unsigned offset = layout->binding[i].offset / 4;468if (layout->binding[i].type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)469offset += A6XX_TEX_CONST_DWORDS;470471const struct tu_sampler *samplers =472(const struct tu_sampler *)((const char *)layout +473layout->binding[i].immutable_samplers_offset);474for (unsigned j = 0; j < layout->binding[i].array_size; ++j) {475memcpy(set->mapped_ptr + offset, samplers[j].descriptor,476sizeof(samplers[j].descriptor));477offset += layout->binding[i].size / 4;478}479}480}481482*out_set = set;483return VK_SUCCESS;484}485486static void487tu_descriptor_set_destroy(struct tu_device *device,488struct tu_descriptor_pool *pool,489struct tu_descriptor_set *set,490bool free_bo)491{492assert(!pool->host_memory_base);493494if (free_bo && set->size && !pool->host_memory_base) {495uint32_t offset = (uint8_t*)set->mapped_ptr - (uint8_t*)pool->bo.map;496for (int i = 0; i < pool->entry_count; ++i) {497if (pool->entries[i].offset == offset) {498memmove(&pool->entries[i], &pool->entries[i+1],499sizeof(pool->entries[i]) * (pool->entry_count - i - 1));500--pool->entry_count;501break;502}503}504}505506vk_object_free(&device->vk, NULL, set);507}508509VKAPI_ATTR VkResult VKAPI_CALL510tu_CreateDescriptorPool(VkDevice _device,511const VkDescriptorPoolCreateInfo *pCreateInfo,512const VkAllocationCallbacks *pAllocator,513VkDescriptorPool *pDescriptorPool)514{515TU_FROM_HANDLE(tu_device, device, _device);516struct tu_descriptor_pool *pool;517uint64_t size = sizeof(struct tu_descriptor_pool);518uint64_t bo_size = 0, bo_count = 0, dynamic_count = 0;519VkResult ret;520521for (unsigned i = 0; i < pCreateInfo->poolSizeCount; ++i) {522if (pCreateInfo->pPoolSizes[i].type != VK_DESCRIPTOR_TYPE_SAMPLER)523bo_count += pCreateInfo->pPoolSizes[i].descriptorCount;524525switch(pCreateInfo->pPoolSizes[i].type) {526case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:527case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:528dynamic_count += pCreateInfo->pPoolSizes[i].descriptorCount;529break;530default:531break;532}533534bo_size += descriptor_size(pCreateInfo->pPoolSizes[i].type) *535pCreateInfo->pPoolSizes[i].descriptorCount;536}537538if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {539uint64_t host_size = pCreateInfo->maxSets * sizeof(struct tu_descriptor_set);540host_size += sizeof(struct tu_bo*) * bo_count;541host_size += A6XX_TEX_CONST_DWORDS * 4 * dynamic_count;542size += host_size;543} else {544size += sizeof(struct tu_descriptor_pool_entry) * pCreateInfo->maxSets;545}546547pool = vk_object_zalloc(&device->vk, pAllocator, size,548VK_OBJECT_TYPE_DESCRIPTOR_POOL);549if (!pool)550return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);551552if (!(pCreateInfo->flags & VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT)) {553pool->host_memory_base = (uint8_t*)pool + sizeof(struct tu_descriptor_pool);554pool->host_memory_ptr = pool->host_memory_base;555pool->host_memory_end = (uint8_t*)pool + size;556}557558if (bo_size) {559ret = tu_bo_init_new(device, &pool->bo, bo_size, TU_BO_ALLOC_ALLOW_DUMP);560if (ret)561goto fail_alloc;562563ret = tu_bo_map(device, &pool->bo);564if (ret)565goto fail_map;566}567pool->size = bo_size;568pool->max_entry_count = pCreateInfo->maxSets;569570*pDescriptorPool = tu_descriptor_pool_to_handle(pool);571return VK_SUCCESS;572573fail_map:574tu_bo_finish(device, &pool->bo);575fail_alloc:576vk_object_free(&device->vk, pAllocator, pool);577return ret;578}579580VKAPI_ATTR void VKAPI_CALL581tu_DestroyDescriptorPool(VkDevice _device,582VkDescriptorPool _pool,583const VkAllocationCallbacks *pAllocator)584{585TU_FROM_HANDLE(tu_device, device, _device);586TU_FROM_HANDLE(tu_descriptor_pool, pool, _pool);587588if (!pool)589return;590591if (!pool->host_memory_base) {592for(int i = 0; i < pool->entry_count; ++i) {593tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);594}595}596597if (pool->size)598tu_bo_finish(device, &pool->bo);599600vk_object_free(&device->vk, pAllocator, pool);601}602603VKAPI_ATTR VkResult VKAPI_CALL604tu_ResetDescriptorPool(VkDevice _device,605VkDescriptorPool descriptorPool,606VkDescriptorPoolResetFlags flags)607{608TU_FROM_HANDLE(tu_device, device, _device);609TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);610611if (!pool->host_memory_base) {612for(int i = 0; i < pool->entry_count; ++i) {613tu_descriptor_set_destroy(device, pool, pool->entries[i].set, false);614}615pool->entry_count = 0;616}617618pool->current_offset = 0;619pool->host_memory_ptr = pool->host_memory_base;620621return VK_SUCCESS;622}623624VKAPI_ATTR VkResult VKAPI_CALL625tu_AllocateDescriptorSets(VkDevice _device,626const VkDescriptorSetAllocateInfo *pAllocateInfo,627VkDescriptorSet *pDescriptorSets)628{629TU_FROM_HANDLE(tu_device, device, _device);630TU_FROM_HANDLE(tu_descriptor_pool, pool, pAllocateInfo->descriptorPool);631632VkResult result = VK_SUCCESS;633uint32_t i;634struct tu_descriptor_set *set = NULL;635636const VkDescriptorSetVariableDescriptorCountAllocateInfoEXT *variable_counts =637vk_find_struct_const(pAllocateInfo->pNext, DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT);638const uint32_t zero = 0;639640/* allocate a set of buffers for each shader to contain descriptors */641for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {642TU_FROM_HANDLE(tu_descriptor_set_layout, layout,643pAllocateInfo->pSetLayouts[i]);644645const uint32_t *variable_count = NULL;646if (variable_counts) {647if (i < variable_counts->descriptorSetCount)648variable_count = variable_counts->pDescriptorCounts + i;649else650variable_count = &zero;651}652653assert(!(layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));654655result = tu_descriptor_set_create(device, pool, layout, variable_count, &set);656if (result != VK_SUCCESS)657break;658659pDescriptorSets[i] = tu_descriptor_set_to_handle(set);660}661662if (result != VK_SUCCESS) {663tu_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,664i, pDescriptorSets);665for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {666pDescriptorSets[i] = VK_NULL_HANDLE;667}668}669return result;670}671672VKAPI_ATTR VkResult VKAPI_CALL673tu_FreeDescriptorSets(VkDevice _device,674VkDescriptorPool descriptorPool,675uint32_t count,676const VkDescriptorSet *pDescriptorSets)677{678TU_FROM_HANDLE(tu_device, device, _device);679TU_FROM_HANDLE(tu_descriptor_pool, pool, descriptorPool);680681for (uint32_t i = 0; i < count; i++) {682TU_FROM_HANDLE(tu_descriptor_set, set, pDescriptorSets[i]);683684if (set && !pool->host_memory_base)685tu_descriptor_set_destroy(device, pool, set, true);686}687return VK_SUCCESS;688}689690static void691write_texel_buffer_descriptor(uint32_t *dst, const VkBufferView buffer_view)692{693if (buffer_view == VK_NULL_HANDLE) {694memset(dst, 0, A6XX_TEX_CONST_DWORDS * sizeof(uint32_t));695} else {696TU_FROM_HANDLE(tu_buffer_view, view, buffer_view);697698memcpy(dst, view->descriptor, sizeof(view->descriptor));699}700}701702static uint32_t get_range(struct tu_buffer *buf, VkDeviceSize offset,703VkDeviceSize range)704{705if (range == VK_WHOLE_SIZE) {706return buf->size - offset;707} else {708return range;709}710}711712static void713write_buffer_descriptor(const struct tu_device *device,714uint32_t *dst,715const VkDescriptorBufferInfo *buffer_info)716{717if (buffer_info->buffer == VK_NULL_HANDLE) {718memset(dst, 0, A6XX_TEX_CONST_DWORDS * sizeof(uint32_t));719return;720}721722TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);723724assert((buffer_info->offset & 63) == 0); /* minStorageBufferOffsetAlignment */725uint64_t va = tu_buffer_iova(buffer) + buffer_info->offset;726uint32_t range = get_range(buffer, buffer_info->offset, buffer_info->range);727/* newer a6xx allows using 16-bit descriptor for both 16-bit and 32-bit access */728if (device->physical_device->info->a6xx.storage_16bit) {729dst[0] = A6XX_IBO_0_TILE_MODE(TILE6_LINEAR) | A6XX_IBO_0_FMT(FMT6_16_UINT);730dst[1] = DIV_ROUND_UP(range, 2);731} else {732dst[0] = A6XX_IBO_0_TILE_MODE(TILE6_LINEAR) | A6XX_IBO_0_FMT(FMT6_32_UINT);733dst[1] = DIV_ROUND_UP(range, 4);734}735dst[2] =736A6XX_IBO_2_UNK4 | A6XX_IBO_2_TYPE(A6XX_TEX_1D) | A6XX_IBO_2_UNK31;737dst[3] = 0;738dst[4] = A6XX_IBO_4_BASE_LO(va);739dst[5] = A6XX_IBO_5_BASE_HI(va >> 32);740for (int i = 6; i < A6XX_TEX_CONST_DWORDS; i++)741dst[i] = 0;742}743744static void745write_ubo_descriptor(uint32_t *dst, const VkDescriptorBufferInfo *buffer_info)746{747if (buffer_info->buffer == VK_NULL_HANDLE) {748dst[0] = dst[1] = 0;749return;750}751752TU_FROM_HANDLE(tu_buffer, buffer, buffer_info->buffer);753754uint32_t range = get_range(buffer, buffer_info->offset, buffer_info->range);755/* The HW range is in vec4 units */756range = ALIGN_POT(range, 16) / 16;757uint64_t va = tu_buffer_iova(buffer) + buffer_info->offset;758759dst[0] = A6XX_UBO_0_BASE_LO(va);760dst[1] = A6XX_UBO_1_BASE_HI(va >> 32) | A6XX_UBO_1_SIZE(range);761}762763static void764write_image_descriptor(uint32_t *dst,765VkDescriptorType descriptor_type,766const VkDescriptorImageInfo *image_info)767{768if (image_info->imageView == VK_NULL_HANDLE) {769memset(dst, 0, A6XX_TEX_CONST_DWORDS * sizeof(uint32_t));770return;771}772773TU_FROM_HANDLE(tu_image_view, iview, image_info->imageView);774775if (descriptor_type == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) {776memcpy(dst, iview->storage_descriptor, sizeof(iview->storage_descriptor));777} else {778memcpy(dst, iview->descriptor, sizeof(iview->descriptor));779}780}781782static void783write_combined_image_sampler_descriptor(uint32_t *dst,784VkDescriptorType descriptor_type,785const VkDescriptorImageInfo *image_info,786bool has_sampler)787{788TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);789790write_image_descriptor(dst, descriptor_type, image_info);791/* copy over sampler state */792if (has_sampler) {793memcpy(dst + A6XX_TEX_CONST_DWORDS, sampler->descriptor, sizeof(sampler->descriptor));794}795}796797static void798write_sampler_descriptor(uint32_t *dst, const VkDescriptorImageInfo *image_info)799{800TU_FROM_HANDLE(tu_sampler, sampler, image_info->sampler);801802memcpy(dst, sampler->descriptor, sizeof(sampler->descriptor));803}804805/* note: this is used with immutable samplers in push descriptors */806static void807write_sampler_push(uint32_t *dst, const struct tu_sampler *sampler)808{809memcpy(dst, sampler->descriptor, sizeof(sampler->descriptor));810}811812void813tu_update_descriptor_sets(const struct tu_device *device,814VkDescriptorSet dstSetOverride,815uint32_t descriptorWriteCount,816const VkWriteDescriptorSet *pDescriptorWrites,817uint32_t descriptorCopyCount,818const VkCopyDescriptorSet *pDescriptorCopies)819{820uint32_t i, j;821for (i = 0; i < descriptorWriteCount; i++) {822const VkWriteDescriptorSet *writeset = &pDescriptorWrites[i];823TU_FROM_HANDLE(tu_descriptor_set, set, dstSetOverride ?: writeset->dstSet);824const struct tu_descriptor_set_binding_layout *binding_layout =825set->layout->binding + writeset->dstBinding;826uint32_t *ptr = set->mapped_ptr;827/* for immutable samplers with push descriptors: */828const bool copy_immutable_samplers =829dstSetOverride && binding_layout->immutable_samplers_offset;830const struct tu_sampler *samplers =831tu_immutable_samplers(set->layout, binding_layout);832833ptr += binding_layout->offset / 4;834835ptr += (binding_layout->size / 4) * writeset->dstArrayElement;836for (j = 0; j < writeset->descriptorCount; ++j) {837switch(writeset->descriptorType) {838case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {839assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));840unsigned idx = writeset->dstArrayElement + j;841idx += binding_layout->dynamic_offset_offset;842write_ubo_descriptor(set->dynamic_descriptors + A6XX_TEX_CONST_DWORDS * idx,843writeset->pBufferInfo + j);844break;845}846case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:847write_ubo_descriptor(ptr, writeset->pBufferInfo + j);848break;849case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {850assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));851unsigned idx = writeset->dstArrayElement + j;852idx += binding_layout->dynamic_offset_offset;853write_buffer_descriptor(device, set->dynamic_descriptors + A6XX_TEX_CONST_DWORDS * idx,854writeset->pBufferInfo + j);855break;856}857case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:858write_buffer_descriptor(device, ptr, writeset->pBufferInfo + j);859break;860case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:861case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:862write_texel_buffer_descriptor(ptr, writeset->pTexelBufferView[j]);863break;864case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:865case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:866write_image_descriptor(ptr, writeset->descriptorType, writeset->pImageInfo + j);867break;868case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:869write_combined_image_sampler_descriptor(ptr,870writeset->descriptorType,871writeset->pImageInfo + j,872!binding_layout->immutable_samplers_offset);873874if (copy_immutable_samplers)875write_sampler_push(ptr + A6XX_TEX_CONST_DWORDS, &samplers[writeset->dstArrayElement + j]);876break;877case VK_DESCRIPTOR_TYPE_SAMPLER:878if (!binding_layout->immutable_samplers_offset)879write_sampler_descriptor(ptr, writeset->pImageInfo + j);880else if (copy_immutable_samplers)881write_sampler_push(ptr, &samplers[writeset->dstArrayElement + j]);882break;883case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:884/* nothing in descriptor set - framebuffer state is used instead */885break;886default:887unreachable("unimplemented descriptor type");888break;889}890ptr += binding_layout->size / 4;891}892}893894for (i = 0; i < descriptorCopyCount; i++) {895const VkCopyDescriptorSet *copyset = &pDescriptorCopies[i];896TU_FROM_HANDLE(tu_descriptor_set, src_set,897copyset->srcSet);898TU_FROM_HANDLE(tu_descriptor_set, dst_set,899copyset->dstSet);900const struct tu_descriptor_set_binding_layout *src_binding_layout =901src_set->layout->binding + copyset->srcBinding;902const struct tu_descriptor_set_binding_layout *dst_binding_layout =903dst_set->layout->binding + copyset->dstBinding;904uint32_t *src_ptr = src_set->mapped_ptr;905uint32_t *dst_ptr = dst_set->mapped_ptr;906907src_ptr += src_binding_layout->offset / 4;908dst_ptr += dst_binding_layout->offset / 4;909910src_ptr += src_binding_layout->size * copyset->srcArrayElement / 4;911dst_ptr += dst_binding_layout->size * copyset->dstArrayElement / 4;912913for (j = 0; j < copyset->descriptorCount; ++j) {914switch (src_binding_layout->type) {915case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:916case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {917unsigned src_idx = copyset->srcArrayElement + j;918unsigned dst_idx = copyset->dstArrayElement + j;919src_idx += src_binding_layout->dynamic_offset_offset;920dst_idx += dst_binding_layout->dynamic_offset_offset;921922uint32_t *src_dynamic, *dst_dynamic;923src_dynamic = src_set->dynamic_descriptors + src_idx * A6XX_TEX_CONST_DWORDS;924dst_dynamic = dst_set->dynamic_descriptors + dst_idx * A6XX_TEX_CONST_DWORDS;925memcpy(dst_dynamic, src_dynamic, A6XX_TEX_CONST_DWORDS * 4);926break;927}928default:929memcpy(dst_ptr, src_ptr, src_binding_layout->size);930}931932src_ptr += src_binding_layout->size / 4;933dst_ptr += dst_binding_layout->size / 4;934}935}936}937938VKAPI_ATTR void VKAPI_CALL939tu_UpdateDescriptorSets(VkDevice _device,940uint32_t descriptorWriteCount,941const VkWriteDescriptorSet *pDescriptorWrites,942uint32_t descriptorCopyCount,943const VkCopyDescriptorSet *pDescriptorCopies)944{945TU_FROM_HANDLE(tu_device, device, _device);946tu_update_descriptor_sets(device, VK_NULL_HANDLE,947descriptorWriteCount, pDescriptorWrites,948descriptorCopyCount, pDescriptorCopies);949}950951VKAPI_ATTR VkResult VKAPI_CALL952tu_CreateDescriptorUpdateTemplate(953VkDevice _device,954const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,955const VkAllocationCallbacks *pAllocator,956VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate)957{958TU_FROM_HANDLE(tu_device, device, _device);959TU_FROM_HANDLE(tu_descriptor_set_layout, set_layout,960pCreateInfo->descriptorSetLayout);961const uint32_t entry_count = pCreateInfo->descriptorUpdateEntryCount;962const size_t size =963sizeof(struct tu_descriptor_update_template) +964sizeof(struct tu_descriptor_update_template_entry) * entry_count;965struct tu_descriptor_update_template *templ;966967templ = vk_object_alloc(&device->vk, pAllocator, size,968VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);969if (!templ)970return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);971972templ->entry_count = entry_count;973974if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR) {975TU_FROM_HANDLE(tu_pipeline_layout, pipeline_layout, pCreateInfo->pipelineLayout);976977/* descriptorSetLayout should be ignored for push descriptors978* and instead it refers to pipelineLayout and set.979*/980assert(pCreateInfo->set < MAX_SETS);981set_layout = pipeline_layout->set[pCreateInfo->set].layout;982983templ->bind_point = pCreateInfo->pipelineBindPoint;984}985986for (uint32_t i = 0; i < entry_count; i++) {987const VkDescriptorUpdateTemplateEntry *entry = &pCreateInfo->pDescriptorUpdateEntries[i];988989const struct tu_descriptor_set_binding_layout *binding_layout =990set_layout->binding + entry->dstBinding;991uint32_t dst_offset, dst_stride;992const struct tu_sampler *immutable_samplers = NULL;993994/* dst_offset is an offset into dynamic_descriptors when the descriptor995* is dynamic, and an offset into mapped_ptr otherwise.996*/997switch (entry->descriptorType) {998case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:999case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:1000dst_offset = (binding_layout->dynamic_offset_offset +1001entry->dstArrayElement) * A6XX_TEX_CONST_DWORDS;1002dst_stride = A6XX_TEX_CONST_DWORDS;1003break;1004case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:1005case VK_DESCRIPTOR_TYPE_SAMPLER:1006if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR &&1007binding_layout->immutable_samplers_offset) {1008immutable_samplers =1009tu_immutable_samplers(set_layout, binding_layout) + entry->dstArrayElement;1010}1011FALLTHROUGH;1012default:1013dst_offset = binding_layout->offset / 4;1014dst_offset += (binding_layout->size * entry->dstArrayElement) / 4;1015dst_stride = binding_layout->size / 4;1016}10171018templ->entry[i] = (struct tu_descriptor_update_template_entry) {1019.descriptor_type = entry->descriptorType,1020.descriptor_count = entry->descriptorCount,1021.src_offset = entry->offset,1022.src_stride = entry->stride,1023.dst_offset = dst_offset,1024.dst_stride = dst_stride,1025.has_sampler = !binding_layout->immutable_samplers_offset,1026.immutable_samplers = immutable_samplers,1027};1028}10291030*pDescriptorUpdateTemplate =1031tu_descriptor_update_template_to_handle(templ);10321033return VK_SUCCESS;1034}10351036VKAPI_ATTR void VKAPI_CALL1037tu_DestroyDescriptorUpdateTemplate(1038VkDevice _device,1039VkDescriptorUpdateTemplate descriptorUpdateTemplate,1040const VkAllocationCallbacks *pAllocator)1041{1042TU_FROM_HANDLE(tu_device, device, _device);1043TU_FROM_HANDLE(tu_descriptor_update_template, templ,1044descriptorUpdateTemplate);10451046if (!templ)1047return;10481049vk_object_free(&device->vk, pAllocator, templ);1050}10511052void1053tu_update_descriptor_set_with_template(1054const struct tu_device *device,1055struct tu_descriptor_set *set,1056VkDescriptorUpdateTemplate descriptorUpdateTemplate,1057const void *pData)1058{1059TU_FROM_HANDLE(tu_descriptor_update_template, templ,1060descriptorUpdateTemplate);10611062for (uint32_t i = 0; i < templ->entry_count; i++) {1063uint32_t *ptr = set->mapped_ptr;1064const void *src = ((const char *) pData) + templ->entry[i].src_offset;1065const struct tu_sampler *samplers = templ->entry[i].immutable_samplers;10661067ptr += templ->entry[i].dst_offset;1068unsigned dst_offset = templ->entry[i].dst_offset;1069for (unsigned j = 0; j < templ->entry[i].descriptor_count; ++j) {1070switch(templ->entry[i].descriptor_type) {1071case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {1072assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));1073write_ubo_descriptor(set->dynamic_descriptors + dst_offset, src);1074break;1075}1076case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:1077write_ubo_descriptor(ptr, src);1078break;1079case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {1080assert(!(set->layout->flags & VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR));1081write_buffer_descriptor(device, set->dynamic_descriptors + dst_offset, src);1082break;1083}1084case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:1085write_buffer_descriptor(device, ptr, src);1086break;1087case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:1088case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:1089write_texel_buffer_descriptor(ptr, *(VkBufferView *) src);1090break;1091case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:1092case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE: {1093write_image_descriptor(ptr, templ->entry[i].descriptor_type, src);1094break;1095}1096case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:1097write_combined_image_sampler_descriptor(ptr,1098templ->entry[i].descriptor_type,1099src,1100templ->entry[i].has_sampler);1101if (samplers)1102write_sampler_push(ptr + A6XX_TEX_CONST_DWORDS, &samplers[j]);1103break;1104case VK_DESCRIPTOR_TYPE_SAMPLER:1105if (templ->entry[i].has_sampler)1106write_sampler_descriptor(ptr, src);1107else if (samplers)1108write_sampler_push(ptr, &samplers[j]);1109break;1110case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:1111/* nothing in descriptor set - framebuffer state is used instead */1112break;1113default:1114unreachable("unimplemented descriptor type");1115break;1116}1117src = (char *) src + templ->entry[i].src_stride;1118ptr += templ->entry[i].dst_stride;1119dst_offset += templ->entry[i].dst_stride;1120}1121}1122}11231124VKAPI_ATTR void VKAPI_CALL1125tu_UpdateDescriptorSetWithTemplate(1126VkDevice _device,1127VkDescriptorSet descriptorSet,1128VkDescriptorUpdateTemplate descriptorUpdateTemplate,1129const void *pData)1130{1131TU_FROM_HANDLE(tu_device, device, _device);1132TU_FROM_HANDLE(tu_descriptor_set, set, descriptorSet);11331134tu_update_descriptor_set_with_template(device, set, descriptorUpdateTemplate, pData);1135}11361137VKAPI_ATTR VkResult VKAPI_CALL1138tu_CreateSamplerYcbcrConversion(1139VkDevice _device,1140const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,1141const VkAllocationCallbacks *pAllocator,1142VkSamplerYcbcrConversion *pYcbcrConversion)1143{1144TU_FROM_HANDLE(tu_device, device, _device);1145struct tu_sampler_ycbcr_conversion *conversion;11461147conversion = vk_object_alloc(&device->vk, pAllocator, sizeof(*conversion),1148VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION);1149if (!conversion)1150return vk_error(device->instance, VK_ERROR_OUT_OF_HOST_MEMORY);11511152conversion->format = pCreateInfo->format;1153conversion->ycbcr_model = pCreateInfo->ycbcrModel;1154conversion->ycbcr_range = pCreateInfo->ycbcrRange;1155conversion->components = pCreateInfo->components;1156conversion->chroma_offsets[0] = pCreateInfo->xChromaOffset;1157conversion->chroma_offsets[1] = pCreateInfo->yChromaOffset;1158conversion->chroma_filter = pCreateInfo->chromaFilter;11591160*pYcbcrConversion = tu_sampler_ycbcr_conversion_to_handle(conversion);1161return VK_SUCCESS;1162}11631164VKAPI_ATTR void VKAPI_CALL1165tu_DestroySamplerYcbcrConversion(VkDevice _device,1166VkSamplerYcbcrConversion ycbcrConversion,1167const VkAllocationCallbacks *pAllocator)1168{1169TU_FROM_HANDLE(tu_device, device, _device);1170TU_FROM_HANDLE(tu_sampler_ycbcr_conversion, ycbcr_conversion, ycbcrConversion);11711172if (!ycbcr_conversion)1173return;11741175vk_object_free(&device->vk, pAllocator, ycbcr_conversion);1176}117711781179