Path: blob/21.2-virgl/src/intel/vulkan/anv_descriptor_set.c
4547 views
/*1* Copyright © 2015 Intel Corporation2*3* Permission is hereby granted, free of charge, to any person obtaining a4* copy of this software and associated documentation files (the "Software"),5* to deal in the Software without restriction, including without limitation6* the rights to use, copy, modify, merge, publish, distribute, sublicense,7* and/or sell copies of the Software, and to permit persons to whom the8* Software is furnished to do so, subject to the following conditions:9*10* The above copyright notice and this permission notice (including the next11* paragraph) shall be included in all copies or substantial portions of the12* Software.13*14* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR15* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,16* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL17* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER18* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING19* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS20* IN THE SOFTWARE.21*/2223#include <assert.h>24#include <stdbool.h>25#include <string.h>26#include <unistd.h>27#include <fcntl.h>2829#include "util/mesa-sha1.h"30#include "vk_util.h"3132#include "anv_private.h"3334/*35* Descriptor set layouts.36*/3738static enum anv_descriptor_data39anv_descriptor_data_for_type(const struct anv_physical_device *device,40VkDescriptorType type)41{42enum anv_descriptor_data data = 0;4344switch (type) {45case VK_DESCRIPTOR_TYPE_SAMPLER:46data = ANV_DESCRIPTOR_SAMPLER_STATE;47if (device->has_bindless_samplers)48data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;49break;5051case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:52data = ANV_DESCRIPTOR_SURFACE_STATE |53ANV_DESCRIPTOR_SAMPLER_STATE;54if (device->has_bindless_images || device->has_bindless_samplers)55data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;56break;5758case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:59case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:60data = ANV_DESCRIPTOR_SURFACE_STATE;61if (device->has_bindless_images)62data |= ANV_DESCRIPTOR_SAMPLED_IMAGE;63break;6465case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:66data = ANV_DESCRIPTOR_SURFACE_STATE;67break;6869case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:70case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:71data = ANV_DESCRIPTOR_SURFACE_STATE;72if (device->info.ver < 9)73data |= ANV_DESCRIPTOR_IMAGE_PARAM;74if (device->has_bindless_images)75data |= ANV_DESCRIPTOR_STORAGE_IMAGE;76break;7778case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:79case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:80data = ANV_DESCRIPTOR_SURFACE_STATE |81ANV_DESCRIPTOR_BUFFER_VIEW;82break;8384case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:85case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:86data = ANV_DESCRIPTOR_SURFACE_STATE;87break;8889case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:90data = ANV_DESCRIPTOR_INLINE_UNIFORM;91break;9293case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR:94data = ANV_DESCRIPTOR_ADDRESS_RANGE;95break;9697default:98unreachable("Unsupported descriptor type");99}100101/* On gfx8 and above when we have softpin enabled, we also need to push102* SSBO address ranges so that we can use A64 messages in the shader.103*/104if (device->has_a64_buffer_access &&105(type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER ||106type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC ||107type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||108type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC))109data |= ANV_DESCRIPTOR_ADDRESS_RANGE;110111/* On Ivy Bridge and Bay Trail, we need swizzles textures in the shader112* Do not handle VK_DESCRIPTOR_TYPE_STORAGE_IMAGE and113* VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT because they already must114* have identity swizzle.115*/116if (device->info.verx10 == 70 &&117(type == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE ||118type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER))119data |= ANV_DESCRIPTOR_TEXTURE_SWIZZLE;120121return data;122}123124static unsigned125anv_descriptor_data_size(enum anv_descriptor_data data)126{127unsigned size = 0;128129if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE)130size += sizeof(struct anv_sampled_image_descriptor);131132if (data & ANV_DESCRIPTOR_STORAGE_IMAGE)133size += sizeof(struct anv_storage_image_descriptor);134135if (data & ANV_DESCRIPTOR_IMAGE_PARAM)136size += BRW_IMAGE_PARAM_SIZE * 4;137138if (data & ANV_DESCRIPTOR_ADDRESS_RANGE)139size += sizeof(struct anv_address_range_descriptor);140141if (data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE)142size += sizeof(struct anv_texture_swizzle_descriptor);143144return size;145}146147static bool148anv_needs_descriptor_buffer(VkDescriptorType desc_type,149enum anv_descriptor_data desc_data)150{151if (desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT ||152anv_descriptor_data_size(desc_data) > 0)153return true;154return false;155}156157/** Returns the size in bytes of each descriptor with the given layout */158unsigned159anv_descriptor_size(const struct anv_descriptor_set_binding_layout *layout)160{161if (layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {162assert(layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);163return layout->array_size;164}165166unsigned size = anv_descriptor_data_size(layout->data);167168/* For multi-planar bindings, we make every descriptor consume the maximum169* number of planes so we don't have to bother with walking arrays and170* adding things up every time. Fortunately, YCbCr samplers aren't all171* that common and likely won't be in the middle of big arrays.172*/173if (layout->max_plane_count > 1)174size *= layout->max_plane_count;175176return size;177}178179/** Returns the size in bytes of each descriptor of the given type180*181* This version of the function does not have access to the entire layout so182* it may only work on certain descriptor types where the descriptor size is183* entirely determined by the descriptor type. Whenever possible, code should184* use anv_descriptor_size() instead.185*/186unsigned187anv_descriptor_type_size(const struct anv_physical_device *pdevice,188VkDescriptorType type)189{190assert(type != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT &&191type != VK_DESCRIPTOR_TYPE_SAMPLER &&192type != VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE &&193type != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER);194195return anv_descriptor_data_size(anv_descriptor_data_for_type(pdevice, type));196}197198static bool199anv_descriptor_data_supports_bindless(const struct anv_physical_device *pdevice,200enum anv_descriptor_data data,201bool sampler)202{203if (data & ANV_DESCRIPTOR_ADDRESS_RANGE) {204assert(pdevice->has_a64_buffer_access);205return true;206}207208if (data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {209assert(pdevice->has_bindless_images || pdevice->has_bindless_samplers);210return sampler ? pdevice->has_bindless_samplers :211pdevice->has_bindless_images;212}213214if (data & ANV_DESCRIPTOR_STORAGE_IMAGE) {215assert(pdevice->has_bindless_images);216return true;217}218219return false;220}221222bool223anv_descriptor_supports_bindless(const struct anv_physical_device *pdevice,224const struct anv_descriptor_set_binding_layout *binding,225bool sampler)226{227return anv_descriptor_data_supports_bindless(pdevice, binding->data,228sampler);229}230231bool232anv_descriptor_requires_bindless(const struct anv_physical_device *pdevice,233const struct anv_descriptor_set_binding_layout *binding,234bool sampler)235{236if (pdevice->always_use_bindless)237return anv_descriptor_supports_bindless(pdevice, binding, sampler);238239static const VkDescriptorBindingFlagBitsEXT flags_requiring_bindless =240VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT |241VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT |242VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT;243244return (binding->flags & flags_requiring_bindless) != 0;245}246247void anv_GetDescriptorSetLayoutSupport(248VkDevice _device,249const VkDescriptorSetLayoutCreateInfo* pCreateInfo,250VkDescriptorSetLayoutSupport* pSupport)251{252ANV_FROM_HANDLE(anv_device, device, _device);253const struct anv_physical_device *pdevice = device->physical;254255uint32_t surface_count[MESA_VULKAN_SHADER_STAGES] = { 0, };256VkDescriptorType varying_desc_type = VK_DESCRIPTOR_TYPE_MAX_ENUM;257bool needs_descriptor_buffer = false;258259const VkDescriptorSetLayoutBindingFlagsCreateInfo *binding_flags_info =260vk_find_struct_const(pCreateInfo->pNext,261DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO);262263for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {264const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];265266VkDescriptorBindingFlags flags = 0;267if (binding_flags_info && binding_flags_info->bindingCount > 0) {268assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);269flags = binding_flags_info->pBindingFlags[b];270}271272enum anv_descriptor_data desc_data =273anv_descriptor_data_for_type(pdevice, binding->descriptorType);274275if (anv_needs_descriptor_buffer(binding->descriptorType, desc_data))276needs_descriptor_buffer = true;277278if (flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT)279varying_desc_type = binding->descriptorType;280281switch (binding->descriptorType) {282case VK_DESCRIPTOR_TYPE_SAMPLER:283/* There is no real limit on samplers */284break;285286case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:287/* Inline uniforms don't use a binding */288break;289290case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:291if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))292break;293294if (binding->pImmutableSamplers) {295for (uint32_t i = 0; i < binding->descriptorCount; i++) {296ANV_FROM_HANDLE(anv_sampler, sampler,297binding->pImmutableSamplers[i]);298anv_foreach_stage(s, binding->stageFlags)299surface_count[s] += sampler->n_planes;300}301} else {302anv_foreach_stage(s, binding->stageFlags)303surface_count[s] += binding->descriptorCount;304}305break;306307default:308if (anv_descriptor_data_supports_bindless(pdevice, desc_data, false))309break;310311anv_foreach_stage(s, binding->stageFlags)312surface_count[s] += binding->descriptorCount;313break;314}315}316317for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {318if (needs_descriptor_buffer)319surface_count[s] += 1;320}321322VkDescriptorSetVariableDescriptorCountLayoutSupport *vdcls =323vk_find_struct(pSupport->pNext,324DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT);325if (vdcls != NULL) {326if (varying_desc_type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {327vdcls->maxVariableDescriptorCount = MAX_INLINE_UNIFORM_BLOCK_SIZE;328} else if (varying_desc_type != VK_DESCRIPTOR_TYPE_MAX_ENUM) {329vdcls->maxVariableDescriptorCount = UINT16_MAX;330} else {331vdcls->maxVariableDescriptorCount = 0;332}333}334335bool supported = true;336for (unsigned s = 0; s < ARRAY_SIZE(surface_count); s++) {337/* Our maximum binding table size is 240 and we need to reserve 8 for338* render targets.339*/340if (surface_count[s] > MAX_BINDING_TABLE_SIZE - MAX_RTS)341supported = false;342}343344pSupport->supported = supported;345}346347VkResult anv_CreateDescriptorSetLayout(348VkDevice _device,349const VkDescriptorSetLayoutCreateInfo* pCreateInfo,350const VkAllocationCallbacks* pAllocator,351VkDescriptorSetLayout* pSetLayout)352{353ANV_FROM_HANDLE(anv_device, device, _device);354355assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);356357uint32_t num_bindings = 0;358uint32_t immutable_sampler_count = 0;359for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {360num_bindings = MAX2(num_bindings, pCreateInfo->pBindings[j].binding + 1);361362/* From the Vulkan 1.1.97 spec for VkDescriptorSetLayoutBinding:363*364* "If descriptorType specifies a VK_DESCRIPTOR_TYPE_SAMPLER or365* VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER type descriptor, then366* pImmutableSamplers can be used to initialize a set of immutable367* samplers. [...] If descriptorType is not one of these descriptor368* types, then pImmutableSamplers is ignored.369*370* We need to be careful here and only parse pImmutableSamplers if we371* have one of the right descriptor types.372*/373VkDescriptorType desc_type = pCreateInfo->pBindings[j].descriptorType;374if ((desc_type == VK_DESCRIPTOR_TYPE_SAMPLER ||375desc_type == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&376pCreateInfo->pBindings[j].pImmutableSamplers)377immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;378}379380/* We need to allocate decriptor set layouts off the device allocator381* with DEVICE scope because they are reference counted and may not be382* destroyed when vkDestroyDescriptorSetLayout is called.383*/384VK_MULTIALLOC(ma);385VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_layout, set_layout, 1);386VK_MULTIALLOC_DECL(&ma, struct anv_descriptor_set_binding_layout,387bindings, num_bindings);388VK_MULTIALLOC_DECL(&ma, struct anv_sampler *, samplers,389immutable_sampler_count);390391if (!vk_object_multizalloc(&device->vk, &ma, NULL,392VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT))393return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);394395set_layout->ref_cnt = 1;396set_layout->binding_count = num_bindings;397398for (uint32_t b = 0; b < num_bindings; b++) {399/* Initialize all binding_layout entries to -1 */400memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));401402set_layout->binding[b].flags = 0;403set_layout->binding[b].data = 0;404set_layout->binding[b].max_plane_count = 0;405set_layout->binding[b].array_size = 0;406set_layout->binding[b].immutable_samplers = NULL;407}408409/* Initialize all samplers to 0 */410memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));411412uint32_t buffer_view_count = 0;413uint32_t dynamic_offset_count = 0;414uint32_t descriptor_buffer_size = 0;415416for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {417const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];418uint32_t b = binding->binding;419/* We temporarily store pCreateInfo->pBindings[] index (plus one) in the420* immutable_samplers pointer. This provides us with a quick-and-dirty421* way to sort the bindings by binding number.422*/423set_layout->binding[b].immutable_samplers = (void *)(uintptr_t)(j + 1);424}425426const VkDescriptorSetLayoutBindingFlagsCreateInfoEXT *binding_flags_info =427vk_find_struct_const(pCreateInfo->pNext,428DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT);429430for (uint32_t b = 0; b < num_bindings; b++) {431/* We stashed the pCreateInfo->pBindings[] index (plus one) in the432* immutable_samplers pointer. Check for NULL (empty binding) and then433* reset it and compute the index.434*/435if (set_layout->binding[b].immutable_samplers == NULL)436continue;437const uint32_t info_idx =438(uintptr_t)(void *)set_layout->binding[b].immutable_samplers - 1;439set_layout->binding[b].immutable_samplers = NULL;440441const VkDescriptorSetLayoutBinding *binding =442&pCreateInfo->pBindings[info_idx];443444if (binding->descriptorCount == 0)445continue;446447set_layout->binding[b].type = binding->descriptorType;448449if (binding_flags_info && binding_flags_info->bindingCount > 0) {450assert(binding_flags_info->bindingCount == pCreateInfo->bindingCount);451set_layout->binding[b].flags =452binding_flags_info->pBindingFlags[info_idx];453454/* From the Vulkan spec:455*456* "If VkDescriptorSetLayoutCreateInfo::flags includes457* VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR, then458* all elements of pBindingFlags must not include459* VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,460* VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT, or461* VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT"462*/463if (pCreateInfo->flags &464VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR) {465assert(!(set_layout->binding[b].flags &466(VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT |467VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT |468VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT)));469}470}471472set_layout->binding[b].data =473anv_descriptor_data_for_type(device->physical,474binding->descriptorType);475set_layout->binding[b].array_size = binding->descriptorCount;476set_layout->binding[b].descriptor_index = set_layout->descriptor_count;477set_layout->descriptor_count += binding->descriptorCount;478479if (set_layout->binding[b].data & ANV_DESCRIPTOR_BUFFER_VIEW) {480set_layout->binding[b].buffer_view_index = buffer_view_count;481buffer_view_count += binding->descriptorCount;482}483484switch (binding->descriptorType) {485case VK_DESCRIPTOR_TYPE_SAMPLER:486case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:487set_layout->binding[b].max_plane_count = 1;488if (binding->pImmutableSamplers) {489set_layout->binding[b].immutable_samplers = samplers;490samplers += binding->descriptorCount;491492for (uint32_t i = 0; i < binding->descriptorCount; i++) {493ANV_FROM_HANDLE(anv_sampler, sampler,494binding->pImmutableSamplers[i]);495496set_layout->binding[b].immutable_samplers[i] = sampler;497if (set_layout->binding[b].max_plane_count < sampler->n_planes)498set_layout->binding[b].max_plane_count = sampler->n_planes;499}500}501break;502503case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:504set_layout->binding[b].max_plane_count = 1;505break;506507default:508break;509}510511switch (binding->descriptorType) {512case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:513case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:514set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;515set_layout->dynamic_offset_stages[dynamic_offset_count] = binding->stageFlags;516dynamic_offset_count += binding->descriptorCount;517assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);518break;519520default:521break;522}523524if (binding->descriptorType ==525VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {526/* Inline uniform blocks are specified to use the descriptor array527* size as the size in bytes of the block.528*/529descriptor_buffer_size = align_u32(descriptor_buffer_size,530ANV_UBO_ALIGNMENT);531set_layout->binding[b].descriptor_offset = descriptor_buffer_size;532descriptor_buffer_size += binding->descriptorCount;533} else {534set_layout->binding[b].descriptor_offset = descriptor_buffer_size;535descriptor_buffer_size += anv_descriptor_size(&set_layout->binding[b]) *536binding->descriptorCount;537}538539set_layout->shader_stages |= binding->stageFlags;540}541542set_layout->buffer_view_count = buffer_view_count;543set_layout->dynamic_offset_count = dynamic_offset_count;544set_layout->descriptor_buffer_size = descriptor_buffer_size;545546*pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);547548return VK_SUCCESS;549}550551void552anv_descriptor_set_layout_destroy(struct anv_device *device,553struct anv_descriptor_set_layout *layout)554{555assert(layout->ref_cnt == 0);556vk_object_free(&device->vk, NULL, layout);557}558559static const struct anv_descriptor_set_binding_layout *560set_layout_dynamic_binding(const struct anv_descriptor_set_layout *set_layout)561{562if (set_layout->binding_count == 0)563return NULL;564565const struct anv_descriptor_set_binding_layout *last_binding =566&set_layout->binding[set_layout->binding_count - 1];567if (!(last_binding->flags & VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT))568return NULL;569570return last_binding;571}572573static uint32_t574set_layout_descriptor_count(const struct anv_descriptor_set_layout *set_layout,575uint32_t var_desc_count)576{577const struct anv_descriptor_set_binding_layout *dynamic_binding =578set_layout_dynamic_binding(set_layout);579if (dynamic_binding == NULL)580return set_layout->descriptor_count;581582assert(var_desc_count <= dynamic_binding->array_size);583uint32_t shrink = dynamic_binding->array_size - var_desc_count;584585if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)586return set_layout->descriptor_count;587588return set_layout->descriptor_count - shrink;589}590591static uint32_t592set_layout_buffer_view_count(const struct anv_descriptor_set_layout *set_layout,593uint32_t var_desc_count)594{595const struct anv_descriptor_set_binding_layout *dynamic_binding =596set_layout_dynamic_binding(set_layout);597if (dynamic_binding == NULL)598return set_layout->buffer_view_count;599600assert(var_desc_count <= dynamic_binding->array_size);601uint32_t shrink = dynamic_binding->array_size - var_desc_count;602603if (!(dynamic_binding->data & ANV_DESCRIPTOR_BUFFER_VIEW))604return set_layout->buffer_view_count;605606return set_layout->buffer_view_count - shrink;607}608609uint32_t610anv_descriptor_set_layout_descriptor_buffer_size(const struct anv_descriptor_set_layout *set_layout,611uint32_t var_desc_count)612{613const struct anv_descriptor_set_binding_layout *dynamic_binding =614set_layout_dynamic_binding(set_layout);615if (dynamic_binding == NULL)616return ALIGN(set_layout->descriptor_buffer_size, ANV_UBO_ALIGNMENT);617618assert(var_desc_count <= dynamic_binding->array_size);619uint32_t shrink = dynamic_binding->array_size - var_desc_count;620uint32_t set_size;621622if (dynamic_binding->type == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {623/* Inline uniform blocks are specified to use the descriptor array624* size as the size in bytes of the block.625*/626set_size = set_layout->descriptor_buffer_size - shrink;627} else {628set_size = set_layout->descriptor_buffer_size -629shrink * anv_descriptor_size(dynamic_binding);630}631632return ALIGN(set_size, ANV_UBO_ALIGNMENT);633}634635void anv_DestroyDescriptorSetLayout(636VkDevice _device,637VkDescriptorSetLayout _set_layout,638const VkAllocationCallbacks* pAllocator)639{640ANV_FROM_HANDLE(anv_device, device, _device);641ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);642643if (!set_layout)644return;645646anv_descriptor_set_layout_unref(device, set_layout);647}648649#define SHA1_UPDATE_VALUE(ctx, x) _mesa_sha1_update(ctx, &(x), sizeof(x));650651static void652sha1_update_immutable_sampler(struct mesa_sha1 *ctx,653const struct anv_sampler *sampler)654{655if (!sampler->conversion)656return;657658/* The only thing that affects the shader is ycbcr conversion */659_mesa_sha1_update(ctx, sampler->conversion,660sizeof(*sampler->conversion));661}662663static void664sha1_update_descriptor_set_binding_layout(struct mesa_sha1 *ctx,665const struct anv_descriptor_set_binding_layout *layout)666{667SHA1_UPDATE_VALUE(ctx, layout->flags);668SHA1_UPDATE_VALUE(ctx, layout->data);669SHA1_UPDATE_VALUE(ctx, layout->max_plane_count);670SHA1_UPDATE_VALUE(ctx, layout->array_size);671SHA1_UPDATE_VALUE(ctx, layout->descriptor_index);672SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_index);673SHA1_UPDATE_VALUE(ctx, layout->buffer_view_index);674SHA1_UPDATE_VALUE(ctx, layout->descriptor_offset);675676if (layout->immutable_samplers) {677for (uint16_t i = 0; i < layout->array_size; i++)678sha1_update_immutable_sampler(ctx, layout->immutable_samplers[i]);679}680}681682static void683sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,684const struct anv_descriptor_set_layout *layout)685{686SHA1_UPDATE_VALUE(ctx, layout->binding_count);687SHA1_UPDATE_VALUE(ctx, layout->descriptor_count);688SHA1_UPDATE_VALUE(ctx, layout->shader_stages);689SHA1_UPDATE_VALUE(ctx, layout->buffer_view_count);690SHA1_UPDATE_VALUE(ctx, layout->dynamic_offset_count);691SHA1_UPDATE_VALUE(ctx, layout->descriptor_buffer_size);692693for (uint16_t i = 0; i < layout->binding_count; i++)694sha1_update_descriptor_set_binding_layout(ctx, &layout->binding[i]);695}696697/*698* Pipeline layouts. These have nothing to do with the pipeline. They are699* just multiple descriptor set layouts pasted together700*/701702VkResult anv_CreatePipelineLayout(703VkDevice _device,704const VkPipelineLayoutCreateInfo* pCreateInfo,705const VkAllocationCallbacks* pAllocator,706VkPipelineLayout* pPipelineLayout)707{708ANV_FROM_HANDLE(anv_device, device, _device);709struct anv_pipeline_layout *layout;710711assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);712713layout = vk_object_alloc(&device->vk, pAllocator, sizeof(*layout),714VK_OBJECT_TYPE_PIPELINE_LAYOUT);715if (layout == NULL)716return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);717718layout->num_sets = pCreateInfo->setLayoutCount;719720unsigned dynamic_offset_count = 0;721722for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {723ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,724pCreateInfo->pSetLayouts[set]);725layout->set[set].layout = set_layout;726anv_descriptor_set_layout_ref(set_layout);727728layout->set[set].dynamic_offset_start = dynamic_offset_count;729for (uint32_t b = 0; b < set_layout->binding_count; b++) {730if (set_layout->binding[b].dynamic_offset_index < 0)731continue;732733dynamic_offset_count += set_layout->binding[b].array_size;734}735}736assert(dynamic_offset_count < MAX_DYNAMIC_BUFFERS);737738struct mesa_sha1 ctx;739_mesa_sha1_init(&ctx);740for (unsigned s = 0; s < layout->num_sets; s++) {741sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);742_mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,743sizeof(layout->set[s].dynamic_offset_start));744}745_mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));746_mesa_sha1_final(&ctx, layout->sha1);747748*pPipelineLayout = anv_pipeline_layout_to_handle(layout);749750return VK_SUCCESS;751}752753void anv_DestroyPipelineLayout(754VkDevice _device,755VkPipelineLayout _pipelineLayout,756const VkAllocationCallbacks* pAllocator)757{758ANV_FROM_HANDLE(anv_device, device, _device);759ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);760761if (!pipeline_layout)762return;763764for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)765anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);766767vk_object_free(&device->vk, pAllocator, pipeline_layout);768}769770/*771* Descriptor pools.772*773* These are implemented using a big pool of memory and a free-list for the774* host memory allocations and a state_stream and a free list for the buffer775* view surface state. The spec allows us to fail to allocate due to776* fragmentation in all cases but two: 1) after pool reset, allocating up777* until the pool size with no freeing must succeed and 2) allocating and778* freeing only descriptor sets with the same layout. Case 1) is easy enogh,779* and the free lists lets us recycle blocks for case 2).780*/781782/* The vma heap reserves 0 to mean NULL; we have to offset by some ammount to783* ensure we can allocate the entire BO without hitting zero. The actual784* amount doesn't matter.785*/786#define POOL_HEAP_OFFSET 64787788#define EMPTY 1789790VkResult anv_CreateDescriptorPool(791VkDevice _device,792const VkDescriptorPoolCreateInfo* pCreateInfo,793const VkAllocationCallbacks* pAllocator,794VkDescriptorPool* pDescriptorPool)795{796ANV_FROM_HANDLE(anv_device, device, _device);797struct anv_descriptor_pool *pool;798799const VkDescriptorPoolInlineUniformBlockCreateInfoEXT *inline_info =800vk_find_struct_const(pCreateInfo->pNext,801DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT);802803uint32_t descriptor_count = 0;804uint32_t buffer_view_count = 0;805uint32_t descriptor_bo_size = 0;806for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {807enum anv_descriptor_data desc_data =808anv_descriptor_data_for_type(device->physical,809pCreateInfo->pPoolSizes[i].type);810811if (desc_data & ANV_DESCRIPTOR_BUFFER_VIEW)812buffer_view_count += pCreateInfo->pPoolSizes[i].descriptorCount;813814unsigned desc_data_size = anv_descriptor_data_size(desc_data) *815pCreateInfo->pPoolSizes[i].descriptorCount;816817/* Combined image sampler descriptors can take up to 3 slots if they818* hold a YCbCr image.819*/820if (pCreateInfo->pPoolSizes[i].type ==821VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER)822desc_data_size *= 3;823824if (pCreateInfo->pPoolSizes[i].type ==825VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT) {826/* Inline uniform blocks are specified to use the descriptor array827* size as the size in bytes of the block.828*/829assert(inline_info);830desc_data_size += pCreateInfo->pPoolSizes[i].descriptorCount;831}832833descriptor_bo_size += desc_data_size;834835descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;836}837/* We have to align descriptor buffer allocations to 32B so that we can838* push descriptor buffers. This means that each descriptor buffer839* allocated may burn up to 32B of extra space to get the right alignment.840* (Technically, it's at most 28B because we're always going to start at841* least 4B aligned but we're being conservative here.) Allocate enough842* extra space that we can chop it into maxSets pieces and align each one843* of them to 32B.844*/845descriptor_bo_size += ANV_UBO_ALIGNMENT * pCreateInfo->maxSets;846/* We align inline uniform blocks to ANV_UBO_ALIGNMENT */847if (inline_info) {848descriptor_bo_size +=849ANV_UBO_ALIGNMENT * inline_info->maxInlineUniformBlockBindings;850}851descriptor_bo_size = ALIGN(descriptor_bo_size, 4096);852853const size_t pool_size =854pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +855descriptor_count * sizeof(struct anv_descriptor) +856buffer_view_count * sizeof(struct anv_buffer_view);857const size_t total_size = sizeof(*pool) + pool_size;858859pool = vk_object_alloc(&device->vk, pAllocator, total_size,860VK_OBJECT_TYPE_DESCRIPTOR_POOL);861if (!pool)862return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);863864pool->size = pool_size;865pool->next = 0;866pool->free_list = EMPTY;867868if (descriptor_bo_size > 0) {869VkResult result = anv_device_alloc_bo(device,870"descriptors",871descriptor_bo_size,872ANV_BO_ALLOC_MAPPED |873ANV_BO_ALLOC_SNOOPED,8740 /* explicit_address */,875&pool->bo);876if (result != VK_SUCCESS) {877vk_object_free(&device->vk, pAllocator, pool);878return result;879}880881util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, descriptor_bo_size);882} else {883pool->bo = NULL;884}885886anv_state_stream_init(&pool->surface_state_stream,887&device->surface_state_pool, 4096);888pool->surface_state_free_list = NULL;889890list_inithead(&pool->desc_sets);891892*pDescriptorPool = anv_descriptor_pool_to_handle(pool);893894return VK_SUCCESS;895}896897void anv_DestroyDescriptorPool(898VkDevice _device,899VkDescriptorPool _pool,900const VkAllocationCallbacks* pAllocator)901{902ANV_FROM_HANDLE(anv_device, device, _device);903ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);904905if (!pool)906return;907908list_for_each_entry_safe(struct anv_descriptor_set, set,909&pool->desc_sets, pool_link) {910anv_descriptor_set_layout_unref(device, set->layout);911}912913if (pool->bo) {914util_vma_heap_finish(&pool->bo_heap);915anv_device_release_bo(device, pool->bo);916}917anv_state_stream_finish(&pool->surface_state_stream);918919vk_object_free(&device->vk, pAllocator, pool);920}921922VkResult anv_ResetDescriptorPool(923VkDevice _device,924VkDescriptorPool descriptorPool,925VkDescriptorPoolResetFlags flags)926{927ANV_FROM_HANDLE(anv_device, device, _device);928ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);929930list_for_each_entry_safe(struct anv_descriptor_set, set,931&pool->desc_sets, pool_link) {932anv_descriptor_set_layout_unref(device, set->layout);933}934list_inithead(&pool->desc_sets);935936pool->next = 0;937pool->free_list = EMPTY;938939if (pool->bo) {940util_vma_heap_finish(&pool->bo_heap);941util_vma_heap_init(&pool->bo_heap, POOL_HEAP_OFFSET, pool->bo->size);942}943944anv_state_stream_finish(&pool->surface_state_stream);945anv_state_stream_init(&pool->surface_state_stream,946&device->surface_state_pool, 4096);947pool->surface_state_free_list = NULL;948949return VK_SUCCESS;950}951952struct pool_free_list_entry {953uint32_t next;954uint32_t size;955};956957static VkResult958anv_descriptor_pool_alloc_set(struct anv_descriptor_pool *pool,959uint32_t size,960struct anv_descriptor_set **set)961{962if (size <= pool->size - pool->next) {963*set = (struct anv_descriptor_set *) (pool->data + pool->next);964(*set)->size = size;965pool->next += size;966return VK_SUCCESS;967} else {968struct pool_free_list_entry *entry;969uint32_t *link = &pool->free_list;970for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {971entry = (struct pool_free_list_entry *) (pool->data + f);972if (size <= entry->size) {973*link = entry->next;974*set = (struct anv_descriptor_set *) entry;975(*set)->size = entry->size;976return VK_SUCCESS;977}978link = &entry->next;979}980981if (pool->free_list != EMPTY) {982return vk_error(VK_ERROR_FRAGMENTED_POOL);983} else {984return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY);985}986}987}988989static void990anv_descriptor_pool_free_set(struct anv_descriptor_pool *pool,991struct anv_descriptor_set *set)992{993/* Put the descriptor set allocation back on the free list. */994const uint32_t index = (char *) set - pool->data;995if (index + set->size == pool->next) {996pool->next = index;997} else {998struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;999entry->next = pool->free_list;1000entry->size = set->size;1001pool->free_list = (char *) entry - pool->data;1002}1003}10041005struct surface_state_free_list_entry {1006void *next;1007struct anv_state state;1008};10091010static struct anv_state1011anv_descriptor_pool_alloc_state(struct anv_descriptor_pool *pool)1012{1013struct surface_state_free_list_entry *entry =1014pool->surface_state_free_list;10151016if (entry) {1017struct anv_state state = entry->state;1018pool->surface_state_free_list = entry->next;1019assert(state.alloc_size == 64);1020return state;1021} else {1022return anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);1023}1024}10251026static void1027anv_descriptor_pool_free_state(struct anv_descriptor_pool *pool,1028struct anv_state state)1029{1030/* Put the buffer view surface state back on the free list. */1031struct surface_state_free_list_entry *entry = state.map;1032entry->next = pool->surface_state_free_list;1033entry->state = state;1034pool->surface_state_free_list = entry;1035}10361037size_t1038anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout,1039uint32_t var_desc_count)1040{1041const uint32_t descriptor_count =1042set_layout_descriptor_count(layout, var_desc_count);1043const uint32_t buffer_view_count =1044set_layout_buffer_view_count(layout, var_desc_count);10451046return sizeof(struct anv_descriptor_set) +1047descriptor_count * sizeof(struct anv_descriptor) +1048buffer_view_count * sizeof(struct anv_buffer_view);1049}10501051VkResult1052anv_descriptor_set_create(struct anv_device *device,1053struct anv_descriptor_pool *pool,1054struct anv_descriptor_set_layout *layout,1055uint32_t var_desc_count,1056struct anv_descriptor_set **out_set)1057{1058struct anv_descriptor_set *set;1059const size_t size = anv_descriptor_set_layout_size(layout, var_desc_count);10601061VkResult result = anv_descriptor_pool_alloc_set(pool, size, &set);1062if (result != VK_SUCCESS)1063return result;10641065uint32_t descriptor_buffer_size =1066anv_descriptor_set_layout_descriptor_buffer_size(layout, var_desc_count);1067if (descriptor_buffer_size) {1068uint64_t pool_vma_offset =1069util_vma_heap_alloc(&pool->bo_heap, descriptor_buffer_size,1070ANV_UBO_ALIGNMENT);1071if (pool_vma_offset == 0) {1072anv_descriptor_pool_free_set(pool, set);1073return vk_error(VK_ERROR_FRAGMENTED_POOL);1074}1075assert(pool_vma_offset >= POOL_HEAP_OFFSET &&1076pool_vma_offset - POOL_HEAP_OFFSET <= INT32_MAX);1077set->desc_mem.offset = pool_vma_offset - POOL_HEAP_OFFSET;1078set->desc_mem.alloc_size = descriptor_buffer_size;1079set->desc_mem.map = pool->bo->map + set->desc_mem.offset;10801081set->desc_addr = (struct anv_address) {1082.bo = pool->bo,1083.offset = set->desc_mem.offset,1084};10851086enum isl_format format =1087anv_isl_format_for_descriptor_type(device,1088VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER);10891090set->desc_surface_state = anv_descriptor_pool_alloc_state(pool);1091anv_fill_buffer_surface_state(device, set->desc_surface_state, format,1092ISL_SURF_USAGE_CONSTANT_BUFFER_BIT,1093set->desc_addr,1094descriptor_buffer_size, 1);1095} else {1096set->desc_mem = ANV_STATE_NULL;1097set->desc_addr = (struct anv_address) { .bo = NULL, .offset = 0 };1098set->desc_surface_state = ANV_STATE_NULL;1099}11001101vk_object_base_init(&device->vk, &set->base,1102VK_OBJECT_TYPE_DESCRIPTOR_SET);1103set->pool = pool;1104set->layout = layout;1105anv_descriptor_set_layout_ref(layout);11061107set->buffer_view_count =1108set_layout_buffer_view_count(layout, var_desc_count);1109set->descriptor_count =1110set_layout_descriptor_count(layout, var_desc_count);11111112set->buffer_views =1113(struct anv_buffer_view *) &set->descriptors[set->descriptor_count];11141115/* By defining the descriptors to be zero now, we can later verify that1116* a descriptor has not been populated with user data.1117*/1118memset(set->descriptors, 0,1119sizeof(struct anv_descriptor) * set->descriptor_count);11201121/* Go through and fill out immutable samplers if we have any */1122struct anv_descriptor *desc = set->descriptors;1123for (uint32_t b = 0; b < layout->binding_count; b++) {1124if (layout->binding[b].immutable_samplers) {1125for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {1126/* The type will get changed to COMBINED_IMAGE_SAMPLER in1127* UpdateDescriptorSets if needed. However, if the descriptor1128* set has an immutable sampler, UpdateDescriptorSets may never1129* touch it, so we need to make sure it's 100% valid now.1130*1131* We don't need to actually provide a sampler because the helper1132* will always write in the immutable sampler regardless of what1133* is in the sampler parameter.1134*/1135VkDescriptorImageInfo info = { };1136anv_descriptor_set_write_image_view(device, set, &info,1137VK_DESCRIPTOR_TYPE_SAMPLER,1138b, i);1139}1140}1141desc += layout->binding[b].array_size;1142}11431144/* Allocate surface state for the buffer views. */1145for (uint32_t b = 0; b < set->buffer_view_count; b++) {1146set->buffer_views[b].surface_state =1147anv_descriptor_pool_alloc_state(pool);1148}11491150list_addtail(&set->pool_link, &pool->desc_sets);11511152*out_set = set;11531154return VK_SUCCESS;1155}11561157void1158anv_descriptor_set_destroy(struct anv_device *device,1159struct anv_descriptor_pool *pool,1160struct anv_descriptor_set *set)1161{1162anv_descriptor_set_layout_unref(device, set->layout);11631164if (set->desc_mem.alloc_size) {1165util_vma_heap_free(&pool->bo_heap,1166(uint64_t)set->desc_mem.offset + POOL_HEAP_OFFSET,1167set->desc_mem.alloc_size);1168anv_descriptor_pool_free_state(pool, set->desc_surface_state);1169}11701171for (uint32_t b = 0; b < set->buffer_view_count; b++)1172anv_descriptor_pool_free_state(pool, set->buffer_views[b].surface_state);11731174list_del(&set->pool_link);11751176vk_object_base_finish(&set->base);1177anv_descriptor_pool_free_set(pool, set);1178}11791180VkResult anv_AllocateDescriptorSets(1181VkDevice _device,1182const VkDescriptorSetAllocateInfo* pAllocateInfo,1183VkDescriptorSet* pDescriptorSets)1184{1185ANV_FROM_HANDLE(anv_device, device, _device);1186ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);11871188VkResult result = VK_SUCCESS;1189struct anv_descriptor_set *set;1190uint32_t i;11911192const VkDescriptorSetVariableDescriptorCountAllocateInfo *vdcai =1193vk_find_struct_const(pAllocateInfo->pNext,1194DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO);11951196for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {1197ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,1198pAllocateInfo->pSetLayouts[i]);11991200uint32_t var_desc_count = 0;1201if (vdcai != NULL && vdcai->descriptorSetCount > 0) {1202assert(vdcai->descriptorSetCount == pAllocateInfo->descriptorSetCount);1203var_desc_count = vdcai->pDescriptorCounts[i];1204}12051206result = anv_descriptor_set_create(device, pool, layout,1207var_desc_count, &set);1208if (result != VK_SUCCESS)1209break;12101211pDescriptorSets[i] = anv_descriptor_set_to_handle(set);1212}12131214if (result != VK_SUCCESS)1215anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,1216i, pDescriptorSets);12171218return result;1219}12201221VkResult anv_FreeDescriptorSets(1222VkDevice _device,1223VkDescriptorPool descriptorPool,1224uint32_t count,1225const VkDescriptorSet* pDescriptorSets)1226{1227ANV_FROM_HANDLE(anv_device, device, _device);1228ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);12291230for (uint32_t i = 0; i < count; i++) {1231ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);12321233if (!set)1234continue;12351236anv_descriptor_set_destroy(device, pool, set);1237}12381239return VK_SUCCESS;1240}12411242static void1243anv_descriptor_set_write_image_param(uint32_t *param_desc_map,1244const struct brw_image_param *param)1245{1246#define WRITE_PARAM_FIELD(field, FIELD) \1247for (unsigned i = 0; i < ARRAY_SIZE(param->field); i++) \1248param_desc_map[BRW_IMAGE_PARAM_##FIELD##_OFFSET + i] = param->field[i]12491250WRITE_PARAM_FIELD(offset, OFFSET);1251WRITE_PARAM_FIELD(size, SIZE);1252WRITE_PARAM_FIELD(stride, STRIDE);1253WRITE_PARAM_FIELD(tiling, TILING);1254WRITE_PARAM_FIELD(swizzling, SWIZZLING);1255WRITE_PARAM_FIELD(size, SIZE);12561257#undef WRITE_PARAM_FIELD1258}12591260static uint32_t1261anv_surface_state_to_handle(struct anv_state state)1262{1263/* Bits 31:12 of the bindless surface offset in the extended message1264* descriptor is bits 25:6 of the byte-based address.1265*/1266assert(state.offset >= 0);1267uint32_t offset = state.offset;1268assert((offset & 0x3f) == 0 && offset < (1 << 26));1269return offset << 6;1270}12711272void1273anv_descriptor_set_write_image_view(struct anv_device *device,1274struct anv_descriptor_set *set,1275const VkDescriptorImageInfo * const info,1276VkDescriptorType type,1277uint32_t binding,1278uint32_t element)1279{1280const struct anv_descriptor_set_binding_layout *bind_layout =1281&set->layout->binding[binding];1282struct anv_descriptor *desc =1283&set->descriptors[bind_layout->descriptor_index + element];1284struct anv_image_view *image_view = NULL;1285struct anv_sampler *sampler = NULL;12861287/* We get called with just VK_DESCRIPTOR_TYPE_SAMPLER as part of descriptor1288* set initialization to set the bindless samplers.1289*/1290assert(type == bind_layout->type ||1291type == VK_DESCRIPTOR_TYPE_SAMPLER);12921293switch (type) {1294case VK_DESCRIPTOR_TYPE_SAMPLER:1295sampler = bind_layout->immutable_samplers ?1296bind_layout->immutable_samplers[element] :1297anv_sampler_from_handle(info->sampler);1298break;12991300case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:1301image_view = anv_image_view_from_handle(info->imageView);1302sampler = bind_layout->immutable_samplers ?1303bind_layout->immutable_samplers[element] :1304anv_sampler_from_handle(info->sampler);1305break;13061307case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:1308case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:1309case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:1310image_view = anv_image_view_from_handle(info->imageView);1311break;13121313default:1314unreachable("invalid descriptor type");1315}13161317*desc = (struct anv_descriptor) {1318.type = type,1319.layout = info->imageLayout,1320.image_view = image_view,1321.sampler = sampler,1322};13231324void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +1325element * anv_descriptor_size(bind_layout);1326memset(desc_map, 0, anv_descriptor_size(bind_layout));13271328if (bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {1329struct anv_sampled_image_descriptor desc_data[3];1330memset(desc_data, 0, sizeof(desc_data));13311332if (image_view) {1333for (unsigned p = 0; p < image_view->n_planes; p++) {1334struct anv_surface_state sstate =1335(desc->layout == VK_IMAGE_LAYOUT_GENERAL) ?1336image_view->planes[p].general_sampler_surface_state :1337image_view->planes[p].optimal_sampler_surface_state;1338desc_data[p].image = anv_surface_state_to_handle(sstate.state);1339}1340}13411342if (sampler) {1343for (unsigned p = 0; p < sampler->n_planes; p++)1344desc_data[p].sampler = sampler->bindless_state.offset + p * 32;1345}13461347/* We may have max_plane_count < 0 if this isn't a sampled image but it1348* can be no more than the size of our array of handles.1349*/1350assert(bind_layout->max_plane_count <= ARRAY_SIZE(desc_data));1351memcpy(desc_map, desc_data,1352MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));1353}13541355if (image_view == NULL)1356return;13571358if (bind_layout->data & ANV_DESCRIPTOR_STORAGE_IMAGE) {1359assert(!(bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM));1360assert(image_view->n_planes == 1);1361struct anv_storage_image_descriptor desc_data = {1362.read_write = anv_surface_state_to_handle(1363image_view->planes[0].storage_surface_state.state),1364.write_only = anv_surface_state_to_handle(1365image_view->planes[0].writeonly_storage_surface_state.state),1366};1367memcpy(desc_map, &desc_data, sizeof(desc_data));1368}13691370if (bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM) {1371/* Storage images can only ever have one plane */1372assert(image_view->n_planes == 1);1373const struct brw_image_param *image_param =1374&image_view->planes[0].storage_image_param;13751376anv_descriptor_set_write_image_param(desc_map, image_param);1377}13781379if (bind_layout->data & ANV_DESCRIPTOR_TEXTURE_SWIZZLE) {1380assert(!(bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE));1381assert(image_view);1382struct anv_texture_swizzle_descriptor desc_data[3];1383memset(desc_data, 0, sizeof(desc_data));13841385for (unsigned p = 0; p < image_view->n_planes; p++) {1386desc_data[p] = (struct anv_texture_swizzle_descriptor) {1387.swizzle = {1388(uint8_t)image_view->planes[p].isl.swizzle.r,1389(uint8_t)image_view->planes[p].isl.swizzle.g,1390(uint8_t)image_view->planes[p].isl.swizzle.b,1391(uint8_t)image_view->planes[p].isl.swizzle.a,1392},1393};1394}1395memcpy(desc_map, desc_data,1396MAX2(1, bind_layout->max_plane_count) * sizeof(desc_data[0]));1397}1398}13991400void1401anv_descriptor_set_write_buffer_view(struct anv_device *device,1402struct anv_descriptor_set *set,1403VkDescriptorType type,1404struct anv_buffer_view *buffer_view,1405uint32_t binding,1406uint32_t element)1407{1408const struct anv_descriptor_set_binding_layout *bind_layout =1409&set->layout->binding[binding];1410struct anv_descriptor *desc =1411&set->descriptors[bind_layout->descriptor_index + element];14121413assert(type == bind_layout->type);14141415void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +1416element * anv_descriptor_size(bind_layout);14171418if (buffer_view == NULL) {1419*desc = (struct anv_descriptor) { .type = type, };1420memset(desc_map, 0, anv_descriptor_size(bind_layout));1421return;1422}14231424*desc = (struct anv_descriptor) {1425.type = type,1426.buffer_view = buffer_view,1427};14281429if (bind_layout->data & ANV_DESCRIPTOR_SAMPLED_IMAGE) {1430struct anv_sampled_image_descriptor desc_data = {1431.image = anv_surface_state_to_handle(buffer_view->surface_state),1432};1433memcpy(desc_map, &desc_data, sizeof(desc_data));1434}14351436if (bind_layout->data & ANV_DESCRIPTOR_STORAGE_IMAGE) {1437assert(!(bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM));1438struct anv_storage_image_descriptor desc_data = {1439.read_write = anv_surface_state_to_handle(1440buffer_view->storage_surface_state),1441.write_only = anv_surface_state_to_handle(1442buffer_view->writeonly_storage_surface_state),1443};1444memcpy(desc_map, &desc_data, sizeof(desc_data));1445}14461447if (bind_layout->data & ANV_DESCRIPTOR_IMAGE_PARAM) {1448anv_descriptor_set_write_image_param(desc_map,1449&buffer_view->storage_image_param);1450}1451}14521453void1454anv_descriptor_set_write_buffer(struct anv_device *device,1455struct anv_descriptor_set *set,1456struct anv_state_stream *alloc_stream,1457VkDescriptorType type,1458struct anv_buffer *buffer,1459uint32_t binding,1460uint32_t element,1461VkDeviceSize offset,1462VkDeviceSize range)1463{1464const struct anv_descriptor_set_binding_layout *bind_layout =1465&set->layout->binding[binding];1466struct anv_descriptor *desc =1467&set->descriptors[bind_layout->descriptor_index + element];14681469assert(type == bind_layout->type);14701471void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +1472element * anv_descriptor_size(bind_layout);14731474if (buffer == NULL) {1475*desc = (struct anv_descriptor) { .type = type, };1476memset(desc_map, 0, anv_descriptor_size(bind_layout));1477return;1478}14791480struct anv_address bind_addr = anv_address_add(buffer->address, offset);1481uint64_t bind_range = anv_buffer_get_range(buffer, offset, range);14821483/* We report a bounds checking alignment of 32B for the sake of block1484* messages which read an entire register worth at a time.1485*/1486if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||1487type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)1488bind_range = align_u64(bind_range, ANV_UBO_ALIGNMENT);14891490if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||1491type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {1492*desc = (struct anv_descriptor) {1493.type = type,1494.buffer = buffer,1495.offset = offset,1496.range = range,1497};1498} else {1499assert(bind_layout->data & ANV_DESCRIPTOR_BUFFER_VIEW);1500struct anv_buffer_view *bview =1501&set->buffer_views[bind_layout->buffer_view_index + element];15021503bview->format = anv_isl_format_for_descriptor_type(device, type);1504bview->range = bind_range;1505bview->address = bind_addr;15061507/* If we're writing descriptors through a push command, we need to1508* allocate the surface state from the command buffer. Otherwise it will1509* be allocated by the descriptor pool when calling1510* vkAllocateDescriptorSets. */1511if (alloc_stream)1512bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);15131514isl_surf_usage_flags_t usage =1515(type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER ||1516type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ?1517ISL_SURF_USAGE_CONSTANT_BUFFER_BIT :1518ISL_SURF_USAGE_STORAGE_BIT;15191520anv_fill_buffer_surface_state(device, bview->surface_state,1521bview->format, usage,1522bind_addr, bind_range, 1);15231524*desc = (struct anv_descriptor) {1525.type = type,1526.buffer_view = bview,1527};1528}15291530if (bind_layout->data & ANV_DESCRIPTOR_ADDRESS_RANGE) {1531struct anv_address_range_descriptor desc_data = {1532.address = anv_address_physical(bind_addr),1533.range = bind_range,1534};1535memcpy(desc_map, &desc_data, sizeof(desc_data));1536}1537}15381539void1540anv_descriptor_set_write_inline_uniform_data(struct anv_device *device,1541struct anv_descriptor_set *set,1542uint32_t binding,1543const void *data,1544size_t offset,1545size_t size)1546{1547const struct anv_descriptor_set_binding_layout *bind_layout =1548&set->layout->binding[binding];15491550assert(bind_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM);15511552void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset;15531554memcpy(desc_map + offset, data, size);1555}15561557void1558anv_descriptor_set_write_acceleration_structure(struct anv_device *device,1559struct anv_descriptor_set *set,1560struct anv_acceleration_structure *accel,1561uint32_t binding,1562uint32_t element)1563{1564const struct anv_descriptor_set_binding_layout *bind_layout =1565&set->layout->binding[binding];1566struct anv_descriptor *desc =1567&set->descriptors[bind_layout->descriptor_index + element];15681569assert(bind_layout->type == VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR);1570*desc = (struct anv_descriptor) {1571.type = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,1572};15731574struct anv_address_range_descriptor desc_data = { };1575if (accel != NULL) {1576desc_data.address = anv_address_physical(accel->address);1577desc_data.range = accel->size;1578}1579assert(anv_descriptor_size(bind_layout) == sizeof(desc_data));15801581void *desc_map = set->desc_mem.map + bind_layout->descriptor_offset +1582element * sizeof(desc_data);1583memcpy(desc_map, &desc_data, sizeof(desc_data));1584}15851586void anv_UpdateDescriptorSets(1587VkDevice _device,1588uint32_t descriptorWriteCount,1589const VkWriteDescriptorSet* pDescriptorWrites,1590uint32_t descriptorCopyCount,1591const VkCopyDescriptorSet* pDescriptorCopies)1592{1593ANV_FROM_HANDLE(anv_device, device, _device);15941595for (uint32_t i = 0; i < descriptorWriteCount; i++) {1596const VkWriteDescriptorSet *write = &pDescriptorWrites[i];1597ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);15981599switch (write->descriptorType) {1600case VK_DESCRIPTOR_TYPE_SAMPLER:1601case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:1602case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:1603case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:1604case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:1605for (uint32_t j = 0; j < write->descriptorCount; j++) {1606anv_descriptor_set_write_image_view(device, set,1607write->pImageInfo + j,1608write->descriptorType,1609write->dstBinding,1610write->dstArrayElement + j);1611}1612break;16131614case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:1615case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:1616for (uint32_t j = 0; j < write->descriptorCount; j++) {1617ANV_FROM_HANDLE(anv_buffer_view, bview,1618write->pTexelBufferView[j]);16191620anv_descriptor_set_write_buffer_view(device, set,1621write->descriptorType,1622bview,1623write->dstBinding,1624write->dstArrayElement + j);1625}1626break;16271628case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:1629case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:1630case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:1631case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:1632for (uint32_t j = 0; j < write->descriptorCount; j++) {1633ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);16341635anv_descriptor_set_write_buffer(device, set,1636NULL,1637write->descriptorType,1638buffer,1639write->dstBinding,1640write->dstArrayElement + j,1641write->pBufferInfo[j].offset,1642write->pBufferInfo[j].range);1643}1644break;16451646case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT: {1647const VkWriteDescriptorSetInlineUniformBlockEXT *inline_write =1648vk_find_struct_const(write->pNext,1649WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT);1650assert(inline_write->dataSize == write->descriptorCount);1651anv_descriptor_set_write_inline_uniform_data(device, set,1652write->dstBinding,1653inline_write->pData,1654write->dstArrayElement,1655inline_write->dataSize);1656break;1657}16581659case VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {1660const VkWriteDescriptorSetAccelerationStructureKHR *accel_write =1661vk_find_struct_const(write, WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR);1662assert(accel_write->accelerationStructureCount ==1663write->descriptorCount);1664for (uint32_t j = 0; j < write->descriptorCount; j++) {1665ANV_FROM_HANDLE(anv_acceleration_structure, accel,1666accel_write->pAccelerationStructures[j]);1667anv_descriptor_set_write_acceleration_structure(device, set, accel,1668write->dstBinding,1669write->dstArrayElement + j);1670}1671break;1672}16731674default:1675break;1676}1677}16781679for (uint32_t i = 0; i < descriptorCopyCount; i++) {1680const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];1681ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);1682ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);16831684const struct anv_descriptor_set_binding_layout *src_layout =1685&src->layout->binding[copy->srcBinding];1686struct anv_descriptor *src_desc =1687&src->descriptors[src_layout->descriptor_index];1688src_desc += copy->srcArrayElement;16891690const struct anv_descriptor_set_binding_layout *dst_layout =1691&dst->layout->binding[copy->dstBinding];1692struct anv_descriptor *dst_desc =1693&dst->descriptors[dst_layout->descriptor_index];1694dst_desc += copy->dstArrayElement;16951696if (src_layout->data & ANV_DESCRIPTOR_INLINE_UNIFORM) {1697assert(src_layout->data == ANV_DESCRIPTOR_INLINE_UNIFORM);1698memcpy(dst->desc_mem.map + dst_layout->descriptor_offset +1699copy->dstArrayElement,1700src->desc_mem.map + src_layout->descriptor_offset +1701copy->srcArrayElement,1702copy->descriptorCount);1703} else {1704for (uint32_t j = 0; j < copy->descriptorCount; j++)1705dst_desc[j] = src_desc[j];17061707unsigned desc_size = anv_descriptor_size(src_layout);1708if (desc_size > 0) {1709assert(desc_size == anv_descriptor_size(dst_layout));1710memcpy(dst->desc_mem.map + dst_layout->descriptor_offset +1711copy->dstArrayElement * desc_size,1712src->desc_mem.map + src_layout->descriptor_offset +1713copy->srcArrayElement * desc_size,1714copy->descriptorCount * desc_size);1715}1716}1717}1718}17191720/*1721* Descriptor update templates.1722*/17231724void1725anv_descriptor_set_write_template(struct anv_device *device,1726struct anv_descriptor_set *set,1727struct anv_state_stream *alloc_stream,1728const struct anv_descriptor_update_template *template,1729const void *data)1730{1731for (uint32_t i = 0; i < template->entry_count; i++) {1732const struct anv_descriptor_template_entry *entry =1733&template->entries[i];17341735switch (entry->type) {1736case VK_DESCRIPTOR_TYPE_SAMPLER:1737case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:1738case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:1739case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:1740case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:1741for (uint32_t j = 0; j < entry->array_count; j++) {1742const VkDescriptorImageInfo *info =1743data + entry->offset + j * entry->stride;1744anv_descriptor_set_write_image_view(device, set,1745info, entry->type,1746entry->binding,1747entry->array_element + j);1748}1749break;17501751case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:1752case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:1753for (uint32_t j = 0; j < entry->array_count; j++) {1754const VkBufferView *_bview =1755data + entry->offset + j * entry->stride;1756ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);17571758anv_descriptor_set_write_buffer_view(device, set,1759entry->type,1760bview,1761entry->binding,1762entry->array_element + j);1763}1764break;17651766case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:1767case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:1768case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:1769case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:1770for (uint32_t j = 0; j < entry->array_count; j++) {1771const VkDescriptorBufferInfo *info =1772data + entry->offset + j * entry->stride;1773ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);17741775anv_descriptor_set_write_buffer(device, set,1776alloc_stream,1777entry->type,1778buffer,1779entry->binding,1780entry->array_element + j,1781info->offset, info->range);1782}1783break;17841785case VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT:1786anv_descriptor_set_write_inline_uniform_data(device, set,1787entry->binding,1788data + entry->offset,1789entry->array_element,1790entry->array_count);1791break;17921793default:1794break;1795}1796}1797}17981799VkResult anv_CreateDescriptorUpdateTemplate(1800VkDevice _device,1801const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,1802const VkAllocationCallbacks* pAllocator,1803VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate)1804{1805ANV_FROM_HANDLE(anv_device, device, _device);1806struct anv_descriptor_update_template *template;18071808size_t size = sizeof(*template) +1809pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);1810template = vk_object_alloc(&device->vk, pAllocator, size,1811VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE);1812if (template == NULL)1813return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);18141815template->bind_point = pCreateInfo->pipelineBindPoint;18161817if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)1818template->set = pCreateInfo->set;18191820template->entry_count = pCreateInfo->descriptorUpdateEntryCount;1821for (uint32_t i = 0; i < template->entry_count; i++) {1822const VkDescriptorUpdateTemplateEntry *pEntry =1823&pCreateInfo->pDescriptorUpdateEntries[i];18241825template->entries[i] = (struct anv_descriptor_template_entry) {1826.type = pEntry->descriptorType,1827.binding = pEntry->dstBinding,1828.array_element = pEntry->dstArrayElement,1829.array_count = pEntry->descriptorCount,1830.offset = pEntry->offset,1831.stride = pEntry->stride,1832};1833}18341835*pDescriptorUpdateTemplate =1836anv_descriptor_update_template_to_handle(template);18371838return VK_SUCCESS;1839}18401841void anv_DestroyDescriptorUpdateTemplate(1842VkDevice _device,1843VkDescriptorUpdateTemplate descriptorUpdateTemplate,1844const VkAllocationCallbacks* pAllocator)1845{1846ANV_FROM_HANDLE(anv_device, device, _device);1847ANV_FROM_HANDLE(anv_descriptor_update_template, template,1848descriptorUpdateTemplate);18491850if (!template)1851return;18521853vk_object_free(&device->vk, pAllocator, template);1854}18551856void anv_UpdateDescriptorSetWithTemplate(1857VkDevice _device,1858VkDescriptorSet descriptorSet,1859VkDescriptorUpdateTemplate descriptorUpdateTemplate,1860const void* pData)1861{1862ANV_FROM_HANDLE(anv_device, device, _device);1863ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);1864ANV_FROM_HANDLE(anv_descriptor_update_template, template,1865descriptorUpdateTemplate);18661867anv_descriptor_set_write_template(device, set, NULL, template, pData);1868}186918701871