Path: blob/21.2-virgl/src/gallium/drivers/zink/zink_descriptors.c
4570 views
/*1* Copyright © 2020 Mike Blumenkrantz2*3* Permission is hereby granted, free of charge, to any person obtaining a4* copy of this software and associated documentation files (the "Software"),5* to deal in the Software without restriction, including without limitation6* the rights to use, copy, modify, merge, publish, distribute, sublicense,7* and/or sell copies of the Software, and to permit persons to whom the8* Software is furnished to do so, subject to the following conditions:9*10* The above copyright notice and this permission notice (including the next11* paragraph) shall be included in all copies or substantial portions of the12* Software.13*14* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR15* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,16* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL17* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER18* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING19* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS20* IN THE SOFTWARE.21*22* Authors:23* Mike Blumenkrantz <[email protected]>24*/2526#include "tgsi/tgsi_from_mesa.h"27282930#include "zink_context.h"31#include "zink_descriptors.h"32#include "zink_program.h"33#include "zink_resource.h"34#include "zink_screen.h"3536#define XXH_INLINE_ALL37#include "util/xxhash.h"383940struct zink_descriptor_pool {41struct pipe_reference reference;42enum zink_descriptor_type type;43struct hash_table *desc_sets;44struct hash_table *free_desc_sets;45struct util_dynarray alloc_desc_sets;46VkDescriptorPool descpool;47struct zink_descriptor_pool_key key;48unsigned num_resources;49unsigned num_sets_allocated;50simple_mtx_t mtx;51};5253struct zink_descriptor_set {54struct zink_descriptor_pool *pool;55struct pipe_reference reference; //incremented for batch usage56VkDescriptorSet desc_set;57uint32_t hash;58bool invalid;59bool punted;60bool recycled;61struct zink_descriptor_state_key key;62struct zink_batch_usage *batch_uses;63#ifndef NDEBUG64/* for extra debug asserts */65unsigned num_resources;66#endif67union {68struct zink_resource_object **res_objs;69struct {70struct zink_descriptor_surface *surfaces;71struct zink_sampler_state **sampler_states;72};73};74};7576union zink_program_descriptor_refs {77struct zink_resource **res;78struct zink_descriptor_surface *dsurf;79struct {80struct zink_descriptor_surface *dsurf;81struct zink_sampler_state **sampler_state;82} sampler;83};8485struct zink_program_descriptor_data_cached {86struct zink_program_descriptor_data base;87struct zink_descriptor_pool *pool[ZINK_DESCRIPTOR_TYPES];88struct zink_descriptor_set *last_set[ZINK_DESCRIPTOR_TYPES];89unsigned num_refs[ZINK_DESCRIPTOR_TYPES];90union zink_program_descriptor_refs *refs[ZINK_DESCRIPTOR_TYPES];91};929394static inline struct zink_program_descriptor_data_cached *95pdd_cached(struct zink_program *pg)96{97return (struct zink_program_descriptor_data_cached*)pg->dd;98}99100static bool101batch_add_desc_set(struct zink_batch *batch, struct zink_descriptor_set *zds)102{103if (zink_batch_usage_matches(zds->batch_uses, batch->state) ||104!batch_ptr_add_usage(batch, batch->state->dd->desc_sets, zds))105return false;106pipe_reference(NULL, &zds->reference);107zink_batch_usage_set(&zds->batch_uses, batch->state);108return true;109}110111static void112debug_describe_zink_descriptor_pool(char *buf, const struct zink_descriptor_pool *ptr)113{114sprintf(buf, "zink_descriptor_pool");115}116117static inline uint32_t118get_sampler_view_hash(const struct zink_sampler_view *sampler_view)119{120if (!sampler_view)121return 0;122return sampler_view->base.target == PIPE_BUFFER ?123sampler_view->buffer_view->hash : sampler_view->image_view->hash;124}125126static inline uint32_t127get_image_view_hash(const struct zink_image_view *image_view)128{129if (!image_view || !image_view->base.resource)130return 0;131return image_view->base.resource->target == PIPE_BUFFER ?132image_view->buffer_view->hash : image_view->surface->hash;133}134135uint32_t136zink_get_sampler_view_hash(struct zink_context *ctx, struct zink_sampler_view *sampler_view, bool is_buffer)137{138return get_sampler_view_hash(sampler_view) ? get_sampler_view_hash(sampler_view) :139(is_buffer ? zink_screen(ctx->base.screen)->null_descriptor_hashes.buffer_view :140zink_screen(ctx->base.screen)->null_descriptor_hashes.image_view);141}142143uint32_t144zink_get_image_view_hash(struct zink_context *ctx, struct zink_image_view *image_view, bool is_buffer)145{146return get_image_view_hash(image_view) ? get_image_view_hash(image_view) :147(is_buffer ? zink_screen(ctx->base.screen)->null_descriptor_hashes.buffer_view :148zink_screen(ctx->base.screen)->null_descriptor_hashes.image_view);149}150151#ifndef NDEBUG152static uint32_t153get_descriptor_surface_hash(struct zink_context *ctx, struct zink_descriptor_surface *dsurf)154{155return dsurf->is_buffer ? (dsurf->bufferview ? dsurf->bufferview->hash : zink_screen(ctx->base.screen)->null_descriptor_hashes.buffer_view) :156(dsurf->surface ? dsurf->surface->hash : zink_screen(ctx->base.screen)->null_descriptor_hashes.image_view);157}158#endif159160static bool161desc_state_equal(const void *a, const void *b)162{163const struct zink_descriptor_state_key *a_k = (void*)a;164const struct zink_descriptor_state_key *b_k = (void*)b;165166for (unsigned i = 0; i < ZINK_SHADER_COUNT; i++) {167if (a_k->exists[i] != b_k->exists[i])168return false;169if (a_k->exists[i] && b_k->exists[i] &&170a_k->state[i] != b_k->state[i])171return false;172}173return true;174}175176static uint32_t177desc_state_hash(const void *key)178{179const struct zink_descriptor_state_key *d_key = (void*)key;180uint32_t hash = 0;181bool first = true;182for (unsigned i = 0; i < ZINK_SHADER_COUNT; i++) {183if (d_key->exists[i]) {184if (!first)185hash = XXH32(&d_key->state[i], sizeof(uint32_t), hash);186else187hash = d_key->state[i];188first = false;189}190}191return hash;192}193194static void195pop_desc_set_ref(struct zink_descriptor_set *zds, struct util_dynarray *refs)196{197size_t size = sizeof(struct zink_descriptor_reference);198unsigned num_elements = refs->size / size;199for (unsigned i = 0; i < num_elements; i++) {200struct zink_descriptor_reference *ref = util_dynarray_element(refs, struct zink_descriptor_reference, i);201if (&zds->invalid == ref->invalid) {202memcpy(util_dynarray_element(refs, struct zink_descriptor_reference, i),203util_dynarray_pop_ptr(refs, struct zink_descriptor_reference), size);204break;205}206}207}208209static void210descriptor_set_invalidate(struct zink_descriptor_set *zds)211{212zds->invalid = true;213for (unsigned i = 0; i < zds->pool->key.layout->num_descriptors; i++) {214switch (zds->pool->type) {215case ZINK_DESCRIPTOR_TYPE_UBO:216case ZINK_DESCRIPTOR_TYPE_SSBO:217if (zds->res_objs[i])218pop_desc_set_ref(zds, &zds->res_objs[i]->desc_set_refs.refs);219zds->res_objs[i] = NULL;220break;221case ZINK_DESCRIPTOR_TYPE_IMAGE:222if (zds->surfaces[i].is_buffer) {223if (zds->surfaces[i].bufferview)224pop_desc_set_ref(zds, &zds->surfaces[i].bufferview->desc_set_refs.refs);225zds->surfaces[i].bufferview = NULL;226} else {227if (zds->surfaces[i].surface)228pop_desc_set_ref(zds, &zds->surfaces[i].surface->desc_set_refs.refs);229zds->surfaces[i].surface = NULL;230}231break;232case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:233if (zds->surfaces[i].is_buffer) {234if (zds->surfaces[i].bufferview)235pop_desc_set_ref(zds, &zds->surfaces[i].bufferview->desc_set_refs.refs);236zds->surfaces[i].bufferview = NULL;237} else {238if (zds->surfaces[i].surface)239pop_desc_set_ref(zds, &zds->surfaces[i].surface->desc_set_refs.refs);240zds->surfaces[i].surface = NULL;241}242if (zds->sampler_states[i])243pop_desc_set_ref(zds, &zds->sampler_states[i]->desc_set_refs.refs);244zds->sampler_states[i] = NULL;245break;246default:247break;248}249}250}251252#ifndef NDEBUG253static void254descriptor_pool_clear(struct hash_table *ht)255{256_mesa_hash_table_clear(ht, NULL);257}258#endif259260static void261descriptor_pool_free(struct zink_screen *screen, struct zink_descriptor_pool *pool)262{263if (!pool)264return;265if (pool->descpool)266vkDestroyDescriptorPool(screen->dev, pool->descpool, NULL);267268simple_mtx_lock(&pool->mtx);269#ifndef NDEBUG270if (pool->desc_sets)271descriptor_pool_clear(pool->desc_sets);272if (pool->free_desc_sets)273descriptor_pool_clear(pool->free_desc_sets);274#endif275if (pool->desc_sets)276_mesa_hash_table_destroy(pool->desc_sets, NULL);277if (pool->free_desc_sets)278_mesa_hash_table_destroy(pool->free_desc_sets, NULL);279280simple_mtx_unlock(&pool->mtx);281util_dynarray_fini(&pool->alloc_desc_sets);282simple_mtx_destroy(&pool->mtx);283ralloc_free(pool);284}285286static struct zink_descriptor_pool *287descriptor_pool_create(struct zink_screen *screen, enum zink_descriptor_type type,288struct zink_descriptor_layout_key *layout_key, VkDescriptorPoolSize *sizes, unsigned num_type_sizes)289{290struct zink_descriptor_pool *pool = rzalloc(NULL, struct zink_descriptor_pool);291if (!pool)292return NULL;293pipe_reference_init(&pool->reference, 1);294pool->type = type;295pool->key.layout = layout_key;296pool->key.num_type_sizes = num_type_sizes;297size_t types_size = num_type_sizes * sizeof(VkDescriptorPoolSize);298pool->key.sizes = ralloc_size(pool, types_size);299if (!pool->key.sizes) {300ralloc_free(pool);301return NULL;302}303memcpy(pool->key.sizes, sizes, types_size);304simple_mtx_init(&pool->mtx, mtx_plain);305for (unsigned i = 0; i < layout_key->num_descriptors; i++) {306pool->num_resources += layout_key->bindings[i].descriptorCount;307}308pool->desc_sets = _mesa_hash_table_create(NULL, desc_state_hash, desc_state_equal);309if (!pool->desc_sets)310goto fail;311312pool->free_desc_sets = _mesa_hash_table_create(NULL, desc_state_hash, desc_state_equal);313if (!pool->free_desc_sets)314goto fail;315316util_dynarray_init(&pool->alloc_desc_sets, NULL);317318VkDescriptorPoolCreateInfo dpci = {0};319dpci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;320dpci.pPoolSizes = sizes;321dpci.poolSizeCount = num_type_sizes;322dpci.flags = 0;323dpci.maxSets = ZINK_DEFAULT_MAX_DESCS;324if (vkCreateDescriptorPool(screen->dev, &dpci, 0, &pool->descpool) != VK_SUCCESS) {325debug_printf("vkCreateDescriptorPool failed\n");326goto fail;327}328329return pool;330fail:331descriptor_pool_free(screen, pool);332return NULL;333}334335static VkDescriptorSetLayout336descriptor_layout_create(struct zink_screen *screen, enum zink_descriptor_type t, VkDescriptorSetLayoutBinding *bindings, unsigned num_bindings)337{338VkDescriptorSetLayout dsl;339VkDescriptorSetLayoutCreateInfo dcslci = {0};340dcslci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;341dcslci.pNext = NULL;342VkDescriptorSetLayoutBindingFlagsCreateInfo fci = {0};343VkDescriptorBindingFlags flags[ZINK_MAX_DESCRIPTORS_PER_TYPE];344if (screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY) {345dcslci.pNext = &fci;346if (t == ZINK_DESCRIPTOR_TYPES)347dcslci.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR;348fci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO;349fci.bindingCount = num_bindings;350fci.pBindingFlags = flags;351for (unsigned i = 0; i < num_bindings; i++) {352flags[i] = 0;353}354}355dcslci.bindingCount = num_bindings;356dcslci.pBindings = bindings;357VkDescriptorSetLayoutSupport supp;358supp.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT;359supp.pNext = NULL;360supp.supported = VK_FALSE;361if (screen->vk.GetDescriptorSetLayoutSupport) {362screen->vk.GetDescriptorSetLayoutSupport(screen->dev, &dcslci, &supp);363if (supp.supported == VK_FALSE) {364debug_printf("vkGetDescriptorSetLayoutSupport claims layout is unsupported\n");365return VK_NULL_HANDLE;366}367}368if (vkCreateDescriptorSetLayout(screen->dev, &dcslci, 0, &dsl) != VK_SUCCESS)369debug_printf("vkCreateDescriptorSetLayout failed\n");370return dsl;371}372373static uint32_t374hash_descriptor_layout(const void *key)375{376uint32_t hash = 0;377const struct zink_descriptor_layout_key *k = key;378hash = XXH32(&k->num_descriptors, sizeof(unsigned), hash);379hash = XXH32(k->bindings, k->num_descriptors * sizeof(VkDescriptorSetLayoutBinding), hash);380381return hash;382}383384static bool385equals_descriptor_layout(const void *a, const void *b)386{387const struct zink_descriptor_layout_key *a_k = a;388const struct zink_descriptor_layout_key *b_k = b;389return a_k->num_descriptors == b_k->num_descriptors &&390!memcmp(a_k->bindings, b_k->bindings, a_k->num_descriptors * sizeof(VkDescriptorSetLayoutBinding));391}392393struct zink_descriptor_layout *394zink_descriptor_util_layout_get(struct zink_context *ctx, enum zink_descriptor_type type,395VkDescriptorSetLayoutBinding *bindings, unsigned num_bindings,396struct zink_descriptor_layout_key **layout_key)397{398struct zink_screen *screen = zink_screen(ctx->base.screen);399uint32_t hash = 0;400struct zink_descriptor_layout_key key = {401.num_descriptors = num_bindings,402.bindings = bindings,403};404405VkDescriptorSetLayoutBinding null_binding;406if (!bindings) {407null_binding.binding = 0;408null_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;409null_binding.descriptorCount = 1;410null_binding.pImmutableSamplers = NULL;411null_binding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT |412VK_SHADER_STAGE_GEOMETRY_BIT | VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT |413VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT | VK_SHADER_STAGE_COMPUTE_BIT;414key.bindings = &null_binding;415}416417if (type != ZINK_DESCRIPTOR_TYPES) {418hash = hash_descriptor_layout(&key);419struct hash_entry *he = _mesa_hash_table_search_pre_hashed(&ctx->desc_set_layouts[type], hash, &key);420if (he) {421*layout_key = (void*)he->key;422return he->data;423}424}425426VkDescriptorSetLayout dsl = descriptor_layout_create(screen, type, key.bindings, MAX2(num_bindings, 1));427if (!dsl)428return VK_NULL_HANDLE;429430struct zink_descriptor_layout_key *k = ralloc(ctx, struct zink_descriptor_layout_key);431k->num_descriptors = num_bindings;432size_t bindings_size = MAX2(num_bindings, 1) * sizeof(VkDescriptorSetLayoutBinding);433k->bindings = ralloc_size(k, bindings_size);434if (!k->bindings) {435ralloc_free(k);436vkDestroyDescriptorSetLayout(screen->dev, dsl, NULL);437return VK_NULL_HANDLE;438}439memcpy(k->bindings, key.bindings, bindings_size);440441struct zink_descriptor_layout *layout = rzalloc(ctx, struct zink_descriptor_layout);442layout->layout = dsl;443if (type != ZINK_DESCRIPTOR_TYPES) {444_mesa_hash_table_insert_pre_hashed(&ctx->desc_set_layouts[type], hash, k, layout);445}446*layout_key = k;447return layout;448}449450bool451zink_descriptor_util_push_layouts_get(struct zink_context *ctx, struct zink_descriptor_layout **dsls, struct zink_descriptor_layout_key **layout_keys)452{453struct zink_screen *screen = zink_screen(ctx->base.screen);454VkDescriptorSetLayoutBinding bindings[PIPE_SHADER_TYPES];455for (unsigned i = 0; i < PIPE_SHADER_TYPES; i++) {456bindings[i].binding = tgsi_processor_to_shader_stage(i);457bindings[i].descriptorType = screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY ?458VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;459bindings[i].descriptorCount = 1;460bindings[i].stageFlags = zink_shader_stage(i);461bindings[i].pImmutableSamplers = NULL;462}463enum zink_descriptor_type dsl_type = screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY &&464screen->info.have_KHR_push_descriptor ? ZINK_DESCRIPTOR_TYPES : ZINK_DESCRIPTOR_TYPE_UBO;465dsls[0] = zink_descriptor_util_layout_get(ctx, dsl_type, bindings, ZINK_SHADER_COUNT, &layout_keys[0]);466dsls[1] = zink_descriptor_util_layout_get(ctx, dsl_type, &bindings[PIPE_SHADER_COMPUTE], 1, &layout_keys[1]);467return dsls[0] && dsls[1];468}469470void471zink_descriptor_util_init_null_set(struct zink_context *ctx, VkDescriptorSet desc_set)472{473struct zink_screen *screen = zink_screen(ctx->base.screen);474VkDescriptorBufferInfo push_info;475VkWriteDescriptorSet push_wd;476push_wd.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;477push_wd.pNext = NULL;478push_wd.dstBinding = 0;479push_wd.dstArrayElement = 0;480push_wd.descriptorCount = 1;481push_wd.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;482push_wd.dstSet = desc_set;483push_wd.pBufferInfo = &push_info;484push_info.buffer = screen->info.rb2_feats.nullDescriptor ?485VK_NULL_HANDLE :486zink_resource(ctx->dummy_vertex_buffer)->obj->buffer;487push_info.offset = 0;488push_info.range = VK_WHOLE_SIZE;489vkUpdateDescriptorSets(screen->dev, 1, &push_wd, 0, NULL);490}491492VkImageLayout493zink_descriptor_util_image_layout_eval(const struct zink_resource *res, bool is_compute)494{495return res->image_bind_count[is_compute] ? VK_IMAGE_LAYOUT_GENERAL :496res->aspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT) ?497//Vulkan-Docs#1490498//(res->aspect == VK_IMAGE_ASPECT_DEPTH_BIT ? VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL :499//res->aspect == VK_IMAGE_ASPECT_STENCIL_BIT ? VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL :500(res->aspect == VK_IMAGE_ASPECT_DEPTH_BIT ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL :501res->aspect == VK_IMAGE_ASPECT_STENCIL_BIT ? VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL :502VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL) :503VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;504}505506static uint32_t507hash_descriptor_pool(const void *key)508{509uint32_t hash = 0;510const struct zink_descriptor_pool_key *k = key;511hash = XXH32(&k->num_type_sizes, sizeof(unsigned), hash);512hash = XXH32(&k->layout, sizeof(k->layout), hash);513hash = XXH32(k->sizes, k->num_type_sizes * sizeof(VkDescriptorPoolSize), hash);514515return hash;516}517518static bool519equals_descriptor_pool(const void *a, const void *b)520{521const struct zink_descriptor_pool_key *a_k = a;522const struct zink_descriptor_pool_key *b_k = b;523return a_k->num_type_sizes == b_k->num_type_sizes &&524a_k->layout == b_k->layout &&525!memcmp(a_k->sizes, b_k->sizes, a_k->num_type_sizes * sizeof(VkDescriptorPoolSize));526}527528static struct zink_descriptor_pool *529descriptor_pool_get(struct zink_context *ctx, enum zink_descriptor_type type,530struct zink_descriptor_layout_key *layout_key, VkDescriptorPoolSize *sizes, unsigned num_type_sizes)531{532uint32_t hash = 0;533if (type != ZINK_DESCRIPTOR_TYPES) {534struct zink_descriptor_pool_key key = {535.layout = layout_key,536.num_type_sizes = num_type_sizes,537.sizes = sizes,538};539540hash = hash_descriptor_pool(&key);541struct hash_entry *he = _mesa_hash_table_search_pre_hashed(ctx->dd->descriptor_pools[type], hash, &key);542if (he)543return (void*)he->data;544}545struct zink_descriptor_pool *pool = descriptor_pool_create(zink_screen(ctx->base.screen), type, layout_key, sizes, num_type_sizes);546if (type != ZINK_DESCRIPTOR_TYPES)547_mesa_hash_table_insert_pre_hashed(ctx->dd->descriptor_pools[type], hash, &pool->key, pool);548return pool;549}550551static bool552get_invalidated_desc_set(struct zink_descriptor_set *zds)553{554if (!zds->invalid)555return false;556return p_atomic_read(&zds->reference.count) == 1;557}558559bool560zink_descriptor_util_alloc_sets(struct zink_screen *screen, VkDescriptorSetLayout dsl, VkDescriptorPool pool, VkDescriptorSet *sets, unsigned num_sets)561{562VkDescriptorSetAllocateInfo dsai;563VkDescriptorSetLayout *layouts = alloca(sizeof(*layouts) * num_sets);564memset((void *)&dsai, 0, sizeof(dsai));565dsai.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;566dsai.pNext = NULL;567dsai.descriptorPool = pool;568dsai.descriptorSetCount = num_sets;569for (unsigned i = 0; i < num_sets; i ++)570layouts[i] = dsl;571dsai.pSetLayouts = layouts;572573if (vkAllocateDescriptorSets(screen->dev, &dsai, sets) != VK_SUCCESS) {574debug_printf("ZINK: %" PRIu64 " failed to allocate descriptor set :/\n", (uint64_t)dsl);575return false;576}577return true;578}579580unsigned581zink_descriptor_program_num_sizes(struct zink_program *pg, enum zink_descriptor_type type)582{583switch (type) {584case ZINK_DESCRIPTOR_TYPE_UBO:585return 1;586case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:587return !!pg->dd->sizes[ZDS_INDEX_COMBINED_SAMPLER].descriptorCount +588!!pg->dd->sizes[ZDS_INDEX_UNIFORM_TEXELS].descriptorCount;589case ZINK_DESCRIPTOR_TYPE_SSBO:590return 1;591case ZINK_DESCRIPTOR_TYPE_IMAGE:592return !!pg->dd->sizes[ZDS_INDEX_STORAGE_IMAGE].descriptorCount +593!!pg->dd->sizes[ZDS_INDEX_STORAGE_TEXELS].descriptorCount;594default: break;595}596unreachable("unknown type");597}598599static struct zink_descriptor_set *600allocate_desc_set(struct zink_context *ctx, struct zink_program *pg, enum zink_descriptor_type type, unsigned descs_used, bool is_compute)601{602struct zink_screen *screen = zink_screen(ctx->base.screen);603bool push_set = type == ZINK_DESCRIPTOR_TYPES;604struct zink_descriptor_pool *pool = push_set ? ctx->dd->push_pool[is_compute] : pdd_cached(pg)->pool[type];605#define DESC_BUCKET_FACTOR 10606unsigned bucket_size = pool->key.layout->num_descriptors ? DESC_BUCKET_FACTOR : 1;607if (pool->key.layout->num_descriptors) {608for (unsigned desc_factor = DESC_BUCKET_FACTOR; desc_factor < descs_used; desc_factor *= DESC_BUCKET_FACTOR)609bucket_size = desc_factor;610}611VkDescriptorSet *desc_set = alloca(sizeof(*desc_set) * bucket_size);612if (!zink_descriptor_util_alloc_sets(screen, push_set ? ctx->dd->push_dsl[is_compute]->layout : pg->dsl[type + 1], pool->descpool, desc_set, bucket_size))613return VK_NULL_HANDLE;614615struct zink_descriptor_set *alloc = ralloc_array(pool, struct zink_descriptor_set, bucket_size);616assert(alloc);617unsigned num_resources = pool->num_resources;618struct zink_resource_object **res_objs = NULL;619void **samplers = NULL;620struct zink_descriptor_surface *surfaces = NULL;621switch (type) {622case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:623samplers = rzalloc_array(pool, void*, num_resources * bucket_size);624assert(samplers);625FALLTHROUGH;626case ZINK_DESCRIPTOR_TYPE_IMAGE:627surfaces = rzalloc_array(pool, struct zink_descriptor_surface, num_resources * bucket_size);628assert(surfaces);629break;630default:631res_objs = rzalloc_array(pool, struct zink_resource_object*, num_resources * bucket_size);632assert(res_objs);633break;634}635for (unsigned i = 0; i < bucket_size; i ++) {636struct zink_descriptor_set *zds = &alloc[i];637pipe_reference_init(&zds->reference, 1);638zds->pool = pool;639zds->hash = 0;640zds->batch_uses = NULL;641zds->invalid = true;642zds->punted = zds->recycled = false;643#ifndef NDEBUG644zds->num_resources = num_resources;645#endif646switch (type) {647case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:648zds->sampler_states = (struct zink_sampler_state**)&samplers[i * pool->key.layout->num_descriptors];649FALLTHROUGH;650case ZINK_DESCRIPTOR_TYPE_IMAGE:651zds->surfaces = &surfaces[i * pool->key.layout->num_descriptors];652break;653default:654zds->res_objs = (struct zink_resource_object**)&res_objs[i * pool->key.layout->num_descriptors];655break;656}657zds->desc_set = desc_set[i];658if (i > 0)659util_dynarray_append(&pool->alloc_desc_sets, struct zink_descriptor_set *, zds);660}661pool->num_sets_allocated += bucket_size;662return alloc;663}664665static void666populate_zds_key(struct zink_context *ctx, enum zink_descriptor_type type, bool is_compute,667struct zink_descriptor_state_key *key, uint32_t push_usage)668{669if (is_compute) {670for (unsigned i = 1; i < ZINK_SHADER_COUNT; i++)671key->exists[i] = false;672key->exists[0] = true;673if (type == ZINK_DESCRIPTOR_TYPES)674key->state[0] = ctx->dd->push_state[is_compute];675else676key->state[0] = ctx->dd->descriptor_states[is_compute].state[type];677} else if (type == ZINK_DESCRIPTOR_TYPES) {678/* gfx only */679for (unsigned i = 0; i < ZINK_SHADER_COUNT; i++) {680if (push_usage & BITFIELD_BIT(i)) {681key->exists[i] = true;682key->state[i] = ctx->dd->gfx_push_state[i];683} else684key->exists[i] = false;685}686} else {687for (unsigned i = 0; i < ZINK_SHADER_COUNT; i++) {688key->exists[i] = ctx->dd->gfx_descriptor_states[i].valid[type];689key->state[i] = ctx->dd->gfx_descriptor_states[i].state[type];690}691}692}693694static void695punt_invalid_set(struct zink_descriptor_set *zds, struct hash_entry *he)696{697/* this is no longer usable, so we punt it for now until it gets recycled */698assert(!zds->recycled);699if (!he)700he = _mesa_hash_table_search_pre_hashed(zds->pool->desc_sets, zds->hash, &zds->key);701_mesa_hash_table_remove(zds->pool->desc_sets, he);702zds->punted = true;703}704705static struct zink_descriptor_set *706zink_descriptor_set_get(struct zink_context *ctx,707enum zink_descriptor_type type,708bool is_compute,709bool *cache_hit)710{711*cache_hit = false;712struct zink_descriptor_set *zds;713struct zink_program *pg = is_compute ? (struct zink_program *)ctx->curr_compute : (struct zink_program *)ctx->curr_program;714struct zink_batch *batch = &ctx->batch;715bool push_set = type == ZINK_DESCRIPTOR_TYPES;716struct zink_descriptor_pool *pool = push_set ? ctx->dd->push_pool[is_compute] : pdd_cached(pg)->pool[type];717unsigned descs_used = 1;718assert(type <= ZINK_DESCRIPTOR_TYPES);719720assert(pool->key.layout->num_descriptors);721uint32_t hash = push_set ? ctx->dd->push_state[is_compute] :722ctx->dd->descriptor_states[is_compute].state[type];723724struct zink_descriptor_set *last_set = push_set ? ctx->dd->last_set[is_compute] : pdd_cached(pg)->last_set[type];725/* if the current state hasn't changed since the last time it was used,726* it's impossible for this set to not be valid, which means that an727* early return here can be done safely and with no locking728*/729if (last_set && ((push_set && !ctx->dd->changed[is_compute][ZINK_DESCRIPTOR_TYPES]) ||730(!push_set && !ctx->dd->changed[is_compute][type]))) {731*cache_hit = true;732return last_set;733}734735struct zink_descriptor_state_key key;736populate_zds_key(ctx, type, is_compute, &key, pg->dd->push_usage);737738simple_mtx_lock(&pool->mtx);739if (last_set && last_set->hash == hash && desc_state_equal(&last_set->key, &key)) {740zds = last_set;741*cache_hit = !zds->invalid;742if (zds->recycled) {743struct hash_entry *he = _mesa_hash_table_search_pre_hashed(pool->free_desc_sets, hash, &key);744if (he)745_mesa_hash_table_remove(pool->free_desc_sets, he);746zds->recycled = false;747}748if (zds->invalid) {749if (zink_batch_usage_exists(zds->batch_uses))750punt_invalid_set(zds, NULL);751else752/* this set is guaranteed to be in pool->alloc_desc_sets */753goto skip_hash_tables;754zds = NULL;755}756if (zds)757goto out;758}759760struct hash_entry *he = _mesa_hash_table_search_pre_hashed(pool->desc_sets, hash, &key);761bool recycled = false, punted = false;762if (he) {763zds = (void*)he->data;764if (zds->invalid && zink_batch_usage_exists(zds->batch_uses)) {765punt_invalid_set(zds, he);766zds = NULL;767punted = true;768}769}770if (!he) {771he = _mesa_hash_table_search_pre_hashed(pool->free_desc_sets, hash, &key);772recycled = true;773}774if (he && !punted) {775zds = (void*)he->data;776*cache_hit = !zds->invalid;777if (recycled) {778/* need to migrate this entry back to the in-use hash */779_mesa_hash_table_remove(pool->free_desc_sets, he);780goto out;781}782goto quick_out;783}784skip_hash_tables:785if (util_dynarray_num_elements(&pool->alloc_desc_sets, struct zink_descriptor_set *)) {786/* grab one off the allocated array */787zds = util_dynarray_pop(&pool->alloc_desc_sets, struct zink_descriptor_set *);788goto out;789}790791if (_mesa_hash_table_num_entries(pool->free_desc_sets)) {792/* try for an invalidated set first */793unsigned count = 0;794hash_table_foreach(pool->free_desc_sets, he) {795struct zink_descriptor_set *tmp = he->data;796if ((count++ >= 100 && tmp->reference.count == 1) || get_invalidated_desc_set(he->data)) {797zds = tmp;798assert(p_atomic_read(&zds->reference.count) == 1);799descriptor_set_invalidate(zds);800_mesa_hash_table_remove(pool->free_desc_sets, he);801goto out;802}803}804}805806if (pool->num_sets_allocated + pool->key.layout->num_descriptors > ZINK_DEFAULT_MAX_DESCS) {807simple_mtx_unlock(&pool->mtx);808zink_fence_wait(&ctx->base);809zink_batch_reference_program(batch, pg);810return zink_descriptor_set_get(ctx, type, is_compute, cache_hit);811}812813zds = allocate_desc_set(ctx, pg, type, descs_used, is_compute);814out:815zds->hash = hash;816populate_zds_key(ctx, type, is_compute, &zds->key, pg->dd->push_usage);817zds->recycled = false;818_mesa_hash_table_insert_pre_hashed(pool->desc_sets, hash, &zds->key, zds);819quick_out:820zds->punted = zds->invalid = false;821batch_add_desc_set(batch, zds);822if (push_set)823ctx->dd->last_set[is_compute] = zds;824else825pdd_cached(pg)->last_set[type] = zds;826simple_mtx_unlock(&pool->mtx);827828return zds;829}830831void832zink_descriptor_set_recycle(struct zink_descriptor_set *zds)833{834struct zink_descriptor_pool *pool = zds->pool;835/* if desc set is still in use by a batch, don't recache */836uint32_t refcount = p_atomic_read(&zds->reference.count);837if (refcount != 1)838return;839/* this is a null set */840if (!pool->key.layout->num_descriptors)841return;842simple_mtx_lock(&pool->mtx);843if (zds->punted)844zds->invalid = true;845else {846/* if we've previously punted this set, then it won't have a hash or be in either of the tables */847struct hash_entry *he = _mesa_hash_table_search_pre_hashed(pool->desc_sets, zds->hash, &zds->key);848if (!he) {849/* desc sets can be used multiple times in the same batch */850simple_mtx_unlock(&pool->mtx);851return;852}853_mesa_hash_table_remove(pool->desc_sets, he);854}855856if (zds->invalid) {857descriptor_set_invalidate(zds);858util_dynarray_append(&pool->alloc_desc_sets, struct zink_descriptor_set *, zds);859} else {860zds->recycled = true;861_mesa_hash_table_insert_pre_hashed(pool->free_desc_sets, zds->hash, &zds->key, zds);862}863simple_mtx_unlock(&pool->mtx);864}865866867static void868desc_set_ref_add(struct zink_descriptor_set *zds, struct zink_descriptor_refs *refs, void **ref_ptr, void *ptr)869{870struct zink_descriptor_reference ref = {ref_ptr, &zds->invalid};871*ref_ptr = ptr;872if (ptr)873util_dynarray_append(&refs->refs, struct zink_descriptor_reference, ref);874}875876static void877zink_descriptor_surface_desc_set_add(struct zink_descriptor_surface *dsurf, struct zink_descriptor_set *zds, unsigned idx)878{879assert(idx < zds->num_resources);880zds->surfaces[idx].is_buffer = dsurf->is_buffer;881if (dsurf->is_buffer)882desc_set_ref_add(zds, &dsurf->bufferview->desc_set_refs, (void**)&zds->surfaces[idx].bufferview, dsurf->bufferview);883else884desc_set_ref_add(zds, &dsurf->surface->desc_set_refs, (void**)&zds->surfaces[idx].surface, dsurf->surface);885}886887static void888zink_image_view_desc_set_add(struct zink_image_view *image_view, struct zink_descriptor_set *zds, unsigned idx, bool is_buffer)889{890assert(idx < zds->num_resources);891if (is_buffer)892desc_set_ref_add(zds, &image_view->buffer_view->desc_set_refs, (void**)&zds->surfaces[idx].bufferview, image_view->buffer_view);893else894desc_set_ref_add(zds, &image_view->surface->desc_set_refs, (void**)&zds->surfaces[idx].surface, image_view->surface);895}896897static void898zink_sampler_state_desc_set_add(struct zink_sampler_state *sampler_state, struct zink_descriptor_set *zds, unsigned idx)899{900assert(idx < zds->num_resources);901if (sampler_state)902desc_set_ref_add(zds, &sampler_state->desc_set_refs, (void**)&zds->sampler_states[idx], sampler_state);903else904zds->sampler_states[idx] = NULL;905}906907static void908zink_resource_desc_set_add(struct zink_resource *res, struct zink_descriptor_set *zds, unsigned idx)909{910assert(idx < zds->num_resources);911desc_set_ref_add(zds, res ? &res->obj->desc_set_refs : NULL, (void**)&zds->res_objs[idx], res ? res->obj : NULL);912}913914void915zink_descriptor_set_refs_clear(struct zink_descriptor_refs *refs, void *ptr)916{917util_dynarray_foreach(&refs->refs, struct zink_descriptor_reference, ref) {918if (*ref->ref == ptr) {919*ref->invalid = true;920*ref->ref = NULL;921}922}923util_dynarray_fini(&refs->refs);924}925926static inline void927zink_descriptor_pool_reference(struct zink_screen *screen,928struct zink_descriptor_pool **dst,929struct zink_descriptor_pool *src)930{931struct zink_descriptor_pool *old_dst = dst ? *dst : NULL;932933if (pipe_reference_described(old_dst ? &old_dst->reference : NULL, &src->reference,934(debug_reference_descriptor)debug_describe_zink_descriptor_pool))935descriptor_pool_free(screen, old_dst);936if (dst) *dst = src;937}938939static void940create_descriptor_ref_template(struct zink_context *ctx, struct zink_program *pg, enum zink_descriptor_type type)941{942struct zink_shader **stages;943if (pg->is_compute)944stages = &((struct zink_compute_program*)pg)->shader;945else946stages = ((struct zink_gfx_program*)pg)->shaders;947unsigned num_shaders = pg->is_compute ? 1 : ZINK_SHADER_COUNT;948949for (int i = 0; i < num_shaders; i++) {950struct zink_shader *shader = stages[i];951if (!shader)952continue;953954for (int j = 0; j < shader->num_bindings[type]; j++) {955int index = shader->bindings[type][j].index;956if (type == ZINK_DESCRIPTOR_TYPE_UBO && !index)957continue;958pdd_cached(pg)->num_refs[type] += shader->bindings[type][j].size;959}960}961962pdd_cached(pg)->refs[type] = ralloc_array(pg->dd, union zink_program_descriptor_refs, pdd_cached(pg)->num_refs[type]);963if (!pdd_cached(pg)->refs[type])964return;965966unsigned ref_idx = 0;967for (int i = 0; i < num_shaders; i++) {968struct zink_shader *shader = stages[i];969if (!shader)970continue;971972enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);973for (int j = 0; j < shader->num_bindings[type]; j++) {974int index = shader->bindings[type][j].index;975for (unsigned k = 0; k < shader->bindings[type][j].size; k++) {976switch (type) {977case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:978pdd_cached(pg)->refs[type][ref_idx].sampler.sampler_state = (struct zink_sampler_state**)&ctx->sampler_states[stage][index + k];979pdd_cached(pg)->refs[type][ref_idx].sampler.dsurf = &ctx->di.sampler_surfaces[stage][index + k];980break;981case ZINK_DESCRIPTOR_TYPE_IMAGE:982pdd_cached(pg)->refs[type][ref_idx].dsurf = &ctx->di.image_surfaces[stage][index + k];983break;984case ZINK_DESCRIPTOR_TYPE_UBO:985if (!index)986continue;987FALLTHROUGH;988default:989pdd_cached(pg)->refs[type][ref_idx].res = &ctx->di.descriptor_res[type][stage][index + k];990break;991}992assert(ref_idx < pdd_cached(pg)->num_refs[type]);993ref_idx++;994}995}996}997}998999bool1000zink_descriptor_program_init(struct zink_context *ctx, struct zink_program *pg)1001{1002struct zink_screen *screen = zink_screen(ctx->base.screen);10031004pg->dd = (void*)rzalloc(pg, struct zink_program_descriptor_data_cached);1005if (!pg->dd)1006return false;10071008if (!zink_descriptor_program_init_lazy(ctx, pg))1009return false;10101011/* no descriptors */1012if (!pg->dd)1013return true;10141015for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {1016if (!pg->dd->layout_key[i])1017continue;10181019unsigned idx = zink_descriptor_type_to_size_idx(i);1020VkDescriptorPoolSize *size = &pg->dd->sizes[idx];1021/* this is a sampler/image set with no images only texels */1022if (!size->descriptorCount)1023size++;1024unsigned num_sizes = zink_descriptor_program_num_sizes(pg, i);1025struct zink_descriptor_pool *pool = descriptor_pool_get(ctx, i, pg->dd->layout_key[i], size, num_sizes);1026if (!pool)1027return false;1028zink_descriptor_pool_reference(screen, &pdd_cached(pg)->pool[i], pool);10291030if (screen->info.have_KHR_descriptor_update_template &&1031screen->descriptor_mode != ZINK_DESCRIPTOR_MODE_NOTEMPLATES)1032create_descriptor_ref_template(ctx, pg, i);1033}10341035return true;1036}10371038void1039zink_descriptor_program_deinit(struct zink_screen *screen, struct zink_program *pg)1040{1041if (!pg->dd)1042return;1043for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++)1044zink_descriptor_pool_reference(screen, &pdd_cached(pg)->pool[i], NULL);10451046zink_descriptor_program_deinit_lazy(screen, pg);1047}10481049static void1050zink_descriptor_pool_deinit(struct zink_context *ctx)1051{1052struct zink_screen *screen = zink_screen(ctx->base.screen);1053for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {1054hash_table_foreach(ctx->dd->descriptor_pools[i], entry) {1055struct zink_descriptor_pool *pool = (void*)entry->data;1056zink_descriptor_pool_reference(screen, &pool, NULL);1057}1058_mesa_hash_table_destroy(ctx->dd->descriptor_pools[i], NULL);1059}1060}10611062static bool1063zink_descriptor_pool_init(struct zink_context *ctx)1064{1065for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {1066ctx->dd->descriptor_pools[i] = _mesa_hash_table_create(ctx, hash_descriptor_pool, equals_descriptor_pool);1067if (!ctx->dd->descriptor_pools[i])1068return false;1069}1070struct zink_screen *screen = zink_screen(ctx->base.screen);1071VkDescriptorPoolSize sizes;1072sizes.type = screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY ? VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER : VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;1073sizes.descriptorCount = ZINK_SHADER_COUNT * ZINK_DEFAULT_MAX_DESCS;1074ctx->dd->push_pool[0] = descriptor_pool_get(ctx, 0, ctx->dd->push_layout_keys[0], &sizes, 1);1075sizes.descriptorCount = ZINK_DEFAULT_MAX_DESCS;1076ctx->dd->push_pool[1] = descriptor_pool_get(ctx, 0, ctx->dd->push_layout_keys[1], &sizes, 1);1077return ctx->dd->push_pool[0] && ctx->dd->push_pool[1];1078}107910801081static void1082desc_set_res_add(struct zink_descriptor_set *zds, struct zink_resource *res, unsigned int i, bool cache_hit)1083{1084/* if we got a cache hit, we have to verify that the cached set is still valid;1085* we store the vk resource to the set here to avoid a more complex and costly mechanism of maintaining a1086* hash table on every resource with the associated descriptor sets that then needs to be iterated through1087* whenever a resource is destroyed1088*/1089assert(!cache_hit || zds->res_objs[i] == (res ? res->obj : NULL));1090if (!cache_hit)1091zink_resource_desc_set_add(res, zds, i);1092}10931094static void1095desc_set_sampler_add(struct zink_context *ctx, struct zink_descriptor_set *zds, struct zink_descriptor_surface *dsurf,1096struct zink_sampler_state *state, unsigned int i, bool cache_hit)1097{1098/* if we got a cache hit, we have to verify that the cached set is still valid;1099* we store the vk resource to the set here to avoid a more complex and costly mechanism of maintaining a1100* hash table on every resource with the associated descriptor sets that then needs to be iterated through1101* whenever a resource is destroyed1102*/1103#ifndef NDEBUG1104uint32_t cur_hash = get_descriptor_surface_hash(ctx, &zds->surfaces[i]);1105uint32_t new_hash = get_descriptor_surface_hash(ctx, dsurf);1106#endif1107assert(!cache_hit || cur_hash == new_hash);1108assert(!cache_hit || zds->sampler_states[i] == state);1109if (!cache_hit) {1110zink_descriptor_surface_desc_set_add(dsurf, zds, i);1111zink_sampler_state_desc_set_add(state, zds, i);1112}1113}11141115static void1116desc_set_image_add(struct zink_context *ctx, struct zink_descriptor_set *zds, struct zink_image_view *image_view,1117unsigned int i, bool is_buffer, bool cache_hit)1118{1119/* if we got a cache hit, we have to verify that the cached set is still valid;1120* we store the vk resource to the set here to avoid a more complex and costly mechanism of maintaining a1121* hash table on every resource with the associated descriptor sets that then needs to be iterated through1122* whenever a resource is destroyed1123*/1124#ifndef NDEBUG1125uint32_t cur_hash = get_descriptor_surface_hash(ctx, &zds->surfaces[i]);1126uint32_t new_hash = zink_get_image_view_hash(ctx, image_view, is_buffer);1127#endif1128assert(!cache_hit || cur_hash == new_hash);1129if (!cache_hit)1130zink_image_view_desc_set_add(image_view, zds, i, is_buffer);1131}11321133static void1134desc_set_descriptor_surface_add(struct zink_context *ctx, struct zink_descriptor_set *zds, struct zink_descriptor_surface *dsurf,1135unsigned int i, bool cache_hit)1136{1137/* if we got a cache hit, we have to verify that the cached set is still valid;1138* we store the vk resource to the set here to avoid a more complex and costly mechanism of maintaining a1139* hash table on every resource with the associated descriptor sets that then needs to be iterated through1140* whenever a resource is destroyed1141*/1142#ifndef NDEBUG1143uint32_t cur_hash = get_descriptor_surface_hash(ctx, &zds->surfaces[i]);1144uint32_t new_hash = get_descriptor_surface_hash(ctx, dsurf);1145#endif1146assert(!cache_hit || cur_hash == new_hash);1147if (!cache_hit)1148zink_descriptor_surface_desc_set_add(dsurf, zds, i);1149}11501151static unsigned1152init_write_descriptor(struct zink_shader *shader, struct zink_descriptor_set *zds, enum zink_descriptor_type type, int idx, VkWriteDescriptorSet *wd, unsigned num_wds)1153{1154wd->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;1155wd->pNext = NULL;1156wd->dstBinding = shader ? shader->bindings[type][idx].binding : idx;1157wd->dstArrayElement = 0;1158wd->descriptorCount = shader ? shader->bindings[type][idx].size : 1;1159wd->descriptorType = shader ? shader->bindings[type][idx].type : VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;1160wd->dstSet = zds->desc_set;1161return num_wds + 1;1162}11631164static unsigned1165update_push_ubo_descriptors(struct zink_context *ctx, struct zink_descriptor_set *zds,1166bool is_compute, bool cache_hit, uint32_t *dynamic_offsets)1167{1168struct zink_screen *screen = zink_screen(ctx->base.screen);1169VkWriteDescriptorSet wds[ZINK_SHADER_COUNT];1170VkDescriptorBufferInfo buffer_infos[ZINK_SHADER_COUNT];1171struct zink_shader **stages;11721173unsigned num_stages = is_compute ? 1 : ZINK_SHADER_COUNT;1174if (is_compute)1175stages = &ctx->curr_compute->shader;1176else1177stages = &ctx->gfx_stages[0];11781179for (int i = 0; i < num_stages; i++) {1180struct zink_shader *shader = stages[i];1181enum pipe_shader_type pstage = shader ? pipe_shader_type_from_mesa(shader->nir->info.stage) : i;1182VkDescriptorBufferInfo *info = &ctx->di.ubos[pstage][0];1183unsigned dynamic_idx = is_compute ? 0 : tgsi_processor_to_shader_stage(pstage);11841185/* Values are taken from pDynamicOffsets in an order such that all entries for set N come before set N+1;1186* within a set, entries are ordered by the binding numbers in the descriptor set layouts1187* - vkCmdBindDescriptorSets spec1188*1189* because of this, we have to populate the dynamic offsets by their shader stage to ensure they1190* match what the driver expects1191*/1192dynamic_offsets[dynamic_idx] = info->offset;1193if (!cache_hit) {1194struct zink_resource *res = zink_get_resource_for_descriptor(ctx, ZINK_DESCRIPTOR_TYPE_UBO, pstage, 0);1195init_write_descriptor(NULL, zds, ZINK_DESCRIPTOR_TYPE_UBO, tgsi_processor_to_shader_stage(pstage), &wds[i], 0);1196desc_set_res_add(zds, res, i, cache_hit);1197/* these are dynamic UBO descriptors, so we have to always set 0 as the descriptor offset */1198buffer_infos[i] = *info;1199buffer_infos[i].offset = 0;1200wds[i].pBufferInfo = &buffer_infos[i];1201}1202}12031204if (!cache_hit)1205vkUpdateDescriptorSets(screen->dev, num_stages, wds, 0, NULL);1206return num_stages;1207}12081209static void1210set_descriptor_set_refs(struct zink_context *ctx, struct zink_descriptor_set *zds, struct zink_program *pg, bool cache_hit)1211{1212enum zink_descriptor_type type = zds->pool->type;1213for (unsigned i = 0; i < pdd_cached(pg)->num_refs[type]; i++) {1214switch (type) {1215case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:1216desc_set_sampler_add(ctx, zds, pdd_cached(pg)->refs[type][i].sampler.dsurf,1217*pdd_cached(pg)->refs[type][i].sampler.sampler_state, i, cache_hit);1218break;1219case ZINK_DESCRIPTOR_TYPE_IMAGE:1220desc_set_descriptor_surface_add(ctx, zds, pdd_cached(pg)->refs[type][i].dsurf, i, cache_hit);1221break;1222default:1223desc_set_res_add(zds, *pdd_cached(pg)->refs[type][i].res, i, cache_hit);1224break;1225}1226}1227}12281229static void1230update_descriptors_internal(struct zink_context *ctx, struct zink_descriptor_set **zds, struct zink_program *pg, bool *cache_hit)1231{1232struct zink_screen *screen = zink_screen(ctx->base.screen);1233struct zink_shader **stages;12341235unsigned num_stages = pg->is_compute ? 1 : ZINK_SHADER_COUNT;1236if (pg->is_compute)1237stages = &ctx->curr_compute->shader;1238else1239stages = &ctx->gfx_stages[0];12401241for (unsigned h = 0; h < ZINK_DESCRIPTOR_TYPES; h++) {1242if (cache_hit[h] || !zds[h])1243continue;12441245if (screen->info.have_KHR_descriptor_update_template &&1246screen->descriptor_mode != ZINK_DESCRIPTOR_MODE_NOTEMPLATES) {1247set_descriptor_set_refs(ctx, zds[h], pg, cache_hit[h]);1248zink_descriptor_set_update_lazy(ctx, pg, h, zds[h]->desc_set);1249continue;1250}12511252unsigned num_resources = 0;1253ASSERTED unsigned num_bindings = zds[h]->pool->num_resources;1254VkWriteDescriptorSet wds[ZINK_MAX_DESCRIPTORS_PER_TYPE];1255unsigned num_wds = 0;12561257for (int i = 0; i < num_stages; i++) {1258struct zink_shader *shader = stages[i];1259if (!shader)1260continue;1261enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);1262for (int j = 0; j < shader->num_bindings[h]; j++) {1263int index = shader->bindings[h][j].index;1264switch (h) {1265case ZINK_DESCRIPTOR_TYPE_UBO:1266if (!index)1267continue;1268FALLTHROUGH;1269case ZINK_DESCRIPTOR_TYPE_SSBO: {1270VkDescriptorBufferInfo *info;1271struct zink_resource *res = zink_get_resource_for_descriptor(ctx, h, stage, index);1272if (h == ZINK_DESCRIPTOR_TYPE_UBO)1273info = &ctx->di.ubos[stage][index];1274else1275info = &ctx->di.ssbos[stage][index];1276assert(num_resources < num_bindings);1277desc_set_res_add(zds[h], res, num_resources++, cache_hit[h]);1278wds[num_wds].pBufferInfo = info;1279}1280break;1281case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:1282case ZINK_DESCRIPTOR_TYPE_IMAGE: {1283VkDescriptorImageInfo *image_info;1284VkBufferView *buffer_info;1285if (h == ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW) {1286image_info = &ctx->di.textures[stage][index];1287buffer_info = &ctx->di.tbos[stage][index];1288} else {1289image_info = &ctx->di.images[stage][index];1290buffer_info = &ctx->di.texel_images[stage][index];1291}1292bool is_buffer = zink_shader_descriptor_is_buffer(shader, h, j);1293for (unsigned k = 0; k < shader->bindings[h][j].size; k++) {1294assert(num_resources < num_bindings);1295if (h == ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW) {1296struct zink_sampler_state *sampler = NULL;1297if (!is_buffer && image_info->imageView)1298sampler = ctx->sampler_states[stage][index + k];;12991300desc_set_sampler_add(ctx, zds[h], &ctx->di.sampler_surfaces[stage][index + k], sampler, num_resources++, cache_hit[h]);1301} else {1302struct zink_image_view *image_view = &ctx->image_views[stage][index + k];1303desc_set_image_add(ctx, zds[h], image_view, num_resources++, is_buffer, cache_hit[h]);1304}1305}1306if (is_buffer)1307wds[num_wds].pTexelBufferView = buffer_info;1308else1309wds[num_wds].pImageInfo = image_info;1310}1311break;1312default:1313unreachable("unknown descriptor type");1314}1315num_wds = init_write_descriptor(shader, zds[h], h, j, &wds[num_wds], num_wds);1316}1317}1318if (num_wds)1319vkUpdateDescriptorSets(screen->dev, num_wds, wds, 0, NULL);1320}1321}13221323static void1324zink_context_update_descriptor_states(struct zink_context *ctx, struct zink_program *pg);13251326void1327zink_descriptors_update(struct zink_context *ctx, bool is_compute)1328{1329struct zink_program *pg = is_compute ? (struct zink_program *)ctx->curr_compute : (struct zink_program *)ctx->curr_program;13301331zink_context_update_descriptor_states(ctx, pg);1332bool cache_hit[ZINK_DESCRIPTOR_TYPES + 1];1333VkDescriptorSet sets[ZINK_DESCRIPTOR_TYPES + 1];1334struct zink_descriptor_set *zds[ZINK_DESCRIPTOR_TYPES + 1];1335/* push set is indexed in vulkan as 0 but isn't in the general pool array */1336ctx->dd->changed[is_compute][ZINK_DESCRIPTOR_TYPES] |= ctx->dd->pg[is_compute] != pg;1337if (pg->dd->push_usage)1338zds[ZINK_DESCRIPTOR_TYPES] = zink_descriptor_set_get(ctx, ZINK_DESCRIPTOR_TYPES, is_compute, &cache_hit[ZINK_DESCRIPTOR_TYPES]);1339else {1340zds[ZINK_DESCRIPTOR_TYPES] = NULL;1341cache_hit[ZINK_DESCRIPTOR_TYPES] = false;1342}1343ctx->dd->changed[is_compute][ZINK_DESCRIPTOR_TYPES] = false;1344sets[0] = zds[ZINK_DESCRIPTOR_TYPES] ? zds[ZINK_DESCRIPTOR_TYPES]->desc_set : ctx->dd->dummy_set;1345for (int h = 0; h < ZINK_DESCRIPTOR_TYPES; h++) {1346ctx->dd->changed[is_compute][h] |= ctx->dd->pg[is_compute] != pg;1347if (pg->dsl[h + 1]) {1348/* null set has null pool */1349if (pdd_cached(pg)->pool[h])1350zds[h] = zink_descriptor_set_get(ctx, h, is_compute, &cache_hit[h]);1351else1352zds[h] = NULL;1353/* reuse dummy set for bind */1354sets[h + 1] = zds[h] ? zds[h]->desc_set : ctx->dd->dummy_set;1355} else {1356zds[h] = NULL;1357}1358if (!zds[h])1359cache_hit[h] = false;1360ctx->dd->changed[is_compute][h] = false;1361}1362struct zink_batch *batch = &ctx->batch;1363zink_batch_reference_program(batch, pg);13641365uint32_t dynamic_offsets[PIPE_MAX_CONSTANT_BUFFERS];1366unsigned dynamic_offset_idx = 0;13671368if (pg->dd->push_usage) // push set1369dynamic_offset_idx = update_push_ubo_descriptors(ctx, zds[ZINK_DESCRIPTOR_TYPES],1370is_compute, cache_hit[ZINK_DESCRIPTOR_TYPES], dynamic_offsets);13711372update_descriptors_internal(ctx, zds, pg, cache_hit);13731374vkCmdBindDescriptorSets(batch->state->cmdbuf, is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,1375pg->layout, 0, pg->num_dsl, sets,1376dynamic_offset_idx, dynamic_offsets);1377ctx->dd->pg[is_compute] = pg;1378}13791380void1381zink_batch_descriptor_deinit(struct zink_screen *screen, struct zink_batch_state *bs)1382{1383if (!bs->dd)1384return;1385_mesa_set_destroy(bs->dd->desc_sets, NULL);1386zink_batch_descriptor_deinit_lazy(screen, bs);1387}13881389void1390zink_batch_descriptor_reset(struct zink_screen *screen, struct zink_batch_state *bs)1391{1392set_foreach(bs->dd->desc_sets, entry) {1393struct zink_descriptor_set *zds = (void*)entry->key;1394zink_batch_usage_unset(&zds->batch_uses, bs);1395/* reset descriptor pools when no bs is using this program to avoid1396* having some inactive program hogging a billion descriptors1397*/1398pipe_reference(&zds->reference, NULL);1399zink_descriptor_set_recycle(zds);1400_mesa_set_remove(bs->dd->desc_sets, entry);1401}1402zink_batch_descriptor_reset_lazy(screen, bs);1403}14041405bool1406zink_batch_descriptor_init(struct zink_screen *screen, struct zink_batch_state *bs)1407{1408if (!zink_batch_descriptor_init_lazy(screen, bs))1409return false;1410bs->dd->desc_sets = _mesa_pointer_set_create(bs);1411return !!bs->dd->desc_sets;1412}14131414struct zink_resource *1415zink_get_resource_for_descriptor(struct zink_context *ctx, enum zink_descriptor_type type, enum pipe_shader_type shader, int idx)1416{1417switch (type) {1418case ZINK_DESCRIPTOR_TYPE_UBO:1419return zink_resource(ctx->ubos[shader][idx].buffer);1420case ZINK_DESCRIPTOR_TYPE_SSBO:1421return zink_resource(ctx->ssbos[shader][idx].buffer);1422case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:1423return ctx->sampler_views[shader][idx] ? zink_resource(ctx->sampler_views[shader][idx]->texture) : NULL;1424case ZINK_DESCRIPTOR_TYPE_IMAGE:1425return zink_resource(ctx->image_views[shader][idx].base.resource);1426default:1427break;1428}1429unreachable("unknown descriptor type!");1430return NULL;1431}14321433static uint32_t1434calc_descriptor_state_hash_ubo(struct zink_context *ctx, enum pipe_shader_type shader, int idx, uint32_t hash, bool need_offset)1435{1436struct zink_resource *res = zink_get_resource_for_descriptor(ctx, ZINK_DESCRIPTOR_TYPE_UBO, shader, idx);1437struct zink_resource_object *obj = res ? res->obj : NULL;1438hash = XXH32(&obj, sizeof(void*), hash);1439void *hash_data = &ctx->ubos[shader][idx].buffer_size;1440size_t data_size = sizeof(unsigned);1441hash = XXH32(hash_data, data_size, hash);1442if (need_offset)1443hash = XXH32(&ctx->ubos[shader][idx].buffer_offset, sizeof(unsigned), hash);1444return hash;1445}14461447static uint32_t1448calc_descriptor_state_hash_ssbo(struct zink_context *ctx, struct zink_shader *zs, enum pipe_shader_type shader, int i, int idx, uint32_t hash)1449{1450struct zink_resource *res = zink_get_resource_for_descriptor(ctx, ZINK_DESCRIPTOR_TYPE_SSBO, shader, idx);1451struct zink_resource_object *obj = res ? res->obj : NULL;1452hash = XXH32(&obj, sizeof(void*), hash);1453if (obj) {1454struct pipe_shader_buffer *ssbo = &ctx->ssbos[shader][idx];1455hash = XXH32(&ssbo->buffer_offset, sizeof(ssbo->buffer_offset), hash);1456hash = XXH32(&ssbo->buffer_size, sizeof(ssbo->buffer_size), hash);1457}1458return hash;1459}14601461static uint32_t1462calc_descriptor_state_hash_sampler(struct zink_context *ctx, struct zink_shader *zs, enum pipe_shader_type shader, int i, int idx, uint32_t hash)1463{1464for (unsigned k = 0; k < zs->bindings[ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW][i].size; k++) {1465struct zink_sampler_view *sampler_view = zink_sampler_view(ctx->sampler_views[shader][idx + k]);1466bool is_buffer = zink_shader_descriptor_is_buffer(zs, ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW, i);1467ctx->di.sampler_surfaces[shader][idx + k].is_buffer = is_buffer;1468uint32_t val = zink_get_sampler_view_hash(ctx, sampler_view, is_buffer);1469hash = XXH32(&val, sizeof(uint32_t), hash);1470if (is_buffer)1471continue;14721473struct zink_sampler_state *sampler_state = ctx->sampler_states[shader][idx + k];14741475if (sampler_state)1476hash = XXH32(&sampler_state->hash, sizeof(uint32_t), hash);1477}1478return hash;1479}14801481static uint32_t1482calc_descriptor_state_hash_image(struct zink_context *ctx, struct zink_shader *zs, enum pipe_shader_type shader, int i, int idx, uint32_t hash)1483{1484for (unsigned k = 0; k < zs->bindings[ZINK_DESCRIPTOR_TYPE_IMAGE][i].size; k++) {1485bool is_buffer = zink_shader_descriptor_is_buffer(zs, ZINK_DESCRIPTOR_TYPE_IMAGE, i);1486uint32_t val = zink_get_image_view_hash(ctx, &ctx->image_views[shader][idx + k], is_buffer);1487ctx->di.image_surfaces[shader][idx + k].is_buffer = is_buffer;1488hash = XXH32(&val, sizeof(uint32_t), hash);1489}1490return hash;1491}14921493static uint32_t1494update_descriptor_stage_state(struct zink_context *ctx, enum pipe_shader_type shader, enum zink_descriptor_type type)1495{1496struct zink_shader *zs = shader == PIPE_SHADER_COMPUTE ? ctx->compute_stage : ctx->gfx_stages[shader];14971498uint32_t hash = 0;1499for (int i = 0; i < zs->num_bindings[type]; i++) {1500/* skip push set members */1501if (zs->bindings[type][i].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC)1502continue;15031504int idx = zs->bindings[type][i].index;1505switch (type) {1506case ZINK_DESCRIPTOR_TYPE_UBO:1507hash = calc_descriptor_state_hash_ubo(ctx, shader, idx, hash, true);1508break;1509case ZINK_DESCRIPTOR_TYPE_SSBO:1510hash = calc_descriptor_state_hash_ssbo(ctx, zs, shader, i, idx, hash);1511break;1512case ZINK_DESCRIPTOR_TYPE_SAMPLER_VIEW:1513hash = calc_descriptor_state_hash_sampler(ctx, zs, shader, i, idx, hash);1514break;1515case ZINK_DESCRIPTOR_TYPE_IMAGE:1516hash = calc_descriptor_state_hash_image(ctx, zs, shader, i, idx, hash);1517break;1518default:1519unreachable("unknown descriptor type");1520}1521}1522return hash;1523}15241525static void1526update_descriptor_state(struct zink_context *ctx, enum zink_descriptor_type type, bool is_compute)1527{1528/* we shouldn't be calling this if we don't have to */1529assert(!ctx->dd->descriptor_states[is_compute].valid[type]);1530bool has_any_usage = false;15311532if (is_compute) {1533/* just update compute state */1534bool has_usage = zink_program_get_descriptor_usage(ctx, PIPE_SHADER_COMPUTE, type);1535if (has_usage)1536ctx->dd->descriptor_states[is_compute].state[type] = update_descriptor_stage_state(ctx, PIPE_SHADER_COMPUTE, type);1537else1538ctx->dd->descriptor_states[is_compute].state[type] = 0;1539has_any_usage = has_usage;1540} else {1541/* update all gfx states */1542bool first = true;1543for (unsigned i = 0; i < ZINK_SHADER_COUNT; i++) {1544bool has_usage = false;1545/* this is the incremental update for the shader stage */1546if (!ctx->dd->gfx_descriptor_states[i].valid[type]) {1547ctx->dd->gfx_descriptor_states[i].state[type] = 0;1548if (ctx->gfx_stages[i]) {1549has_usage = zink_program_get_descriptor_usage(ctx, i, type);1550if (has_usage)1551ctx->dd->gfx_descriptor_states[i].state[type] = update_descriptor_stage_state(ctx, i, type);1552ctx->dd->gfx_descriptor_states[i].valid[type] = has_usage;1553}1554}1555if (ctx->dd->gfx_descriptor_states[i].valid[type]) {1556/* this is the overall state update for the descriptor set hash */1557if (first) {1558/* no need to double hash the first state */1559ctx->dd->descriptor_states[is_compute].state[type] = ctx->dd->gfx_descriptor_states[i].state[type];1560first = false;1561} else {1562ctx->dd->descriptor_states[is_compute].state[type] = XXH32(&ctx->dd->gfx_descriptor_states[i].state[type],1563sizeof(uint32_t),1564ctx->dd->descriptor_states[is_compute].state[type]);1565}1566}1567has_any_usage |= has_usage;1568}1569}1570ctx->dd->descriptor_states[is_compute].valid[type] = has_any_usage;1571}15721573static void1574zink_context_update_descriptor_states(struct zink_context *ctx, struct zink_program *pg)1575{1576if (pg->dd->push_usage && (!ctx->dd->push_valid[pg->is_compute] ||1577pg->dd->push_usage != ctx->dd->last_push_usage[pg->is_compute])) {1578uint32_t hash = 0;1579if (pg->is_compute) {1580hash = calc_descriptor_state_hash_ubo(ctx, PIPE_SHADER_COMPUTE, 0, 0, false);1581} else {1582bool first = true;1583u_foreach_bit(stage, pg->dd->push_usage) {1584if (!ctx->dd->gfx_push_valid[stage]) {1585ctx->dd->gfx_push_state[stage] = calc_descriptor_state_hash_ubo(ctx, stage, 0, 0, false);1586ctx->dd->gfx_push_valid[stage] = true;1587}1588if (first)1589hash = ctx->dd->gfx_push_state[stage];1590else1591hash = XXH32(&ctx->dd->gfx_push_state[stage], sizeof(uint32_t), hash);1592first = false;1593}1594}1595ctx->dd->push_state[pg->is_compute] = hash;1596ctx->dd->push_valid[pg->is_compute] = true;1597ctx->dd->last_push_usage[pg->is_compute] = pg->dd->push_usage;1598}1599for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {1600if (pdd_cached(pg)->pool[i] && !ctx->dd->descriptor_states[pg->is_compute].valid[i])1601update_descriptor_state(ctx, i, pg->is_compute);1602}1603}16041605void1606zink_context_invalidate_descriptor_state(struct zink_context *ctx, enum pipe_shader_type shader, enum zink_descriptor_type type, unsigned start, unsigned count)1607{1608if (type == ZINK_DESCRIPTOR_TYPE_UBO && !start) {1609/* ubo 0 is the push set */1610ctx->dd->push_state[shader == PIPE_SHADER_COMPUTE] = 0;1611ctx->dd->push_valid[shader == PIPE_SHADER_COMPUTE] = false;1612if (shader != PIPE_SHADER_COMPUTE) {1613ctx->dd->gfx_push_state[shader] = 0;1614ctx->dd->gfx_push_valid[shader] = false;1615}1616ctx->dd->changed[shader == PIPE_SHADER_COMPUTE][ZINK_DESCRIPTOR_TYPES] = true;1617return;1618}1619if (shader != PIPE_SHADER_COMPUTE) {1620ctx->dd->gfx_descriptor_states[shader].valid[type] = false;1621ctx->dd->gfx_descriptor_states[shader].state[type] = 0;1622}1623ctx->dd->descriptor_states[shader == PIPE_SHADER_COMPUTE].valid[type] = false;1624ctx->dd->descriptor_states[shader == PIPE_SHADER_COMPUTE].state[type] = 0;1625ctx->dd->changed[shader == PIPE_SHADER_COMPUTE][type] = true;1626}16271628bool1629zink_descriptors_init(struct zink_context *ctx)1630{1631zink_descriptors_init_lazy(ctx);1632if (!ctx->dd)1633return false;1634return zink_descriptor_pool_init(ctx);1635}16361637void1638zink_descriptors_deinit(struct zink_context *ctx)1639{1640zink_descriptor_pool_deinit(ctx);1641zink_descriptors_deinit_lazy(ctx);1642}16431644bool1645zink_descriptor_layouts_init(struct zink_context *ctx)1646{1647for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++)1648if (!_mesa_hash_table_init(&ctx->desc_set_layouts[i], ctx, hash_descriptor_layout, equals_descriptor_layout))1649return false;1650return true;1651}16521653void1654zink_descriptor_layouts_deinit(struct zink_context *ctx)1655{1656struct zink_screen *screen = zink_screen(ctx->base.screen);1657for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {1658hash_table_foreach(&ctx->desc_set_layouts[i], he) {1659struct zink_descriptor_layout *layout = he->data;1660vkDestroyDescriptorSetLayout(screen->dev, layout->layout, NULL);1661if (layout->desc_template)1662screen->vk.DestroyDescriptorUpdateTemplate(screen->dev, layout->desc_template, NULL);1663ralloc_free(layout);1664_mesa_hash_table_remove(&ctx->desc_set_layouts[i], he);1665}1666}1667}166816691670