Path: blob/21.2-virgl/src/gallium/drivers/zink/zink_descriptors_lazy.c
4570 views
/*1* Copyright © 2021 Valve Corporation2*3* Permission is hereby granted, free of charge, to any person obtaining a4* copy of this software and associated documentation files (the "Software"),5* to deal in the Software without restriction, including without limitation6* the rights to use, copy, modify, merge, publish, distribute, sublicense,7* and/or sell copies of the Software, and to permit persons to whom the8* Software is furnished to do so, subject to the following conditions:9*10* The above copyright notice and this permission notice (including the next11* paragraph) shall be included in all copies or substantial portions of the12* Software.13*14* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR15* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,16* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL17* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER18* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING19* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS20* IN THE SOFTWARE.21*22* Authors:23* Mike Blumenkrantz <[email protected]>24*/25#include "tgsi/tgsi_from_mesa.h"26272829#include "zink_context.h"30#include "zink_compiler.h"31#include "zink_descriptors.h"32#include "zink_program.h"33#include "zink_resource.h"34#include "zink_screen.h"3536struct zink_descriptor_data_lazy {37struct zink_descriptor_data base;38VkDescriptorUpdateTemplateEntry push_entries[PIPE_SHADER_TYPES];39bool push_state_changed[2]; //gfx, compute40uint8_t state_changed[2]; //gfx, compute41};4243struct zink_descriptor_pool {44VkDescriptorPool pool;45VkDescriptorSet sets[ZINK_DEFAULT_MAX_DESCS];46unsigned set_idx;47unsigned sets_alloc;48};4950struct zink_batch_descriptor_data_lazy {51struct zink_batch_descriptor_data base;52struct hash_table pools[ZINK_DESCRIPTOR_TYPES];53struct zink_descriptor_pool *push_pool[2];54struct zink_program *pg[2]; //gfx, compute55VkDescriptorSetLayout dsl[2][ZINK_DESCRIPTOR_TYPES];56unsigned push_usage[2];57};5859ALWAYS_INLINE static struct zink_descriptor_data_lazy *60dd_lazy(struct zink_context *ctx)61{62return (struct zink_descriptor_data_lazy*)ctx->dd;63}6465ALWAYS_INLINE static struct zink_batch_descriptor_data_lazy *66bdd_lazy(struct zink_batch_state *bs)67{68return (struct zink_batch_descriptor_data_lazy*)bs->dd;69}7071static void72init_template_entry(struct zink_shader *shader, enum zink_descriptor_type type,73unsigned idx, unsigned offset, VkDescriptorUpdateTemplateEntry *entry, unsigned *entry_idx, bool flatten_dynamic)74{75int index = shader->bindings[type][idx].index;76enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);77entry->dstArrayElement = 0;78entry->dstBinding = shader->bindings[type][idx].binding;79if (shader->bindings[type][idx].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC && flatten_dynamic)80/* filter out DYNAMIC type here */81entry->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;82else83entry->descriptorType = shader->bindings[type][idx].type;84switch (shader->bindings[type][idx].type) {85case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:86case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:87entry->descriptorCount = 1;88entry->offset = offsetof(struct zink_context, di.ubos[stage][index + offset]);89entry->stride = sizeof(VkDescriptorBufferInfo);90break;91case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:92entry->descriptorCount = shader->bindings[type][idx].size;93entry->offset = offsetof(struct zink_context, di.textures[stage][index + offset]);94entry->stride = sizeof(VkDescriptorImageInfo);95break;96case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:97entry->descriptorCount = shader->bindings[type][idx].size;98entry->offset = offsetof(struct zink_context, di.tbos[stage][index + offset]);99entry->stride = sizeof(VkBufferView);100break;101case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:102entry->descriptorCount = 1;103entry->offset = offsetof(struct zink_context, di.ssbos[stage][index + offset]);104entry->stride = sizeof(VkDescriptorBufferInfo);105break;106case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:107entry->descriptorCount = shader->bindings[type][idx].size;108entry->offset = offsetof(struct zink_context, di.images[stage][index + offset]);109entry->stride = sizeof(VkDescriptorImageInfo);110break;111case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:112entry->descriptorCount = shader->bindings[type][idx].size;113entry->offset = offsetof(struct zink_context, di.texel_images[stage][index + offset]);114entry->stride = sizeof(VkBufferView);115break;116default:117unreachable("unknown type");118}119(*entry_idx)++;120}121122bool123zink_descriptor_program_init_lazy(struct zink_context *ctx, struct zink_program *pg)124{125struct zink_screen *screen = zink_screen(ctx->base.screen);126VkDescriptorSetLayoutBinding bindings[ZINK_DESCRIPTOR_TYPES][PIPE_SHADER_TYPES * 32];127VkDescriptorUpdateTemplateEntry entries[ZINK_DESCRIPTOR_TYPES][PIPE_SHADER_TYPES * 32];128unsigned num_bindings[ZINK_DESCRIPTOR_TYPES] = {0};129uint8_t has_bindings = 0;130131struct zink_shader **stages;132if (pg->is_compute)133stages = &((struct zink_compute_program*)pg)->shader;134else135stages = ((struct zink_gfx_program*)pg)->shaders;136137if (!pg->dd)138pg->dd = (void*)rzalloc(pg, struct zink_program_descriptor_data);139if (!pg->dd)140return false;141142unsigned push_count = 0;143unsigned entry_idx[ZINK_DESCRIPTOR_TYPES] = {0};144145unsigned num_shaders = pg->is_compute ? 1 : ZINK_SHADER_COUNT;146bool have_push = screen->info.have_KHR_push_descriptor;147for (int i = 0; i < num_shaders; i++) {148struct zink_shader *shader = stages[i];149if (!shader)150continue;151152enum pipe_shader_type stage = pipe_shader_type_from_mesa(shader->nir->info.stage);153VkShaderStageFlagBits stage_flags = zink_shader_stage(stage);154for (int j = 0; j < ZINK_DESCRIPTOR_TYPES; j++) {155for (int k = 0; k < shader->num_bindings[j]; k++) {156/* dynamic ubos handled in push */157if (shader->bindings[j][k].type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) {158pg->dd->push_usage |= BITFIELD64_BIT(stage);159160push_count++;161continue;162}163164assert(num_bindings[j] < ARRAY_SIZE(bindings[j]));165VkDescriptorSetLayoutBinding *binding = &bindings[j][num_bindings[j]];166binding->binding = shader->bindings[j][k].binding;167binding->descriptorType = shader->bindings[j][k].type;168binding->descriptorCount = shader->bindings[j][k].size;169binding->stageFlags = stage_flags;170binding->pImmutableSamplers = NULL;171172enum zink_descriptor_size_index idx = zink_vktype_to_size_idx(shader->bindings[j][k].type);173pg->dd->sizes[idx].descriptorCount += shader->bindings[j][k].size;174pg->dd->sizes[idx].type = shader->bindings[j][k].type;175switch (shader->bindings[j][k].type) {176case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:177case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:178case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:179case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:180init_template_entry(shader, j, k, 0, &entries[j][entry_idx[j]], &entry_idx[j], screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY);181break;182case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:183case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:184for (unsigned l = 0; l < shader->bindings[j][k].size; l++)185init_template_entry(shader, j, k, l, &entries[j][entry_idx[j]], &entry_idx[j], screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY);186break;187default:188break;189}190num_bindings[j]++;191has_bindings |= BITFIELD_BIT(j);192}193}194}195pg->dd->binding_usage = has_bindings;196if (!has_bindings && !push_count) {197ralloc_free(pg->dd);198pg->dd = NULL;199200pg->layout = zink_pipeline_layout_create(screen, pg);201return !!pg->layout;202}203204pg->dsl[pg->num_dsl++] = push_count ? ctx->dd->push_dsl[pg->is_compute]->layout : ctx->dd->dummy_dsl->layout;205if (has_bindings) {206u_foreach_bit(type, has_bindings) {207for (unsigned i = 0; i < type; i++) {208/* push set is always 0 */209if (!pg->dsl[i + 1]) {210/* inject a null dsl */211pg->dsl[pg->num_dsl++] = ctx->dd->dummy_dsl->layout;212pg->dd->binding_usage |= BITFIELD_BIT(i);213}214}215pg->dd->layouts[pg->num_dsl] = zink_descriptor_util_layout_get(ctx, type, bindings[type], num_bindings[type], &pg->dd->layout_key[type]);216pg->dd->layout_key[type]->use_count++;217pg->dsl[pg->num_dsl] = pg->dd->layouts[pg->num_dsl]->layout;218pg->num_dsl++;219}220for (unsigned i = 0; i < ARRAY_SIZE(pg->dd->sizes); i++)221pg->dd->sizes[i].descriptorCount *= ZINK_DEFAULT_MAX_DESCS;222}223224pg->layout = zink_pipeline_layout_create(screen, pg);225if (!pg->layout)226return false;227if (!screen->info.have_KHR_descriptor_update_template || screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_NOTEMPLATES)228return true;229230VkDescriptorUpdateTemplateCreateInfo template[ZINK_DESCRIPTOR_TYPES + 1] = {0};231/* type of template */232VkDescriptorUpdateTemplateType types[ZINK_DESCRIPTOR_TYPES + 1] = {VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET};233if (have_push && screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY)234types[0] = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR;235236/* number of descriptors in template */237unsigned wd_count[ZINK_DESCRIPTOR_TYPES + 1];238if (push_count)239wd_count[0] = pg->is_compute ? 1 : ZINK_SHADER_COUNT;240for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++)241wd_count[i + 1] = pg->dd->layout_key[i] ? pg->dd->layout_key[i]->num_descriptors : 0;242243VkDescriptorUpdateTemplateEntry *push_entries[2] = {244dd_lazy(ctx)->push_entries,245&dd_lazy(ctx)->push_entries[PIPE_SHADER_COMPUTE],246};247for (unsigned i = 0; i < pg->num_dsl; i++) {248bool is_push = i == 0;249/* no need for empty templates */250if (pg->dsl[i] == ctx->dd->dummy_dsl->layout ||251(!is_push && pg->dd->layouts[i]->desc_template))252continue;253template[i].sType = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO;254assert(wd_count[i]);255template[i].descriptorUpdateEntryCount = wd_count[i];256if (is_push)257template[i].pDescriptorUpdateEntries = push_entries[pg->is_compute];258else259template[i].pDescriptorUpdateEntries = entries[i - 1];260template[i].templateType = types[i];261template[i].descriptorSetLayout = pg->dsl[i];262template[i].pipelineBindPoint = pg->is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS;263template[i].pipelineLayout = pg->layout;264template[i].set = i;265VkDescriptorUpdateTemplateKHR t;266if (screen->vk.CreateDescriptorUpdateTemplate(screen->dev, &template[i], NULL, &t) != VK_SUCCESS)267return false;268if (is_push)269pg->dd->push_template = t;270else271pg->dd->layouts[i]->desc_template = t;272}273return true;274}275276void277zink_descriptor_program_deinit_lazy(struct zink_screen *screen, struct zink_program *pg)278{279for (unsigned i = 0; pg->num_dsl && i < ZINK_DESCRIPTOR_TYPES; i++) {280if (pg->dd->layout_key[i])281pg->dd->layout_key[i]->use_count--;282}283if (pg->dd && pg->dd->push_template)284screen->vk.DestroyDescriptorUpdateTemplate(screen->dev, pg->dd->push_template, NULL);285ralloc_free(pg->dd);286}287288static VkDescriptorPool289create_pool(struct zink_screen *screen, unsigned num_type_sizes, VkDescriptorPoolSize *sizes, unsigned flags)290{291VkDescriptorPool pool;292VkDescriptorPoolCreateInfo dpci = {0};293dpci.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;294dpci.pPoolSizes = sizes;295dpci.poolSizeCount = num_type_sizes;296dpci.flags = flags;297dpci.maxSets = ZINK_DEFAULT_MAX_DESCS;298if (vkCreateDescriptorPool(screen->dev, &dpci, 0, &pool) != VK_SUCCESS) {299debug_printf("vkCreateDescriptorPool failed\n");300return VK_NULL_HANDLE;301}302return pool;303}304305static struct zink_descriptor_pool *306get_descriptor_pool_lazy(struct zink_context *ctx, struct zink_program *pg, enum zink_descriptor_type type, struct zink_batch_state *bs)307{308struct zink_screen *screen = zink_screen(ctx->base.screen);309struct hash_entry *he = _mesa_hash_table_search(&bdd_lazy(bs)->pools[type], pg->dd->layout_key[type]);310if (he)311return he->data;312struct zink_descriptor_pool *pool = rzalloc(bs, struct zink_descriptor_pool);313if (!pool)314return NULL;315unsigned idx = zink_descriptor_type_to_size_idx(type);316VkDescriptorPoolSize *size = &pg->dd->sizes[idx];317/* this is a sampler/image set with no images only texels */318if (!size->descriptorCount)319size++;320pool->pool = create_pool(screen, zink_descriptor_program_num_sizes(pg, type), size, 0);321if (!pool->pool) {322ralloc_free(pool);323return NULL;324}325_mesa_hash_table_insert(&bdd_lazy(bs)->pools[type], pg->dd->layout_key[type], pool);326return pool;327}328329static VkDescriptorSet330get_descriptor_set_lazy(struct zink_context *ctx, struct zink_program *pg, enum zink_descriptor_type type, struct zink_descriptor_pool *pool, bool is_compute)331{332struct zink_screen *screen = zink_screen(ctx->base.screen);333if (!pool)334return VK_NULL_HANDLE;335336if (pool->set_idx < pool->sets_alloc)337return pool->sets[pool->set_idx++];338339/* allocate up to $current * 10, e.g., 10 -> 100 or 100 -> 1000 */340unsigned sets_to_alloc = MIN2(MAX2(pool->sets_alloc * 10, 10), ZINK_DEFAULT_MAX_DESCS) - pool->sets_alloc;341if (!sets_to_alloc) {//pool full342zink_fence_wait(&ctx->base);343return get_descriptor_set_lazy(ctx, pg, type, pool, is_compute);344}345if (!zink_descriptor_util_alloc_sets(screen, pg ? pg->dsl[type + 1] : ctx->dd->push_dsl[is_compute]->layout,346pool->pool, &pool->sets[pool->sets_alloc], sets_to_alloc))347return VK_NULL_HANDLE;348pool->sets_alloc += sets_to_alloc;349return pool->sets[pool->set_idx++];350}351352static bool353populate_sets(struct zink_context *ctx, struct zink_program *pg, uint8_t *changed_sets, bool need_push, VkDescriptorSet *sets)354{355struct zink_batch_state *bs = ctx->batch.state;356if (need_push && !zink_screen(ctx->base.screen)->info.have_KHR_push_descriptor) {357struct zink_descriptor_pool *pool = bdd_lazy(bs)->push_pool[pg->is_compute];358sets[0] = get_descriptor_set_lazy(ctx, NULL, 0, pool, pg->is_compute);359if (!sets[0])360return false;361} else362sets[0] = VK_NULL_HANDLE;363/* may have flushed */364if (bs != ctx->batch.state)365*changed_sets = pg->dd->binding_usage;366bs = ctx->batch.state;367u_foreach_bit(type, *changed_sets) {368if (pg->dd->layout_key[type]) {369struct zink_descriptor_pool *pool = get_descriptor_pool_lazy(ctx, pg, type, bs);370sets[type + 1] = get_descriptor_set_lazy(ctx, pg, type, pool, pg->is_compute);371if (ctx->batch.state != bs && (sets[0] || type != ffs(*changed_sets))) {372/* sets are allocated by batch state, so if flush occurs on anything373* but the first set that has been fetched here, get all new sets374*/375*changed_sets = pg->dd->binding_usage;376if (pg->dd->push_usage)377need_push = true;378return populate_sets(ctx, pg, changed_sets, need_push, sets);379}380} else381sets[type + 1] = ctx->dd->dummy_set;382if (!sets[type + 1])383return false;384}385return true;386}387388void389zink_descriptor_set_update_lazy(struct zink_context *ctx, struct zink_program *pg, enum zink_descriptor_type type, VkDescriptorSet set)390{391struct zink_screen *screen = zink_screen(ctx->base.screen);392screen->vk.UpdateDescriptorSetWithTemplate(screen->dev, set, pg->dd->layouts[type + 1]->desc_template, ctx);393}394395void396zink_descriptors_update_lazy(struct zink_context *ctx, bool is_compute)397{398struct zink_screen *screen = zink_screen(ctx->base.screen);399struct zink_batch *batch = &ctx->batch;400struct zink_batch_state *bs = ctx->batch.state;401struct zink_program *pg = is_compute ? &ctx->curr_compute->base : &ctx->curr_program->base;402403bool batch_changed = !bdd_lazy(bs)->pg[is_compute];404if (batch_changed) {405/* update all sets and bind null sets */406dd_lazy(ctx)->state_changed[is_compute] = pg->dd->binding_usage;407dd_lazy(ctx)->push_state_changed[is_compute] = !!pg->dd->push_usage;408}409410if (pg != bdd_lazy(bs)->pg[is_compute]) {411/* if we don't already know that we have to update all sets,412* check to see if any dsls changed413*414* also always update the dsl pointers on program change415*/416for (unsigned i = 0; i < ARRAY_SIZE(bdd_lazy(bs)->dsl[is_compute]); i++) {417/* push set is already detected, start at 1 */418if (bdd_lazy(bs)->dsl[is_compute][i] != pg->dsl[i + 1])419dd_lazy(ctx)->state_changed[is_compute] |= BITFIELD_BIT(i);420bdd_lazy(bs)->dsl[is_compute][i] = pg->dsl[i + 1];421}422dd_lazy(ctx)->push_state_changed[is_compute] |= bdd_lazy(bs)->push_usage[is_compute] != pg->dd->push_usage;423bdd_lazy(bs)->push_usage[is_compute] = pg->dd->push_usage;424}425bdd_lazy(bs)->pg[is_compute] = pg;426427VkDescriptorSet desc_sets[5];428uint8_t changed_sets = pg->dd->binding_usage & dd_lazy(ctx)->state_changed[is_compute];429bool need_push = pg->dd->push_usage &&430(dd_lazy(ctx)->push_state_changed[is_compute] || batch_changed);431if (!populate_sets(ctx, pg, &changed_sets, need_push, desc_sets)) {432debug_printf("ZINK: couldn't get descriptor sets!\n");433return;434}435if (ctx->batch.state != bs) {436/* recheck: populate may have overflowed the pool and triggered a flush */437batch_changed = true;438dd_lazy(ctx)->state_changed[is_compute] = pg->dd->binding_usage;439changed_sets = pg->dd->binding_usage & dd_lazy(ctx)->state_changed[is_compute];440dd_lazy(ctx)->push_state_changed[is_compute] = !!pg->dd->push_usage;441}442bs = ctx->batch.state;443444if (pg->dd->binding_usage && changed_sets) {445u_foreach_bit(type, changed_sets) {446if (pg->dd->layout_key[type])447screen->vk.UpdateDescriptorSetWithTemplate(screen->dev, desc_sets[type + 1], pg->dd->layouts[type + 1]->desc_template, ctx);448assert(type + 1 < pg->num_dsl);449vkCmdBindDescriptorSets(bs->cmdbuf,450is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,451/* set index incremented by 1 to account for push set */452pg->layout, type + 1, 1, &desc_sets[type + 1],4530, NULL);454}455dd_lazy(ctx)->state_changed[is_compute] = false;456}457458if (pg->dd->push_usage && dd_lazy(ctx)->push_state_changed[is_compute]) {459if (screen->info.have_KHR_push_descriptor)460screen->vk.CmdPushDescriptorSetWithTemplateKHR(batch->state->cmdbuf, pg->dd->push_template,461pg->layout, 0, ctx);462else {463assert(desc_sets[0]);464screen->vk.UpdateDescriptorSetWithTemplate(screen->dev, desc_sets[0], pg->dd->push_template, ctx);465vkCmdBindDescriptorSets(batch->state->cmdbuf,466is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,467pg->layout, 0, 1, &desc_sets[0],4680, NULL);469}470dd_lazy(ctx)->push_state_changed[is_compute] = false;471} else if (dd_lazy(ctx)->push_state_changed[is_compute]) {472vkCmdBindDescriptorSets(bs->cmdbuf,473is_compute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS,474pg->layout, 0, 1, &ctx->dd->dummy_set,4750, NULL);476dd_lazy(ctx)->push_state_changed[is_compute] = false;477}478/* set again in case of flushing */479bdd_lazy(bs)->pg[is_compute] = pg;480ctx->dd->pg[is_compute] = pg;481}482483void484zink_context_invalidate_descriptor_state_lazy(struct zink_context *ctx, enum pipe_shader_type shader, enum zink_descriptor_type type, unsigned start, unsigned count)485{486if (type == ZINK_DESCRIPTOR_TYPE_UBO && !start)487dd_lazy(ctx)->push_state_changed[shader == PIPE_SHADER_COMPUTE] = true;488else489dd_lazy(ctx)->state_changed[shader == PIPE_SHADER_COMPUTE] |= BITFIELD_BIT(type);490}491492void493zink_batch_descriptor_deinit_lazy(struct zink_screen *screen, struct zink_batch_state *bs)494{495if (!bs->dd)496return;497if (screen->info.have_KHR_descriptor_update_template) {498for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {499hash_table_foreach(&bdd_lazy(bs)->pools[i], entry) {500struct zink_descriptor_pool *pool = (void*)entry->data;501vkDestroyDescriptorPool(screen->dev, pool->pool, NULL);502}503}504if (bdd_lazy(bs)->push_pool[0])505vkDestroyDescriptorPool(screen->dev, bdd_lazy(bs)->push_pool[0]->pool, NULL);506if (bdd_lazy(bs)->push_pool[1])507vkDestroyDescriptorPool(screen->dev, bdd_lazy(bs)->push_pool[1]->pool, NULL);508}509ralloc_free(bs->dd);510}511512void513zink_batch_descriptor_reset_lazy(struct zink_screen *screen, struct zink_batch_state *bs)514{515if (!screen->info.have_KHR_descriptor_update_template)516return;517for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {518hash_table_foreach(&bdd_lazy(bs)->pools[i], entry) {519const struct zink_descriptor_layout_key *key = entry->key;520struct zink_descriptor_pool *pool = (void*)entry->data;521if (key->use_count)522pool->set_idx = 0;523else {524vkDestroyDescriptorPool(screen->dev, pool->pool, NULL);525ralloc_free(pool);526_mesa_hash_table_remove(&bdd_lazy(bs)->pools[i], entry);527}528}529}530for (unsigned i = 0; i < 2; i++) {531bdd_lazy(bs)->pg[i] = NULL;532if (bdd_lazy(bs)->push_pool[i])533bdd_lazy(bs)->push_pool[i]->set_idx = 0;534}535}536537bool538zink_batch_descriptor_init_lazy(struct zink_screen *screen, struct zink_batch_state *bs)539{540bs->dd = (void*)rzalloc(bs, struct zink_batch_descriptor_data_lazy);541if (!bs->dd)542return false;543if (!screen->info.have_KHR_descriptor_update_template)544return true;545for (unsigned i = 0; i < ZINK_DESCRIPTOR_TYPES; i++) {546if (!_mesa_hash_table_init(&bdd_lazy(bs)->pools[i], bs->dd, _mesa_hash_pointer, _mesa_key_pointer_equal))547return false;548}549if (!screen->info.have_KHR_push_descriptor) {550VkDescriptorPoolSize sizes;551sizes.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;552sizes.descriptorCount = ZINK_SHADER_COUNT * ZINK_DEFAULT_MAX_DESCS;553bdd_lazy(bs)->push_pool[0] = rzalloc(bs, struct zink_descriptor_pool);554bdd_lazy(bs)->push_pool[0]->pool = create_pool(screen, 1, &sizes, 0);555sizes.descriptorCount = ZINK_DEFAULT_MAX_DESCS;556bdd_lazy(bs)->push_pool[1] = rzalloc(bs, struct zink_descriptor_pool);557bdd_lazy(bs)->push_pool[1]->pool = create_pool(screen, 1, &sizes, 0);558}559return true;560}561562bool563zink_descriptors_init_lazy(struct zink_context *ctx)564{565struct zink_screen *screen = zink_screen(ctx->base.screen);566ctx->dd = (void*)rzalloc(ctx, struct zink_descriptor_data_lazy);567if (!ctx->dd)568return false;569570if (screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_NOTEMPLATES)571printf("ZINK: CACHED/NOTEMPLATES DESCRIPTORS\n");572else if (screen->info.have_KHR_descriptor_update_template) {573for (unsigned i = 0; i < PIPE_SHADER_TYPES; i++) {574VkDescriptorUpdateTemplateEntry *entry = &dd_lazy(ctx)->push_entries[i];575entry->dstBinding = tgsi_processor_to_shader_stage(i);576entry->descriptorCount = 1;577entry->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;578entry->offset = offsetof(struct zink_context, di.ubos[i][0]);579entry->stride = sizeof(VkDescriptorBufferInfo);580}581if (screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY)582printf("ZINK: USING LAZY DESCRIPTORS\n");583}584struct zink_descriptor_layout_key *layout_key;585if (!zink_descriptor_util_push_layouts_get(ctx, ctx->dd->push_dsl, ctx->dd->push_layout_keys))586return false;587588ctx->dd->dummy_dsl = zink_descriptor_util_layout_get(ctx, 0, NULL, 0, &layout_key);589if (!ctx->dd->dummy_dsl)590return false;591VkDescriptorPoolSize null_size = {VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 1};592ctx->dd->dummy_pool = create_pool(screen, 1, &null_size, 0);593zink_descriptor_util_alloc_sets(screen, ctx->dd->dummy_dsl->layout,594ctx->dd->dummy_pool, &ctx->dd->dummy_set, 1);595zink_descriptor_util_init_null_set(ctx, ctx->dd->dummy_set);596return true;597}598599void600zink_descriptors_deinit_lazy(struct zink_context *ctx)601{602if (ctx->dd) {603struct zink_screen *screen = zink_screen(ctx->base.screen);604if (ctx->dd->dummy_pool)605vkDestroyDescriptorPool(screen->dev, ctx->dd->dummy_pool, NULL);606if (screen->descriptor_mode == ZINK_DESCRIPTOR_MODE_LAZY &&607screen->info.have_KHR_push_descriptor) {608vkDestroyDescriptorSetLayout(screen->dev, ctx->dd->push_dsl[0]->layout, NULL);609vkDestroyDescriptorSetLayout(screen->dev, ctx->dd->push_dsl[1]->layout, NULL);610}611}612ralloc_free(ctx->dd);613}614615616