Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/virtio/vulkan/vn_command_buffer.c
4560 views
1
/*
2
* Copyright 2019 Google LLC
3
* SPDX-License-Identifier: MIT
4
*
5
* based in part on anv and radv which are:
6
* Copyright © 2015 Intel Corporation
7
* Copyright © 2016 Red Hat.
8
* Copyright © 2016 Bas Nieuwenhuizen
9
*/
10
11
#include "vn_command_buffer.h"
12
13
#include "venus-protocol/vn_protocol_driver_command_buffer.h"
14
#include "venus-protocol/vn_protocol_driver_command_pool.h"
15
16
#include "vn_device.h"
17
#include "vn_image.h"
18
#include "vn_render_pass.h"
19
20
static bool
21
vn_image_memory_barrier_has_present_src(
22
const VkImageMemoryBarrier *img_barriers, uint32_t count)
23
{
24
for (uint32_t i = 0; i < count; i++) {
25
if (img_barriers[i].oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ||
26
img_barriers[i].newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
27
return true;
28
}
29
return false;
30
}
31
32
static VkImageMemoryBarrier *
33
vn_cmd_get_image_memory_barriers(struct vn_command_buffer *cmd,
34
uint32_t count)
35
{
36
/* avoid shrinking in case of non efficient reallocation implementation */
37
if (count > cmd->builder.image_barrier_count) {
38
size_t size = sizeof(VkImageMemoryBarrier) * count;
39
VkImageMemoryBarrier *img_barriers =
40
vk_realloc(&cmd->allocator, cmd->builder.image_barriers, size,
41
VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
42
if (!img_barriers)
43
return NULL;
44
45
/* update upon successful reallocation */
46
cmd->builder.image_barrier_count = count;
47
cmd->builder.image_barriers = img_barriers;
48
}
49
50
return cmd->builder.image_barriers;
51
}
52
53
/* About VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, the spec says
54
*
55
* VK_IMAGE_LAYOUT_PRESENT_SRC_KHR must only be used for presenting a
56
* presentable image for display. A swapchain's image must be transitioned
57
* to this layout before calling vkQueuePresentKHR, and must be
58
* transitioned away from this layout after calling vkAcquireNextImageKHR.
59
*
60
* That allows us to treat the layout internally as
61
*
62
* - VK_IMAGE_LAYOUT_GENERAL
63
* - VK_QUEUE_FAMILY_FOREIGN_EXT has the ownership, if the image is not a
64
* prime blit source
65
*
66
* while staying performant.
67
*
68
* About queue family ownerships, the spec says
69
*
70
* A queue family can take ownership of an image subresource or buffer
71
* range of a resource created with VK_SHARING_MODE_EXCLUSIVE, without an
72
* ownership transfer, in the same way as for a resource that was just
73
* created; however, taking ownership in this way has the effect that the
74
* contents of the image subresource or buffer range are undefined.
75
*
76
* It is unclear if that is applicable to external resources, which supposedly
77
* have the same semantics
78
*
79
* Binding a resource to a memory object shared between multiple Vulkan
80
* instances or other APIs does not change the ownership of the underlying
81
* memory. The first entity to access the resource implicitly acquires
82
* ownership. Accessing a resource backed by memory that is owned by a
83
* particular instance or API has the same semantics as accessing a
84
* VK_SHARING_MODE_EXCLUSIVE resource[...]
85
*
86
* We should get the spec clarified, or get rid of this completely broken code
87
* (TODO).
88
*
89
* Assuming a queue family can acquire the ownership implicitly when the
90
* contents are not needed, we do not need to worry about
91
* VK_IMAGE_LAYOUT_UNDEFINED. We can use VK_IMAGE_LAYOUT_PRESENT_SRC_KHR as
92
* the sole signal to trigger queue family ownership transfers.
93
*
94
* When the image has VK_SHARING_MODE_CONCURRENT, we can, and are required to,
95
* use VK_QUEUE_FAMILY_IGNORED as the other queue family whether we are
96
* transitioning to or from VK_IMAGE_LAYOUT_PRESENT_SRC_KHR.
97
*
98
* When the image has VK_SHARING_MODE_EXCLUSIVE, we have to work out who the
99
* other queue family is. It is easier when the barrier does not also define
100
* a queue family ownership transfer (i.e., srcQueueFamilyIndex equals to
101
* dstQueueFamilyIndex). The other queue family must be the queue family the
102
* command buffer was allocated for.
103
*
104
* When the barrier also defines a queue family ownership transfer, it is
105
* submitted both to the source queue family to release the ownership and to
106
* the destination queue family to acquire the ownership. Depending on
107
* whether the barrier transitions to or from VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
108
* we are only interested in the ownership release or acquire respectively and
109
* should be careful to avoid double releases/acquires.
110
*
111
* I haven't followed all transition paths mentally to verify the correctness.
112
* I likely also violate some VUs or miss some cases below. They are
113
* hopefully fixable and are left as TODOs.
114
*/
115
static void
116
vn_cmd_fix_image_memory_barrier(const struct vn_command_buffer *cmd,
117
const VkImageMemoryBarrier *src_barrier,
118
VkImageMemoryBarrier *out_barrier)
119
{
120
const struct vn_image *img = vn_image_from_handle(src_barrier->image);
121
122
*out_barrier = *src_barrier;
123
124
/* no fix needed */
125
if (out_barrier->oldLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
126
out_barrier->newLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
127
return;
128
129
assert(img->is_wsi);
130
131
if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
132
return;
133
134
/* prime blit src or no layout transition */
135
if (img->prime_blit_buffer != VK_NULL_HANDLE ||
136
out_barrier->oldLayout == out_barrier->newLayout) {
137
if (out_barrier->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
138
out_barrier->oldLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
139
if (out_barrier->newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
140
out_barrier->newLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
141
return;
142
}
143
144
if (out_barrier->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
145
out_barrier->oldLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
146
147
/* no availability operation needed */
148
out_barrier->srcAccessMask = 0;
149
150
const uint32_t dst_qfi = out_barrier->dstQueueFamilyIndex;
151
if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
152
out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
153
out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
154
} else if (dst_qfi == out_barrier->srcQueueFamilyIndex ||
155
dst_qfi == cmd->queue_family_index) {
156
out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
157
out_barrier->dstQueueFamilyIndex = cmd->queue_family_index;
158
} else {
159
/* The barrier also defines a queue family ownership transfer, and
160
* this is the one that gets submitted to the source queue family to
161
* release the ownership. Skip both the transfer and the transition.
162
*/
163
out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
164
out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
165
out_barrier->newLayout = out_barrier->oldLayout;
166
}
167
} else {
168
out_barrier->newLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
169
170
/* no visibility operation needed */
171
out_barrier->dstAccessMask = 0;
172
173
const uint32_t src_qfi = out_barrier->srcQueueFamilyIndex;
174
if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
175
out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
176
out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
177
} else if (src_qfi == out_barrier->dstQueueFamilyIndex ||
178
src_qfi == cmd->queue_family_index) {
179
out_barrier->srcQueueFamilyIndex = cmd->queue_family_index;
180
out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
181
} else {
182
/* The barrier also defines a queue family ownership transfer, and
183
* this is the one that gets submitted to the destination queue
184
* family to acquire the ownership. Skip both the transfer and the
185
* transition.
186
*/
187
out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
188
out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
189
out_barrier->oldLayout = out_barrier->newLayout;
190
}
191
}
192
}
193
194
static const VkImageMemoryBarrier *
195
vn_cmd_wait_events_fix_image_memory_barriers(
196
struct vn_command_buffer *cmd,
197
const VkImageMemoryBarrier *src_barriers,
198
uint32_t count,
199
uint32_t *out_transfer_count)
200
{
201
*out_transfer_count = 0;
202
203
if (cmd->builder.render_pass ||
204
!vn_image_memory_barrier_has_present_src(src_barriers, count))
205
return src_barriers;
206
207
VkImageMemoryBarrier *img_barriers =
208
vn_cmd_get_image_memory_barriers(cmd, count * 2);
209
if (!img_barriers) {
210
cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
211
return src_barriers;
212
}
213
214
/* vkCmdWaitEvents cannot be used for queue family ownership transfers.
215
* Nothing appears to be said about the submission order of image memory
216
* barriers in the same array. We take the liberty to move queue family
217
* ownership transfers to the tail.
218
*/
219
VkImageMemoryBarrier *transfer_barriers = img_barriers + count;
220
uint32_t transfer_count = 0;
221
uint32_t valid_count = 0;
222
for (uint32_t i = 0; i < count; i++) {
223
VkImageMemoryBarrier *img_barrier = &img_barriers[valid_count];
224
vn_cmd_fix_image_memory_barrier(cmd, &src_barriers[i], img_barrier);
225
226
if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
227
valid_count++;
228
continue;
229
}
230
231
if (img_barrier->srcQueueFamilyIndex ==
232
img_barrier->dstQueueFamilyIndex) {
233
valid_count++;
234
} else {
235
transfer_barriers[transfer_count++] = *img_barrier;
236
}
237
}
238
239
assert(valid_count + transfer_count == count);
240
if (transfer_count) {
241
/* copy back to the tail */
242
memcpy(&img_barriers[valid_count], transfer_barriers,
243
sizeof(*transfer_barriers) * transfer_count);
244
*out_transfer_count = transfer_count;
245
}
246
247
return img_barriers;
248
}
249
250
static const VkImageMemoryBarrier *
251
vn_cmd_pipeline_barrier_fix_image_memory_barriers(
252
struct vn_command_buffer *cmd,
253
const VkImageMemoryBarrier *src_barriers,
254
uint32_t count)
255
{
256
if (cmd->builder.render_pass ||
257
!vn_image_memory_barrier_has_present_src(src_barriers, count))
258
return src_barriers;
259
260
VkImageMemoryBarrier *img_barriers =
261
vn_cmd_get_image_memory_barriers(cmd, count);
262
if (!img_barriers) {
263
cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
264
return src_barriers;
265
}
266
267
for (uint32_t i = 0; i < count; i++) {
268
vn_cmd_fix_image_memory_barrier(cmd, &src_barriers[i],
269
&img_barriers[i]);
270
}
271
272
return img_barriers;
273
}
274
275
static void
276
vn_cmd_encode_memory_barriers(struct vn_command_buffer *cmd,
277
VkPipelineStageFlags src_stage_mask,
278
VkPipelineStageFlags dst_stage_mask,
279
uint32_t buf_barrier_count,
280
const VkBufferMemoryBarrier *buf_barriers,
281
uint32_t img_barrier_count,
282
const VkImageMemoryBarrier *img_barriers)
283
{
284
const VkCommandBuffer cmd_handle = vn_command_buffer_to_handle(cmd);
285
286
const size_t cmd_size = vn_sizeof_vkCmdPipelineBarrier(
287
cmd_handle, src_stage_mask, dst_stage_mask, 0, 0, NULL,
288
buf_barrier_count, buf_barriers, img_barrier_count, img_barriers);
289
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
290
cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
291
return;
292
}
293
294
vn_encode_vkCmdPipelineBarrier(
295
&cmd->cs, 0, cmd_handle, src_stage_mask, dst_stage_mask, 0, 0, NULL,
296
buf_barrier_count, buf_barriers, img_barrier_count, img_barriers);
297
}
298
299
static void
300
vn_present_src_attachment_to_image_memory_barrier(
301
const struct vn_image *img,
302
const struct vn_present_src_attachment *att,
303
VkImageMemoryBarrier *img_barrier)
304
{
305
*img_barrier = (VkImageMemoryBarrier)
306
{
307
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
308
.srcAccessMask = att->src_access_mask,
309
.dstAccessMask = att->dst_access_mask,
310
.oldLayout = att->acquire ? VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
311
: VN_PRESENT_SRC_INTERNAL_LAYOUT,
312
.newLayout = att->acquire ? VN_PRESENT_SRC_INTERNAL_LAYOUT
313
: VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
314
.image = vn_image_to_handle((struct vn_image *)img),
315
.subresourceRange = {
316
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
317
.levelCount = 1,
318
.layerCount = 1,
319
},
320
};
321
}
322
323
static void
324
vn_cmd_transfer_present_src_images(
325
struct vn_command_buffer *cmd,
326
const struct vn_image *const *images,
327
const struct vn_present_src_attachment *atts,
328
uint32_t count)
329
{
330
VkImageMemoryBarrier *img_barriers =
331
vn_cmd_get_image_memory_barriers(cmd, count);
332
if (!img_barriers) {
333
cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
334
return;
335
}
336
337
VkPipelineStageFlags src_stage_mask = 0;
338
VkPipelineStageFlags dst_stage_mask = 0;
339
for (uint32_t i = 0; i < count; i++) {
340
src_stage_mask |= atts[i].src_stage_mask;
341
dst_stage_mask |= atts[i].dst_stage_mask;
342
343
vn_present_src_attachment_to_image_memory_barrier(images[i], &atts[i],
344
&img_barriers[i]);
345
vn_cmd_fix_image_memory_barrier(cmd, &img_barriers[i],
346
&img_barriers[i]);
347
}
348
349
if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
350
return;
351
352
vn_cmd_encode_memory_barriers(cmd, src_stage_mask, dst_stage_mask, 0, NULL,
353
count, img_barriers);
354
}
355
356
static void
357
vn_cmd_begin_render_pass(struct vn_command_buffer *cmd,
358
const struct vn_render_pass *pass,
359
const struct vn_framebuffer *fb,
360
const VkRenderPassBeginInfo *begin_info)
361
{
362
cmd->builder.render_pass = pass;
363
cmd->builder.framebuffer = fb;
364
365
if (!pass->present_src_count ||
366
cmd->level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)
367
return;
368
369
/* find fb attachments */
370
const VkImageView *views;
371
ASSERTED uint32_t view_count;
372
if (fb->image_view_count) {
373
views = fb->image_views;
374
view_count = fb->image_view_count;
375
} else {
376
const VkRenderPassAttachmentBeginInfo *imageless_info =
377
vk_find_struct_const(begin_info->pNext,
378
RENDER_PASS_ATTACHMENT_BEGIN_INFO);
379
assert(imageless_info);
380
views = imageless_info->pAttachments;
381
view_count = imageless_info->attachmentCount;
382
}
383
384
const struct vn_image **images =
385
vk_alloc(&cmd->allocator, sizeof(*images) * pass->present_src_count,
386
VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
387
if (!images) {
388
cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
389
return;
390
}
391
392
for (uint32_t i = 0; i < pass->present_src_count; i++) {
393
const uint32_t index = pass->present_src_attachments[i].index;
394
assert(index < view_count);
395
images[i] = vn_image_view_from_handle(views[index])->image;
396
}
397
398
if (pass->acquire_count) {
399
vn_cmd_transfer_present_src_images(
400
cmd, images, pass->present_src_attachments, pass->acquire_count);
401
}
402
403
cmd->builder.present_src_images = images;
404
}
405
406
static void
407
vn_cmd_end_render_pass(struct vn_command_buffer *cmd)
408
{
409
const struct vn_render_pass *pass = cmd->builder.render_pass;
410
411
cmd->builder.render_pass = NULL;
412
cmd->builder.framebuffer = NULL;
413
414
if (!pass->present_src_count || !cmd->builder.present_src_images)
415
return;
416
417
const struct vn_image **images = cmd->builder.present_src_images;
418
cmd->builder.present_src_images = NULL;
419
420
if (pass->release_count) {
421
vn_cmd_transfer_present_src_images(
422
cmd, images + pass->acquire_count,
423
pass->present_src_attachments + pass->acquire_count,
424
pass->release_count);
425
}
426
427
vk_free(&cmd->allocator, images);
428
}
429
430
/* command pool commands */
431
432
VkResult
433
vn_CreateCommandPool(VkDevice device,
434
const VkCommandPoolCreateInfo *pCreateInfo,
435
const VkAllocationCallbacks *pAllocator,
436
VkCommandPool *pCommandPool)
437
{
438
struct vn_device *dev = vn_device_from_handle(device);
439
const VkAllocationCallbacks *alloc =
440
pAllocator ? pAllocator : &dev->base.base.alloc;
441
442
struct vn_command_pool *pool =
443
vk_zalloc(alloc, sizeof(*pool), VN_DEFAULT_ALIGN,
444
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
445
if (!pool)
446
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
447
448
vn_object_base_init(&pool->base, VK_OBJECT_TYPE_COMMAND_POOL, &dev->base);
449
450
pool->allocator = *alloc;
451
pool->queue_family_index = pCreateInfo->queueFamilyIndex;
452
list_inithead(&pool->command_buffers);
453
454
VkCommandPool pool_handle = vn_command_pool_to_handle(pool);
455
vn_async_vkCreateCommandPool(dev->instance, device, pCreateInfo, NULL,
456
&pool_handle);
457
458
*pCommandPool = pool_handle;
459
460
return VK_SUCCESS;
461
}
462
463
void
464
vn_DestroyCommandPool(VkDevice device,
465
VkCommandPool commandPool,
466
const VkAllocationCallbacks *pAllocator)
467
{
468
struct vn_device *dev = vn_device_from_handle(device);
469
struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
470
const VkAllocationCallbacks *alloc;
471
472
if (!pool)
473
return;
474
475
alloc = pAllocator ? pAllocator : &pool->allocator;
476
477
/* We must emit vkDestroyCommandPool before freeing the command buffers in
478
* pool->command_buffers. Otherwise, another thread might reuse their
479
* object ids while they still refer to the command buffers in the
480
* renderer.
481
*/
482
vn_async_vkDestroyCommandPool(dev->instance, device, commandPool, NULL);
483
484
list_for_each_entry_safe(struct vn_command_buffer, cmd,
485
&pool->command_buffers, head) {
486
vn_cs_encoder_fini(&cmd->cs);
487
vn_object_base_fini(&cmd->base);
488
vk_free(alloc, cmd);
489
}
490
491
vn_object_base_fini(&pool->base);
492
vk_free(alloc, pool);
493
}
494
495
VkResult
496
vn_ResetCommandPool(VkDevice device,
497
VkCommandPool commandPool,
498
VkCommandPoolResetFlags flags)
499
{
500
struct vn_device *dev = vn_device_from_handle(device);
501
struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
502
503
list_for_each_entry_safe(struct vn_command_buffer, cmd,
504
&pool->command_buffers, head) {
505
vn_cs_encoder_reset(&cmd->cs);
506
cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
507
}
508
509
vn_async_vkResetCommandPool(dev->instance, device, commandPool, flags);
510
511
return VK_SUCCESS;
512
}
513
514
void
515
vn_TrimCommandPool(VkDevice device,
516
VkCommandPool commandPool,
517
VkCommandPoolTrimFlags flags)
518
{
519
struct vn_device *dev = vn_device_from_handle(device);
520
521
vn_async_vkTrimCommandPool(dev->instance, device, commandPool, flags);
522
}
523
524
/* command buffer commands */
525
526
VkResult
527
vn_AllocateCommandBuffers(VkDevice device,
528
const VkCommandBufferAllocateInfo *pAllocateInfo,
529
VkCommandBuffer *pCommandBuffers)
530
{
531
struct vn_device *dev = vn_device_from_handle(device);
532
struct vn_command_pool *pool =
533
vn_command_pool_from_handle(pAllocateInfo->commandPool);
534
const VkAllocationCallbacks *alloc = &pool->allocator;
535
536
for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
537
struct vn_command_buffer *cmd =
538
vk_zalloc(alloc, sizeof(*cmd), VN_DEFAULT_ALIGN,
539
VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
540
if (!cmd) {
541
for (uint32_t j = 0; j < i; j++) {
542
cmd = vn_command_buffer_from_handle(pCommandBuffers[j]);
543
vn_cs_encoder_fini(&cmd->cs);
544
list_del(&cmd->head);
545
vk_free(alloc, cmd);
546
}
547
memset(pCommandBuffers, 0,
548
sizeof(*pCommandBuffers) * pAllocateInfo->commandBufferCount);
549
return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
550
}
551
552
vn_object_base_init(&cmd->base, VK_OBJECT_TYPE_COMMAND_BUFFER,
553
&dev->base);
554
cmd->device = dev;
555
cmd->allocator = pool->allocator;
556
cmd->level = pAllocateInfo->level;
557
cmd->queue_family_index = pool->queue_family_index;
558
559
list_addtail(&cmd->head, &pool->command_buffers);
560
561
cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
562
vn_cs_encoder_init_indirect(&cmd->cs, dev->instance, 16 * 1024);
563
564
VkCommandBuffer cmd_handle = vn_command_buffer_to_handle(cmd);
565
pCommandBuffers[i] = cmd_handle;
566
}
567
568
vn_async_vkAllocateCommandBuffers(dev->instance, device, pAllocateInfo,
569
pCommandBuffers);
570
571
return VK_SUCCESS;
572
}
573
574
void
575
vn_FreeCommandBuffers(VkDevice device,
576
VkCommandPool commandPool,
577
uint32_t commandBufferCount,
578
const VkCommandBuffer *pCommandBuffers)
579
{
580
struct vn_device *dev = vn_device_from_handle(device);
581
struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
582
const VkAllocationCallbacks *alloc = &pool->allocator;
583
584
vn_async_vkFreeCommandBuffers(dev->instance, device, commandPool,
585
commandBufferCount, pCommandBuffers);
586
587
for (uint32_t i = 0; i < commandBufferCount; i++) {
588
struct vn_command_buffer *cmd =
589
vn_command_buffer_from_handle(pCommandBuffers[i]);
590
591
if (!cmd)
592
continue;
593
594
if (cmd->builder.image_barriers)
595
vk_free(alloc, cmd->builder.image_barriers);
596
597
vn_cs_encoder_fini(&cmd->cs);
598
list_del(&cmd->head);
599
600
vn_object_base_fini(&cmd->base);
601
vk_free(alloc, cmd);
602
}
603
}
604
605
VkResult
606
vn_ResetCommandBuffer(VkCommandBuffer commandBuffer,
607
VkCommandBufferResetFlags flags)
608
{
609
struct vn_command_buffer *cmd =
610
vn_command_buffer_from_handle(commandBuffer);
611
612
vn_cs_encoder_reset(&cmd->cs);
613
cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
614
615
vn_async_vkResetCommandBuffer(cmd->device->instance, commandBuffer, flags);
616
617
return VK_SUCCESS;
618
}
619
620
VkResult
621
vn_BeginCommandBuffer(VkCommandBuffer commandBuffer,
622
const VkCommandBufferBeginInfo *pBeginInfo)
623
{
624
struct vn_command_buffer *cmd =
625
vn_command_buffer_from_handle(commandBuffer);
626
struct vn_instance *instance = cmd->device->instance;
627
size_t cmd_size;
628
629
vn_cs_encoder_reset(&cmd->cs);
630
631
VkCommandBufferBeginInfo local_begin_info;
632
if (pBeginInfo->pInheritanceInfo &&
633
cmd->level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
634
local_begin_info = *pBeginInfo;
635
local_begin_info.pInheritanceInfo = NULL;
636
pBeginInfo = &local_begin_info;
637
}
638
639
cmd_size = vn_sizeof_vkBeginCommandBuffer(commandBuffer, pBeginInfo);
640
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
641
cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
642
return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
643
}
644
645
vn_encode_vkBeginCommandBuffer(&cmd->cs, 0, commandBuffer, pBeginInfo);
646
647
cmd->state = VN_COMMAND_BUFFER_STATE_RECORDING;
648
649
if (cmd->level == VK_COMMAND_BUFFER_LEVEL_SECONDARY &&
650
(pBeginInfo->flags &
651
VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
652
const VkCommandBufferInheritanceInfo *inheritance_info =
653
pBeginInfo->pInheritanceInfo;
654
vn_cmd_begin_render_pass(
655
cmd, vn_render_pass_from_handle(inheritance_info->renderPass),
656
vn_framebuffer_from_handle(inheritance_info->framebuffer), NULL);
657
}
658
659
return VK_SUCCESS;
660
}
661
662
VkResult
663
vn_EndCommandBuffer(VkCommandBuffer commandBuffer)
664
{
665
struct vn_command_buffer *cmd =
666
vn_command_buffer_from_handle(commandBuffer);
667
struct vn_instance *instance = cmd->device->instance;
668
size_t cmd_size;
669
670
if (cmd->state != VN_COMMAND_BUFFER_STATE_RECORDING)
671
return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
672
673
cmd_size = vn_sizeof_vkEndCommandBuffer(commandBuffer);
674
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
675
cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
676
return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
677
}
678
679
vn_encode_vkEndCommandBuffer(&cmd->cs, 0, commandBuffer);
680
vn_cs_encoder_commit(&cmd->cs);
681
682
if (vn_cs_encoder_get_fatal(&cmd->cs)) {
683
cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
684
return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
685
}
686
687
vn_instance_wait_roundtrip(instance, cmd->cs.current_buffer_roundtrip);
688
VkResult result = vn_instance_ring_submit(instance, &cmd->cs);
689
if (result != VK_SUCCESS) {
690
cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
691
return vn_error(instance, result);
692
}
693
694
vn_cs_encoder_reset(&cmd->cs);
695
696
cmd->state = VN_COMMAND_BUFFER_STATE_EXECUTABLE;
697
698
return VK_SUCCESS;
699
}
700
701
void
702
vn_CmdBindPipeline(VkCommandBuffer commandBuffer,
703
VkPipelineBindPoint pipelineBindPoint,
704
VkPipeline pipeline)
705
{
706
struct vn_command_buffer *cmd =
707
vn_command_buffer_from_handle(commandBuffer);
708
size_t cmd_size;
709
710
cmd_size =
711
vn_sizeof_vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
712
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
713
return;
714
715
vn_encode_vkCmdBindPipeline(&cmd->cs, 0, commandBuffer, pipelineBindPoint,
716
pipeline);
717
}
718
719
void
720
vn_CmdSetViewport(VkCommandBuffer commandBuffer,
721
uint32_t firstViewport,
722
uint32_t viewportCount,
723
const VkViewport *pViewports)
724
{
725
struct vn_command_buffer *cmd =
726
vn_command_buffer_from_handle(commandBuffer);
727
size_t cmd_size;
728
729
cmd_size = vn_sizeof_vkCmdSetViewport(commandBuffer, firstViewport,
730
viewportCount, pViewports);
731
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
732
return;
733
734
vn_encode_vkCmdSetViewport(&cmd->cs, 0, commandBuffer, firstViewport,
735
viewportCount, pViewports);
736
}
737
738
void
739
vn_CmdSetScissor(VkCommandBuffer commandBuffer,
740
uint32_t firstScissor,
741
uint32_t scissorCount,
742
const VkRect2D *pScissors)
743
{
744
struct vn_command_buffer *cmd =
745
vn_command_buffer_from_handle(commandBuffer);
746
size_t cmd_size;
747
748
cmd_size = vn_sizeof_vkCmdSetScissor(commandBuffer, firstScissor,
749
scissorCount, pScissors);
750
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
751
return;
752
753
vn_encode_vkCmdSetScissor(&cmd->cs, 0, commandBuffer, firstScissor,
754
scissorCount, pScissors);
755
}
756
757
void
758
vn_CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
759
{
760
struct vn_command_buffer *cmd =
761
vn_command_buffer_from_handle(commandBuffer);
762
size_t cmd_size;
763
764
cmd_size = vn_sizeof_vkCmdSetLineWidth(commandBuffer, lineWidth);
765
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
766
return;
767
768
vn_encode_vkCmdSetLineWidth(&cmd->cs, 0, commandBuffer, lineWidth);
769
}
770
771
void
772
vn_CmdSetDepthBias(VkCommandBuffer commandBuffer,
773
float depthBiasConstantFactor,
774
float depthBiasClamp,
775
float depthBiasSlopeFactor)
776
{
777
struct vn_command_buffer *cmd =
778
vn_command_buffer_from_handle(commandBuffer);
779
size_t cmd_size;
780
781
cmd_size =
782
vn_sizeof_vkCmdSetDepthBias(commandBuffer, depthBiasConstantFactor,
783
depthBiasClamp, depthBiasSlopeFactor);
784
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
785
return;
786
787
vn_encode_vkCmdSetDepthBias(&cmd->cs, 0, commandBuffer,
788
depthBiasConstantFactor, depthBiasClamp,
789
depthBiasSlopeFactor);
790
}
791
792
void
793
vn_CmdSetBlendConstants(VkCommandBuffer commandBuffer,
794
const float blendConstants[4])
795
{
796
struct vn_command_buffer *cmd =
797
vn_command_buffer_from_handle(commandBuffer);
798
size_t cmd_size;
799
800
cmd_size = vn_sizeof_vkCmdSetBlendConstants(commandBuffer, blendConstants);
801
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
802
return;
803
804
vn_encode_vkCmdSetBlendConstants(&cmd->cs, 0, commandBuffer,
805
blendConstants);
806
}
807
808
void
809
vn_CmdSetDepthBounds(VkCommandBuffer commandBuffer,
810
float minDepthBounds,
811
float maxDepthBounds)
812
{
813
struct vn_command_buffer *cmd =
814
vn_command_buffer_from_handle(commandBuffer);
815
size_t cmd_size;
816
817
cmd_size = vn_sizeof_vkCmdSetDepthBounds(commandBuffer, minDepthBounds,
818
maxDepthBounds);
819
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
820
return;
821
822
vn_encode_vkCmdSetDepthBounds(&cmd->cs, 0, commandBuffer, minDepthBounds,
823
maxDepthBounds);
824
}
825
826
void
827
vn_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
828
VkStencilFaceFlags faceMask,
829
uint32_t compareMask)
830
{
831
struct vn_command_buffer *cmd =
832
vn_command_buffer_from_handle(commandBuffer);
833
size_t cmd_size;
834
835
cmd_size = vn_sizeof_vkCmdSetStencilCompareMask(commandBuffer, faceMask,
836
compareMask);
837
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
838
return;
839
840
vn_encode_vkCmdSetStencilCompareMask(&cmd->cs, 0, commandBuffer, faceMask,
841
compareMask);
842
}
843
844
void
845
vn_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
846
VkStencilFaceFlags faceMask,
847
uint32_t writeMask)
848
{
849
struct vn_command_buffer *cmd =
850
vn_command_buffer_from_handle(commandBuffer);
851
size_t cmd_size;
852
853
cmd_size =
854
vn_sizeof_vkCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
855
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
856
return;
857
858
vn_encode_vkCmdSetStencilWriteMask(&cmd->cs, 0, commandBuffer, faceMask,
859
writeMask);
860
}
861
862
void
863
vn_CmdSetStencilReference(VkCommandBuffer commandBuffer,
864
VkStencilFaceFlags faceMask,
865
uint32_t reference)
866
{
867
struct vn_command_buffer *cmd =
868
vn_command_buffer_from_handle(commandBuffer);
869
size_t cmd_size;
870
871
cmd_size =
872
vn_sizeof_vkCmdSetStencilReference(commandBuffer, faceMask, reference);
873
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
874
return;
875
876
vn_encode_vkCmdSetStencilReference(&cmd->cs, 0, commandBuffer, faceMask,
877
reference);
878
}
879
880
void
881
vn_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,
882
VkPipelineBindPoint pipelineBindPoint,
883
VkPipelineLayout layout,
884
uint32_t firstSet,
885
uint32_t descriptorSetCount,
886
const VkDescriptorSet *pDescriptorSets,
887
uint32_t dynamicOffsetCount,
888
const uint32_t *pDynamicOffsets)
889
{
890
struct vn_command_buffer *cmd =
891
vn_command_buffer_from_handle(commandBuffer);
892
size_t cmd_size;
893
894
cmd_size = vn_sizeof_vkCmdBindDescriptorSets(
895
commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount,
896
pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
897
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
898
return;
899
900
vn_encode_vkCmdBindDescriptorSets(&cmd->cs, 0, commandBuffer,
901
pipelineBindPoint, layout, firstSet,
902
descriptorSetCount, pDescriptorSets,
903
dynamicOffsetCount, pDynamicOffsets);
904
}
905
906
void
907
vn_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,
908
VkBuffer buffer,
909
VkDeviceSize offset,
910
VkIndexType indexType)
911
{
912
struct vn_command_buffer *cmd =
913
vn_command_buffer_from_handle(commandBuffer);
914
size_t cmd_size;
915
916
cmd_size = vn_sizeof_vkCmdBindIndexBuffer(commandBuffer, buffer, offset,
917
indexType);
918
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
919
return;
920
921
vn_encode_vkCmdBindIndexBuffer(&cmd->cs, 0, commandBuffer, buffer, offset,
922
indexType);
923
}
924
925
void
926
vn_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,
927
uint32_t firstBinding,
928
uint32_t bindingCount,
929
const VkBuffer *pBuffers,
930
const VkDeviceSize *pOffsets)
931
{
932
struct vn_command_buffer *cmd =
933
vn_command_buffer_from_handle(commandBuffer);
934
size_t cmd_size;
935
936
cmd_size = vn_sizeof_vkCmdBindVertexBuffers(
937
commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
938
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
939
return;
940
941
vn_encode_vkCmdBindVertexBuffers(&cmd->cs, 0, commandBuffer, firstBinding,
942
bindingCount, pBuffers, pOffsets);
943
}
944
945
void
946
vn_CmdDraw(VkCommandBuffer commandBuffer,
947
uint32_t vertexCount,
948
uint32_t instanceCount,
949
uint32_t firstVertex,
950
uint32_t firstInstance)
951
{
952
struct vn_command_buffer *cmd =
953
vn_command_buffer_from_handle(commandBuffer);
954
size_t cmd_size;
955
956
cmd_size = vn_sizeof_vkCmdDraw(commandBuffer, vertexCount, instanceCount,
957
firstVertex, firstInstance);
958
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
959
return;
960
961
vn_encode_vkCmdDraw(&cmd->cs, 0, commandBuffer, vertexCount, instanceCount,
962
firstVertex, firstInstance);
963
}
964
965
void
966
vn_CmdDrawIndexed(VkCommandBuffer commandBuffer,
967
uint32_t indexCount,
968
uint32_t instanceCount,
969
uint32_t firstIndex,
970
int32_t vertexOffset,
971
uint32_t firstInstance)
972
{
973
struct vn_command_buffer *cmd =
974
vn_command_buffer_from_handle(commandBuffer);
975
size_t cmd_size;
976
977
cmd_size =
978
vn_sizeof_vkCmdDrawIndexed(commandBuffer, indexCount, instanceCount,
979
firstIndex, vertexOffset, firstInstance);
980
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
981
return;
982
983
vn_encode_vkCmdDrawIndexed(&cmd->cs, 0, commandBuffer, indexCount,
984
instanceCount, firstIndex, vertexOffset,
985
firstInstance);
986
}
987
988
void
989
vn_CmdDrawIndirect(VkCommandBuffer commandBuffer,
990
VkBuffer buffer,
991
VkDeviceSize offset,
992
uint32_t drawCount,
993
uint32_t stride)
994
{
995
struct vn_command_buffer *cmd =
996
vn_command_buffer_from_handle(commandBuffer);
997
size_t cmd_size;
998
999
cmd_size = vn_sizeof_vkCmdDrawIndirect(commandBuffer, buffer, offset,
1000
drawCount, stride);
1001
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1002
return;
1003
1004
vn_encode_vkCmdDrawIndirect(&cmd->cs, 0, commandBuffer, buffer, offset,
1005
drawCount, stride);
1006
}
1007
1008
void
1009
vn_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,
1010
VkBuffer buffer,
1011
VkDeviceSize offset,
1012
uint32_t drawCount,
1013
uint32_t stride)
1014
{
1015
struct vn_command_buffer *cmd =
1016
vn_command_buffer_from_handle(commandBuffer);
1017
size_t cmd_size;
1018
1019
cmd_size = vn_sizeof_vkCmdDrawIndexedIndirect(commandBuffer, buffer,
1020
offset, drawCount, stride);
1021
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1022
return;
1023
1024
vn_encode_vkCmdDrawIndexedIndirect(&cmd->cs, 0, commandBuffer, buffer,
1025
offset, drawCount, stride);
1026
}
1027
1028
void
1029
vn_CmdDrawIndirectCount(VkCommandBuffer commandBuffer,
1030
VkBuffer buffer,
1031
VkDeviceSize offset,
1032
VkBuffer countBuffer,
1033
VkDeviceSize countBufferOffset,
1034
uint32_t maxDrawCount,
1035
uint32_t stride)
1036
{
1037
struct vn_command_buffer *cmd =
1038
vn_command_buffer_from_handle(commandBuffer);
1039
size_t cmd_size;
1040
1041
cmd_size = vn_sizeof_vkCmdDrawIndirectCount(commandBuffer, buffer, offset,
1042
countBuffer, countBufferOffset,
1043
maxDrawCount, stride);
1044
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1045
return;
1046
1047
vn_encode_vkCmdDrawIndirectCount(&cmd->cs, 0, commandBuffer, buffer,
1048
offset, countBuffer, countBufferOffset,
1049
maxDrawCount, stride);
1050
}
1051
1052
void
1053
vn_CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,
1054
VkBuffer buffer,
1055
VkDeviceSize offset,
1056
VkBuffer countBuffer,
1057
VkDeviceSize countBufferOffset,
1058
uint32_t maxDrawCount,
1059
uint32_t stride)
1060
{
1061
struct vn_command_buffer *cmd =
1062
vn_command_buffer_from_handle(commandBuffer);
1063
size_t cmd_size;
1064
1065
cmd_size = vn_sizeof_vkCmdDrawIndexedIndirectCount(
1066
commandBuffer, buffer, offset, countBuffer, countBufferOffset,
1067
maxDrawCount, stride);
1068
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1069
return;
1070
1071
vn_encode_vkCmdDrawIndexedIndirectCount(
1072
&cmd->cs, 0, commandBuffer, buffer, offset, countBuffer,
1073
countBufferOffset, maxDrawCount, stride);
1074
}
1075
1076
void
1077
vn_CmdDispatch(VkCommandBuffer commandBuffer,
1078
uint32_t groupCountX,
1079
uint32_t groupCountY,
1080
uint32_t groupCountZ)
1081
{
1082
struct vn_command_buffer *cmd =
1083
vn_command_buffer_from_handle(commandBuffer);
1084
size_t cmd_size;
1085
1086
cmd_size = vn_sizeof_vkCmdDispatch(commandBuffer, groupCountX, groupCountY,
1087
groupCountZ);
1088
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1089
return;
1090
1091
vn_encode_vkCmdDispatch(&cmd->cs, 0, commandBuffer, groupCountX,
1092
groupCountY, groupCountZ);
1093
}
1094
1095
void
1096
vn_CmdDispatchIndirect(VkCommandBuffer commandBuffer,
1097
VkBuffer buffer,
1098
VkDeviceSize offset)
1099
{
1100
struct vn_command_buffer *cmd =
1101
vn_command_buffer_from_handle(commandBuffer);
1102
size_t cmd_size;
1103
1104
cmd_size = vn_sizeof_vkCmdDispatchIndirect(commandBuffer, buffer, offset);
1105
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1106
return;
1107
1108
vn_encode_vkCmdDispatchIndirect(&cmd->cs, 0, commandBuffer, buffer,
1109
offset);
1110
}
1111
1112
void
1113
vn_CmdCopyBuffer(VkCommandBuffer commandBuffer,
1114
VkBuffer srcBuffer,
1115
VkBuffer dstBuffer,
1116
uint32_t regionCount,
1117
const VkBufferCopy *pRegions)
1118
{
1119
struct vn_command_buffer *cmd =
1120
vn_command_buffer_from_handle(commandBuffer);
1121
size_t cmd_size;
1122
1123
cmd_size = vn_sizeof_vkCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer,
1124
regionCount, pRegions);
1125
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1126
return;
1127
1128
vn_encode_vkCmdCopyBuffer(&cmd->cs, 0, commandBuffer, srcBuffer, dstBuffer,
1129
regionCount, pRegions);
1130
}
1131
1132
void
1133
vn_CmdCopyImage(VkCommandBuffer commandBuffer,
1134
VkImage srcImage,
1135
VkImageLayout srcImageLayout,
1136
VkImage dstImage,
1137
VkImageLayout dstImageLayout,
1138
uint32_t regionCount,
1139
const VkImageCopy *pRegions)
1140
{
1141
struct vn_command_buffer *cmd =
1142
vn_command_buffer_from_handle(commandBuffer);
1143
size_t cmd_size;
1144
1145
cmd_size = vn_sizeof_vkCmdCopyImage(commandBuffer, srcImage,
1146
srcImageLayout, dstImage,
1147
dstImageLayout, regionCount, pRegions);
1148
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1149
return;
1150
1151
vn_encode_vkCmdCopyImage(&cmd->cs, 0, commandBuffer, srcImage,
1152
srcImageLayout, dstImage, dstImageLayout,
1153
regionCount, pRegions);
1154
}
1155
1156
void
1157
vn_CmdBlitImage(VkCommandBuffer commandBuffer,
1158
VkImage srcImage,
1159
VkImageLayout srcImageLayout,
1160
VkImage dstImage,
1161
VkImageLayout dstImageLayout,
1162
uint32_t regionCount,
1163
const VkImageBlit *pRegions,
1164
VkFilter filter)
1165
{
1166
struct vn_command_buffer *cmd =
1167
vn_command_buffer_from_handle(commandBuffer);
1168
size_t cmd_size;
1169
1170
cmd_size = vn_sizeof_vkCmdBlitImage(
1171
commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
1172
regionCount, pRegions, filter);
1173
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1174
return;
1175
1176
vn_encode_vkCmdBlitImage(&cmd->cs, 0, commandBuffer, srcImage,
1177
srcImageLayout, dstImage, dstImageLayout,
1178
regionCount, pRegions, filter);
1179
}
1180
1181
void
1182
vn_CmdCopyBufferToImage(VkCommandBuffer commandBuffer,
1183
VkBuffer srcBuffer,
1184
VkImage dstImage,
1185
VkImageLayout dstImageLayout,
1186
uint32_t regionCount,
1187
const VkBufferImageCopy *pRegions)
1188
{
1189
struct vn_command_buffer *cmd =
1190
vn_command_buffer_from_handle(commandBuffer);
1191
size_t cmd_size;
1192
1193
cmd_size =
1194
vn_sizeof_vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage,
1195
dstImageLayout, regionCount, pRegions);
1196
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1197
return;
1198
1199
vn_encode_vkCmdCopyBufferToImage(&cmd->cs, 0, commandBuffer, srcBuffer,
1200
dstImage, dstImageLayout, regionCount,
1201
pRegions);
1202
}
1203
1204
void
1205
vn_CmdCopyImageToBuffer(VkCommandBuffer commandBuffer,
1206
VkImage srcImage,
1207
VkImageLayout srcImageLayout,
1208
VkBuffer dstBuffer,
1209
uint32_t regionCount,
1210
const VkBufferImageCopy *pRegions)
1211
{
1212
struct vn_command_buffer *cmd =
1213
vn_command_buffer_from_handle(commandBuffer);
1214
size_t cmd_size;
1215
1216
bool prime_blit = false;
1217
if (srcImageLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
1218
VN_PRESENT_SRC_INTERNAL_LAYOUT != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
1219
srcImageLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
1220
1221
const struct vn_image *img = vn_image_from_handle(srcImage);
1222
prime_blit = img->is_wsi && img->prime_blit_buffer == dstBuffer;
1223
assert(prime_blit);
1224
}
1225
1226
cmd_size = vn_sizeof_vkCmdCopyImageToBuffer(commandBuffer, srcImage,
1227
srcImageLayout, dstBuffer,
1228
regionCount, pRegions);
1229
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1230
return;
1231
1232
vn_encode_vkCmdCopyImageToBuffer(&cmd->cs, 0, commandBuffer, srcImage,
1233
srcImageLayout, dstBuffer, regionCount,
1234
pRegions);
1235
1236
if (prime_blit) {
1237
const VkBufferMemoryBarrier buf_barrier = {
1238
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
1239
.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
1240
.srcQueueFamilyIndex = cmd->queue_family_index,
1241
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT,
1242
.buffer = dstBuffer,
1243
.size = VK_WHOLE_SIZE,
1244
};
1245
vn_cmd_encode_memory_barriers(cmd, VK_PIPELINE_STAGE_TRANSFER_BIT,
1246
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 1,
1247
&buf_barrier, 0, NULL);
1248
}
1249
}
1250
1251
void
1252
vn_CmdUpdateBuffer(VkCommandBuffer commandBuffer,
1253
VkBuffer dstBuffer,
1254
VkDeviceSize dstOffset,
1255
VkDeviceSize dataSize,
1256
const void *pData)
1257
{
1258
struct vn_command_buffer *cmd =
1259
vn_command_buffer_from_handle(commandBuffer);
1260
size_t cmd_size;
1261
1262
cmd_size = vn_sizeof_vkCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset,
1263
dataSize, pData);
1264
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1265
return;
1266
1267
vn_encode_vkCmdUpdateBuffer(&cmd->cs, 0, commandBuffer, dstBuffer,
1268
dstOffset, dataSize, pData);
1269
}
1270
1271
void
1272
vn_CmdFillBuffer(VkCommandBuffer commandBuffer,
1273
VkBuffer dstBuffer,
1274
VkDeviceSize dstOffset,
1275
VkDeviceSize size,
1276
uint32_t data)
1277
{
1278
struct vn_command_buffer *cmd =
1279
vn_command_buffer_from_handle(commandBuffer);
1280
size_t cmd_size;
1281
1282
cmd_size = vn_sizeof_vkCmdFillBuffer(commandBuffer, dstBuffer, dstOffset,
1283
size, data);
1284
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1285
return;
1286
1287
vn_encode_vkCmdFillBuffer(&cmd->cs, 0, commandBuffer, dstBuffer, dstOffset,
1288
size, data);
1289
}
1290
1291
void
1292
vn_CmdClearColorImage(VkCommandBuffer commandBuffer,
1293
VkImage image,
1294
VkImageLayout imageLayout,
1295
const VkClearColorValue *pColor,
1296
uint32_t rangeCount,
1297
const VkImageSubresourceRange *pRanges)
1298
{
1299
struct vn_command_buffer *cmd =
1300
vn_command_buffer_from_handle(commandBuffer);
1301
size_t cmd_size;
1302
1303
cmd_size = vn_sizeof_vkCmdClearColorImage(
1304
commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
1305
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1306
return;
1307
1308
vn_encode_vkCmdClearColorImage(&cmd->cs, 0, commandBuffer, image,
1309
imageLayout, pColor, rangeCount, pRanges);
1310
}
1311
1312
void
1313
vn_CmdClearDepthStencilImage(VkCommandBuffer commandBuffer,
1314
VkImage image,
1315
VkImageLayout imageLayout,
1316
const VkClearDepthStencilValue *pDepthStencil,
1317
uint32_t rangeCount,
1318
const VkImageSubresourceRange *pRanges)
1319
{
1320
struct vn_command_buffer *cmd =
1321
vn_command_buffer_from_handle(commandBuffer);
1322
size_t cmd_size;
1323
1324
cmd_size = vn_sizeof_vkCmdClearDepthStencilImage(
1325
commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
1326
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1327
return;
1328
1329
vn_encode_vkCmdClearDepthStencilImage(&cmd->cs, 0, commandBuffer, image,
1330
imageLayout, pDepthStencil,
1331
rangeCount, pRanges);
1332
}
1333
1334
void
1335
vn_CmdClearAttachments(VkCommandBuffer commandBuffer,
1336
uint32_t attachmentCount,
1337
const VkClearAttachment *pAttachments,
1338
uint32_t rectCount,
1339
const VkClearRect *pRects)
1340
{
1341
struct vn_command_buffer *cmd =
1342
vn_command_buffer_from_handle(commandBuffer);
1343
size_t cmd_size;
1344
1345
cmd_size = vn_sizeof_vkCmdClearAttachments(
1346
commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
1347
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1348
return;
1349
1350
vn_encode_vkCmdClearAttachments(&cmd->cs, 0, commandBuffer,
1351
attachmentCount, pAttachments, rectCount,
1352
pRects);
1353
}
1354
1355
void
1356
vn_CmdResolveImage(VkCommandBuffer commandBuffer,
1357
VkImage srcImage,
1358
VkImageLayout srcImageLayout,
1359
VkImage dstImage,
1360
VkImageLayout dstImageLayout,
1361
uint32_t regionCount,
1362
const VkImageResolve *pRegions)
1363
{
1364
struct vn_command_buffer *cmd =
1365
vn_command_buffer_from_handle(commandBuffer);
1366
size_t cmd_size;
1367
1368
cmd_size = vn_sizeof_vkCmdResolveImage(
1369
commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
1370
regionCount, pRegions);
1371
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1372
return;
1373
1374
vn_encode_vkCmdResolveImage(&cmd->cs, 0, commandBuffer, srcImage,
1375
srcImageLayout, dstImage, dstImageLayout,
1376
regionCount, pRegions);
1377
}
1378
1379
void
1380
vn_CmdSetEvent(VkCommandBuffer commandBuffer,
1381
VkEvent event,
1382
VkPipelineStageFlags stageMask)
1383
{
1384
struct vn_command_buffer *cmd =
1385
vn_command_buffer_from_handle(commandBuffer);
1386
size_t cmd_size;
1387
1388
cmd_size = vn_sizeof_vkCmdSetEvent(commandBuffer, event, stageMask);
1389
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1390
return;
1391
1392
vn_encode_vkCmdSetEvent(&cmd->cs, 0, commandBuffer, event, stageMask);
1393
}
1394
1395
void
1396
vn_CmdResetEvent(VkCommandBuffer commandBuffer,
1397
VkEvent event,
1398
VkPipelineStageFlags stageMask)
1399
{
1400
struct vn_command_buffer *cmd =
1401
vn_command_buffer_from_handle(commandBuffer);
1402
size_t cmd_size;
1403
1404
cmd_size = vn_sizeof_vkCmdResetEvent(commandBuffer, event, stageMask);
1405
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1406
return;
1407
1408
vn_encode_vkCmdResetEvent(&cmd->cs, 0, commandBuffer, event, stageMask);
1409
}
1410
1411
void
1412
vn_CmdWaitEvents(VkCommandBuffer commandBuffer,
1413
uint32_t eventCount,
1414
const VkEvent *pEvents,
1415
VkPipelineStageFlags srcStageMask,
1416
VkPipelineStageFlags dstStageMask,
1417
uint32_t memoryBarrierCount,
1418
const VkMemoryBarrier *pMemoryBarriers,
1419
uint32_t bufferMemoryBarrierCount,
1420
const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1421
uint32_t imageMemoryBarrierCount,
1422
const VkImageMemoryBarrier *pImageMemoryBarriers)
1423
{
1424
struct vn_command_buffer *cmd =
1425
vn_command_buffer_from_handle(commandBuffer);
1426
size_t cmd_size;
1427
1428
uint32_t transfer_count;
1429
pImageMemoryBarriers = vn_cmd_wait_events_fix_image_memory_barriers(
1430
cmd, pImageMemoryBarriers, imageMemoryBarrierCount, &transfer_count);
1431
imageMemoryBarrierCount -= transfer_count;
1432
1433
cmd_size = vn_sizeof_vkCmdWaitEvents(
1434
commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask,
1435
memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
1436
pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
1437
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1438
return;
1439
1440
vn_encode_vkCmdWaitEvents(&cmd->cs, 0, commandBuffer, eventCount, pEvents,
1441
srcStageMask, dstStageMask, memoryBarrierCount,
1442
pMemoryBarriers, bufferMemoryBarrierCount,
1443
pBufferMemoryBarriers, imageMemoryBarrierCount,
1444
pImageMemoryBarriers);
1445
1446
if (transfer_count) {
1447
pImageMemoryBarriers += imageMemoryBarrierCount;
1448
vn_cmd_encode_memory_barriers(cmd, srcStageMask, dstStageMask, 0, NULL,
1449
transfer_count, pImageMemoryBarriers);
1450
}
1451
}
1452
1453
void
1454
vn_CmdPipelineBarrier(VkCommandBuffer commandBuffer,
1455
VkPipelineStageFlags srcStageMask,
1456
VkPipelineStageFlags dstStageMask,
1457
VkDependencyFlags dependencyFlags,
1458
uint32_t memoryBarrierCount,
1459
const VkMemoryBarrier *pMemoryBarriers,
1460
uint32_t bufferMemoryBarrierCount,
1461
const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1462
uint32_t imageMemoryBarrierCount,
1463
const VkImageMemoryBarrier *pImageMemoryBarriers)
1464
{
1465
struct vn_command_buffer *cmd =
1466
vn_command_buffer_from_handle(commandBuffer);
1467
size_t cmd_size;
1468
1469
pImageMemoryBarriers = vn_cmd_pipeline_barrier_fix_image_memory_barriers(
1470
cmd, pImageMemoryBarriers, imageMemoryBarrierCount);
1471
1472
cmd_size = vn_sizeof_vkCmdPipelineBarrier(
1473
commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
1474
memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
1475
pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
1476
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1477
return;
1478
1479
vn_encode_vkCmdPipelineBarrier(
1480
&cmd->cs, 0, commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
1481
memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
1482
pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
1483
}
1484
1485
void
1486
vn_CmdBeginQuery(VkCommandBuffer commandBuffer,
1487
VkQueryPool queryPool,
1488
uint32_t query,
1489
VkQueryControlFlags flags)
1490
{
1491
struct vn_command_buffer *cmd =
1492
vn_command_buffer_from_handle(commandBuffer);
1493
size_t cmd_size;
1494
1495
cmd_size =
1496
vn_sizeof_vkCmdBeginQuery(commandBuffer, queryPool, query, flags);
1497
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1498
return;
1499
1500
vn_encode_vkCmdBeginQuery(&cmd->cs, 0, commandBuffer, queryPool, query,
1501
flags);
1502
}
1503
1504
void
1505
vn_CmdEndQuery(VkCommandBuffer commandBuffer,
1506
VkQueryPool queryPool,
1507
uint32_t query)
1508
{
1509
struct vn_command_buffer *cmd =
1510
vn_command_buffer_from_handle(commandBuffer);
1511
size_t cmd_size;
1512
1513
cmd_size = vn_sizeof_vkCmdEndQuery(commandBuffer, queryPool, query);
1514
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1515
return;
1516
1517
vn_encode_vkCmdEndQuery(&cmd->cs, 0, commandBuffer, queryPool, query);
1518
}
1519
1520
void
1521
vn_CmdResetQueryPool(VkCommandBuffer commandBuffer,
1522
VkQueryPool queryPool,
1523
uint32_t firstQuery,
1524
uint32_t queryCount)
1525
{
1526
struct vn_command_buffer *cmd =
1527
vn_command_buffer_from_handle(commandBuffer);
1528
size_t cmd_size;
1529
1530
cmd_size = vn_sizeof_vkCmdResetQueryPool(commandBuffer, queryPool,
1531
firstQuery, queryCount);
1532
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1533
return;
1534
1535
vn_encode_vkCmdResetQueryPool(&cmd->cs, 0, commandBuffer, queryPool,
1536
firstQuery, queryCount);
1537
}
1538
1539
void
1540
vn_CmdWriteTimestamp(VkCommandBuffer commandBuffer,
1541
VkPipelineStageFlagBits pipelineStage,
1542
VkQueryPool queryPool,
1543
uint32_t query)
1544
{
1545
struct vn_command_buffer *cmd =
1546
vn_command_buffer_from_handle(commandBuffer);
1547
size_t cmd_size;
1548
1549
cmd_size = vn_sizeof_vkCmdWriteTimestamp(commandBuffer, pipelineStage,
1550
queryPool, query);
1551
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1552
return;
1553
1554
vn_encode_vkCmdWriteTimestamp(&cmd->cs, 0, commandBuffer, pipelineStage,
1555
queryPool, query);
1556
}
1557
1558
void
1559
vn_CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,
1560
VkQueryPool queryPool,
1561
uint32_t firstQuery,
1562
uint32_t queryCount,
1563
VkBuffer dstBuffer,
1564
VkDeviceSize dstOffset,
1565
VkDeviceSize stride,
1566
VkQueryResultFlags flags)
1567
{
1568
struct vn_command_buffer *cmd =
1569
vn_command_buffer_from_handle(commandBuffer);
1570
size_t cmd_size;
1571
1572
cmd_size = vn_sizeof_vkCmdCopyQueryPoolResults(
1573
commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset,
1574
stride, flags);
1575
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1576
return;
1577
1578
vn_encode_vkCmdCopyQueryPoolResults(&cmd->cs, 0, commandBuffer, queryPool,
1579
firstQuery, queryCount, dstBuffer,
1580
dstOffset, stride, flags);
1581
}
1582
1583
void
1584
vn_CmdPushConstants(VkCommandBuffer commandBuffer,
1585
VkPipelineLayout layout,
1586
VkShaderStageFlags stageFlags,
1587
uint32_t offset,
1588
uint32_t size,
1589
const void *pValues)
1590
{
1591
struct vn_command_buffer *cmd =
1592
vn_command_buffer_from_handle(commandBuffer);
1593
size_t cmd_size;
1594
1595
cmd_size = vn_sizeof_vkCmdPushConstants(commandBuffer, layout, stageFlags,
1596
offset, size, pValues);
1597
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1598
return;
1599
1600
vn_encode_vkCmdPushConstants(&cmd->cs, 0, commandBuffer, layout,
1601
stageFlags, offset, size, pValues);
1602
}
1603
1604
void
1605
vn_CmdBeginRenderPass(VkCommandBuffer commandBuffer,
1606
const VkRenderPassBeginInfo *pRenderPassBegin,
1607
VkSubpassContents contents)
1608
{
1609
struct vn_command_buffer *cmd =
1610
vn_command_buffer_from_handle(commandBuffer);
1611
size_t cmd_size;
1612
1613
vn_cmd_begin_render_pass(
1614
cmd, vn_render_pass_from_handle(pRenderPassBegin->renderPass),
1615
vn_framebuffer_from_handle(pRenderPassBegin->framebuffer),
1616
pRenderPassBegin);
1617
1618
cmd_size = vn_sizeof_vkCmdBeginRenderPass(commandBuffer, pRenderPassBegin,
1619
contents);
1620
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1621
return;
1622
1623
vn_encode_vkCmdBeginRenderPass(&cmd->cs, 0, commandBuffer,
1624
pRenderPassBegin, contents);
1625
}
1626
1627
void
1628
vn_CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
1629
{
1630
struct vn_command_buffer *cmd =
1631
vn_command_buffer_from_handle(commandBuffer);
1632
size_t cmd_size;
1633
1634
cmd_size = vn_sizeof_vkCmdNextSubpass(commandBuffer, contents);
1635
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1636
return;
1637
1638
vn_encode_vkCmdNextSubpass(&cmd->cs, 0, commandBuffer, contents);
1639
}
1640
1641
void
1642
vn_CmdEndRenderPass(VkCommandBuffer commandBuffer)
1643
{
1644
struct vn_command_buffer *cmd =
1645
vn_command_buffer_from_handle(commandBuffer);
1646
size_t cmd_size;
1647
1648
cmd_size = vn_sizeof_vkCmdEndRenderPass(commandBuffer);
1649
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1650
return;
1651
1652
vn_encode_vkCmdEndRenderPass(&cmd->cs, 0, commandBuffer);
1653
1654
vn_cmd_end_render_pass(cmd);
1655
}
1656
1657
void
1658
vn_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,
1659
const VkRenderPassBeginInfo *pRenderPassBegin,
1660
const VkSubpassBeginInfo *pSubpassBeginInfo)
1661
{
1662
struct vn_command_buffer *cmd =
1663
vn_command_buffer_from_handle(commandBuffer);
1664
size_t cmd_size;
1665
1666
vn_cmd_begin_render_pass(
1667
cmd, vn_render_pass_from_handle(pRenderPassBegin->renderPass),
1668
vn_framebuffer_from_handle(pRenderPassBegin->framebuffer),
1669
pRenderPassBegin);
1670
1671
cmd_size = vn_sizeof_vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin,
1672
pSubpassBeginInfo);
1673
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1674
return;
1675
1676
vn_encode_vkCmdBeginRenderPass2(&cmd->cs, 0, commandBuffer,
1677
pRenderPassBegin, pSubpassBeginInfo);
1678
}
1679
1680
void
1681
vn_CmdNextSubpass2(VkCommandBuffer commandBuffer,
1682
const VkSubpassBeginInfo *pSubpassBeginInfo,
1683
const VkSubpassEndInfo *pSubpassEndInfo)
1684
{
1685
struct vn_command_buffer *cmd =
1686
vn_command_buffer_from_handle(commandBuffer);
1687
size_t cmd_size;
1688
1689
cmd_size = vn_sizeof_vkCmdNextSubpass2(commandBuffer, pSubpassBeginInfo,
1690
pSubpassEndInfo);
1691
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1692
return;
1693
1694
vn_encode_vkCmdNextSubpass2(&cmd->cs, 0, commandBuffer, pSubpassBeginInfo,
1695
pSubpassEndInfo);
1696
}
1697
1698
void
1699
vn_CmdEndRenderPass2(VkCommandBuffer commandBuffer,
1700
const VkSubpassEndInfo *pSubpassEndInfo)
1701
{
1702
struct vn_command_buffer *cmd =
1703
vn_command_buffer_from_handle(commandBuffer);
1704
size_t cmd_size;
1705
1706
cmd_size = vn_sizeof_vkCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
1707
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1708
return;
1709
1710
vn_encode_vkCmdEndRenderPass2(&cmd->cs, 0, commandBuffer, pSubpassEndInfo);
1711
1712
vn_cmd_end_render_pass(cmd);
1713
}
1714
1715
void
1716
vn_CmdExecuteCommands(VkCommandBuffer commandBuffer,
1717
uint32_t commandBufferCount,
1718
const VkCommandBuffer *pCommandBuffers)
1719
{
1720
struct vn_command_buffer *cmd =
1721
vn_command_buffer_from_handle(commandBuffer);
1722
size_t cmd_size;
1723
1724
cmd_size = vn_sizeof_vkCmdExecuteCommands(
1725
commandBuffer, commandBufferCount, pCommandBuffers);
1726
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1727
return;
1728
1729
vn_encode_vkCmdExecuteCommands(&cmd->cs, 0, commandBuffer,
1730
commandBufferCount, pCommandBuffers);
1731
}
1732
1733
void
1734
vn_CmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
1735
{
1736
struct vn_command_buffer *cmd =
1737
vn_command_buffer_from_handle(commandBuffer);
1738
size_t cmd_size;
1739
1740
cmd_size = vn_sizeof_vkCmdSetDeviceMask(commandBuffer, deviceMask);
1741
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1742
return;
1743
1744
vn_encode_vkCmdSetDeviceMask(&cmd->cs, 0, commandBuffer, deviceMask);
1745
}
1746
1747
void
1748
vn_CmdDispatchBase(VkCommandBuffer commandBuffer,
1749
uint32_t baseGroupX,
1750
uint32_t baseGroupY,
1751
uint32_t baseGroupZ,
1752
uint32_t groupCountX,
1753
uint32_t groupCountY,
1754
uint32_t groupCountZ)
1755
{
1756
struct vn_command_buffer *cmd =
1757
vn_command_buffer_from_handle(commandBuffer);
1758
size_t cmd_size;
1759
1760
cmd_size = vn_sizeof_vkCmdDispatchBase(commandBuffer, baseGroupX,
1761
baseGroupY, baseGroupZ, groupCountX,
1762
groupCountY, groupCountZ);
1763
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1764
return;
1765
1766
vn_encode_vkCmdDispatchBase(&cmd->cs, 0, commandBuffer, baseGroupX,
1767
baseGroupY, baseGroupZ, groupCountX,
1768
groupCountY, groupCountZ);
1769
}
1770
1771
void
1772
vn_CmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,
1773
VkQueryPool queryPool,
1774
uint32_t query,
1775
VkQueryControlFlags flags,
1776
uint32_t index)
1777
{
1778
struct vn_command_buffer *cmd =
1779
vn_command_buffer_from_handle(commandBuffer);
1780
size_t cmd_size;
1781
1782
cmd_size = vn_sizeof_vkCmdBeginQueryIndexedEXT(commandBuffer, queryPool,
1783
query, flags, index);
1784
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1785
return;
1786
1787
vn_encode_vkCmdBeginQueryIndexedEXT(&cmd->cs, 0, commandBuffer, queryPool,
1788
query, flags, index);
1789
}
1790
1791
void
1792
vn_CmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,
1793
VkQueryPool queryPool,
1794
uint32_t query,
1795
uint32_t index)
1796
{
1797
struct vn_command_buffer *cmd =
1798
vn_command_buffer_from_handle(commandBuffer);
1799
size_t cmd_size;
1800
1801
cmd_size = vn_sizeof_vkCmdEndQueryIndexedEXT(commandBuffer, queryPool,
1802
query, index);
1803
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1804
return;
1805
1806
vn_encode_vkCmdEndQueryIndexedEXT(&cmd->cs, 0, commandBuffer, queryPool,
1807
query, index);
1808
}
1809
1810
void
1811
vn_CmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer,
1812
uint32_t firstBinding,
1813
uint32_t bindingCount,
1814
const VkBuffer *pBuffers,
1815
const VkDeviceSize *pOffsets,
1816
const VkDeviceSize *pSizes)
1817
{
1818
struct vn_command_buffer *cmd =
1819
vn_command_buffer_from_handle(commandBuffer);
1820
size_t cmd_size;
1821
1822
cmd_size = vn_sizeof_vkCmdBindTransformFeedbackBuffersEXT(
1823
commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
1824
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1825
return;
1826
1827
vn_encode_vkCmdBindTransformFeedbackBuffersEXT(&cmd->cs, 0, commandBuffer,
1828
firstBinding, bindingCount,
1829
pBuffers, pOffsets, pSizes);
1830
}
1831
1832
void
1833
vn_CmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer,
1834
uint32_t firstCounterBuffer,
1835
uint32_t counterBufferCount,
1836
const VkBuffer *pCounterBuffers,
1837
const VkDeviceSize *pCounterBufferOffsets)
1838
{
1839
struct vn_command_buffer *cmd =
1840
vn_command_buffer_from_handle(commandBuffer);
1841
size_t cmd_size;
1842
1843
cmd_size = vn_sizeof_vkCmdBeginTransformFeedbackEXT(
1844
commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers,
1845
pCounterBufferOffsets);
1846
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1847
return;
1848
1849
vn_encode_vkCmdBeginTransformFeedbackEXT(
1850
&cmd->cs, 0, commandBuffer, firstCounterBuffer, counterBufferCount,
1851
pCounterBuffers, pCounterBufferOffsets);
1852
}
1853
1854
void
1855
vn_CmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer,
1856
uint32_t firstCounterBuffer,
1857
uint32_t counterBufferCount,
1858
const VkBuffer *pCounterBuffers,
1859
const VkDeviceSize *pCounterBufferOffsets)
1860
{
1861
struct vn_command_buffer *cmd =
1862
vn_command_buffer_from_handle(commandBuffer);
1863
size_t cmd_size;
1864
1865
cmd_size = vn_sizeof_vkCmdEndTransformFeedbackEXT(
1866
commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers,
1867
pCounterBufferOffsets);
1868
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1869
return;
1870
1871
vn_encode_vkCmdEndTransformFeedbackEXT(
1872
&cmd->cs, 0, commandBuffer, firstCounterBuffer, counterBufferCount,
1873
pCounterBuffers, pCounterBufferOffsets);
1874
}
1875
1876
void
1877
vn_CmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer,
1878
uint32_t instanceCount,
1879
uint32_t firstInstance,
1880
VkBuffer counterBuffer,
1881
VkDeviceSize counterBufferOffset,
1882
uint32_t counterOffset,
1883
uint32_t vertexStride)
1884
{
1885
struct vn_command_buffer *cmd =
1886
vn_command_buffer_from_handle(commandBuffer);
1887
size_t cmd_size;
1888
1889
cmd_size = vn_sizeof_vkCmdDrawIndirectByteCountEXT(
1890
commandBuffer, instanceCount, firstInstance, counterBuffer,
1891
counterBufferOffset, counterOffset, vertexStride);
1892
if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1893
return;
1894
1895
vn_encode_vkCmdDrawIndirectByteCountEXT(
1896
&cmd->cs, 0, commandBuffer, instanceCount, firstInstance, counterBuffer,
1897
counterBufferOffset, counterOffset, vertexStride);
1898
}
1899
1900