Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
PojavLauncherTeam
GitHub Repository: PojavLauncherTeam/mesa
Path: blob/21.2-virgl/src/virtio/venus-protocol/vn_protocol_driver_pipeline.h
4560 views
1
/* This file is generated by venus-protocol. See vn_protocol_driver.h. */
2
3
/*
4
* Copyright 2020 Google LLC
5
* SPDX-License-Identifier: MIT
6
*/
7
8
#ifndef VN_PROTOCOL_DRIVER_PIPELINE_H
9
#define VN_PROTOCOL_DRIVER_PIPELINE_H
10
11
#include "vn_device.h"
12
#include "vn_protocol_driver_structs.h"
13
14
/* struct VkSpecializationMapEntry */
15
16
static inline size_t
17
vn_sizeof_VkSpecializationMapEntry(const VkSpecializationMapEntry *val)
18
{
19
size_t size = 0;
20
size += vn_sizeof_uint32_t(&val->constantID);
21
size += vn_sizeof_uint32_t(&val->offset);
22
size += vn_sizeof_size_t(&val->size);
23
return size;
24
}
25
26
static inline void
27
vn_encode_VkSpecializationMapEntry(struct vn_cs_encoder *enc, const VkSpecializationMapEntry *val)
28
{
29
vn_encode_uint32_t(enc, &val->constantID);
30
vn_encode_uint32_t(enc, &val->offset);
31
vn_encode_size_t(enc, &val->size);
32
}
33
34
/* struct VkSpecializationInfo */
35
36
static inline size_t
37
vn_sizeof_VkSpecializationInfo(const VkSpecializationInfo *val)
38
{
39
size_t size = 0;
40
size += vn_sizeof_uint32_t(&val->mapEntryCount);
41
if (val->pMapEntries) {
42
size += vn_sizeof_array_size(val->mapEntryCount);
43
for (uint32_t i = 0; i < val->mapEntryCount; i++)
44
size += vn_sizeof_VkSpecializationMapEntry(&val->pMapEntries[i]);
45
} else {
46
size += vn_sizeof_array_size(0);
47
}
48
size += vn_sizeof_size_t(&val->dataSize);
49
if (val->pData) {
50
size += vn_sizeof_array_size(val->dataSize);
51
size += vn_sizeof_blob_array(val->pData, val->dataSize);
52
} else {
53
size += vn_sizeof_array_size(0);
54
}
55
return size;
56
}
57
58
static inline void
59
vn_encode_VkSpecializationInfo(struct vn_cs_encoder *enc, const VkSpecializationInfo *val)
60
{
61
vn_encode_uint32_t(enc, &val->mapEntryCount);
62
if (val->pMapEntries) {
63
vn_encode_array_size(enc, val->mapEntryCount);
64
for (uint32_t i = 0; i < val->mapEntryCount; i++)
65
vn_encode_VkSpecializationMapEntry(enc, &val->pMapEntries[i]);
66
} else {
67
vn_encode_array_size(enc, 0);
68
}
69
vn_encode_size_t(enc, &val->dataSize);
70
if (val->pData) {
71
vn_encode_array_size(enc, val->dataSize);
72
vn_encode_blob_array(enc, val->pData, val->dataSize);
73
} else {
74
vn_encode_array_size(enc, 0);
75
}
76
}
77
78
/* struct VkPipelineShaderStageCreateInfo chain */
79
80
static inline size_t
81
vn_sizeof_VkPipelineShaderStageCreateInfo_pnext(const void *val)
82
{
83
/* no known/supported struct */
84
return vn_sizeof_simple_pointer(NULL);
85
}
86
87
static inline size_t
88
vn_sizeof_VkPipelineShaderStageCreateInfo_self(const VkPipelineShaderStageCreateInfo *val)
89
{
90
size_t size = 0;
91
/* skip val->{sType,pNext} */
92
size += vn_sizeof_VkFlags(&val->flags);
93
size += vn_sizeof_VkShaderStageFlagBits(&val->stage);
94
size += vn_sizeof_VkShaderModule(&val->module);
95
if (val->pName) {
96
const size_t string_size = strlen(val->pName) + 1;
97
size += vn_sizeof_array_size(string_size);
98
size += vn_sizeof_blob_array(val->pName, string_size);
99
} else {
100
size += vn_sizeof_array_size(0);
101
}
102
size += vn_sizeof_simple_pointer(val->pSpecializationInfo);
103
if (val->pSpecializationInfo)
104
size += vn_sizeof_VkSpecializationInfo(val->pSpecializationInfo);
105
return size;
106
}
107
108
static inline size_t
109
vn_sizeof_VkPipelineShaderStageCreateInfo(const VkPipelineShaderStageCreateInfo *val)
110
{
111
size_t size = 0;
112
113
size += vn_sizeof_VkStructureType(&val->sType);
114
size += vn_sizeof_VkPipelineShaderStageCreateInfo_pnext(val->pNext);
115
size += vn_sizeof_VkPipelineShaderStageCreateInfo_self(val);
116
117
return size;
118
}
119
120
static inline void
121
vn_encode_VkPipelineShaderStageCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
122
{
123
/* no known/supported struct */
124
vn_encode_simple_pointer(enc, NULL);
125
}
126
127
static inline void
128
vn_encode_VkPipelineShaderStageCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineShaderStageCreateInfo *val)
129
{
130
/* skip val->{sType,pNext} */
131
vn_encode_VkFlags(enc, &val->flags);
132
vn_encode_VkShaderStageFlagBits(enc, &val->stage);
133
vn_encode_VkShaderModule(enc, &val->module);
134
if (val->pName) {
135
const size_t string_size = strlen(val->pName) + 1;
136
vn_encode_array_size(enc, string_size);
137
vn_encode_blob_array(enc, val->pName, string_size);
138
} else {
139
vn_encode_array_size(enc, 0);
140
}
141
if (vn_encode_simple_pointer(enc, val->pSpecializationInfo))
142
vn_encode_VkSpecializationInfo(enc, val->pSpecializationInfo);
143
}
144
145
static inline void
146
vn_encode_VkPipelineShaderStageCreateInfo(struct vn_cs_encoder *enc, const VkPipelineShaderStageCreateInfo *val)
147
{
148
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO);
149
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO });
150
vn_encode_VkPipelineShaderStageCreateInfo_pnext(enc, val->pNext);
151
vn_encode_VkPipelineShaderStageCreateInfo_self(enc, val);
152
}
153
154
/* struct VkVertexInputBindingDescription */
155
156
static inline size_t
157
vn_sizeof_VkVertexInputBindingDescription(const VkVertexInputBindingDescription *val)
158
{
159
size_t size = 0;
160
size += vn_sizeof_uint32_t(&val->binding);
161
size += vn_sizeof_uint32_t(&val->stride);
162
size += vn_sizeof_VkVertexInputRate(&val->inputRate);
163
return size;
164
}
165
166
static inline void
167
vn_encode_VkVertexInputBindingDescription(struct vn_cs_encoder *enc, const VkVertexInputBindingDescription *val)
168
{
169
vn_encode_uint32_t(enc, &val->binding);
170
vn_encode_uint32_t(enc, &val->stride);
171
vn_encode_VkVertexInputRate(enc, &val->inputRate);
172
}
173
174
/* struct VkVertexInputAttributeDescription */
175
176
static inline size_t
177
vn_sizeof_VkVertexInputAttributeDescription(const VkVertexInputAttributeDescription *val)
178
{
179
size_t size = 0;
180
size += vn_sizeof_uint32_t(&val->location);
181
size += vn_sizeof_uint32_t(&val->binding);
182
size += vn_sizeof_VkFormat(&val->format);
183
size += vn_sizeof_uint32_t(&val->offset);
184
return size;
185
}
186
187
static inline void
188
vn_encode_VkVertexInputAttributeDescription(struct vn_cs_encoder *enc, const VkVertexInputAttributeDescription *val)
189
{
190
vn_encode_uint32_t(enc, &val->location);
191
vn_encode_uint32_t(enc, &val->binding);
192
vn_encode_VkFormat(enc, &val->format);
193
vn_encode_uint32_t(enc, &val->offset);
194
}
195
196
/* struct VkPipelineVertexInputStateCreateInfo chain */
197
198
static inline size_t
199
vn_sizeof_VkPipelineVertexInputStateCreateInfo_pnext(const void *val)
200
{
201
/* no known/supported struct */
202
return vn_sizeof_simple_pointer(NULL);
203
}
204
205
static inline size_t
206
vn_sizeof_VkPipelineVertexInputStateCreateInfo_self(const VkPipelineVertexInputStateCreateInfo *val)
207
{
208
size_t size = 0;
209
/* skip val->{sType,pNext} */
210
size += vn_sizeof_VkFlags(&val->flags);
211
size += vn_sizeof_uint32_t(&val->vertexBindingDescriptionCount);
212
if (val->pVertexBindingDescriptions) {
213
size += vn_sizeof_array_size(val->vertexBindingDescriptionCount);
214
for (uint32_t i = 0; i < val->vertexBindingDescriptionCount; i++)
215
size += vn_sizeof_VkVertexInputBindingDescription(&val->pVertexBindingDescriptions[i]);
216
} else {
217
size += vn_sizeof_array_size(0);
218
}
219
size += vn_sizeof_uint32_t(&val->vertexAttributeDescriptionCount);
220
if (val->pVertexAttributeDescriptions) {
221
size += vn_sizeof_array_size(val->vertexAttributeDescriptionCount);
222
for (uint32_t i = 0; i < val->vertexAttributeDescriptionCount; i++)
223
size += vn_sizeof_VkVertexInputAttributeDescription(&val->pVertexAttributeDescriptions[i]);
224
} else {
225
size += vn_sizeof_array_size(0);
226
}
227
return size;
228
}
229
230
static inline size_t
231
vn_sizeof_VkPipelineVertexInputStateCreateInfo(const VkPipelineVertexInputStateCreateInfo *val)
232
{
233
size_t size = 0;
234
235
size += vn_sizeof_VkStructureType(&val->sType);
236
size += vn_sizeof_VkPipelineVertexInputStateCreateInfo_pnext(val->pNext);
237
size += vn_sizeof_VkPipelineVertexInputStateCreateInfo_self(val);
238
239
return size;
240
}
241
242
static inline void
243
vn_encode_VkPipelineVertexInputStateCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
244
{
245
/* no known/supported struct */
246
vn_encode_simple_pointer(enc, NULL);
247
}
248
249
static inline void
250
vn_encode_VkPipelineVertexInputStateCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineVertexInputStateCreateInfo *val)
251
{
252
/* skip val->{sType,pNext} */
253
vn_encode_VkFlags(enc, &val->flags);
254
vn_encode_uint32_t(enc, &val->vertexBindingDescriptionCount);
255
if (val->pVertexBindingDescriptions) {
256
vn_encode_array_size(enc, val->vertexBindingDescriptionCount);
257
for (uint32_t i = 0; i < val->vertexBindingDescriptionCount; i++)
258
vn_encode_VkVertexInputBindingDescription(enc, &val->pVertexBindingDescriptions[i]);
259
} else {
260
vn_encode_array_size(enc, 0);
261
}
262
vn_encode_uint32_t(enc, &val->vertexAttributeDescriptionCount);
263
if (val->pVertexAttributeDescriptions) {
264
vn_encode_array_size(enc, val->vertexAttributeDescriptionCount);
265
for (uint32_t i = 0; i < val->vertexAttributeDescriptionCount; i++)
266
vn_encode_VkVertexInputAttributeDescription(enc, &val->pVertexAttributeDescriptions[i]);
267
} else {
268
vn_encode_array_size(enc, 0);
269
}
270
}
271
272
static inline void
273
vn_encode_VkPipelineVertexInputStateCreateInfo(struct vn_cs_encoder *enc, const VkPipelineVertexInputStateCreateInfo *val)
274
{
275
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO);
276
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO });
277
vn_encode_VkPipelineVertexInputStateCreateInfo_pnext(enc, val->pNext);
278
vn_encode_VkPipelineVertexInputStateCreateInfo_self(enc, val);
279
}
280
281
/* struct VkPipelineInputAssemblyStateCreateInfo chain */
282
283
static inline size_t
284
vn_sizeof_VkPipelineInputAssemblyStateCreateInfo_pnext(const void *val)
285
{
286
/* no known/supported struct */
287
return vn_sizeof_simple_pointer(NULL);
288
}
289
290
static inline size_t
291
vn_sizeof_VkPipelineInputAssemblyStateCreateInfo_self(const VkPipelineInputAssemblyStateCreateInfo *val)
292
{
293
size_t size = 0;
294
/* skip val->{sType,pNext} */
295
size += vn_sizeof_VkFlags(&val->flags);
296
size += vn_sizeof_VkPrimitiveTopology(&val->topology);
297
size += vn_sizeof_VkBool32(&val->primitiveRestartEnable);
298
return size;
299
}
300
301
static inline size_t
302
vn_sizeof_VkPipelineInputAssemblyStateCreateInfo(const VkPipelineInputAssemblyStateCreateInfo *val)
303
{
304
size_t size = 0;
305
306
size += vn_sizeof_VkStructureType(&val->sType);
307
size += vn_sizeof_VkPipelineInputAssemblyStateCreateInfo_pnext(val->pNext);
308
size += vn_sizeof_VkPipelineInputAssemblyStateCreateInfo_self(val);
309
310
return size;
311
}
312
313
static inline void
314
vn_encode_VkPipelineInputAssemblyStateCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
315
{
316
/* no known/supported struct */
317
vn_encode_simple_pointer(enc, NULL);
318
}
319
320
static inline void
321
vn_encode_VkPipelineInputAssemblyStateCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineInputAssemblyStateCreateInfo *val)
322
{
323
/* skip val->{sType,pNext} */
324
vn_encode_VkFlags(enc, &val->flags);
325
vn_encode_VkPrimitiveTopology(enc, &val->topology);
326
vn_encode_VkBool32(enc, &val->primitiveRestartEnable);
327
}
328
329
static inline void
330
vn_encode_VkPipelineInputAssemblyStateCreateInfo(struct vn_cs_encoder *enc, const VkPipelineInputAssemblyStateCreateInfo *val)
331
{
332
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO);
333
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO });
334
vn_encode_VkPipelineInputAssemblyStateCreateInfo_pnext(enc, val->pNext);
335
vn_encode_VkPipelineInputAssemblyStateCreateInfo_self(enc, val);
336
}
337
338
/* struct VkPipelineTessellationDomainOriginStateCreateInfo chain */
339
340
static inline size_t
341
vn_sizeof_VkPipelineTessellationDomainOriginStateCreateInfo_pnext(const void *val)
342
{
343
/* no known/supported struct */
344
return vn_sizeof_simple_pointer(NULL);
345
}
346
347
static inline size_t
348
vn_sizeof_VkPipelineTessellationDomainOriginStateCreateInfo_self(const VkPipelineTessellationDomainOriginStateCreateInfo *val)
349
{
350
size_t size = 0;
351
/* skip val->{sType,pNext} */
352
size += vn_sizeof_VkTessellationDomainOrigin(&val->domainOrigin);
353
return size;
354
}
355
356
static inline size_t
357
vn_sizeof_VkPipelineTessellationDomainOriginStateCreateInfo(const VkPipelineTessellationDomainOriginStateCreateInfo *val)
358
{
359
size_t size = 0;
360
361
size += vn_sizeof_VkStructureType(&val->sType);
362
size += vn_sizeof_VkPipelineTessellationDomainOriginStateCreateInfo_pnext(val->pNext);
363
size += vn_sizeof_VkPipelineTessellationDomainOriginStateCreateInfo_self(val);
364
365
return size;
366
}
367
368
static inline void
369
vn_encode_VkPipelineTessellationDomainOriginStateCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
370
{
371
/* no known/supported struct */
372
vn_encode_simple_pointer(enc, NULL);
373
}
374
375
static inline void
376
vn_encode_VkPipelineTessellationDomainOriginStateCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineTessellationDomainOriginStateCreateInfo *val)
377
{
378
/* skip val->{sType,pNext} */
379
vn_encode_VkTessellationDomainOrigin(enc, &val->domainOrigin);
380
}
381
382
static inline void
383
vn_encode_VkPipelineTessellationDomainOriginStateCreateInfo(struct vn_cs_encoder *enc, const VkPipelineTessellationDomainOriginStateCreateInfo *val)
384
{
385
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO);
386
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO });
387
vn_encode_VkPipelineTessellationDomainOriginStateCreateInfo_pnext(enc, val->pNext);
388
vn_encode_VkPipelineTessellationDomainOriginStateCreateInfo_self(enc, val);
389
}
390
391
/* struct VkPipelineTessellationStateCreateInfo chain */
392
393
static inline size_t
394
vn_sizeof_VkPipelineTessellationStateCreateInfo_pnext(const void *val)
395
{
396
const VkBaseInStructure *pnext = val;
397
size_t size = 0;
398
399
while (pnext) {
400
switch ((int32_t)pnext->sType) {
401
case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
402
size += vn_sizeof_simple_pointer(pnext);
403
size += vn_sizeof_VkStructureType(&pnext->sType);
404
size += vn_sizeof_VkPipelineTessellationStateCreateInfo_pnext(pnext->pNext);
405
size += vn_sizeof_VkPipelineTessellationDomainOriginStateCreateInfo_self((const VkPipelineTessellationDomainOriginStateCreateInfo *)pnext);
406
return size;
407
default:
408
/* ignore unknown/unsupported struct */
409
break;
410
}
411
pnext = pnext->pNext;
412
}
413
414
return vn_sizeof_simple_pointer(NULL);
415
}
416
417
static inline size_t
418
vn_sizeof_VkPipelineTessellationStateCreateInfo_self(const VkPipelineTessellationStateCreateInfo *val)
419
{
420
size_t size = 0;
421
/* skip val->{sType,pNext} */
422
size += vn_sizeof_VkFlags(&val->flags);
423
size += vn_sizeof_uint32_t(&val->patchControlPoints);
424
return size;
425
}
426
427
static inline size_t
428
vn_sizeof_VkPipelineTessellationStateCreateInfo(const VkPipelineTessellationStateCreateInfo *val)
429
{
430
size_t size = 0;
431
432
size += vn_sizeof_VkStructureType(&val->sType);
433
size += vn_sizeof_VkPipelineTessellationStateCreateInfo_pnext(val->pNext);
434
size += vn_sizeof_VkPipelineTessellationStateCreateInfo_self(val);
435
436
return size;
437
}
438
439
static inline void
440
vn_encode_VkPipelineTessellationStateCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
441
{
442
const VkBaseInStructure *pnext = val;
443
444
while (pnext) {
445
switch ((int32_t)pnext->sType) {
446
case VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO:
447
vn_encode_simple_pointer(enc, pnext);
448
vn_encode_VkStructureType(enc, &pnext->sType);
449
vn_encode_VkPipelineTessellationStateCreateInfo_pnext(enc, pnext->pNext);
450
vn_encode_VkPipelineTessellationDomainOriginStateCreateInfo_self(enc, (const VkPipelineTessellationDomainOriginStateCreateInfo *)pnext);
451
return;
452
default:
453
/* ignore unknown/unsupported struct */
454
break;
455
}
456
pnext = pnext->pNext;
457
}
458
459
vn_encode_simple_pointer(enc, NULL);
460
}
461
462
static inline void
463
vn_encode_VkPipelineTessellationStateCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineTessellationStateCreateInfo *val)
464
{
465
/* skip val->{sType,pNext} */
466
vn_encode_VkFlags(enc, &val->flags);
467
vn_encode_uint32_t(enc, &val->patchControlPoints);
468
}
469
470
static inline void
471
vn_encode_VkPipelineTessellationStateCreateInfo(struct vn_cs_encoder *enc, const VkPipelineTessellationStateCreateInfo *val)
472
{
473
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO);
474
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO });
475
vn_encode_VkPipelineTessellationStateCreateInfo_pnext(enc, val->pNext);
476
vn_encode_VkPipelineTessellationStateCreateInfo_self(enc, val);
477
}
478
479
/* struct VkPipelineViewportStateCreateInfo chain */
480
481
static inline size_t
482
vn_sizeof_VkPipelineViewportStateCreateInfo_pnext(const void *val)
483
{
484
/* no known/supported struct */
485
return vn_sizeof_simple_pointer(NULL);
486
}
487
488
static inline size_t
489
vn_sizeof_VkPipelineViewportStateCreateInfo_self(const VkPipelineViewportStateCreateInfo *val)
490
{
491
size_t size = 0;
492
/* skip val->{sType,pNext} */
493
size += vn_sizeof_VkFlags(&val->flags);
494
size += vn_sizeof_uint32_t(&val->viewportCount);
495
if (val->pViewports) {
496
size += vn_sizeof_array_size(val->viewportCount);
497
for (uint32_t i = 0; i < val->viewportCount; i++)
498
size += vn_sizeof_VkViewport(&val->pViewports[i]);
499
} else {
500
size += vn_sizeof_array_size(0);
501
}
502
size += vn_sizeof_uint32_t(&val->scissorCount);
503
if (val->pScissors) {
504
size += vn_sizeof_array_size(val->scissorCount);
505
for (uint32_t i = 0; i < val->scissorCount; i++)
506
size += vn_sizeof_VkRect2D(&val->pScissors[i]);
507
} else {
508
size += vn_sizeof_array_size(0);
509
}
510
return size;
511
}
512
513
static inline size_t
514
vn_sizeof_VkPipelineViewportStateCreateInfo(const VkPipelineViewportStateCreateInfo *val)
515
{
516
size_t size = 0;
517
518
size += vn_sizeof_VkStructureType(&val->sType);
519
size += vn_sizeof_VkPipelineViewportStateCreateInfo_pnext(val->pNext);
520
size += vn_sizeof_VkPipelineViewportStateCreateInfo_self(val);
521
522
return size;
523
}
524
525
static inline void
526
vn_encode_VkPipelineViewportStateCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
527
{
528
/* no known/supported struct */
529
vn_encode_simple_pointer(enc, NULL);
530
}
531
532
static inline void
533
vn_encode_VkPipelineViewportStateCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineViewportStateCreateInfo *val)
534
{
535
/* skip val->{sType,pNext} */
536
vn_encode_VkFlags(enc, &val->flags);
537
vn_encode_uint32_t(enc, &val->viewportCount);
538
if (val->pViewports) {
539
vn_encode_array_size(enc, val->viewportCount);
540
for (uint32_t i = 0; i < val->viewportCount; i++)
541
vn_encode_VkViewport(enc, &val->pViewports[i]);
542
} else {
543
vn_encode_array_size(enc, 0);
544
}
545
vn_encode_uint32_t(enc, &val->scissorCount);
546
if (val->pScissors) {
547
vn_encode_array_size(enc, val->scissorCount);
548
for (uint32_t i = 0; i < val->scissorCount; i++)
549
vn_encode_VkRect2D(enc, &val->pScissors[i]);
550
} else {
551
vn_encode_array_size(enc, 0);
552
}
553
}
554
555
static inline void
556
vn_encode_VkPipelineViewportStateCreateInfo(struct vn_cs_encoder *enc, const VkPipelineViewportStateCreateInfo *val)
557
{
558
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO);
559
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO });
560
vn_encode_VkPipelineViewportStateCreateInfo_pnext(enc, val->pNext);
561
vn_encode_VkPipelineViewportStateCreateInfo_self(enc, val);
562
}
563
564
/* struct VkPipelineRasterizationStateStreamCreateInfoEXT chain */
565
566
static inline size_t
567
vn_sizeof_VkPipelineRasterizationStateStreamCreateInfoEXT_pnext(const void *val)
568
{
569
/* no known/supported struct */
570
return vn_sizeof_simple_pointer(NULL);
571
}
572
573
static inline size_t
574
vn_sizeof_VkPipelineRasterizationStateStreamCreateInfoEXT_self(const VkPipelineRasterizationStateStreamCreateInfoEXT *val)
575
{
576
size_t size = 0;
577
/* skip val->{sType,pNext} */
578
size += vn_sizeof_VkFlags(&val->flags);
579
size += vn_sizeof_uint32_t(&val->rasterizationStream);
580
return size;
581
}
582
583
static inline size_t
584
vn_sizeof_VkPipelineRasterizationStateStreamCreateInfoEXT(const VkPipelineRasterizationStateStreamCreateInfoEXT *val)
585
{
586
size_t size = 0;
587
588
size += vn_sizeof_VkStructureType(&val->sType);
589
size += vn_sizeof_VkPipelineRasterizationStateStreamCreateInfoEXT_pnext(val->pNext);
590
size += vn_sizeof_VkPipelineRasterizationStateStreamCreateInfoEXT_self(val);
591
592
return size;
593
}
594
595
static inline void
596
vn_encode_VkPipelineRasterizationStateStreamCreateInfoEXT_pnext(struct vn_cs_encoder *enc, const void *val)
597
{
598
/* no known/supported struct */
599
vn_encode_simple_pointer(enc, NULL);
600
}
601
602
static inline void
603
vn_encode_VkPipelineRasterizationStateStreamCreateInfoEXT_self(struct vn_cs_encoder *enc, const VkPipelineRasterizationStateStreamCreateInfoEXT *val)
604
{
605
/* skip val->{sType,pNext} */
606
vn_encode_VkFlags(enc, &val->flags);
607
vn_encode_uint32_t(enc, &val->rasterizationStream);
608
}
609
610
static inline void
611
vn_encode_VkPipelineRasterizationStateStreamCreateInfoEXT(struct vn_cs_encoder *enc, const VkPipelineRasterizationStateStreamCreateInfoEXT *val)
612
{
613
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT);
614
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT });
615
vn_encode_VkPipelineRasterizationStateStreamCreateInfoEXT_pnext(enc, val->pNext);
616
vn_encode_VkPipelineRasterizationStateStreamCreateInfoEXT_self(enc, val);
617
}
618
619
/* struct VkPipelineRasterizationStateCreateInfo chain */
620
621
static inline size_t
622
vn_sizeof_VkPipelineRasterizationStateCreateInfo_pnext(const void *val)
623
{
624
const VkBaseInStructure *pnext = val;
625
size_t size = 0;
626
627
while (pnext) {
628
switch ((int32_t)pnext->sType) {
629
case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT:
630
size += vn_sizeof_simple_pointer(pnext);
631
size += vn_sizeof_VkStructureType(&pnext->sType);
632
size += vn_sizeof_VkPipelineRasterizationStateCreateInfo_pnext(pnext->pNext);
633
size += vn_sizeof_VkPipelineRasterizationStateStreamCreateInfoEXT_self((const VkPipelineRasterizationStateStreamCreateInfoEXT *)pnext);
634
return size;
635
default:
636
/* ignore unknown/unsupported struct */
637
break;
638
}
639
pnext = pnext->pNext;
640
}
641
642
return vn_sizeof_simple_pointer(NULL);
643
}
644
645
static inline size_t
646
vn_sizeof_VkPipelineRasterizationStateCreateInfo_self(const VkPipelineRasterizationStateCreateInfo *val)
647
{
648
size_t size = 0;
649
/* skip val->{sType,pNext} */
650
size += vn_sizeof_VkFlags(&val->flags);
651
size += vn_sizeof_VkBool32(&val->depthClampEnable);
652
size += vn_sizeof_VkBool32(&val->rasterizerDiscardEnable);
653
size += vn_sizeof_VkPolygonMode(&val->polygonMode);
654
size += vn_sizeof_VkFlags(&val->cullMode);
655
size += vn_sizeof_VkFrontFace(&val->frontFace);
656
size += vn_sizeof_VkBool32(&val->depthBiasEnable);
657
size += vn_sizeof_float(&val->depthBiasConstantFactor);
658
size += vn_sizeof_float(&val->depthBiasClamp);
659
size += vn_sizeof_float(&val->depthBiasSlopeFactor);
660
size += vn_sizeof_float(&val->lineWidth);
661
return size;
662
}
663
664
static inline size_t
665
vn_sizeof_VkPipelineRasterizationStateCreateInfo(const VkPipelineRasterizationStateCreateInfo *val)
666
{
667
size_t size = 0;
668
669
size += vn_sizeof_VkStructureType(&val->sType);
670
size += vn_sizeof_VkPipelineRasterizationStateCreateInfo_pnext(val->pNext);
671
size += vn_sizeof_VkPipelineRasterizationStateCreateInfo_self(val);
672
673
return size;
674
}
675
676
static inline void
677
vn_encode_VkPipelineRasterizationStateCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
678
{
679
const VkBaseInStructure *pnext = val;
680
681
while (pnext) {
682
switch ((int32_t)pnext->sType) {
683
case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT:
684
vn_encode_simple_pointer(enc, pnext);
685
vn_encode_VkStructureType(enc, &pnext->sType);
686
vn_encode_VkPipelineRasterizationStateCreateInfo_pnext(enc, pnext->pNext);
687
vn_encode_VkPipelineRasterizationStateStreamCreateInfoEXT_self(enc, (const VkPipelineRasterizationStateStreamCreateInfoEXT *)pnext);
688
return;
689
default:
690
/* ignore unknown/unsupported struct */
691
break;
692
}
693
pnext = pnext->pNext;
694
}
695
696
vn_encode_simple_pointer(enc, NULL);
697
}
698
699
static inline void
700
vn_encode_VkPipelineRasterizationStateCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineRasterizationStateCreateInfo *val)
701
{
702
/* skip val->{sType,pNext} */
703
vn_encode_VkFlags(enc, &val->flags);
704
vn_encode_VkBool32(enc, &val->depthClampEnable);
705
vn_encode_VkBool32(enc, &val->rasterizerDiscardEnable);
706
vn_encode_VkPolygonMode(enc, &val->polygonMode);
707
vn_encode_VkFlags(enc, &val->cullMode);
708
vn_encode_VkFrontFace(enc, &val->frontFace);
709
vn_encode_VkBool32(enc, &val->depthBiasEnable);
710
vn_encode_float(enc, &val->depthBiasConstantFactor);
711
vn_encode_float(enc, &val->depthBiasClamp);
712
vn_encode_float(enc, &val->depthBiasSlopeFactor);
713
vn_encode_float(enc, &val->lineWidth);
714
}
715
716
static inline void
717
vn_encode_VkPipelineRasterizationStateCreateInfo(struct vn_cs_encoder *enc, const VkPipelineRasterizationStateCreateInfo *val)
718
{
719
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO);
720
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO });
721
vn_encode_VkPipelineRasterizationStateCreateInfo_pnext(enc, val->pNext);
722
vn_encode_VkPipelineRasterizationStateCreateInfo_self(enc, val);
723
}
724
725
/* struct VkPipelineMultisampleStateCreateInfo chain */
726
727
static inline size_t
728
vn_sizeof_VkPipelineMultisampleStateCreateInfo_pnext(const void *val)
729
{
730
/* no known/supported struct */
731
return vn_sizeof_simple_pointer(NULL);
732
}
733
734
static inline size_t
735
vn_sizeof_VkPipelineMultisampleStateCreateInfo_self(const VkPipelineMultisampleStateCreateInfo *val)
736
{
737
size_t size = 0;
738
/* skip val->{sType,pNext} */
739
size += vn_sizeof_VkFlags(&val->flags);
740
size += vn_sizeof_VkSampleCountFlagBits(&val->rasterizationSamples);
741
size += vn_sizeof_VkBool32(&val->sampleShadingEnable);
742
size += vn_sizeof_float(&val->minSampleShading);
743
if (val->pSampleMask) {
744
size += vn_sizeof_array_size((val->rasterizationSamples + 31) / 32);
745
size += vn_sizeof_VkSampleMask_array(val->pSampleMask, (val->rasterizationSamples + 31) / 32);
746
} else {
747
size += vn_sizeof_array_size(0);
748
}
749
size += vn_sizeof_VkBool32(&val->alphaToCoverageEnable);
750
size += vn_sizeof_VkBool32(&val->alphaToOneEnable);
751
return size;
752
}
753
754
static inline size_t
755
vn_sizeof_VkPipelineMultisampleStateCreateInfo(const VkPipelineMultisampleStateCreateInfo *val)
756
{
757
size_t size = 0;
758
759
size += vn_sizeof_VkStructureType(&val->sType);
760
size += vn_sizeof_VkPipelineMultisampleStateCreateInfo_pnext(val->pNext);
761
size += vn_sizeof_VkPipelineMultisampleStateCreateInfo_self(val);
762
763
return size;
764
}
765
766
static inline void
767
vn_encode_VkPipelineMultisampleStateCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
768
{
769
/* no known/supported struct */
770
vn_encode_simple_pointer(enc, NULL);
771
}
772
773
static inline void
774
vn_encode_VkPipelineMultisampleStateCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineMultisampleStateCreateInfo *val)
775
{
776
/* skip val->{sType,pNext} */
777
vn_encode_VkFlags(enc, &val->flags);
778
vn_encode_VkSampleCountFlagBits(enc, &val->rasterizationSamples);
779
vn_encode_VkBool32(enc, &val->sampleShadingEnable);
780
vn_encode_float(enc, &val->minSampleShading);
781
if (val->pSampleMask) {
782
vn_encode_array_size(enc, (val->rasterizationSamples + 31) / 32);
783
vn_encode_VkSampleMask_array(enc, val->pSampleMask, (val->rasterizationSamples + 31) / 32);
784
} else {
785
vn_encode_array_size(enc, 0);
786
}
787
vn_encode_VkBool32(enc, &val->alphaToCoverageEnable);
788
vn_encode_VkBool32(enc, &val->alphaToOneEnable);
789
}
790
791
static inline void
792
vn_encode_VkPipelineMultisampleStateCreateInfo(struct vn_cs_encoder *enc, const VkPipelineMultisampleStateCreateInfo *val)
793
{
794
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO);
795
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO });
796
vn_encode_VkPipelineMultisampleStateCreateInfo_pnext(enc, val->pNext);
797
vn_encode_VkPipelineMultisampleStateCreateInfo_self(enc, val);
798
}
799
800
/* struct VkStencilOpState */
801
802
static inline size_t
803
vn_sizeof_VkStencilOpState(const VkStencilOpState *val)
804
{
805
size_t size = 0;
806
size += vn_sizeof_VkStencilOp(&val->failOp);
807
size += vn_sizeof_VkStencilOp(&val->passOp);
808
size += vn_sizeof_VkStencilOp(&val->depthFailOp);
809
size += vn_sizeof_VkCompareOp(&val->compareOp);
810
size += vn_sizeof_uint32_t(&val->compareMask);
811
size += vn_sizeof_uint32_t(&val->writeMask);
812
size += vn_sizeof_uint32_t(&val->reference);
813
return size;
814
}
815
816
static inline void
817
vn_encode_VkStencilOpState(struct vn_cs_encoder *enc, const VkStencilOpState *val)
818
{
819
vn_encode_VkStencilOp(enc, &val->failOp);
820
vn_encode_VkStencilOp(enc, &val->passOp);
821
vn_encode_VkStencilOp(enc, &val->depthFailOp);
822
vn_encode_VkCompareOp(enc, &val->compareOp);
823
vn_encode_uint32_t(enc, &val->compareMask);
824
vn_encode_uint32_t(enc, &val->writeMask);
825
vn_encode_uint32_t(enc, &val->reference);
826
}
827
828
/* struct VkPipelineDepthStencilStateCreateInfo chain */
829
830
static inline size_t
831
vn_sizeof_VkPipelineDepthStencilStateCreateInfo_pnext(const void *val)
832
{
833
/* no known/supported struct */
834
return vn_sizeof_simple_pointer(NULL);
835
}
836
837
static inline size_t
838
vn_sizeof_VkPipelineDepthStencilStateCreateInfo_self(const VkPipelineDepthStencilStateCreateInfo *val)
839
{
840
size_t size = 0;
841
/* skip val->{sType,pNext} */
842
size += vn_sizeof_VkFlags(&val->flags);
843
size += vn_sizeof_VkBool32(&val->depthTestEnable);
844
size += vn_sizeof_VkBool32(&val->depthWriteEnable);
845
size += vn_sizeof_VkCompareOp(&val->depthCompareOp);
846
size += vn_sizeof_VkBool32(&val->depthBoundsTestEnable);
847
size += vn_sizeof_VkBool32(&val->stencilTestEnable);
848
size += vn_sizeof_VkStencilOpState(&val->front);
849
size += vn_sizeof_VkStencilOpState(&val->back);
850
size += vn_sizeof_float(&val->minDepthBounds);
851
size += vn_sizeof_float(&val->maxDepthBounds);
852
return size;
853
}
854
855
static inline size_t
856
vn_sizeof_VkPipelineDepthStencilStateCreateInfo(const VkPipelineDepthStencilStateCreateInfo *val)
857
{
858
size_t size = 0;
859
860
size += vn_sizeof_VkStructureType(&val->sType);
861
size += vn_sizeof_VkPipelineDepthStencilStateCreateInfo_pnext(val->pNext);
862
size += vn_sizeof_VkPipelineDepthStencilStateCreateInfo_self(val);
863
864
return size;
865
}
866
867
static inline void
868
vn_encode_VkPipelineDepthStencilStateCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
869
{
870
/* no known/supported struct */
871
vn_encode_simple_pointer(enc, NULL);
872
}
873
874
static inline void
875
vn_encode_VkPipelineDepthStencilStateCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineDepthStencilStateCreateInfo *val)
876
{
877
/* skip val->{sType,pNext} */
878
vn_encode_VkFlags(enc, &val->flags);
879
vn_encode_VkBool32(enc, &val->depthTestEnable);
880
vn_encode_VkBool32(enc, &val->depthWriteEnable);
881
vn_encode_VkCompareOp(enc, &val->depthCompareOp);
882
vn_encode_VkBool32(enc, &val->depthBoundsTestEnable);
883
vn_encode_VkBool32(enc, &val->stencilTestEnable);
884
vn_encode_VkStencilOpState(enc, &val->front);
885
vn_encode_VkStencilOpState(enc, &val->back);
886
vn_encode_float(enc, &val->minDepthBounds);
887
vn_encode_float(enc, &val->maxDepthBounds);
888
}
889
890
static inline void
891
vn_encode_VkPipelineDepthStencilStateCreateInfo(struct vn_cs_encoder *enc, const VkPipelineDepthStencilStateCreateInfo *val)
892
{
893
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO);
894
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO });
895
vn_encode_VkPipelineDepthStencilStateCreateInfo_pnext(enc, val->pNext);
896
vn_encode_VkPipelineDepthStencilStateCreateInfo_self(enc, val);
897
}
898
899
/* struct VkPipelineColorBlendAttachmentState */
900
901
static inline size_t
902
vn_sizeof_VkPipelineColorBlendAttachmentState(const VkPipelineColorBlendAttachmentState *val)
903
{
904
size_t size = 0;
905
size += vn_sizeof_VkBool32(&val->blendEnable);
906
size += vn_sizeof_VkBlendFactor(&val->srcColorBlendFactor);
907
size += vn_sizeof_VkBlendFactor(&val->dstColorBlendFactor);
908
size += vn_sizeof_VkBlendOp(&val->colorBlendOp);
909
size += vn_sizeof_VkBlendFactor(&val->srcAlphaBlendFactor);
910
size += vn_sizeof_VkBlendFactor(&val->dstAlphaBlendFactor);
911
size += vn_sizeof_VkBlendOp(&val->alphaBlendOp);
912
size += vn_sizeof_VkFlags(&val->colorWriteMask);
913
return size;
914
}
915
916
static inline void
917
vn_encode_VkPipelineColorBlendAttachmentState(struct vn_cs_encoder *enc, const VkPipelineColorBlendAttachmentState *val)
918
{
919
vn_encode_VkBool32(enc, &val->blendEnable);
920
vn_encode_VkBlendFactor(enc, &val->srcColorBlendFactor);
921
vn_encode_VkBlendFactor(enc, &val->dstColorBlendFactor);
922
vn_encode_VkBlendOp(enc, &val->colorBlendOp);
923
vn_encode_VkBlendFactor(enc, &val->srcAlphaBlendFactor);
924
vn_encode_VkBlendFactor(enc, &val->dstAlphaBlendFactor);
925
vn_encode_VkBlendOp(enc, &val->alphaBlendOp);
926
vn_encode_VkFlags(enc, &val->colorWriteMask);
927
}
928
929
/* struct VkPipelineColorBlendStateCreateInfo chain */
930
931
static inline size_t
932
vn_sizeof_VkPipelineColorBlendStateCreateInfo_pnext(const void *val)
933
{
934
/* no known/supported struct */
935
return vn_sizeof_simple_pointer(NULL);
936
}
937
938
static inline size_t
939
vn_sizeof_VkPipelineColorBlendStateCreateInfo_self(const VkPipelineColorBlendStateCreateInfo *val)
940
{
941
size_t size = 0;
942
/* skip val->{sType,pNext} */
943
size += vn_sizeof_VkFlags(&val->flags);
944
size += vn_sizeof_VkBool32(&val->logicOpEnable);
945
size += vn_sizeof_VkLogicOp(&val->logicOp);
946
size += vn_sizeof_uint32_t(&val->attachmentCount);
947
if (val->pAttachments) {
948
size += vn_sizeof_array_size(val->attachmentCount);
949
for (uint32_t i = 0; i < val->attachmentCount; i++)
950
size += vn_sizeof_VkPipelineColorBlendAttachmentState(&val->pAttachments[i]);
951
} else {
952
size += vn_sizeof_array_size(0);
953
}
954
size += vn_sizeof_array_size(4);
955
size += vn_sizeof_float_array(val->blendConstants, 4);
956
return size;
957
}
958
959
static inline size_t
960
vn_sizeof_VkPipelineColorBlendStateCreateInfo(const VkPipelineColorBlendStateCreateInfo *val)
961
{
962
size_t size = 0;
963
964
size += vn_sizeof_VkStructureType(&val->sType);
965
size += vn_sizeof_VkPipelineColorBlendStateCreateInfo_pnext(val->pNext);
966
size += vn_sizeof_VkPipelineColorBlendStateCreateInfo_self(val);
967
968
return size;
969
}
970
971
static inline void
972
vn_encode_VkPipelineColorBlendStateCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
973
{
974
/* no known/supported struct */
975
vn_encode_simple_pointer(enc, NULL);
976
}
977
978
static inline void
979
vn_encode_VkPipelineColorBlendStateCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineColorBlendStateCreateInfo *val)
980
{
981
/* skip val->{sType,pNext} */
982
vn_encode_VkFlags(enc, &val->flags);
983
vn_encode_VkBool32(enc, &val->logicOpEnable);
984
vn_encode_VkLogicOp(enc, &val->logicOp);
985
vn_encode_uint32_t(enc, &val->attachmentCount);
986
if (val->pAttachments) {
987
vn_encode_array_size(enc, val->attachmentCount);
988
for (uint32_t i = 0; i < val->attachmentCount; i++)
989
vn_encode_VkPipelineColorBlendAttachmentState(enc, &val->pAttachments[i]);
990
} else {
991
vn_encode_array_size(enc, 0);
992
}
993
vn_encode_array_size(enc, 4);
994
vn_encode_float_array(enc, val->blendConstants, 4);
995
}
996
997
static inline void
998
vn_encode_VkPipelineColorBlendStateCreateInfo(struct vn_cs_encoder *enc, const VkPipelineColorBlendStateCreateInfo *val)
999
{
1000
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO);
1001
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO });
1002
vn_encode_VkPipelineColorBlendStateCreateInfo_pnext(enc, val->pNext);
1003
vn_encode_VkPipelineColorBlendStateCreateInfo_self(enc, val);
1004
}
1005
1006
/* struct VkPipelineDynamicStateCreateInfo chain */
1007
1008
static inline size_t
1009
vn_sizeof_VkPipelineDynamicStateCreateInfo_pnext(const void *val)
1010
{
1011
/* no known/supported struct */
1012
return vn_sizeof_simple_pointer(NULL);
1013
}
1014
1015
static inline size_t
1016
vn_sizeof_VkPipelineDynamicStateCreateInfo_self(const VkPipelineDynamicStateCreateInfo *val)
1017
{
1018
size_t size = 0;
1019
/* skip val->{sType,pNext} */
1020
size += vn_sizeof_VkFlags(&val->flags);
1021
size += vn_sizeof_uint32_t(&val->dynamicStateCount);
1022
if (val->pDynamicStates) {
1023
size += vn_sizeof_array_size(val->dynamicStateCount);
1024
size += vn_sizeof_VkDynamicState_array(val->pDynamicStates, val->dynamicStateCount);
1025
} else {
1026
size += vn_sizeof_array_size(0);
1027
}
1028
return size;
1029
}
1030
1031
static inline size_t
1032
vn_sizeof_VkPipelineDynamicStateCreateInfo(const VkPipelineDynamicStateCreateInfo *val)
1033
{
1034
size_t size = 0;
1035
1036
size += vn_sizeof_VkStructureType(&val->sType);
1037
size += vn_sizeof_VkPipelineDynamicStateCreateInfo_pnext(val->pNext);
1038
size += vn_sizeof_VkPipelineDynamicStateCreateInfo_self(val);
1039
1040
return size;
1041
}
1042
1043
static inline void
1044
vn_encode_VkPipelineDynamicStateCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
1045
{
1046
/* no known/supported struct */
1047
vn_encode_simple_pointer(enc, NULL);
1048
}
1049
1050
static inline void
1051
vn_encode_VkPipelineDynamicStateCreateInfo_self(struct vn_cs_encoder *enc, const VkPipelineDynamicStateCreateInfo *val)
1052
{
1053
/* skip val->{sType,pNext} */
1054
vn_encode_VkFlags(enc, &val->flags);
1055
vn_encode_uint32_t(enc, &val->dynamicStateCount);
1056
if (val->pDynamicStates) {
1057
vn_encode_array_size(enc, val->dynamicStateCount);
1058
vn_encode_VkDynamicState_array(enc, val->pDynamicStates, val->dynamicStateCount);
1059
} else {
1060
vn_encode_array_size(enc, 0);
1061
}
1062
}
1063
1064
static inline void
1065
vn_encode_VkPipelineDynamicStateCreateInfo(struct vn_cs_encoder *enc, const VkPipelineDynamicStateCreateInfo *val)
1066
{
1067
assert(val->sType == VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO);
1068
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO });
1069
vn_encode_VkPipelineDynamicStateCreateInfo_pnext(enc, val->pNext);
1070
vn_encode_VkPipelineDynamicStateCreateInfo_self(enc, val);
1071
}
1072
1073
/* struct VkGraphicsPipelineCreateInfo chain */
1074
1075
static inline size_t
1076
vn_sizeof_VkGraphicsPipelineCreateInfo_pnext(const void *val)
1077
{
1078
/* no known/supported struct */
1079
return vn_sizeof_simple_pointer(NULL);
1080
}
1081
1082
static inline size_t
1083
vn_sizeof_VkGraphicsPipelineCreateInfo_self(const VkGraphicsPipelineCreateInfo *val)
1084
{
1085
size_t size = 0;
1086
/* skip val->{sType,pNext} */
1087
size += vn_sizeof_VkFlags(&val->flags);
1088
size += vn_sizeof_uint32_t(&val->stageCount);
1089
if (val->pStages) {
1090
size += vn_sizeof_array_size(val->stageCount);
1091
for (uint32_t i = 0; i < val->stageCount; i++)
1092
size += vn_sizeof_VkPipelineShaderStageCreateInfo(&val->pStages[i]);
1093
} else {
1094
size += vn_sizeof_array_size(0);
1095
}
1096
size += vn_sizeof_simple_pointer(val->pVertexInputState);
1097
if (val->pVertexInputState)
1098
size += vn_sizeof_VkPipelineVertexInputStateCreateInfo(val->pVertexInputState);
1099
size += vn_sizeof_simple_pointer(val->pInputAssemblyState);
1100
if (val->pInputAssemblyState)
1101
size += vn_sizeof_VkPipelineInputAssemblyStateCreateInfo(val->pInputAssemblyState);
1102
size += vn_sizeof_simple_pointer(val->pTessellationState);
1103
if (val->pTessellationState)
1104
size += vn_sizeof_VkPipelineTessellationStateCreateInfo(val->pTessellationState);
1105
size += vn_sizeof_simple_pointer(val->pViewportState);
1106
if (val->pViewportState)
1107
size += vn_sizeof_VkPipelineViewportStateCreateInfo(val->pViewportState);
1108
size += vn_sizeof_simple_pointer(val->pRasterizationState);
1109
if (val->pRasterizationState)
1110
size += vn_sizeof_VkPipelineRasterizationStateCreateInfo(val->pRasterizationState);
1111
size += vn_sizeof_simple_pointer(val->pMultisampleState);
1112
if (val->pMultisampleState)
1113
size += vn_sizeof_VkPipelineMultisampleStateCreateInfo(val->pMultisampleState);
1114
size += vn_sizeof_simple_pointer(val->pDepthStencilState);
1115
if (val->pDepthStencilState)
1116
size += vn_sizeof_VkPipelineDepthStencilStateCreateInfo(val->pDepthStencilState);
1117
size += vn_sizeof_simple_pointer(val->pColorBlendState);
1118
if (val->pColorBlendState)
1119
size += vn_sizeof_VkPipelineColorBlendStateCreateInfo(val->pColorBlendState);
1120
size += vn_sizeof_simple_pointer(val->pDynamicState);
1121
if (val->pDynamicState)
1122
size += vn_sizeof_VkPipelineDynamicStateCreateInfo(val->pDynamicState);
1123
size += vn_sizeof_VkPipelineLayout(&val->layout);
1124
size += vn_sizeof_VkRenderPass(&val->renderPass);
1125
size += vn_sizeof_uint32_t(&val->subpass);
1126
size += vn_sizeof_VkPipeline(&val->basePipelineHandle);
1127
size += vn_sizeof_int32_t(&val->basePipelineIndex);
1128
return size;
1129
}
1130
1131
static inline size_t
1132
vn_sizeof_VkGraphicsPipelineCreateInfo(const VkGraphicsPipelineCreateInfo *val)
1133
{
1134
size_t size = 0;
1135
1136
size += vn_sizeof_VkStructureType(&val->sType);
1137
size += vn_sizeof_VkGraphicsPipelineCreateInfo_pnext(val->pNext);
1138
size += vn_sizeof_VkGraphicsPipelineCreateInfo_self(val);
1139
1140
return size;
1141
}
1142
1143
static inline void
1144
vn_encode_VkGraphicsPipelineCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
1145
{
1146
/* no known/supported struct */
1147
vn_encode_simple_pointer(enc, NULL);
1148
}
1149
1150
static inline void
1151
vn_encode_VkGraphicsPipelineCreateInfo_self(struct vn_cs_encoder *enc, const VkGraphicsPipelineCreateInfo *val)
1152
{
1153
/* skip val->{sType,pNext} */
1154
vn_encode_VkFlags(enc, &val->flags);
1155
vn_encode_uint32_t(enc, &val->stageCount);
1156
if (val->pStages) {
1157
vn_encode_array_size(enc, val->stageCount);
1158
for (uint32_t i = 0; i < val->stageCount; i++)
1159
vn_encode_VkPipelineShaderStageCreateInfo(enc, &val->pStages[i]);
1160
} else {
1161
vn_encode_array_size(enc, 0);
1162
}
1163
if (vn_encode_simple_pointer(enc, val->pVertexInputState))
1164
vn_encode_VkPipelineVertexInputStateCreateInfo(enc, val->pVertexInputState);
1165
if (vn_encode_simple_pointer(enc, val->pInputAssemblyState))
1166
vn_encode_VkPipelineInputAssemblyStateCreateInfo(enc, val->pInputAssemblyState);
1167
if (vn_encode_simple_pointer(enc, val->pTessellationState))
1168
vn_encode_VkPipelineTessellationStateCreateInfo(enc, val->pTessellationState);
1169
if (vn_encode_simple_pointer(enc, val->pViewportState))
1170
vn_encode_VkPipelineViewportStateCreateInfo(enc, val->pViewportState);
1171
if (vn_encode_simple_pointer(enc, val->pRasterizationState))
1172
vn_encode_VkPipelineRasterizationStateCreateInfo(enc, val->pRasterizationState);
1173
if (vn_encode_simple_pointer(enc, val->pMultisampleState))
1174
vn_encode_VkPipelineMultisampleStateCreateInfo(enc, val->pMultisampleState);
1175
if (vn_encode_simple_pointer(enc, val->pDepthStencilState))
1176
vn_encode_VkPipelineDepthStencilStateCreateInfo(enc, val->pDepthStencilState);
1177
if (vn_encode_simple_pointer(enc, val->pColorBlendState))
1178
vn_encode_VkPipelineColorBlendStateCreateInfo(enc, val->pColorBlendState);
1179
if (vn_encode_simple_pointer(enc, val->pDynamicState))
1180
vn_encode_VkPipelineDynamicStateCreateInfo(enc, val->pDynamicState);
1181
vn_encode_VkPipelineLayout(enc, &val->layout);
1182
vn_encode_VkRenderPass(enc, &val->renderPass);
1183
vn_encode_uint32_t(enc, &val->subpass);
1184
vn_encode_VkPipeline(enc, &val->basePipelineHandle);
1185
vn_encode_int32_t(enc, &val->basePipelineIndex);
1186
}
1187
1188
static inline void
1189
vn_encode_VkGraphicsPipelineCreateInfo(struct vn_cs_encoder *enc, const VkGraphicsPipelineCreateInfo *val)
1190
{
1191
assert(val->sType == VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO);
1192
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO });
1193
vn_encode_VkGraphicsPipelineCreateInfo_pnext(enc, val->pNext);
1194
vn_encode_VkGraphicsPipelineCreateInfo_self(enc, val);
1195
}
1196
1197
/* struct VkComputePipelineCreateInfo chain */
1198
1199
static inline size_t
1200
vn_sizeof_VkComputePipelineCreateInfo_pnext(const void *val)
1201
{
1202
/* no known/supported struct */
1203
return vn_sizeof_simple_pointer(NULL);
1204
}
1205
1206
static inline size_t
1207
vn_sizeof_VkComputePipelineCreateInfo_self(const VkComputePipelineCreateInfo *val)
1208
{
1209
size_t size = 0;
1210
/* skip val->{sType,pNext} */
1211
size += vn_sizeof_VkFlags(&val->flags);
1212
size += vn_sizeof_VkPipelineShaderStageCreateInfo(&val->stage);
1213
size += vn_sizeof_VkPipelineLayout(&val->layout);
1214
size += vn_sizeof_VkPipeline(&val->basePipelineHandle);
1215
size += vn_sizeof_int32_t(&val->basePipelineIndex);
1216
return size;
1217
}
1218
1219
static inline size_t
1220
vn_sizeof_VkComputePipelineCreateInfo(const VkComputePipelineCreateInfo *val)
1221
{
1222
size_t size = 0;
1223
1224
size += vn_sizeof_VkStructureType(&val->sType);
1225
size += vn_sizeof_VkComputePipelineCreateInfo_pnext(val->pNext);
1226
size += vn_sizeof_VkComputePipelineCreateInfo_self(val);
1227
1228
return size;
1229
}
1230
1231
static inline void
1232
vn_encode_VkComputePipelineCreateInfo_pnext(struct vn_cs_encoder *enc, const void *val)
1233
{
1234
/* no known/supported struct */
1235
vn_encode_simple_pointer(enc, NULL);
1236
}
1237
1238
static inline void
1239
vn_encode_VkComputePipelineCreateInfo_self(struct vn_cs_encoder *enc, const VkComputePipelineCreateInfo *val)
1240
{
1241
/* skip val->{sType,pNext} */
1242
vn_encode_VkFlags(enc, &val->flags);
1243
vn_encode_VkPipelineShaderStageCreateInfo(enc, &val->stage);
1244
vn_encode_VkPipelineLayout(enc, &val->layout);
1245
vn_encode_VkPipeline(enc, &val->basePipelineHandle);
1246
vn_encode_int32_t(enc, &val->basePipelineIndex);
1247
}
1248
1249
static inline void
1250
vn_encode_VkComputePipelineCreateInfo(struct vn_cs_encoder *enc, const VkComputePipelineCreateInfo *val)
1251
{
1252
assert(val->sType == VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO);
1253
vn_encode_VkStructureType(enc, &(VkStructureType){ VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO });
1254
vn_encode_VkComputePipelineCreateInfo_pnext(enc, val->pNext);
1255
vn_encode_VkComputePipelineCreateInfo_self(enc, val);
1256
}
1257
1258
static inline size_t vn_sizeof_vkCreateGraphicsPipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1259
{
1260
const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkCreateGraphicsPipelines_EXT;
1261
const VkFlags cmd_flags = 0;
1262
size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type) + vn_sizeof_VkFlags(&cmd_flags);
1263
1264
cmd_size += vn_sizeof_VkDevice(&device);
1265
cmd_size += vn_sizeof_VkPipelineCache(&pipelineCache);
1266
cmd_size += vn_sizeof_uint32_t(&createInfoCount);
1267
if (pCreateInfos) {
1268
cmd_size += vn_sizeof_array_size(createInfoCount);
1269
for (uint32_t i = 0; i < createInfoCount; i++)
1270
cmd_size += vn_sizeof_VkGraphicsPipelineCreateInfo(&pCreateInfos[i]);
1271
} else {
1272
cmd_size += vn_sizeof_array_size(0);
1273
}
1274
cmd_size += vn_sizeof_simple_pointer(pAllocator);
1275
if (pAllocator)
1276
assert(false);
1277
if (pPipelines) {
1278
cmd_size += vn_sizeof_array_size(createInfoCount);
1279
for (uint32_t i = 0; i < createInfoCount; i++)
1280
cmd_size += vn_sizeof_VkPipeline(&pPipelines[i]);
1281
} else {
1282
cmd_size += vn_sizeof_array_size(0);
1283
}
1284
1285
return cmd_size;
1286
}
1287
1288
static inline void vn_encode_vkCreateGraphicsPipelines(struct vn_cs_encoder *enc, VkCommandFlagsEXT cmd_flags, VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1289
{
1290
const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkCreateGraphicsPipelines_EXT;
1291
1292
vn_encode_VkCommandTypeEXT(enc, &cmd_type);
1293
vn_encode_VkFlags(enc, &cmd_flags);
1294
1295
vn_encode_VkDevice(enc, &device);
1296
vn_encode_VkPipelineCache(enc, &pipelineCache);
1297
vn_encode_uint32_t(enc, &createInfoCount);
1298
if (pCreateInfos) {
1299
vn_encode_array_size(enc, createInfoCount);
1300
for (uint32_t i = 0; i < createInfoCount; i++)
1301
vn_encode_VkGraphicsPipelineCreateInfo(enc, &pCreateInfos[i]);
1302
} else {
1303
vn_encode_array_size(enc, 0);
1304
}
1305
if (vn_encode_simple_pointer(enc, pAllocator))
1306
assert(false);
1307
if (pPipelines) {
1308
vn_encode_array_size(enc, createInfoCount);
1309
for (uint32_t i = 0; i < createInfoCount; i++)
1310
vn_encode_VkPipeline(enc, &pPipelines[i]);
1311
} else {
1312
vn_encode_array_size(enc, 0);
1313
}
1314
}
1315
1316
static inline size_t vn_sizeof_vkCreateGraphicsPipelines_reply(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1317
{
1318
const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkCreateGraphicsPipelines_EXT;
1319
size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type);
1320
1321
VkResult ret;
1322
cmd_size += vn_sizeof_VkResult(&ret);
1323
/* skip device */
1324
/* skip pipelineCache */
1325
/* skip createInfoCount */
1326
/* skip pCreateInfos */
1327
/* skip pAllocator */
1328
if (pPipelines) {
1329
cmd_size += vn_sizeof_array_size(createInfoCount);
1330
for (uint32_t i = 0; i < createInfoCount; i++)
1331
cmd_size += vn_sizeof_VkPipeline(&pPipelines[i]);
1332
} else {
1333
cmd_size += vn_sizeof_array_size(0);
1334
}
1335
1336
return cmd_size;
1337
}
1338
1339
static inline VkResult vn_decode_vkCreateGraphicsPipelines_reply(struct vn_cs_decoder *dec, VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1340
{
1341
VkCommandTypeEXT command_type;
1342
vn_decode_VkCommandTypeEXT(dec, &command_type);
1343
assert(command_type == VK_COMMAND_TYPE_vkCreateGraphicsPipelines_EXT);
1344
1345
VkResult ret;
1346
vn_decode_VkResult(dec, &ret);
1347
/* skip device */
1348
/* skip pipelineCache */
1349
/* skip createInfoCount */
1350
/* skip pCreateInfos */
1351
/* skip pAllocator */
1352
if (vn_peek_array_size(dec)) {
1353
vn_decode_array_size(dec, createInfoCount);
1354
for (uint32_t i = 0; i < createInfoCount; i++)
1355
vn_decode_VkPipeline(dec, &pPipelines[i]);
1356
} else {
1357
vn_decode_array_size(dec, 0);
1358
pPipelines = NULL;
1359
}
1360
1361
return ret;
1362
}
1363
1364
static inline size_t vn_sizeof_vkCreateComputePipelines(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1365
{
1366
const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkCreateComputePipelines_EXT;
1367
const VkFlags cmd_flags = 0;
1368
size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type) + vn_sizeof_VkFlags(&cmd_flags);
1369
1370
cmd_size += vn_sizeof_VkDevice(&device);
1371
cmd_size += vn_sizeof_VkPipelineCache(&pipelineCache);
1372
cmd_size += vn_sizeof_uint32_t(&createInfoCount);
1373
if (pCreateInfos) {
1374
cmd_size += vn_sizeof_array_size(createInfoCount);
1375
for (uint32_t i = 0; i < createInfoCount; i++)
1376
cmd_size += vn_sizeof_VkComputePipelineCreateInfo(&pCreateInfos[i]);
1377
} else {
1378
cmd_size += vn_sizeof_array_size(0);
1379
}
1380
cmd_size += vn_sizeof_simple_pointer(pAllocator);
1381
if (pAllocator)
1382
assert(false);
1383
if (pPipelines) {
1384
cmd_size += vn_sizeof_array_size(createInfoCount);
1385
for (uint32_t i = 0; i < createInfoCount; i++)
1386
cmd_size += vn_sizeof_VkPipeline(&pPipelines[i]);
1387
} else {
1388
cmd_size += vn_sizeof_array_size(0);
1389
}
1390
1391
return cmd_size;
1392
}
1393
1394
static inline void vn_encode_vkCreateComputePipelines(struct vn_cs_encoder *enc, VkCommandFlagsEXT cmd_flags, VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1395
{
1396
const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkCreateComputePipelines_EXT;
1397
1398
vn_encode_VkCommandTypeEXT(enc, &cmd_type);
1399
vn_encode_VkFlags(enc, &cmd_flags);
1400
1401
vn_encode_VkDevice(enc, &device);
1402
vn_encode_VkPipelineCache(enc, &pipelineCache);
1403
vn_encode_uint32_t(enc, &createInfoCount);
1404
if (pCreateInfos) {
1405
vn_encode_array_size(enc, createInfoCount);
1406
for (uint32_t i = 0; i < createInfoCount; i++)
1407
vn_encode_VkComputePipelineCreateInfo(enc, &pCreateInfos[i]);
1408
} else {
1409
vn_encode_array_size(enc, 0);
1410
}
1411
if (vn_encode_simple_pointer(enc, pAllocator))
1412
assert(false);
1413
if (pPipelines) {
1414
vn_encode_array_size(enc, createInfoCount);
1415
for (uint32_t i = 0; i < createInfoCount; i++)
1416
vn_encode_VkPipeline(enc, &pPipelines[i]);
1417
} else {
1418
vn_encode_array_size(enc, 0);
1419
}
1420
}
1421
1422
static inline size_t vn_sizeof_vkCreateComputePipelines_reply(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1423
{
1424
const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkCreateComputePipelines_EXT;
1425
size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type);
1426
1427
VkResult ret;
1428
cmd_size += vn_sizeof_VkResult(&ret);
1429
/* skip device */
1430
/* skip pipelineCache */
1431
/* skip createInfoCount */
1432
/* skip pCreateInfos */
1433
/* skip pAllocator */
1434
if (pPipelines) {
1435
cmd_size += vn_sizeof_array_size(createInfoCount);
1436
for (uint32_t i = 0; i < createInfoCount; i++)
1437
cmd_size += vn_sizeof_VkPipeline(&pPipelines[i]);
1438
} else {
1439
cmd_size += vn_sizeof_array_size(0);
1440
}
1441
1442
return cmd_size;
1443
}
1444
1445
static inline VkResult vn_decode_vkCreateComputePipelines_reply(struct vn_cs_decoder *dec, VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1446
{
1447
VkCommandTypeEXT command_type;
1448
vn_decode_VkCommandTypeEXT(dec, &command_type);
1449
assert(command_type == VK_COMMAND_TYPE_vkCreateComputePipelines_EXT);
1450
1451
VkResult ret;
1452
vn_decode_VkResult(dec, &ret);
1453
/* skip device */
1454
/* skip pipelineCache */
1455
/* skip createInfoCount */
1456
/* skip pCreateInfos */
1457
/* skip pAllocator */
1458
if (vn_peek_array_size(dec)) {
1459
vn_decode_array_size(dec, createInfoCount);
1460
for (uint32_t i = 0; i < createInfoCount; i++)
1461
vn_decode_VkPipeline(dec, &pPipelines[i]);
1462
} else {
1463
vn_decode_array_size(dec, 0);
1464
pPipelines = NULL;
1465
}
1466
1467
return ret;
1468
}
1469
1470
static inline size_t vn_sizeof_vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator)
1471
{
1472
const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkDestroyPipeline_EXT;
1473
const VkFlags cmd_flags = 0;
1474
size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type) + vn_sizeof_VkFlags(&cmd_flags);
1475
1476
cmd_size += vn_sizeof_VkDevice(&device);
1477
cmd_size += vn_sizeof_VkPipeline(&pipeline);
1478
cmd_size += vn_sizeof_simple_pointer(pAllocator);
1479
if (pAllocator)
1480
assert(false);
1481
1482
return cmd_size;
1483
}
1484
1485
static inline void vn_encode_vkDestroyPipeline(struct vn_cs_encoder *enc, VkCommandFlagsEXT cmd_flags, VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator)
1486
{
1487
const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkDestroyPipeline_EXT;
1488
1489
vn_encode_VkCommandTypeEXT(enc, &cmd_type);
1490
vn_encode_VkFlags(enc, &cmd_flags);
1491
1492
vn_encode_VkDevice(enc, &device);
1493
vn_encode_VkPipeline(enc, &pipeline);
1494
if (vn_encode_simple_pointer(enc, pAllocator))
1495
assert(false);
1496
}
1497
1498
static inline size_t vn_sizeof_vkDestroyPipeline_reply(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator)
1499
{
1500
const VkCommandTypeEXT cmd_type = VK_COMMAND_TYPE_vkDestroyPipeline_EXT;
1501
size_t cmd_size = vn_sizeof_VkCommandTypeEXT(&cmd_type);
1502
1503
/* skip device */
1504
/* skip pipeline */
1505
/* skip pAllocator */
1506
1507
return cmd_size;
1508
}
1509
1510
static inline void vn_decode_vkDestroyPipeline_reply(struct vn_cs_decoder *dec, VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator)
1511
{
1512
VkCommandTypeEXT command_type;
1513
vn_decode_VkCommandTypeEXT(dec, &command_type);
1514
assert(command_type == VK_COMMAND_TYPE_vkDestroyPipeline_EXT);
1515
1516
/* skip device */
1517
/* skip pipeline */
1518
/* skip pAllocator */
1519
}
1520
1521
static inline void vn_submit_vkCreateGraphicsPipelines(struct vn_instance *vn_instance, VkCommandFlagsEXT cmd_flags, VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, struct vn_instance_submit_command *submit)
1522
{
1523
uint8_t local_cmd_data[VN_SUBMIT_LOCAL_CMD_SIZE];
1524
void *cmd_data = local_cmd_data;
1525
size_t cmd_size = vn_sizeof_vkCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1526
if (cmd_size > sizeof(local_cmd_data)) {
1527
cmd_data = malloc(cmd_size);
1528
if (!cmd_data)
1529
cmd_size = 0;
1530
}
1531
const size_t reply_size = cmd_flags & VK_COMMAND_GENERATE_REPLY_BIT_EXT ? vn_sizeof_vkCreateGraphicsPipelines_reply(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines) : 0;
1532
1533
struct vn_cs_encoder *enc = vn_instance_submit_command_init(vn_instance, submit, cmd_data, cmd_size, reply_size);
1534
if (cmd_size) {
1535
vn_encode_vkCreateGraphicsPipelines(enc, cmd_flags, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1536
vn_instance_submit_command(vn_instance, submit);
1537
if (cmd_data != local_cmd_data)
1538
free(cmd_data);
1539
}
1540
}
1541
1542
static inline void vn_submit_vkCreateComputePipelines(struct vn_instance *vn_instance, VkCommandFlagsEXT cmd_flags, VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines, struct vn_instance_submit_command *submit)
1543
{
1544
uint8_t local_cmd_data[VN_SUBMIT_LOCAL_CMD_SIZE];
1545
void *cmd_data = local_cmd_data;
1546
size_t cmd_size = vn_sizeof_vkCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1547
if (cmd_size > sizeof(local_cmd_data)) {
1548
cmd_data = malloc(cmd_size);
1549
if (!cmd_data)
1550
cmd_size = 0;
1551
}
1552
const size_t reply_size = cmd_flags & VK_COMMAND_GENERATE_REPLY_BIT_EXT ? vn_sizeof_vkCreateComputePipelines_reply(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines) : 0;
1553
1554
struct vn_cs_encoder *enc = vn_instance_submit_command_init(vn_instance, submit, cmd_data, cmd_size, reply_size);
1555
if (cmd_size) {
1556
vn_encode_vkCreateComputePipelines(enc, cmd_flags, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1557
vn_instance_submit_command(vn_instance, submit);
1558
if (cmd_data != local_cmd_data)
1559
free(cmd_data);
1560
}
1561
}
1562
1563
static inline void vn_submit_vkDestroyPipeline(struct vn_instance *vn_instance, VkCommandFlagsEXT cmd_flags, VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator, struct vn_instance_submit_command *submit)
1564
{
1565
uint8_t local_cmd_data[VN_SUBMIT_LOCAL_CMD_SIZE];
1566
void *cmd_data = local_cmd_data;
1567
size_t cmd_size = vn_sizeof_vkDestroyPipeline(device, pipeline, pAllocator);
1568
if (cmd_size > sizeof(local_cmd_data)) {
1569
cmd_data = malloc(cmd_size);
1570
if (!cmd_data)
1571
cmd_size = 0;
1572
}
1573
const size_t reply_size = cmd_flags & VK_COMMAND_GENERATE_REPLY_BIT_EXT ? vn_sizeof_vkDestroyPipeline_reply(device, pipeline, pAllocator) : 0;
1574
1575
struct vn_cs_encoder *enc = vn_instance_submit_command_init(vn_instance, submit, cmd_data, cmd_size, reply_size);
1576
if (cmd_size) {
1577
vn_encode_vkDestroyPipeline(enc, cmd_flags, device, pipeline, pAllocator);
1578
vn_instance_submit_command(vn_instance, submit);
1579
if (cmd_data != local_cmd_data)
1580
free(cmd_data);
1581
}
1582
}
1583
1584
static inline VkResult vn_call_vkCreateGraphicsPipelines(struct vn_instance *vn_instance, VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1585
{
1586
struct vn_instance_submit_command submit;
1587
vn_submit_vkCreateGraphicsPipelines(vn_instance, VK_COMMAND_GENERATE_REPLY_BIT_EXT, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &submit);
1588
struct vn_cs_decoder *dec = vn_instance_get_command_reply(vn_instance, &submit);
1589
if (dec) {
1590
const VkResult ret = vn_decode_vkCreateGraphicsPipelines_reply(dec, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1591
vn_instance_free_command_reply(vn_instance, &submit);
1592
return ret;
1593
} else {
1594
return VK_ERROR_OUT_OF_HOST_MEMORY;
1595
}
1596
}
1597
1598
static inline void vn_async_vkCreateGraphicsPipelines(struct vn_instance *vn_instance, VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1599
{
1600
struct vn_instance_submit_command submit;
1601
vn_submit_vkCreateGraphicsPipelines(vn_instance, 0, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &submit);
1602
}
1603
1604
static inline VkResult vn_call_vkCreateComputePipelines(struct vn_instance *vn_instance, VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1605
{
1606
struct vn_instance_submit_command submit;
1607
vn_submit_vkCreateComputePipelines(vn_instance, VK_COMMAND_GENERATE_REPLY_BIT_EXT, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &submit);
1608
struct vn_cs_decoder *dec = vn_instance_get_command_reply(vn_instance, &submit);
1609
if (dec) {
1610
const VkResult ret = vn_decode_vkCreateComputePipelines_reply(dec, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
1611
vn_instance_free_command_reply(vn_instance, &submit);
1612
return ret;
1613
} else {
1614
return VK_ERROR_OUT_OF_HOST_MEMORY;
1615
}
1616
}
1617
1618
static inline void vn_async_vkCreateComputePipelines(struct vn_instance *vn_instance, VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines)
1619
{
1620
struct vn_instance_submit_command submit;
1621
vn_submit_vkCreateComputePipelines(vn_instance, 0, device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines, &submit);
1622
}
1623
1624
static inline void vn_call_vkDestroyPipeline(struct vn_instance *vn_instance, VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator)
1625
{
1626
struct vn_instance_submit_command submit;
1627
vn_submit_vkDestroyPipeline(vn_instance, VK_COMMAND_GENERATE_REPLY_BIT_EXT, device, pipeline, pAllocator, &submit);
1628
struct vn_cs_decoder *dec = vn_instance_get_command_reply(vn_instance, &submit);
1629
if (dec) {
1630
vn_decode_vkDestroyPipeline_reply(dec, device, pipeline, pAllocator);
1631
vn_instance_free_command_reply(vn_instance, &submit);
1632
}
1633
}
1634
1635
static inline void vn_async_vkDestroyPipeline(struct vn_instance *vn_instance, VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator)
1636
{
1637
struct vn_instance_submit_command submit;
1638
vn_submit_vkDestroyPipeline(vn_instance, 0, device, pipeline, pAllocator, &submit);
1639
}
1640
1641
#endif /* VN_PROTOCOL_DRIVER_PIPELINE_H */
1642
1643