Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
godotengine
GitHub Repository: godotengine/godot
Path: blob/master/thirdparty/libktx/lib/vk_format.h
9903 views
1
/*
2
================================================================================================
3
4
Description : Vulkan format properties and conversion from OpenGL.
5
Author : J.M.P. van Waveren
6
Date : 07/17/2016
7
Language : C99
8
Format : Real tabs with the tab size equal to 4 spaces.
9
Copyright : Copyright (c) 2016 Oculus VR, LLC. All Rights reserved.
10
11
12
LICENSE
13
=======
14
15
Copyright 2016 Oculus VR, LLC.
16
SPDX-License-Identifier: Apache-2.0
17
18
19
DESCRIPTION
20
===========
21
22
This header implements several support routines to convert OpenGL formats/types
23
to Vulkan formats. These routines are particularly useful for loading file
24
formats that store OpenGL formats/types such as KTX and glTF.
25
26
The functions in this header file convert the format, internalFormat and type
27
that are used as parameters to the following OpenGL functions:
28
29
void glTexImage2D( GLenum target, GLint level, GLint internalFormat,
30
GLsizei width, GLsizei height, GLint border,
31
GLenum format, GLenum type, const GLvoid * data );
32
void glTexImage3D( GLenum target, GLint level, GLint internalFormat,
33
GLsizei width, GLsizei height, GLsizei depth, GLint border,
34
GLenum format, GLenum type, const GLvoid * data );
35
void glCompressedTexImage2D( GLenum target, GLint level, GLenum internalformat,
36
GLsizei width, GLsizei height, GLint border,
37
GLsizei imageSize, const GLvoid * data );
38
void glCompressedTexImage3D( GLenum target, GLint level, GLenum internalformat,
39
GLsizei width, GLsizei height, GLsizei depth, GLint border,
40
GLsizei imageSize, const GLvoid * data );
41
void glTexStorage2D( GLenum target, GLsizei levels, GLenum internalformat,
42
GLsizei width, GLsizei height );
43
void glTexStorage3D( GLenum target, GLsizei levels, GLenum internalformat,
44
GLsizei width, GLsizei height, GLsizei depth );
45
void glVertexAttribPointer( GLuint index, GLint size, GLenum type, GLboolean normalized,
46
GLsizei stride, const GLvoid * pointer);
47
48
49
IMPLEMENTATION
50
==============
51
52
This file does not include OpenGL / OpenGL ES headers because:
53
54
1. Including OpenGL / OpenGL ES headers is platform dependent and
55
may require a separate installation of an OpenGL SDK.
56
2. The OpenGL format/type constants are the same between extensions and core.
57
3. The OpenGL format/type constants are the same between OpenGL and OpenGL ES.
58
4. File formats like KTX and glTF may use OpenGL formats and types that
59
are not supported by the OpenGL implementation on the platform but are
60
supported by the Vulkan implementation.
61
62
63
ENTRY POINTS
64
============
65
66
static inline VkFormat vkGetFormatFromOpenGLFormat( const GLenum format, const GLenum type );
67
static inline VkFormat vkGetFormatFromOpenGLType( const GLenum type, const GLuint numComponents, const GLboolean normalized );
68
static inline VkFormat vkGetFormatFromOpenGLInternalFormat( const GLenum internalFormat );
69
static inline void vkGetFormatSize( const VkFormat format, VkFormatSize * pFormatSize );
70
71
MODIFICATIONS for use in libktx
72
===============================
73
74
2019.5.30 Use common ktxFormatSize to return results. Mark Callow, Edgewise Consulting.
75
2019.6.12 Add mapping of PVRTC formats. "
76
77
================================================================================================
78
*/
79
80
#if !defined( VK_FORMAT_H )
81
#define VK_FORMAT_H
82
83
#include "gl_format.h"
84
85
static inline VkFormat vkGetFormatFromOpenGLFormat( const GLenum format, const GLenum type )
86
{
87
switch ( type )
88
{
89
//
90
// 8 bits per component
91
//
92
case GL_UNSIGNED_BYTE:
93
{
94
switch ( format )
95
{
96
case GL_RED: return VK_FORMAT_R8_UNORM;
97
case GL_RG: return VK_FORMAT_R8G8_UNORM;
98
case GL_RGB: return VK_FORMAT_R8G8B8_UNORM;
99
case GL_BGR: return VK_FORMAT_B8G8R8_UNORM;
100
case GL_RGBA: return VK_FORMAT_R8G8B8A8_UNORM;
101
case GL_BGRA: return VK_FORMAT_B8G8R8A8_UNORM;
102
case GL_RED_INTEGER: return VK_FORMAT_R8_UINT;
103
case GL_RG_INTEGER: return VK_FORMAT_R8G8_UINT;
104
case GL_RGB_INTEGER: return VK_FORMAT_R8G8B8_UINT;
105
case GL_BGR_INTEGER: return VK_FORMAT_B8G8R8_UINT;
106
case GL_RGBA_INTEGER: return VK_FORMAT_R8G8B8A8_UINT;
107
case GL_BGRA_INTEGER: return VK_FORMAT_B8G8R8A8_UINT;
108
case GL_STENCIL_INDEX: return VK_FORMAT_S8_UINT;
109
case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
110
case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
111
}
112
break;
113
}
114
case GL_BYTE:
115
{
116
switch ( format )
117
{
118
case GL_RED: return VK_FORMAT_R8_SNORM;
119
case GL_RG: return VK_FORMAT_R8G8_SNORM;
120
case GL_RGB: return VK_FORMAT_R8G8B8_SNORM;
121
case GL_BGR: return VK_FORMAT_B8G8R8_SNORM;
122
case GL_RGBA: return VK_FORMAT_R8G8B8A8_SNORM;
123
case GL_BGRA: return VK_FORMAT_B8G8R8A8_SNORM;
124
case GL_RED_INTEGER: return VK_FORMAT_R8_SINT;
125
case GL_RG_INTEGER: return VK_FORMAT_R8G8_SINT;
126
case GL_RGB_INTEGER: return VK_FORMAT_R8G8B8_SINT;
127
case GL_BGR_INTEGER: return VK_FORMAT_B8G8R8_SINT;
128
case GL_RGBA_INTEGER: return VK_FORMAT_R8G8B8A8_SINT;
129
case GL_BGRA_INTEGER: return VK_FORMAT_B8G8R8A8_SINT;
130
case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
131
case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
132
case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
133
}
134
break;
135
}
136
137
//
138
// 16 bits per component
139
//
140
case GL_UNSIGNED_SHORT:
141
{
142
switch ( format )
143
{
144
case GL_RED: return VK_FORMAT_R16_UNORM;
145
case GL_RG: return VK_FORMAT_R16G16_UNORM;
146
case GL_RGB: return VK_FORMAT_R16G16B16_UNORM;
147
case GL_BGR: return VK_FORMAT_UNDEFINED;
148
case GL_RGBA: return VK_FORMAT_R16G16B16A16_UNORM;
149
case GL_BGRA: return VK_FORMAT_UNDEFINED;
150
case GL_RED_INTEGER: return VK_FORMAT_R16_UINT;
151
case GL_RG_INTEGER: return VK_FORMAT_R16G16_UINT;
152
case GL_RGB_INTEGER: return VK_FORMAT_R16G16B16_UINT;
153
case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
154
case GL_RGBA_INTEGER: return VK_FORMAT_R16G16B16A16_UINT;
155
case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
156
case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
157
case GL_DEPTH_COMPONENT: return VK_FORMAT_D16_UNORM;
158
case GL_DEPTH_STENCIL: return VK_FORMAT_D16_UNORM_S8_UINT;
159
}
160
break;
161
}
162
case GL_SHORT:
163
{
164
switch ( format )
165
{
166
case GL_RED: return VK_FORMAT_R16_SNORM;
167
case GL_RG: return VK_FORMAT_R16G16_SNORM;
168
case GL_RGB: return VK_FORMAT_R16G16B16_SNORM;
169
case GL_BGR: return VK_FORMAT_UNDEFINED;
170
case GL_RGBA: return VK_FORMAT_R16G16B16A16_SNORM;
171
case GL_BGRA: return VK_FORMAT_UNDEFINED;
172
case GL_RED_INTEGER: return VK_FORMAT_R16_SINT;
173
case GL_RG_INTEGER: return VK_FORMAT_R16G16_SINT;
174
case GL_RGB_INTEGER: return VK_FORMAT_R16G16B16_SINT;
175
case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
176
case GL_RGBA_INTEGER: return VK_FORMAT_R16G16B16A16_SINT;
177
case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
178
case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
179
case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
180
case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
181
}
182
break;
183
}
184
case GL_HALF_FLOAT:
185
case GL_HALF_FLOAT_OES:
186
{
187
switch ( format )
188
{
189
case GL_RED: return VK_FORMAT_R16_SFLOAT;
190
case GL_RG: return VK_FORMAT_R16G16_SFLOAT;
191
case GL_RGB: return VK_FORMAT_R16G16B16_SFLOAT;
192
case GL_BGR: return VK_FORMAT_UNDEFINED;
193
case GL_RGBA: return VK_FORMAT_R16G16B16A16_SFLOAT;
194
case GL_BGRA: return VK_FORMAT_UNDEFINED;
195
case GL_RED_INTEGER: return VK_FORMAT_UNDEFINED;
196
case GL_RG_INTEGER: return VK_FORMAT_UNDEFINED;
197
case GL_RGB_INTEGER: return VK_FORMAT_UNDEFINED;
198
case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
199
case GL_RGBA_INTEGER: return VK_FORMAT_UNDEFINED;
200
case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
201
case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
202
case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
203
case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
204
}
205
break;
206
}
207
208
//
209
// 32 bits per component
210
//
211
case GL_UNSIGNED_INT:
212
{
213
switch ( format )
214
{
215
case GL_RED: return VK_FORMAT_R32_UINT;
216
case GL_RG: return VK_FORMAT_R32G32_UINT;
217
case GL_RGB: return VK_FORMAT_R32G32B32_UINT;
218
case GL_BGR: return VK_FORMAT_UNDEFINED;
219
case GL_RGBA: return VK_FORMAT_R32G32B32A32_UINT;
220
case GL_BGRA: return VK_FORMAT_UNDEFINED;
221
case GL_RED_INTEGER: return VK_FORMAT_R32_UINT;
222
case GL_RG_INTEGER: return VK_FORMAT_R32G32_UINT;
223
case GL_RGB_INTEGER: return VK_FORMAT_R32G32B32_UINT;
224
case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
225
case GL_RGBA_INTEGER: return VK_FORMAT_R32G32B32A32_UINT;
226
case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
227
case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
228
case GL_DEPTH_COMPONENT: return VK_FORMAT_X8_D24_UNORM_PACK32;
229
case GL_DEPTH_STENCIL: return VK_FORMAT_D24_UNORM_S8_UINT;
230
}
231
break;
232
}
233
case GL_INT:
234
{
235
switch ( format )
236
{
237
case GL_RED: return VK_FORMAT_R32_SINT;
238
case GL_RG: return VK_FORMAT_R32G32_SINT;
239
case GL_RGB: return VK_FORMAT_R32G32B32_SINT;
240
case GL_BGR: return VK_FORMAT_UNDEFINED;
241
case GL_RGBA: return VK_FORMAT_R32G32B32A32_SINT;
242
case GL_BGRA: return VK_FORMAT_UNDEFINED;
243
case GL_RED_INTEGER: return VK_FORMAT_R32_SINT;
244
case GL_RG_INTEGER: return VK_FORMAT_R32G32_SINT;
245
case GL_RGB_INTEGER: return VK_FORMAT_R32G32B32_SINT;
246
case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
247
case GL_RGBA_INTEGER: return VK_FORMAT_R32G32B32A32_SINT;
248
case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
249
case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
250
case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
251
case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
252
}
253
break;
254
}
255
case GL_FLOAT:
256
{
257
switch ( format )
258
{
259
case GL_RED: return VK_FORMAT_R32_SFLOAT;
260
case GL_RG: return VK_FORMAT_R32G32_SFLOAT;
261
case GL_RGB: return VK_FORMAT_R32G32B32_SFLOAT;
262
case GL_BGR: return VK_FORMAT_UNDEFINED;
263
case GL_RGBA: return VK_FORMAT_R32G32B32A32_SFLOAT;
264
case GL_BGRA: return VK_FORMAT_UNDEFINED;
265
case GL_RED_INTEGER: return VK_FORMAT_UNDEFINED;
266
case GL_RG_INTEGER: return VK_FORMAT_UNDEFINED;
267
case GL_RGB_INTEGER: return VK_FORMAT_UNDEFINED;
268
case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
269
case GL_RGBA_INTEGER: return VK_FORMAT_UNDEFINED;
270
case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
271
case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
272
case GL_DEPTH_COMPONENT: return VK_FORMAT_D32_SFLOAT;
273
case GL_DEPTH_STENCIL: return VK_FORMAT_D32_SFLOAT_S8_UINT;
274
}
275
break;
276
}
277
278
//
279
// 64 bits per component
280
//
281
case GL_UNSIGNED_INT64:
282
{
283
switch ( format )
284
{
285
case GL_RED: return VK_FORMAT_R64_UINT;
286
case GL_RG: return VK_FORMAT_R64G64_UINT;
287
case GL_RGB: return VK_FORMAT_R64G64B64_UINT;
288
case GL_BGR: return VK_FORMAT_UNDEFINED;
289
case GL_RGBA: return VK_FORMAT_R64G64B64A64_UINT;
290
case GL_BGRA: return VK_FORMAT_UNDEFINED;
291
case GL_RED_INTEGER: return VK_FORMAT_UNDEFINED;
292
case GL_RG_INTEGER: return VK_FORMAT_UNDEFINED;
293
case GL_RGB_INTEGER: return VK_FORMAT_UNDEFINED;
294
case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
295
case GL_RGBA_INTEGER: return VK_FORMAT_UNDEFINED;
296
case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
297
case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
298
case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
299
case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
300
}
301
break;
302
}
303
case GL_INT64:
304
{
305
switch ( format )
306
{
307
case GL_RED: return VK_FORMAT_R64_SINT;
308
case GL_RG: return VK_FORMAT_R64G64_SINT;
309
case GL_RGB: return VK_FORMAT_R64G64B64_SINT;
310
case GL_BGR: return VK_FORMAT_UNDEFINED;
311
case GL_RGBA: return VK_FORMAT_R64G64B64A64_SINT;
312
case GL_BGRA: return VK_FORMAT_UNDEFINED;
313
case GL_RED_INTEGER: return VK_FORMAT_R64_SINT;
314
case GL_RG_INTEGER: return VK_FORMAT_R64G64_SINT;
315
case GL_RGB_INTEGER: return VK_FORMAT_R64G64B64_SINT;
316
case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
317
case GL_RGBA_INTEGER: return VK_FORMAT_R64G64B64A64_SINT;
318
case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
319
case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
320
case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
321
case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
322
}
323
break;
324
}
325
case GL_DOUBLE:
326
{
327
switch ( format )
328
{
329
case GL_RED: return VK_FORMAT_R64_SFLOAT;
330
case GL_RG: return VK_FORMAT_R64G64_SFLOAT;
331
case GL_RGB: return VK_FORMAT_R64G64B64_SFLOAT;
332
case GL_BGR: return VK_FORMAT_UNDEFINED;
333
case GL_RGBA: return VK_FORMAT_R64G64B64A64_SFLOAT;
334
case GL_BGRA: return VK_FORMAT_UNDEFINED;
335
case GL_RED_INTEGER: return VK_FORMAT_R64_SFLOAT;
336
case GL_RG_INTEGER: return VK_FORMAT_R64G64_SFLOAT;
337
case GL_RGB_INTEGER: return VK_FORMAT_R64G64B64_SFLOAT;
338
case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
339
case GL_RGBA_INTEGER: return VK_FORMAT_R64G64B64A64_SFLOAT;
340
case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
341
case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
342
case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
343
case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
344
}
345
break;
346
}
347
348
//
349
// Packed
350
//
351
case GL_UNSIGNED_BYTE_3_3_2:
352
assert( format == GL_RGB || format == GL_RGB_INTEGER );
353
return VK_FORMAT_UNDEFINED;
354
case GL_UNSIGNED_BYTE_2_3_3_REV:
355
assert( format == GL_BGR || format == GL_BGR_INTEGER );
356
return VK_FORMAT_UNDEFINED;
357
case GL_UNSIGNED_SHORT_5_6_5:
358
assert( format == GL_RGB || format == GL_RGB_INTEGER );
359
return VK_FORMAT_R5G6B5_UNORM_PACK16;
360
case GL_UNSIGNED_SHORT_5_6_5_REV:
361
assert( format == GL_BGR || format == GL_BGR_INTEGER );
362
return VK_FORMAT_B5G6R5_UNORM_PACK16;
363
case GL_UNSIGNED_SHORT_4_4_4_4:
364
assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
365
return VK_FORMAT_R4G4B4A4_UNORM_PACK16;
366
case GL_UNSIGNED_SHORT_4_4_4_4_REV:
367
assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
368
return VK_FORMAT_B4G4R4A4_UNORM_PACK16;
369
case GL_UNSIGNED_SHORT_5_5_5_1:
370
assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
371
return VK_FORMAT_R5G5B5A1_UNORM_PACK16;
372
case GL_UNSIGNED_SHORT_1_5_5_5_REV:
373
assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
374
return VK_FORMAT_A1R5G5B5_UNORM_PACK16;
375
case GL_UNSIGNED_INT_8_8_8_8:
376
assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
377
return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_R8G8B8A8_UINT : VK_FORMAT_R8G8B8A8_UNORM;
378
case GL_UNSIGNED_INT_8_8_8_8_REV:
379
assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
380
return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_A8B8G8R8_UINT_PACK32 : VK_FORMAT_A8B8G8R8_UNORM_PACK32;
381
case GL_UNSIGNED_INT_10_10_10_2:
382
assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
383
return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_A2R10G10B10_UINT_PACK32 : VK_FORMAT_A2R10G10B10_UNORM_PACK32;
384
case GL_UNSIGNED_INT_2_10_10_10_REV:
385
assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
386
return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_A2B10G10R10_UINT_PACK32 : VK_FORMAT_A2B10G10R10_UNORM_PACK32;
387
case GL_UNSIGNED_INT_10F_11F_11F_REV:
388
assert( format == GL_RGB || format == GL_BGR );
389
return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
390
case GL_UNSIGNED_INT_5_9_9_9_REV:
391
assert( format == GL_RGB || format == GL_BGR );
392
return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
393
case GL_UNSIGNED_INT_24_8:
394
assert( format == GL_DEPTH_STENCIL );
395
return VK_FORMAT_D24_UNORM_S8_UINT;
396
case GL_FLOAT_32_UNSIGNED_INT_24_8_REV:
397
assert( format == GL_DEPTH_STENCIL );
398
return VK_FORMAT_D32_SFLOAT_S8_UINT;
399
}
400
401
return VK_FORMAT_UNDEFINED;
402
}
403
404
#if defined(NEED_VK_GET_FORMAT_FROM_OPENGL_TYPE)
405
static inline VkFormat vkGetFormatFromOpenGLType( const GLenum type, const GLuint numComponents, const GLboolean normalized )
406
{
407
switch ( type )
408
{
409
//
410
// 8 bits per component
411
//
412
case GL_UNSIGNED_BYTE:
413
{
414
switch ( numComponents )
415
{
416
case 1: return normalized ? VK_FORMAT_R8_UNORM : VK_FORMAT_R8_UINT;
417
case 2: return normalized ? VK_FORMAT_R8G8_UNORM : VK_FORMAT_R8G8_UINT;
418
case 3: return normalized ? VK_FORMAT_R8G8B8_UNORM : VK_FORMAT_R8G8B8_UINT;
419
case 4: return normalized ? VK_FORMAT_R8G8B8A8_UNORM : VK_FORMAT_R8G8B8A8_UINT;
420
}
421
break;
422
}
423
case GL_BYTE:
424
{
425
switch ( numComponents )
426
{
427
case 1: return normalized ? VK_FORMAT_R8_SNORM : VK_FORMAT_R8_SINT;
428
case 2: return normalized ? VK_FORMAT_R8G8_SNORM : VK_FORMAT_R8G8_SINT;
429
case 3: return normalized ? VK_FORMAT_R8G8B8_SNORM : VK_FORMAT_R8G8B8_SINT;
430
case 4: return normalized ? VK_FORMAT_R8G8B8A8_SNORM : VK_FORMAT_R8G8B8A8_SINT;
431
}
432
break;
433
}
434
435
//
436
// 16 bits per component
437
//
438
case GL_UNSIGNED_SHORT:
439
{
440
switch ( numComponents )
441
{
442
case 1: return normalized ? VK_FORMAT_R16_UNORM : VK_FORMAT_R16_UINT;
443
case 2: return normalized ? VK_FORMAT_R16G16_UNORM : VK_FORMAT_R16G16_UINT;
444
case 3: return normalized ? VK_FORMAT_R16G16B16_UNORM : VK_FORMAT_R16G16B16_UINT;
445
case 4: return normalized ? VK_FORMAT_R16G16B16A16_UNORM : VK_FORMAT_R16G16B16A16_UINT;
446
}
447
break;
448
}
449
case GL_SHORT:
450
{
451
switch ( numComponents )
452
{
453
case 1: return normalized ? VK_FORMAT_R16_SNORM : VK_FORMAT_R16_SINT;
454
case 2: return normalized ? VK_FORMAT_R16G16_SNORM : VK_FORMAT_R16G16_SINT;
455
case 3: return normalized ? VK_FORMAT_R16G16B16_SNORM : VK_FORMAT_R16G16B16_SINT;
456
case 4: return normalized ? VK_FORMAT_R16G16B16A16_SNORM : VK_FORMAT_R16G16B16A16_SINT;
457
}
458
break;
459
}
460
case GL_HALF_FLOAT:
461
case GL_HALF_FLOAT_OES:
462
{
463
switch ( numComponents )
464
{
465
case 1: return VK_FORMAT_R16_SFLOAT;
466
case 2: return VK_FORMAT_R16G16_SFLOAT;
467
case 3: return VK_FORMAT_R16G16B16_SFLOAT;
468
case 4: return VK_FORMAT_R16G16B16A16_SFLOAT;
469
}
470
break;
471
}
472
473
//
474
// 32 bits per component
475
//
476
case GL_UNSIGNED_INT:
477
{
478
switch ( numComponents )
479
{
480
case 1: return VK_FORMAT_R32_UINT;
481
case 2: return VK_FORMAT_R32G32_UINT;
482
case 3: return VK_FORMAT_R32G32B32_UINT;
483
case 4: return VK_FORMAT_R32G32B32A32_UINT;
484
}
485
break;
486
}
487
case GL_INT:
488
{
489
switch ( numComponents )
490
{
491
case 1: return VK_FORMAT_R32_SINT;
492
case 2: return VK_FORMAT_R32G32_SINT;
493
case 3: return VK_FORMAT_R32G32B32_SINT;
494
case 4: return VK_FORMAT_R32G32B32A32_SINT;
495
}
496
break;
497
}
498
case GL_FLOAT:
499
{
500
switch ( numComponents )
501
{
502
case 1: return VK_FORMAT_R32_SFLOAT;
503
case 2: return VK_FORMAT_R32G32_SFLOAT;
504
case 3: return VK_FORMAT_R32G32B32_SFLOAT;
505
case 4: return VK_FORMAT_R32G32B32A32_SFLOAT;
506
}
507
break;
508
}
509
510
//
511
// 64 bits per component
512
//
513
case GL_UNSIGNED_INT64:
514
{
515
switch ( numComponents )
516
{
517
case 1: return VK_FORMAT_R64_UINT;
518
case 2: return VK_FORMAT_R64G64_UINT;
519
case 3: return VK_FORMAT_R64G64B64_UINT;
520
case 4: return VK_FORMAT_R64G64B64A64_UINT;
521
}
522
break;
523
}
524
case GL_INT64:
525
{
526
switch ( numComponents )
527
{
528
case 1: return VK_FORMAT_R64_SINT;
529
case 2: return VK_FORMAT_R64G64_SINT;
530
case 3: return VK_FORMAT_R64G64B64_SINT;
531
case 4: return VK_FORMAT_R64G64B64A64_SINT;
532
}
533
break;
534
}
535
case GL_DOUBLE:
536
{
537
switch ( numComponents )
538
{
539
case 1: return VK_FORMAT_R64_SFLOAT;
540
case 2: return VK_FORMAT_R64G64_SFLOAT;
541
case 3: return VK_FORMAT_R64G64B64_SFLOAT;
542
case 4: return VK_FORMAT_R64G64B64A64_SFLOAT;
543
}
544
break;
545
}
546
547
//
548
// Packed
549
//
550
case GL_UNSIGNED_BYTE_3_3_2: return VK_FORMAT_UNDEFINED;
551
case GL_UNSIGNED_BYTE_2_3_3_REV: return VK_FORMAT_UNDEFINED;
552
case GL_UNSIGNED_SHORT_5_6_5: return VK_FORMAT_R5G6B5_UNORM_PACK16;
553
case GL_UNSIGNED_SHORT_5_6_5_REV: return VK_FORMAT_B5G6R5_UNORM_PACK16;
554
case GL_UNSIGNED_SHORT_4_4_4_4: return VK_FORMAT_R4G4B4A4_UNORM_PACK16;
555
case GL_UNSIGNED_SHORT_4_4_4_4_REV: return VK_FORMAT_B4G4R4A4_UNORM_PACK16;
556
case GL_UNSIGNED_SHORT_5_5_5_1: return VK_FORMAT_R5G5B5A1_UNORM_PACK16;
557
case GL_UNSIGNED_SHORT_1_5_5_5_REV: return VK_FORMAT_A1R5G5B5_UNORM_PACK16;
558
case GL_UNSIGNED_INT_8_8_8_8: return normalized ? VK_FORMAT_R8G8B8A8_UNORM : VK_FORMAT_R8G8B8A8_UINT;
559
case GL_UNSIGNED_INT_8_8_8_8_REV: return normalized ? VK_FORMAT_A8B8G8R8_UNORM_PACK32 : VK_FORMAT_A8B8G8R8_UINT_PACK32;
560
case GL_UNSIGNED_INT_10_10_10_2: return normalized ? VK_FORMAT_A2R10G10B10_UNORM_PACK32 : VK_FORMAT_A2R10G10B10_UINT_PACK32;
561
case GL_UNSIGNED_INT_2_10_10_10_REV: return normalized ? VK_FORMAT_A2B10G10R10_UNORM_PACK32 : VK_FORMAT_A2B10G10R10_UINT_PACK32;
562
case GL_UNSIGNED_INT_10F_11F_11F_REV: return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
563
case GL_UNSIGNED_INT_5_9_9_9_REV: return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
564
case GL_UNSIGNED_INT_24_8: return VK_FORMAT_D24_UNORM_S8_UINT;
565
case GL_FLOAT_32_UNSIGNED_INT_24_8_REV: return VK_FORMAT_D32_SFLOAT_S8_UINT;
566
}
567
568
return VK_FORMAT_UNDEFINED;
569
}
570
#endif
571
572
static inline VkFormat vkGetFormatFromOpenGLInternalFormat( const GLenum internalFormat )
573
{
574
switch ( internalFormat )
575
{
576
//
577
// 8 bits per component
578
//
579
case GL_R8: return VK_FORMAT_R8_UNORM; // 1-component, 8-bit unsigned normalized
580
case GL_RG8: return VK_FORMAT_R8G8_UNORM; // 2-component, 8-bit unsigned normalized
581
case GL_RGB8: return VK_FORMAT_R8G8B8_UNORM; // 3-component, 8-bit unsigned normalized
582
case GL_RGBA8: return VK_FORMAT_R8G8B8A8_UNORM; // 4-component, 8-bit unsigned normalized
583
584
case GL_R8_SNORM: return VK_FORMAT_R8_SNORM; // 1-component, 8-bit signed normalized
585
case GL_RG8_SNORM: return VK_FORMAT_R8G8_SNORM; // 2-component, 8-bit signed normalized
586
case GL_RGB8_SNORM: return VK_FORMAT_R8G8B8_SNORM; // 3-component, 8-bit signed normalized
587
case GL_RGBA8_SNORM: return VK_FORMAT_R8G8B8A8_SNORM; // 4-component, 8-bit signed normalized
588
589
case GL_R8UI: return VK_FORMAT_R8_UINT; // 1-component, 8-bit unsigned integer
590
case GL_RG8UI: return VK_FORMAT_R8G8_UINT; // 2-component, 8-bit unsigned integer
591
case GL_RGB8UI: return VK_FORMAT_R8G8B8_UINT; // 3-component, 8-bit unsigned integer
592
case GL_RGBA8UI: return VK_FORMAT_R8G8B8A8_UINT; // 4-component, 8-bit unsigned integer
593
594
case GL_R8I: return VK_FORMAT_R8_SINT; // 1-component, 8-bit signed integer
595
case GL_RG8I: return VK_FORMAT_R8G8_SINT; // 2-component, 8-bit signed integer
596
case GL_RGB8I: return VK_FORMAT_R8G8B8_SINT; // 3-component, 8-bit signed integer
597
case GL_RGBA8I: return VK_FORMAT_R8G8B8A8_SINT; // 4-component, 8-bit signed integer
598
599
case GL_SR8: return VK_FORMAT_R8_SRGB; // 1-component, 8-bit sRGB
600
case GL_SRG8: return VK_FORMAT_R8G8_SRGB; // 2-component, 8-bit sRGB
601
case GL_SRGB8: return VK_FORMAT_R8G8B8_SRGB; // 3-component, 8-bit sRGB
602
case GL_SRGB8_ALPHA8: return VK_FORMAT_R8G8B8A8_SRGB; // 4-component, 8-bit sRGB
603
604
//
605
// 16 bits per component
606
//
607
case GL_R16: return VK_FORMAT_R16_UNORM; // 1-component, 16-bit unsigned normalized
608
case GL_RG16: return VK_FORMAT_R16G16_UNORM; // 2-component, 16-bit unsigned normalized
609
case GL_RGB16: return VK_FORMAT_R16G16B16_UNORM; // 3-component, 16-bit unsigned normalized
610
case GL_RGBA16: return VK_FORMAT_R16G16B16A16_UNORM; // 4-component, 16-bit unsigned normalized
611
612
case GL_R16_SNORM: return VK_FORMAT_R16_SNORM; // 1-component, 16-bit signed normalized
613
case GL_RG16_SNORM: return VK_FORMAT_R16G16_SNORM; // 2-component, 16-bit signed normalized
614
case GL_RGB16_SNORM: return VK_FORMAT_R16G16B16_SNORM; // 3-component, 16-bit signed normalized
615
case GL_RGBA16_SNORM: return VK_FORMAT_R16G16B16A16_SNORM; // 4-component, 16-bit signed normalized
616
617
case GL_R16UI: return VK_FORMAT_R16_UINT; // 1-component, 16-bit unsigned integer
618
case GL_RG16UI: return VK_FORMAT_R16G16_UINT; // 2-component, 16-bit unsigned integer
619
case GL_RGB16UI: return VK_FORMAT_R16G16B16_UINT; // 3-component, 16-bit unsigned integer
620
case GL_RGBA16UI: return VK_FORMAT_R16G16B16A16_UINT; // 4-component, 16-bit unsigned integer
621
622
case GL_R16I: return VK_FORMAT_R16_SINT; // 1-component, 16-bit signed integer
623
case GL_RG16I: return VK_FORMAT_R16G16_SINT; // 2-component, 16-bit signed integer
624
case GL_RGB16I: return VK_FORMAT_R16G16B16_SINT; // 3-component, 16-bit signed integer
625
case GL_RGBA16I: return VK_FORMAT_R16G16B16A16_SINT; // 4-component, 16-bit signed integer
626
627
case GL_R16F: return VK_FORMAT_R16_SFLOAT; // 1-component, 16-bit floating-point
628
case GL_RG16F: return VK_FORMAT_R16G16_SFLOAT; // 2-component, 16-bit floating-point
629
case GL_RGB16F: return VK_FORMAT_R16G16B16_SFLOAT; // 3-component, 16-bit floating-point
630
case GL_RGBA16F: return VK_FORMAT_R16G16B16A16_SFLOAT; // 4-component, 16-bit floating-point
631
632
//
633
// 32 bits per component
634
//
635
case GL_R32UI: return VK_FORMAT_R32_UINT; // 1-component, 32-bit unsigned integer
636
case GL_RG32UI: return VK_FORMAT_R32G32_UINT; // 2-component, 32-bit unsigned integer
637
case GL_RGB32UI: return VK_FORMAT_R32G32B32_UINT; // 3-component, 32-bit unsigned integer
638
case GL_RGBA32UI: return VK_FORMAT_R32G32B32A32_UINT; // 4-component, 32-bit unsigned integer
639
640
case GL_R32I: return VK_FORMAT_R32_SINT; // 1-component, 32-bit signed integer
641
case GL_RG32I: return VK_FORMAT_R32G32_SINT; // 2-component, 32-bit signed integer
642
case GL_RGB32I: return VK_FORMAT_R32G32B32_SINT; // 3-component, 32-bit signed integer
643
case GL_RGBA32I: return VK_FORMAT_R32G32B32A32_SINT; // 4-component, 32-bit signed integer
644
645
case GL_R32F: return VK_FORMAT_R32_SFLOAT; // 1-component, 32-bit floating-point
646
case GL_RG32F: return VK_FORMAT_R32G32_SFLOAT; // 2-component, 32-bit floating-point
647
case GL_RGB32F: return VK_FORMAT_R32G32B32_SFLOAT; // 3-component, 32-bit floating-point
648
case GL_RGBA32F: return VK_FORMAT_R32G32B32A32_SFLOAT; // 4-component, 32-bit floating-point
649
650
//
651
// Packed
652
//
653
case GL_R3_G3_B2: return VK_FORMAT_UNDEFINED; // 3-component 3:3:2, unsigned normalized
654
case GL_RGB4: return VK_FORMAT_UNDEFINED; // 3-component 4:4:4, unsigned normalized
655
case GL_RGB5: return VK_FORMAT_R5G5B5A1_UNORM_PACK16; // 3-component 5:5:5, unsigned normalized
656
case GL_RGB565: return VK_FORMAT_R5G6B5_UNORM_PACK16; // 3-component 5:6:5, unsigned normalized
657
case GL_RGB10: return VK_FORMAT_A2R10G10B10_UNORM_PACK32; // 3-component 10:10:10, unsigned normalized
658
case GL_RGB12: return VK_FORMAT_UNDEFINED; // 3-component 12:12:12, unsigned normalized
659
case GL_RGBA2: return VK_FORMAT_UNDEFINED; // 4-component 2:2:2:2, unsigned normalized
660
case GL_RGBA4: return VK_FORMAT_R4G4B4A4_UNORM_PACK16; // 4-component 4:4:4:4, unsigned normalized
661
case GL_RGBA12: return VK_FORMAT_UNDEFINED; // 4-component 12:12:12:12, unsigned normalized
662
case GL_RGB5_A1: return VK_FORMAT_A1R5G5B5_UNORM_PACK16; // 4-component 5:5:5:1, unsigned normalized
663
case GL_RGB10_A2: return VK_FORMAT_A2R10G10B10_UNORM_PACK32; // 4-component 10:10:10:2, unsigned normalized
664
case GL_RGB10_A2UI: return VK_FORMAT_A2R10G10B10_UINT_PACK32; // 4-component 10:10:10:2, unsigned integer
665
case GL_R11F_G11F_B10F: return VK_FORMAT_B10G11R11_UFLOAT_PACK32; // 3-component 11:11:10, floating-point
666
case GL_RGB9_E5: return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32; // 3-component/exp 9:9:9/5, floating-point
667
668
//
669
// S3TC/DXT/BC
670
//
671
672
case GL_COMPRESSED_RGB_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGB_UNORM_BLOCK; // line through 3D space, 4x4 blocks, unsigned normalized
673
case GL_COMPRESSED_RGBA_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGBA_UNORM_BLOCK; // line through 3D space plus 1-bit alpha, 4x4 blocks, unsigned normalized
674
case GL_COMPRESSED_RGBA_S3TC_DXT3_EXT: return VK_FORMAT_BC2_UNORM_BLOCK; // line through 3D space plus line through 1D space, 4x4 blocks, unsigned normalized
675
case GL_COMPRESSED_RGBA_S3TC_DXT5_EXT: return VK_FORMAT_BC3_UNORM_BLOCK; // line through 3D space plus 4-bit alpha, 4x4 blocks, unsigned normalized
676
677
case GL_COMPRESSED_SRGB_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGB_SRGB_BLOCK; // line through 3D space, 4x4 blocks, sRGB
678
case GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGBA_SRGB_BLOCK; // line through 3D space plus 1-bit alpha, 4x4 blocks, sRGB
679
case GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT: return VK_FORMAT_BC2_SRGB_BLOCK; // line through 3D space plus line through 1D space, 4x4 blocks, sRGB
680
case GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT: return VK_FORMAT_BC3_SRGB_BLOCK; // line through 3D space plus 4-bit alpha, 4x4 blocks, sRGB
681
682
case GL_COMPRESSED_LUMINANCE_LATC1_EXT: return VK_FORMAT_BC4_UNORM_BLOCK; // line through 1D space, 4x4 blocks, unsigned normalized
683
case GL_COMPRESSED_LUMINANCE_ALPHA_LATC2_EXT: return VK_FORMAT_BC5_UNORM_BLOCK; // two lines through 1D space, 4x4 blocks, unsigned normalized
684
case GL_COMPRESSED_SIGNED_LUMINANCE_LATC1_EXT: return VK_FORMAT_BC4_SNORM_BLOCK; // line through 1D space, 4x4 blocks, signed normalized
685
case GL_COMPRESSED_SIGNED_LUMINANCE_ALPHA_LATC2_EXT: return VK_FORMAT_BC5_SNORM_BLOCK; // two lines through 1D space, 4x4 blocks, signed normalized
686
687
case GL_COMPRESSED_RED_RGTC1: return VK_FORMAT_BC4_UNORM_BLOCK; // line through 1D space, 4x4 blocks, unsigned normalized
688
case GL_COMPRESSED_RG_RGTC2: return VK_FORMAT_BC5_UNORM_BLOCK; // two lines through 1D space, 4x4 blocks, unsigned normalized
689
case GL_COMPRESSED_SIGNED_RED_RGTC1: return VK_FORMAT_BC4_SNORM_BLOCK; // line through 1D space, 4x4 blocks, signed normalized
690
case GL_COMPRESSED_SIGNED_RG_RGTC2: return VK_FORMAT_BC5_SNORM_BLOCK; // two lines through 1D space, 4x4 blocks, signed normalized
691
692
case GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT: return VK_FORMAT_BC6H_UFLOAT_BLOCK; // 3-component, 4x4 blocks, unsigned floating-point
693
case GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT: return VK_FORMAT_BC6H_SFLOAT_BLOCK; // 3-component, 4x4 blocks, signed floating-point
694
case GL_COMPRESSED_RGBA_BPTC_UNORM: return VK_FORMAT_BC7_UNORM_BLOCK; // 4-component, 4x4 blocks, unsigned normalized
695
case GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM: return VK_FORMAT_BC7_SRGB_BLOCK; // 4-component, 4x4 blocks, sRGB
696
697
//
698
// ETC
699
//
700
case GL_ETC1_RGB8_OES: return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK; // 3-component ETC1, 4x4 blocks, unsigned normalized
701
702
case GL_COMPRESSED_RGB8_ETC2: return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK; // 3-component ETC2, 4x4 blocks, unsigned normalized
703
case GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2: return VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK; // 4-component ETC2 with 1-bit alpha, 4x4 blocks, unsigned normalized
704
case GL_COMPRESSED_RGBA8_ETC2_EAC: return VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK; // 4-component ETC2, 4x4 blocks, unsigned normalized
705
706
case GL_COMPRESSED_SRGB8_ETC2: return VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK; // 3-component ETC2, 4x4 blocks, sRGB
707
case GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2: return VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK; // 4-component ETC2 with 1-bit alpha, 4x4 blocks, sRGB
708
case GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC: return VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK; // 4-component ETC2, 4x4 blocks, sRGB
709
710
case GL_COMPRESSED_R11_EAC: return VK_FORMAT_EAC_R11_UNORM_BLOCK; // 1-component ETC, 4x4 blocks, unsigned normalized
711
case GL_COMPRESSED_RG11_EAC: return VK_FORMAT_EAC_R11G11_UNORM_BLOCK; // 2-component ETC, 4x4 blocks, unsigned normalized
712
case GL_COMPRESSED_SIGNED_R11_EAC: return VK_FORMAT_EAC_R11_SNORM_BLOCK; // 1-component ETC, 4x4 blocks, signed normalized
713
case GL_COMPRESSED_SIGNED_RG11_EAC: return VK_FORMAT_EAC_R11G11_SNORM_BLOCK; // 2-component ETC, 4x4 blocks, signed normalized
714
715
//
716
// PVRTC
717
//
718
case GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG: return VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG; // 3-component PVRTC, 16x8 blocks, unsigned normalized
719
case GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG: return VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG; // 3-component PVRTC, 8x8 blocks, unsigned normalized
720
case GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG: return VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 16x8 blocks, unsigned normalized
721
case GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG: return VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 8x8 blocks, unsigned normalized
722
case GL_COMPRESSED_RGBA_PVRTC_2BPPV2_IMG: return VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 8x4 blocks, unsigned normalized
723
case GL_COMPRESSED_RGBA_PVRTC_4BPPV2_IMG: return VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 4x4 blocks, unsigned normalized
724
725
case GL_COMPRESSED_SRGB_PVRTC_2BPPV1_EXT: return VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG; // 3-component PVRTC, 16x8 blocks, sRGB
726
case GL_COMPRESSED_SRGB_PVRTC_4BPPV1_EXT: return VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG; // 3-component PVRTC, 8x8 blocks, sRGB
727
case GL_COMPRESSED_SRGB_ALPHA_PVRTC_2BPPV1_EXT: return VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 16x8 blocks, sRGB
728
case GL_COMPRESSED_SRGB_ALPHA_PVRTC_4BPPV1_EXT: return VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 8x8 blocks, sRGB
729
case GL_COMPRESSED_SRGB_ALPHA_PVRTC_2BPPV2_IMG: return VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 8x4 blocks, sRGB
730
case GL_COMPRESSED_SRGB_ALPHA_PVRTC_4BPPV2_IMG: return VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 4x4 blocks, sRGB
731
732
//
733
// ASTC
734
//
735
case GL_COMPRESSED_RGBA_ASTC_4x4_KHR: return VK_FORMAT_ASTC_4x4_UNORM_BLOCK; // 4-component ASTC, 4x4 blocks, unsigned normalized
736
case GL_COMPRESSED_RGBA_ASTC_5x4_KHR: return VK_FORMAT_ASTC_5x4_UNORM_BLOCK; // 4-component ASTC, 5x4 blocks, unsigned normalized
737
case GL_COMPRESSED_RGBA_ASTC_5x5_KHR: return VK_FORMAT_ASTC_5x5_UNORM_BLOCK; // 4-component ASTC, 5x5 blocks, unsigned normalized
738
case GL_COMPRESSED_RGBA_ASTC_6x5_KHR: return VK_FORMAT_ASTC_6x5_UNORM_BLOCK; // 4-component ASTC, 6x5 blocks, unsigned normalized
739
case GL_COMPRESSED_RGBA_ASTC_6x6_KHR: return VK_FORMAT_ASTC_6x6_UNORM_BLOCK; // 4-component ASTC, 6x6 blocks, unsigned normalized
740
case GL_COMPRESSED_RGBA_ASTC_8x5_KHR: return VK_FORMAT_ASTC_8x5_UNORM_BLOCK; // 4-component ASTC, 8x5 blocks, unsigned normalized
741
case GL_COMPRESSED_RGBA_ASTC_8x6_KHR: return VK_FORMAT_ASTC_8x6_UNORM_BLOCK; // 4-component ASTC, 8x6 blocks, unsigned normalized
742
case GL_COMPRESSED_RGBA_ASTC_8x8_KHR: return VK_FORMAT_ASTC_8x8_UNORM_BLOCK; // 4-component ASTC, 8x8 blocks, unsigned normalized
743
case GL_COMPRESSED_RGBA_ASTC_10x5_KHR: return VK_FORMAT_ASTC_10x5_UNORM_BLOCK; // 4-component ASTC, 10x5 blocks, unsigned normalized
744
case GL_COMPRESSED_RGBA_ASTC_10x6_KHR: return VK_FORMAT_ASTC_10x6_UNORM_BLOCK; // 4-component ASTC, 10x6 blocks, unsigned normalized
745
case GL_COMPRESSED_RGBA_ASTC_10x8_KHR: return VK_FORMAT_ASTC_10x8_UNORM_BLOCK; // 4-component ASTC, 10x8 blocks, unsigned normalized
746
case GL_COMPRESSED_RGBA_ASTC_10x10_KHR: return VK_FORMAT_ASTC_10x10_UNORM_BLOCK; // 4-component ASTC, 10x10 blocks, unsigned normalized
747
case GL_COMPRESSED_RGBA_ASTC_12x10_KHR: return VK_FORMAT_ASTC_12x10_UNORM_BLOCK; // 4-component ASTC, 12x10 blocks, unsigned normalized
748
case GL_COMPRESSED_RGBA_ASTC_12x12_KHR: return VK_FORMAT_ASTC_12x12_UNORM_BLOCK; // 4-component ASTC, 12x12 blocks, unsigned normalized
749
750
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR: return VK_FORMAT_ASTC_4x4_SRGB_BLOCK; // 4-component ASTC, 4x4 blocks, sRGB
751
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR: return VK_FORMAT_ASTC_5x4_SRGB_BLOCK; // 4-component ASTC, 5x4 blocks, sRGB
752
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR: return VK_FORMAT_ASTC_5x5_SRGB_BLOCK; // 4-component ASTC, 5x5 blocks, sRGB
753
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR: return VK_FORMAT_ASTC_6x5_SRGB_BLOCK; // 4-component ASTC, 6x5 blocks, sRGB
754
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR: return VK_FORMAT_ASTC_6x6_SRGB_BLOCK; // 4-component ASTC, 6x6 blocks, sRGB
755
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR: return VK_FORMAT_ASTC_8x5_SRGB_BLOCK; // 4-component ASTC, 8x5 blocks, sRGB
756
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR: return VK_FORMAT_ASTC_8x6_SRGB_BLOCK; // 4-component ASTC, 8x6 blocks, sRGB
757
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR: return VK_FORMAT_ASTC_8x8_SRGB_BLOCK; // 4-component ASTC, 8x8 blocks, sRGB
758
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR: return VK_FORMAT_ASTC_10x5_SRGB_BLOCK; // 4-component ASTC, 10x5 blocks, sRGB
759
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR: return VK_FORMAT_ASTC_10x6_SRGB_BLOCK; // 4-component ASTC, 10x6 blocks, sRGB
760
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR: return VK_FORMAT_ASTC_10x8_SRGB_BLOCK; // 4-component ASTC, 10x8 blocks, sRGB
761
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR: return VK_FORMAT_ASTC_10x10_SRGB_BLOCK; // 4-component ASTC, 10x10 blocks, sRGB
762
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR: return VK_FORMAT_ASTC_12x10_SRGB_BLOCK; // 4-component ASTC, 12x10 blocks, sRGB
763
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR: return VK_FORMAT_ASTC_12x12_SRGB_BLOCK; // 4-component ASTC, 12x12 blocks, sRGB
764
765
case GL_COMPRESSED_RGBA_ASTC_3x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 3x3x3 blocks, unsigned normalized
766
case GL_COMPRESSED_RGBA_ASTC_4x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x3x3 blocks, unsigned normalized
767
case GL_COMPRESSED_RGBA_ASTC_4x4x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x3 blocks, unsigned normalized
768
case GL_COMPRESSED_RGBA_ASTC_4x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x4 blocks, unsigned normalized
769
case GL_COMPRESSED_RGBA_ASTC_5x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x4x4 blocks, unsigned normalized
770
case GL_COMPRESSED_RGBA_ASTC_5x5x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x4 blocks, unsigned normalized
771
case GL_COMPRESSED_RGBA_ASTC_5x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x5 blocks, unsigned normalized
772
case GL_COMPRESSED_RGBA_ASTC_6x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x5x5 blocks, unsigned normalized
773
case GL_COMPRESSED_RGBA_ASTC_6x6x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x5 blocks, unsigned normalized
774
case GL_COMPRESSED_RGBA_ASTC_6x6x6_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x6 blocks, unsigned normalized
775
776
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_3x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 3x3x3 blocks, sRGB
777
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x3x3 blocks, sRGB
778
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x3 blocks, sRGB
779
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x4 blocks, sRGB
780
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x4x4 blocks, sRGB
781
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x4 blocks, sRGB
782
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x5 blocks, sRGB
783
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x5x5 blocks, sRGB
784
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x5 blocks, sRGB
785
case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6x6_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x6 blocks, sRGB
786
787
//
788
// ATC
789
//
790
case GL_ATC_RGB_AMD: return VK_FORMAT_UNDEFINED; // 3-component, 4x4 blocks, unsigned normalized
791
case GL_ATC_RGBA_EXPLICIT_ALPHA_AMD: return VK_FORMAT_UNDEFINED; // 4-component, 4x4 blocks, unsigned normalized
792
case GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD: return VK_FORMAT_UNDEFINED; // 4-component, 4x4 blocks, unsigned normalized
793
794
//
795
// Palletized
796
//
797
case GL_PALETTE4_RGB8_OES: return VK_FORMAT_UNDEFINED; // 3-component 8:8:8, 4-bit palette, unsigned normalized
798
case GL_PALETTE4_RGBA8_OES: return VK_FORMAT_UNDEFINED; // 4-component 8:8:8:8, 4-bit palette, unsigned normalized
799
case GL_PALETTE4_R5_G6_B5_OES: return VK_FORMAT_UNDEFINED; // 3-component 5:6:5, 4-bit palette, unsigned normalized
800
case GL_PALETTE4_RGBA4_OES: return VK_FORMAT_UNDEFINED; // 4-component 4:4:4:4, 4-bit palette, unsigned normalized
801
case GL_PALETTE4_RGB5_A1_OES: return VK_FORMAT_UNDEFINED; // 4-component 5:5:5:1, 4-bit palette, unsigned normalized
802
case GL_PALETTE8_RGB8_OES: return VK_FORMAT_UNDEFINED; // 3-component 8:8:8, 8-bit palette, unsigned normalized
803
case GL_PALETTE8_RGBA8_OES: return VK_FORMAT_UNDEFINED; // 4-component 8:8:8:8, 8-bit palette, unsigned normalized
804
case GL_PALETTE8_R5_G6_B5_OES: return VK_FORMAT_UNDEFINED; // 3-component 5:6:5, 8-bit palette, unsigned normalized
805
case GL_PALETTE8_RGBA4_OES: return VK_FORMAT_UNDEFINED; // 4-component 4:4:4:4, 8-bit palette, unsigned normalized
806
case GL_PALETTE8_RGB5_A1_OES: return VK_FORMAT_UNDEFINED; // 4-component 5:5:5:1, 8-bit palette, unsigned normalized
807
808
//
809
// Depth/stencil
810
//
811
case GL_DEPTH_COMPONENT16: return VK_FORMAT_D16_UNORM;
812
case GL_DEPTH_COMPONENT24: return VK_FORMAT_X8_D24_UNORM_PACK32;
813
case GL_DEPTH_COMPONENT32: return VK_FORMAT_UNDEFINED;
814
case GL_DEPTH_COMPONENT32F: return VK_FORMAT_D32_SFLOAT;
815
case GL_DEPTH_COMPONENT32F_NV: return VK_FORMAT_D32_SFLOAT;
816
case GL_STENCIL_INDEX1: return VK_FORMAT_UNDEFINED;
817
case GL_STENCIL_INDEX4: return VK_FORMAT_UNDEFINED;
818
case GL_STENCIL_INDEX8: return VK_FORMAT_S8_UINT;
819
case GL_STENCIL_INDEX16: return VK_FORMAT_UNDEFINED;
820
case GL_DEPTH24_STENCIL8: return VK_FORMAT_D24_UNORM_S8_UINT;
821
case GL_DEPTH32F_STENCIL8: return VK_FORMAT_D32_SFLOAT_S8_UINT;
822
case GL_DEPTH32F_STENCIL8_NV: return VK_FORMAT_D32_SFLOAT_S8_UINT;
823
824
default: return VK_FORMAT_UNDEFINED;
825
}
826
}
827
828
#if defined(NEED_VK_GET_FORMAT_SIZE)
829
static inline void vkGetFormatSize( const VkFormat format, ktxFormatSize * pFormatSize )
830
{
831
pFormatSize->minBlocksX = pFormatSize->minBlocksY = 1;
832
switch ( format )
833
{
834
case VK_FORMAT_R4G4_UNORM_PACK8:
835
pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
836
pFormatSize->paletteSizeInBits = 0;
837
pFormatSize->blockSizeInBits = 1 * 8;
838
pFormatSize->blockWidth = 1;
839
pFormatSize->blockHeight = 1;
840
pFormatSize->blockDepth = 1;
841
break;
842
case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
843
case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
844
case VK_FORMAT_R5G6B5_UNORM_PACK16:
845
case VK_FORMAT_B5G6R5_UNORM_PACK16:
846
case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
847
case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
848
case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
849
pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
850
pFormatSize->paletteSizeInBits = 0;
851
pFormatSize->blockSizeInBits = 2 * 8;
852
pFormatSize->blockWidth = 1;
853
pFormatSize->blockHeight = 1;
854
pFormatSize->blockDepth = 1;
855
break;
856
case VK_FORMAT_R8_UNORM:
857
case VK_FORMAT_R8_SNORM:
858
case VK_FORMAT_R8_USCALED:
859
case VK_FORMAT_R8_SSCALED:
860
case VK_FORMAT_R8_UINT:
861
case VK_FORMAT_R8_SINT:
862
case VK_FORMAT_R8_SRGB:
863
pFormatSize->flags = 0;
864
pFormatSize->paletteSizeInBits = 0;
865
pFormatSize->blockSizeInBits = 1 * 8;
866
pFormatSize->blockWidth = 1;
867
pFormatSize->blockHeight = 1;
868
pFormatSize->blockDepth = 1;
869
break;
870
case VK_FORMAT_R8G8_UNORM:
871
case VK_FORMAT_R8G8_SNORM:
872
case VK_FORMAT_R8G8_USCALED:
873
case VK_FORMAT_R8G8_SSCALED:
874
case VK_FORMAT_R8G8_UINT:
875
case VK_FORMAT_R8G8_SINT:
876
case VK_FORMAT_R8G8_SRGB:
877
pFormatSize->flags = 0;
878
pFormatSize->paletteSizeInBits = 0;
879
pFormatSize->blockSizeInBits = 2 * 8;
880
pFormatSize->blockWidth = 1;
881
pFormatSize->blockHeight = 1;
882
pFormatSize->blockDepth = 1;
883
break;
884
case VK_FORMAT_R8G8B8_UNORM:
885
case VK_FORMAT_R8G8B8_SNORM:
886
case VK_FORMAT_R8G8B8_USCALED:
887
case VK_FORMAT_R8G8B8_SSCALED:
888
case VK_FORMAT_R8G8B8_UINT:
889
case VK_FORMAT_R8G8B8_SINT:
890
case VK_FORMAT_R8G8B8_SRGB:
891
case VK_FORMAT_B8G8R8_UNORM:
892
case VK_FORMAT_B8G8R8_SNORM:
893
case VK_FORMAT_B8G8R8_USCALED:
894
case VK_FORMAT_B8G8R8_SSCALED:
895
case VK_FORMAT_B8G8R8_UINT:
896
case VK_FORMAT_B8G8R8_SINT:
897
case VK_FORMAT_B8G8R8_SRGB:
898
pFormatSize->flags = 0;
899
pFormatSize->paletteSizeInBits = 0;
900
pFormatSize->blockSizeInBits = 3 * 8;
901
pFormatSize->blockWidth = 1;
902
pFormatSize->blockHeight = 1;
903
pFormatSize->blockDepth = 1;
904
break;
905
case VK_FORMAT_R8G8B8A8_UNORM:
906
case VK_FORMAT_R8G8B8A8_SNORM:
907
case VK_FORMAT_R8G8B8A8_USCALED:
908
case VK_FORMAT_R8G8B8A8_SSCALED:
909
case VK_FORMAT_R8G8B8A8_UINT:
910
case VK_FORMAT_R8G8B8A8_SINT:
911
case VK_FORMAT_R8G8B8A8_SRGB:
912
case VK_FORMAT_B8G8R8A8_UNORM:
913
case VK_FORMAT_B8G8R8A8_SNORM:
914
case VK_FORMAT_B8G8R8A8_USCALED:
915
case VK_FORMAT_B8G8R8A8_SSCALED:
916
case VK_FORMAT_B8G8R8A8_UINT:
917
case VK_FORMAT_B8G8R8A8_SINT:
918
case VK_FORMAT_B8G8R8A8_SRGB:
919
pFormatSize->flags = 0;
920
pFormatSize->paletteSizeInBits = 0;
921
pFormatSize->blockSizeInBits = 4 * 8;
922
pFormatSize->blockWidth = 1;
923
pFormatSize->blockHeight = 1;
924
pFormatSize->blockDepth = 1;
925
break;
926
case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
927
case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
928
case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
929
case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
930
case VK_FORMAT_A8B8G8R8_UINT_PACK32:
931
case VK_FORMAT_A8B8G8R8_SINT_PACK32:
932
case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
933
pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
934
pFormatSize->paletteSizeInBits = 0;
935
pFormatSize->blockSizeInBits = 4 * 8;
936
pFormatSize->blockWidth = 1;
937
pFormatSize->blockHeight = 1;
938
pFormatSize->blockDepth = 1;
939
break;
940
case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
941
case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
942
case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
943
case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
944
case VK_FORMAT_A2R10G10B10_UINT_PACK32:
945
case VK_FORMAT_A2R10G10B10_SINT_PACK32:
946
case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
947
case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
948
case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
949
case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
950
case VK_FORMAT_A2B10G10R10_UINT_PACK32:
951
case VK_FORMAT_A2B10G10R10_SINT_PACK32:
952
pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
953
pFormatSize->paletteSizeInBits = 0;
954
pFormatSize->blockSizeInBits = 4 * 8;
955
pFormatSize->blockWidth = 1;
956
pFormatSize->blockHeight = 1;
957
pFormatSize->blockDepth = 1;
958
break;
959
case VK_FORMAT_R16_UNORM:
960
case VK_FORMAT_R16_SNORM:
961
case VK_FORMAT_R16_USCALED:
962
case VK_FORMAT_R16_SSCALED:
963
case VK_FORMAT_R16_UINT:
964
case VK_FORMAT_R16_SINT:
965
case VK_FORMAT_R16_SFLOAT:
966
pFormatSize->flags = 0;
967
pFormatSize->paletteSizeInBits = 0;
968
pFormatSize->blockSizeInBits = 2 * 8;
969
pFormatSize->blockWidth = 1;
970
pFormatSize->blockHeight = 1;
971
pFormatSize->blockDepth = 1;
972
break;
973
case VK_FORMAT_R16G16_UNORM:
974
case VK_FORMAT_R16G16_SNORM:
975
case VK_FORMAT_R16G16_USCALED:
976
case VK_FORMAT_R16G16_SSCALED:
977
case VK_FORMAT_R16G16_UINT:
978
case VK_FORMAT_R16G16_SINT:
979
case VK_FORMAT_R16G16_SFLOAT:
980
pFormatSize->flags = 0;
981
pFormatSize->paletteSizeInBits = 0;
982
pFormatSize->blockSizeInBits = 4 * 8;
983
pFormatSize->blockWidth = 1;
984
pFormatSize->blockHeight = 1;
985
pFormatSize->blockDepth = 1;
986
break;
987
case VK_FORMAT_R16G16B16_UNORM:
988
case VK_FORMAT_R16G16B16_SNORM:
989
case VK_FORMAT_R16G16B16_USCALED:
990
case VK_FORMAT_R16G16B16_SSCALED:
991
case VK_FORMAT_R16G16B16_UINT:
992
case VK_FORMAT_R16G16B16_SINT:
993
case VK_FORMAT_R16G16B16_SFLOAT:
994
pFormatSize->flags = 0;
995
pFormatSize->paletteSizeInBits = 0;
996
pFormatSize->blockSizeInBits = 6 * 8;
997
pFormatSize->blockWidth = 1;
998
pFormatSize->blockHeight = 1;
999
pFormatSize->blockDepth = 1;
1000
break;
1001
case VK_FORMAT_R16G16B16A16_UNORM:
1002
case VK_FORMAT_R16G16B16A16_SNORM:
1003
case VK_FORMAT_R16G16B16A16_USCALED:
1004
case VK_FORMAT_R16G16B16A16_SSCALED:
1005
case VK_FORMAT_R16G16B16A16_UINT:
1006
case VK_FORMAT_R16G16B16A16_SINT:
1007
case VK_FORMAT_R16G16B16A16_SFLOAT:
1008
pFormatSize->flags = 0;
1009
pFormatSize->paletteSizeInBits = 0;
1010
pFormatSize->blockSizeInBits = 8 * 8;
1011
pFormatSize->blockWidth = 1;
1012
pFormatSize->blockHeight = 1;
1013
pFormatSize->blockDepth = 1;
1014
break;
1015
case VK_FORMAT_R32_UINT:
1016
case VK_FORMAT_R32_SINT:
1017
case VK_FORMAT_R32_SFLOAT:
1018
pFormatSize->flags = 0;
1019
pFormatSize->paletteSizeInBits = 0;
1020
pFormatSize->blockSizeInBits = 4 * 8;
1021
pFormatSize->blockWidth = 1;
1022
pFormatSize->blockHeight = 1;
1023
pFormatSize->blockDepth = 1;
1024
break;
1025
case VK_FORMAT_R32G32_UINT:
1026
case VK_FORMAT_R32G32_SINT:
1027
case VK_FORMAT_R32G32_SFLOAT:
1028
pFormatSize->flags = 0;
1029
pFormatSize->paletteSizeInBits = 0;
1030
pFormatSize->blockSizeInBits = 8 * 8;
1031
pFormatSize->blockWidth = 1;
1032
pFormatSize->blockHeight = 1;
1033
pFormatSize->blockDepth = 1;
1034
break;
1035
case VK_FORMAT_R32G32B32_UINT:
1036
case VK_FORMAT_R32G32B32_SINT:
1037
case VK_FORMAT_R32G32B32_SFLOAT:
1038
pFormatSize->flags = 0;
1039
pFormatSize->paletteSizeInBits = 0;
1040
pFormatSize->blockSizeInBits = 12 * 8;
1041
pFormatSize->blockWidth = 1;
1042
pFormatSize->blockHeight = 1;
1043
pFormatSize->blockDepth = 1;
1044
break;
1045
case VK_FORMAT_R32G32B32A32_UINT:
1046
case VK_FORMAT_R32G32B32A32_SINT:
1047
case VK_FORMAT_R32G32B32A32_SFLOAT:
1048
pFormatSize->flags = 0;
1049
pFormatSize->paletteSizeInBits = 0;
1050
pFormatSize->blockSizeInBits = 16 * 8;
1051
pFormatSize->blockWidth = 1;
1052
pFormatSize->blockHeight = 1;
1053
pFormatSize->blockDepth = 1;
1054
break;
1055
case VK_FORMAT_R64_UINT:
1056
case VK_FORMAT_R64_SINT:
1057
case VK_FORMAT_R64_SFLOAT:
1058
pFormatSize->flags = 0;
1059
pFormatSize->paletteSizeInBits = 0;
1060
pFormatSize->blockSizeInBits = 8 * 8;
1061
pFormatSize->blockWidth = 1;
1062
pFormatSize->blockHeight = 1;
1063
pFormatSize->blockDepth = 1;
1064
break;
1065
case VK_FORMAT_R64G64_UINT:
1066
case VK_FORMAT_R64G64_SINT:
1067
case VK_FORMAT_R64G64_SFLOAT:
1068
pFormatSize->flags = 0;
1069
pFormatSize->paletteSizeInBits = 0;
1070
pFormatSize->blockSizeInBits = 16 * 8;
1071
pFormatSize->blockWidth = 1;
1072
pFormatSize->blockHeight = 1;
1073
pFormatSize->blockDepth = 1;
1074
break;
1075
case VK_FORMAT_R64G64B64_UINT:
1076
case VK_FORMAT_R64G64B64_SINT:
1077
case VK_FORMAT_R64G64B64_SFLOAT:
1078
pFormatSize->flags = 0;
1079
pFormatSize->paletteSizeInBits = 0;
1080
pFormatSize->blockSizeInBits = 24 * 8;
1081
pFormatSize->blockWidth = 1;
1082
pFormatSize->blockHeight = 1;
1083
pFormatSize->blockDepth = 1;
1084
break;
1085
case VK_FORMAT_R64G64B64A64_UINT:
1086
case VK_FORMAT_R64G64B64A64_SINT:
1087
case VK_FORMAT_R64G64B64A64_SFLOAT:
1088
pFormatSize->flags = 0;
1089
pFormatSize->paletteSizeInBits = 0;
1090
pFormatSize->blockSizeInBits = 32 * 8;
1091
pFormatSize->blockWidth = 1;
1092
pFormatSize->blockHeight = 1;
1093
pFormatSize->blockDepth = 1;
1094
break;
1095
case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
1096
case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
1097
pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
1098
pFormatSize->paletteSizeInBits = 0;
1099
pFormatSize->blockSizeInBits = 4 * 8;
1100
pFormatSize->blockWidth = 1;
1101
pFormatSize->blockHeight = 1;
1102
pFormatSize->blockDepth = 1;
1103
break;
1104
case VK_FORMAT_D16_UNORM:
1105
pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT;
1106
pFormatSize->paletteSizeInBits = 0;
1107
pFormatSize->blockSizeInBits = 2 * 8;
1108
pFormatSize->blockWidth = 1;
1109
pFormatSize->blockHeight = 1;
1110
pFormatSize->blockDepth = 1;
1111
break;
1112
case VK_FORMAT_X8_D24_UNORM_PACK32:
1113
pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT | KTX_FORMAT_SIZE_DEPTH_BIT;
1114
pFormatSize->paletteSizeInBits = 0;
1115
pFormatSize->blockSizeInBits = 4 * 8;
1116
pFormatSize->blockWidth = 1;
1117
pFormatSize->blockHeight = 1;
1118
pFormatSize->blockDepth = 1;
1119
break;
1120
case VK_FORMAT_D32_SFLOAT:
1121
pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT;
1122
pFormatSize->paletteSizeInBits = 0;
1123
pFormatSize->blockSizeInBits = 4 * 8;
1124
pFormatSize->blockWidth = 1;
1125
pFormatSize->blockHeight = 1;
1126
pFormatSize->blockDepth = 1;
1127
break;
1128
case VK_FORMAT_S8_UINT:
1129
pFormatSize->flags = KTX_FORMAT_SIZE_STENCIL_BIT;
1130
pFormatSize->paletteSizeInBits = 0;
1131
pFormatSize->blockSizeInBits = 1 * 8;
1132
pFormatSize->blockWidth = 1;
1133
pFormatSize->blockHeight = 1;
1134
pFormatSize->blockDepth = 1;
1135
break;
1136
case VK_FORMAT_D16_UNORM_S8_UINT:
1137
pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT | KTX_FORMAT_SIZE_STENCIL_BIT;
1138
pFormatSize->paletteSizeInBits = 0;
1139
pFormatSize->blockSizeInBits = 3 * 8;
1140
pFormatSize->blockWidth = 1;
1141
pFormatSize->blockHeight = 1;
1142
pFormatSize->blockDepth = 1;
1143
break;
1144
case VK_FORMAT_D24_UNORM_S8_UINT:
1145
pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT | KTX_FORMAT_SIZE_STENCIL_BIT;
1146
pFormatSize->paletteSizeInBits = 0;
1147
pFormatSize->blockSizeInBits = 4 * 8;
1148
pFormatSize->blockWidth = 1;
1149
pFormatSize->blockHeight = 1;
1150
pFormatSize->blockDepth = 1;
1151
break;
1152
case VK_FORMAT_D32_SFLOAT_S8_UINT:
1153
pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT | KTX_FORMAT_SIZE_STENCIL_BIT;
1154
pFormatSize->paletteSizeInBits = 0;
1155
pFormatSize->blockSizeInBits = 8 * 8;
1156
pFormatSize->blockWidth = 1;
1157
pFormatSize->blockHeight = 1;
1158
pFormatSize->blockDepth = 1;
1159
break;
1160
case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
1161
case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
1162
case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
1163
case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
1164
case VK_FORMAT_BC4_UNORM_BLOCK:
1165
case VK_FORMAT_BC4_SNORM_BLOCK:
1166
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1167
pFormatSize->paletteSizeInBits = 0;
1168
pFormatSize->blockSizeInBits = 8 * 8;
1169
pFormatSize->blockWidth = 4;
1170
pFormatSize->blockHeight = 4;
1171
pFormatSize->blockDepth = 1;
1172
break;
1173
case VK_FORMAT_BC2_UNORM_BLOCK:
1174
case VK_FORMAT_BC2_SRGB_BLOCK:
1175
case VK_FORMAT_BC3_UNORM_BLOCK:
1176
case VK_FORMAT_BC3_SRGB_BLOCK:
1177
case VK_FORMAT_BC5_UNORM_BLOCK:
1178
case VK_FORMAT_BC5_SNORM_BLOCK:
1179
case VK_FORMAT_BC6H_UFLOAT_BLOCK:
1180
case VK_FORMAT_BC6H_SFLOAT_BLOCK:
1181
case VK_FORMAT_BC7_UNORM_BLOCK:
1182
case VK_FORMAT_BC7_SRGB_BLOCK:
1183
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1184
pFormatSize->paletteSizeInBits = 0;
1185
pFormatSize->blockSizeInBits = 16 * 8;
1186
pFormatSize->blockWidth = 4;
1187
pFormatSize->blockHeight = 4;
1188
pFormatSize->blockDepth = 1;
1189
break;
1190
case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
1191
case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
1192
case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
1193
case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
1194
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1195
pFormatSize->paletteSizeInBits = 0;
1196
pFormatSize->blockSizeInBits = 8 * 8;
1197
pFormatSize->blockWidth = 4;
1198
pFormatSize->blockHeight = 4;
1199
pFormatSize->blockDepth = 1;
1200
break;
1201
case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
1202
case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
1203
case VK_FORMAT_EAC_R11_UNORM_BLOCK:
1204
case VK_FORMAT_EAC_R11_SNORM_BLOCK:
1205
case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
1206
case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
1207
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1208
pFormatSize->paletteSizeInBits = 0;
1209
pFormatSize->blockSizeInBits = 16 * 8;
1210
pFormatSize->blockWidth = 4;
1211
pFormatSize->blockHeight = 4;
1212
pFormatSize->blockDepth = 1;
1213
break;
1214
case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
1215
case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
1216
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1217
pFormatSize->paletteSizeInBits = 0;
1218
pFormatSize->blockSizeInBits = 8 * 8;
1219
pFormatSize->blockWidth = 8;
1220
pFormatSize->blockHeight = 4;
1221
pFormatSize->blockDepth = 1;
1222
pFormatSize->minBlocksX = 2;
1223
pFormatSize->minBlocksY = 2;
1224
break;
1225
case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
1226
case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
1227
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1228
pFormatSize->paletteSizeInBits = 0;
1229
pFormatSize->blockSizeInBits = 8 * 8;
1230
pFormatSize->blockWidth = 8;
1231
pFormatSize->blockHeight = 4;
1232
pFormatSize->blockDepth = 1;
1233
break;
1234
case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
1235
case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
1236
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1237
pFormatSize->paletteSizeInBits = 0;
1238
pFormatSize->blockSizeInBits = 8 * 8;
1239
pFormatSize->blockWidth = 4;
1240
pFormatSize->blockHeight = 4;
1241
pFormatSize->blockDepth = 1;
1242
pFormatSize->minBlocksX = 2;
1243
pFormatSize->minBlocksY = 2;
1244
break;
1245
case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
1246
case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
1247
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1248
pFormatSize->paletteSizeInBits = 0;
1249
pFormatSize->blockSizeInBits = 8 * 8;
1250
pFormatSize->blockWidth = 4;
1251
pFormatSize->blockHeight = 4;
1252
pFormatSize->blockDepth = 1;
1253
break;
1254
case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
1255
case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
1256
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1257
pFormatSize->paletteSizeInBits = 0;
1258
pFormatSize->blockSizeInBits = 16 * 8;
1259
pFormatSize->blockWidth = 4;
1260
pFormatSize->blockHeight = 4;
1261
pFormatSize->blockDepth = 1;
1262
break;
1263
case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
1264
case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
1265
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1266
pFormatSize->paletteSizeInBits = 0;
1267
pFormatSize->blockSizeInBits = 16 * 8;
1268
pFormatSize->blockWidth = 5;
1269
pFormatSize->blockHeight = 4;
1270
pFormatSize->blockDepth = 1;
1271
break;
1272
case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
1273
case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
1274
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1275
pFormatSize->paletteSizeInBits = 0;
1276
pFormatSize->blockSizeInBits = 16 * 8;
1277
pFormatSize->blockWidth = 5;
1278
pFormatSize->blockHeight = 5;
1279
pFormatSize->blockDepth = 1;
1280
break;
1281
case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
1282
case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
1283
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1284
pFormatSize->paletteSizeInBits = 0;
1285
pFormatSize->blockSizeInBits = 16 * 8;
1286
pFormatSize->blockWidth = 6;
1287
pFormatSize->blockHeight = 5;
1288
pFormatSize->blockDepth = 1;
1289
break;
1290
case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
1291
case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
1292
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1293
pFormatSize->paletteSizeInBits = 0;
1294
pFormatSize->blockSizeInBits = 16 * 8;
1295
pFormatSize->blockWidth = 6;
1296
pFormatSize->blockHeight = 6;
1297
pFormatSize->blockDepth = 1;
1298
break;
1299
case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
1300
case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
1301
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1302
pFormatSize->paletteSizeInBits = 0;
1303
pFormatSize->blockSizeInBits = 16 * 8;
1304
pFormatSize->blockWidth = 8;
1305
pFormatSize->blockHeight = 5;
1306
pFormatSize->blockDepth = 1;
1307
break;
1308
case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
1309
case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
1310
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1311
pFormatSize->paletteSizeInBits = 0;
1312
pFormatSize->blockSizeInBits = 16 * 8;
1313
pFormatSize->blockWidth = 8;
1314
pFormatSize->blockHeight = 6;
1315
pFormatSize->blockDepth = 1;
1316
break;
1317
case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
1318
case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
1319
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1320
pFormatSize->paletteSizeInBits = 0;
1321
pFormatSize->blockSizeInBits = 16 * 8;
1322
pFormatSize->blockWidth = 8;
1323
pFormatSize->blockHeight = 8;
1324
pFormatSize->blockDepth = 1;
1325
break;
1326
case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
1327
case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
1328
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1329
pFormatSize->paletteSizeInBits = 0;
1330
pFormatSize->blockSizeInBits = 16 * 8;
1331
pFormatSize->blockWidth = 10;
1332
pFormatSize->blockHeight = 5;
1333
pFormatSize->blockDepth = 1;
1334
break;
1335
case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
1336
case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
1337
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1338
pFormatSize->paletteSizeInBits = 0;
1339
pFormatSize->blockSizeInBits = 16 * 8;
1340
pFormatSize->blockWidth = 10;
1341
pFormatSize->blockHeight = 6;
1342
pFormatSize->blockDepth = 1;
1343
break;
1344
case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
1345
case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
1346
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1347
pFormatSize->paletteSizeInBits = 0;
1348
pFormatSize->blockSizeInBits = 16 * 8;
1349
pFormatSize->blockWidth = 10;
1350
pFormatSize->blockHeight = 8;
1351
pFormatSize->blockDepth = 1;
1352
break;
1353
case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
1354
case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
1355
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1356
pFormatSize->paletteSizeInBits = 0;
1357
pFormatSize->blockSizeInBits = 16 * 8;
1358
pFormatSize->blockWidth = 10;
1359
pFormatSize->blockHeight = 10;
1360
pFormatSize->blockDepth = 1;
1361
break;
1362
case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
1363
case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
1364
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1365
pFormatSize->paletteSizeInBits = 0;
1366
pFormatSize->blockSizeInBits = 16 * 8;
1367
pFormatSize->blockWidth = 12;
1368
pFormatSize->blockHeight = 10;
1369
pFormatSize->blockDepth = 1;
1370
break;
1371
case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
1372
case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
1373
pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1374
pFormatSize->paletteSizeInBits = 0;
1375
pFormatSize->blockSizeInBits = 16 * 8;
1376
pFormatSize->blockWidth = 12;
1377
pFormatSize->blockHeight = 12;
1378
pFormatSize->blockDepth = 1;
1379
break;
1380
default:
1381
pFormatSize->flags = 0;
1382
pFormatSize->paletteSizeInBits = 0;
1383
pFormatSize->blockSizeInBits = 0 * 8;
1384
pFormatSize->blockWidth = 1;
1385
pFormatSize->blockHeight = 1;
1386
pFormatSize->blockDepth = 1;
1387
break;
1388
}
1389
}
1390
#endif
1391
1392
#endif // !VK_FORMAT_H
1393
1394