1/*
2================================================================================================
3
4Description : Vulkan format properties and conversion from OpenGL.
5Author : J.M.P. van Waveren
6Date : 07/17/2016
7Language : C99
8Format : Real tabs with the tab size equal to 4 spaces.
9Copyright : Copyright (c) 2016 Oculus VR, LLC. All Rights reserved.
10
11
12LICENSE
13=======
14
15Copyright 2016 Oculus VR, LLC.
16SPDX-License-Identifier: Apache-2.0
17
18
19DESCRIPTION
20===========
21
22This header implements several support routines to convert OpenGL formats/types
23to Vulkan formats. These routines are particularly useful for loading file
24formats that store OpenGL formats/types such as KTX and glTF.
25
26The functions in this header file convert the format, internalFormat and type
27that are used as parameters to the following OpenGL functions:
28
29void glTexImage2D( GLenum target, GLint level, GLint internalFormat,
30 GLsizei width, GLsizei height, GLint border,
31 GLenum format, GLenum type, const GLvoid * data );
32void glTexImage3D( GLenum target, GLint level, GLint internalFormat,
33 GLsizei width, GLsizei height, GLsizei depth, GLint border,
34 GLenum format, GLenum type, const GLvoid * data );
35void glCompressedTexImage2D( GLenum target, GLint level, GLenum internalformat,
36 GLsizei width, GLsizei height, GLint border,
37 GLsizei imageSize, const GLvoid * data );
38void glCompressedTexImage3D( GLenum target, GLint level, GLenum internalformat,
39 GLsizei width, GLsizei height, GLsizei depth, GLint border,
40 GLsizei imageSize, const GLvoid * data );
41void glTexStorage2D( GLenum target, GLsizei levels, GLenum internalformat,
42 GLsizei width, GLsizei height );
43void glTexStorage3D( GLenum target, GLsizei levels, GLenum internalformat,
44 GLsizei width, GLsizei height, GLsizei depth );
45void glVertexAttribPointer( GLuint index, GLint size, GLenum type, GLboolean normalized,
46 GLsizei stride, const GLvoid * pointer);
47
48
49IMPLEMENTATION
50==============
51
52This file does not include OpenGL / OpenGL ES headers because:
53
54 1. Including OpenGL / OpenGL ES headers is platform dependent and
55 may require a separate installation of an OpenGL SDK.
56 2. The OpenGL format/type constants are the same between extensions and core.
57 3. The OpenGL format/type constants are the same between OpenGL and OpenGL ES.
58 4. File formats like KTX and glTF may use OpenGL formats and types that
59 are not supported by the OpenGL implementation on the platform but are
60 supported by the Vulkan implementation.
61
62
63ENTRY POINTS
64============
65
66static inline VkFormat vkGetFormatFromOpenGLFormat( const GLenum format, const GLenum type );
67static inline VkFormat vkGetFormatFromOpenGLType( const GLenum type, const GLuint numComponents, const GLboolean normalized );
68static inline VkFormat vkGetFormatFromOpenGLInternalFormat( const GLenum internalFormat );
69static inline void vkGetFormatSize( const VkFormat format, VkFormatSize * pFormatSize );
70
71MODIFICATIONS for use in libktx
72===============================
73
742019.5.30 Use common ktxFormatSize to return results. Mark Callow, Edgewise Consulting.
752019.6.12 Add mapping of PVRTC formats. "
76
77================================================================================================
78*/
79
80#if !defined( VK_FORMAT_H )
81#define VK_FORMAT_H
82
83#include "gl_format.h"
84
85static inline VkFormat vkGetFormatFromOpenGLFormat( const GLenum format, const GLenum type )
86{
87 switch ( type )
88 {
89 //
90 // 8 bits per component
91 //
92 case GL_UNSIGNED_BYTE:
93 {
94 switch ( format )
95 {
96 case GL_RED: return VK_FORMAT_R8_UNORM;
97 case GL_RG: return VK_FORMAT_R8G8_UNORM;
98 case GL_RGB: return VK_FORMAT_R8G8B8_UNORM;
99 case GL_BGR: return VK_FORMAT_B8G8R8_UNORM;
100 case GL_RGBA: return VK_FORMAT_R8G8B8A8_UNORM;
101 case GL_BGRA: return VK_FORMAT_B8G8R8A8_UNORM;
102 case GL_RED_INTEGER: return VK_FORMAT_R8_UINT;
103 case GL_RG_INTEGER: return VK_FORMAT_R8G8_UINT;
104 case GL_RGB_INTEGER: return VK_FORMAT_R8G8B8_UINT;
105 case GL_BGR_INTEGER: return VK_FORMAT_B8G8R8_UINT;
106 case GL_RGBA_INTEGER: return VK_FORMAT_R8G8B8A8_UINT;
107 case GL_BGRA_INTEGER: return VK_FORMAT_B8G8R8A8_UINT;
108 case GL_STENCIL_INDEX: return VK_FORMAT_S8_UINT;
109 case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
110 case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
111 }
112 break;
113 }
114 case GL_BYTE:
115 {
116 switch ( format )
117 {
118 case GL_RED: return VK_FORMAT_R8_SNORM;
119 case GL_RG: return VK_FORMAT_R8G8_SNORM;
120 case GL_RGB: return VK_FORMAT_R8G8B8_SNORM;
121 case GL_BGR: return VK_FORMAT_B8G8R8_SNORM;
122 case GL_RGBA: return VK_FORMAT_R8G8B8A8_SNORM;
123 case GL_BGRA: return VK_FORMAT_B8G8R8A8_SNORM;
124 case GL_RED_INTEGER: return VK_FORMAT_R8_SINT;
125 case GL_RG_INTEGER: return VK_FORMAT_R8G8_SINT;
126 case GL_RGB_INTEGER: return VK_FORMAT_R8G8B8_SINT;
127 case GL_BGR_INTEGER: return VK_FORMAT_B8G8R8_SINT;
128 case GL_RGBA_INTEGER: return VK_FORMAT_R8G8B8A8_SINT;
129 case GL_BGRA_INTEGER: return VK_FORMAT_B8G8R8A8_SINT;
130 case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
131 case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
132 case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
133 }
134 break;
135 }
136
137 //
138 // 16 bits per component
139 //
140 case GL_UNSIGNED_SHORT:
141 {
142 switch ( format )
143 {
144 case GL_RED: return VK_FORMAT_R16_UNORM;
145 case GL_RG: return VK_FORMAT_R16G16_UNORM;
146 case GL_RGB: return VK_FORMAT_R16G16B16_UNORM;
147 case GL_BGR: return VK_FORMAT_UNDEFINED;
148 case GL_RGBA: return VK_FORMAT_R16G16B16A16_UNORM;
149 case GL_BGRA: return VK_FORMAT_UNDEFINED;
150 case GL_RED_INTEGER: return VK_FORMAT_R16_UINT;
151 case GL_RG_INTEGER: return VK_FORMAT_R16G16_UINT;
152 case GL_RGB_INTEGER: return VK_FORMAT_R16G16B16_UINT;
153 case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
154 case GL_RGBA_INTEGER: return VK_FORMAT_R16G16B16A16_UINT;
155 case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
156 case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
157 case GL_DEPTH_COMPONENT: return VK_FORMAT_D16_UNORM;
158 case GL_DEPTH_STENCIL: return VK_FORMAT_D16_UNORM_S8_UINT;
159 }
160 break;
161 }
162 case GL_SHORT:
163 {
164 switch ( format )
165 {
166 case GL_RED: return VK_FORMAT_R16_SNORM;
167 case GL_RG: return VK_FORMAT_R16G16_SNORM;
168 case GL_RGB: return VK_FORMAT_R16G16B16_SNORM;
169 case GL_BGR: return VK_FORMAT_UNDEFINED;
170 case GL_RGBA: return VK_FORMAT_R16G16B16A16_SNORM;
171 case GL_BGRA: return VK_FORMAT_UNDEFINED;
172 case GL_RED_INTEGER: return VK_FORMAT_R16_SINT;
173 case GL_RG_INTEGER: return VK_FORMAT_R16G16_SINT;
174 case GL_RGB_INTEGER: return VK_FORMAT_R16G16B16_SINT;
175 case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
176 case GL_RGBA_INTEGER: return VK_FORMAT_R16G16B16A16_SINT;
177 case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
178 case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
179 case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
180 case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
181 }
182 break;
183 }
184 case GL_HALF_FLOAT:
185 case GL_HALF_FLOAT_OES:
186 {
187 switch ( format )
188 {
189 case GL_RED: return VK_FORMAT_R16_SFLOAT;
190 case GL_RG: return VK_FORMAT_R16G16_SFLOAT;
191 case GL_RGB: return VK_FORMAT_R16G16B16_SFLOAT;
192 case GL_BGR: return VK_FORMAT_UNDEFINED;
193 case GL_RGBA: return VK_FORMAT_R16G16B16A16_SFLOAT;
194 case GL_BGRA: return VK_FORMAT_UNDEFINED;
195 case GL_RED_INTEGER: return VK_FORMAT_UNDEFINED;
196 case GL_RG_INTEGER: return VK_FORMAT_UNDEFINED;
197 case GL_RGB_INTEGER: return VK_FORMAT_UNDEFINED;
198 case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
199 case GL_RGBA_INTEGER: return VK_FORMAT_UNDEFINED;
200 case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
201 case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
202 case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
203 case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
204 }
205 break;
206 }
207
208 //
209 // 32 bits per component
210 //
211 case GL_UNSIGNED_INT:
212 {
213 switch ( format )
214 {
215 case GL_RED: return VK_FORMAT_R32_UINT;
216 case GL_RG: return VK_FORMAT_R32G32_UINT;
217 case GL_RGB: return VK_FORMAT_R32G32B32_UINT;
218 case GL_BGR: return VK_FORMAT_UNDEFINED;
219 case GL_RGBA: return VK_FORMAT_R32G32B32A32_UINT;
220 case GL_BGRA: return VK_FORMAT_UNDEFINED;
221 case GL_RED_INTEGER: return VK_FORMAT_R32_UINT;
222 case GL_RG_INTEGER: return VK_FORMAT_R32G32_UINT;
223 case GL_RGB_INTEGER: return VK_FORMAT_R32G32B32_UINT;
224 case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
225 case GL_RGBA_INTEGER: return VK_FORMAT_R32G32B32A32_UINT;
226 case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
227 case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
228 case GL_DEPTH_COMPONENT: return VK_FORMAT_X8_D24_UNORM_PACK32;
229 case GL_DEPTH_STENCIL: return VK_FORMAT_D24_UNORM_S8_UINT;
230 }
231 break;
232 }
233 case GL_INT:
234 {
235 switch ( format )
236 {
237 case GL_RED: return VK_FORMAT_R32_SINT;
238 case GL_RG: return VK_FORMAT_R32G32_SINT;
239 case GL_RGB: return VK_FORMAT_R32G32B32_SINT;
240 case GL_BGR: return VK_FORMAT_UNDEFINED;
241 case GL_RGBA: return VK_FORMAT_R32G32B32A32_SINT;
242 case GL_BGRA: return VK_FORMAT_UNDEFINED;
243 case GL_RED_INTEGER: return VK_FORMAT_R32_SINT;
244 case GL_RG_INTEGER: return VK_FORMAT_R32G32_SINT;
245 case GL_RGB_INTEGER: return VK_FORMAT_R32G32B32_SINT;
246 case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
247 case GL_RGBA_INTEGER: return VK_FORMAT_R32G32B32A32_SINT;
248 case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
249 case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
250 case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
251 case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
252 }
253 break;
254 }
255 case GL_FLOAT:
256 {
257 switch ( format )
258 {
259 case GL_RED: return VK_FORMAT_R32_SFLOAT;
260 case GL_RG: return VK_FORMAT_R32G32_SFLOAT;
261 case GL_RGB: return VK_FORMAT_R32G32B32_SFLOAT;
262 case GL_BGR: return VK_FORMAT_UNDEFINED;
263 case GL_RGBA: return VK_FORMAT_R32G32B32A32_SFLOAT;
264 case GL_BGRA: return VK_FORMAT_UNDEFINED;
265 case GL_RED_INTEGER: return VK_FORMAT_UNDEFINED;
266 case GL_RG_INTEGER: return VK_FORMAT_UNDEFINED;
267 case GL_RGB_INTEGER: return VK_FORMAT_UNDEFINED;
268 case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
269 case GL_RGBA_INTEGER: return VK_FORMAT_UNDEFINED;
270 case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
271 case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
272 case GL_DEPTH_COMPONENT: return VK_FORMAT_D32_SFLOAT;
273 case GL_DEPTH_STENCIL: return VK_FORMAT_D32_SFLOAT_S8_UINT;
274 }
275 break;
276 }
277
278 //
279 // 64 bits per component
280 //
281 case GL_UNSIGNED_INT64:
282 {
283 switch ( format )
284 {
285 case GL_RED: return VK_FORMAT_R64_UINT;
286 case GL_RG: return VK_FORMAT_R64G64_UINT;
287 case GL_RGB: return VK_FORMAT_R64G64B64_UINT;
288 case GL_BGR: return VK_FORMAT_UNDEFINED;
289 case GL_RGBA: return VK_FORMAT_R64G64B64A64_UINT;
290 case GL_BGRA: return VK_FORMAT_UNDEFINED;
291 case GL_RED_INTEGER: return VK_FORMAT_UNDEFINED;
292 case GL_RG_INTEGER: return VK_FORMAT_UNDEFINED;
293 case GL_RGB_INTEGER: return VK_FORMAT_UNDEFINED;
294 case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
295 case GL_RGBA_INTEGER: return VK_FORMAT_UNDEFINED;
296 case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
297 case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
298 case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
299 case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
300 }
301 break;
302 }
303 case GL_INT64:
304 {
305 switch ( format )
306 {
307 case GL_RED: return VK_FORMAT_R64_SINT;
308 case GL_RG: return VK_FORMAT_R64G64_SINT;
309 case GL_RGB: return VK_FORMAT_R64G64B64_SINT;
310 case GL_BGR: return VK_FORMAT_UNDEFINED;
311 case GL_RGBA: return VK_FORMAT_R64G64B64A64_SINT;
312 case GL_BGRA: return VK_FORMAT_UNDEFINED;
313 case GL_RED_INTEGER: return VK_FORMAT_R64_SINT;
314 case GL_RG_INTEGER: return VK_FORMAT_R64G64_SINT;
315 case GL_RGB_INTEGER: return VK_FORMAT_R64G64B64_SINT;
316 case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
317 case GL_RGBA_INTEGER: return VK_FORMAT_R64G64B64A64_SINT;
318 case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
319 case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
320 case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
321 case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
322 }
323 break;
324 }
325 case GL_DOUBLE:
326 {
327 switch ( format )
328 {
329 case GL_RED: return VK_FORMAT_R64_SFLOAT;
330 case GL_RG: return VK_FORMAT_R64G64_SFLOAT;
331 case GL_RGB: return VK_FORMAT_R64G64B64_SFLOAT;
332 case GL_BGR: return VK_FORMAT_UNDEFINED;
333 case GL_RGBA: return VK_FORMAT_R64G64B64A64_SFLOAT;
334 case GL_BGRA: return VK_FORMAT_UNDEFINED;
335 case GL_RED_INTEGER: return VK_FORMAT_R64_SFLOAT;
336 case GL_RG_INTEGER: return VK_FORMAT_R64G64_SFLOAT;
337 case GL_RGB_INTEGER: return VK_FORMAT_R64G64B64_SFLOAT;
338 case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
339 case GL_RGBA_INTEGER: return VK_FORMAT_R64G64B64A64_SFLOAT;
340 case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
341 case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
342 case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
343 case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
344 }
345 break;
346 }
347
348 //
349 // Packed
350 //
351 case GL_UNSIGNED_BYTE_3_3_2:
352 assert( format == GL_RGB || format == GL_RGB_INTEGER );
353 return VK_FORMAT_UNDEFINED;
354 case GL_UNSIGNED_BYTE_2_3_3_REV:
355 assert( format == GL_BGR || format == GL_BGR_INTEGER );
356 return VK_FORMAT_UNDEFINED;
357 case GL_UNSIGNED_SHORT_5_6_5:
358 assert( format == GL_RGB || format == GL_RGB_INTEGER );
359 return VK_FORMAT_R5G6B5_UNORM_PACK16;
360 case GL_UNSIGNED_SHORT_5_6_5_REV:
361 assert( format == GL_BGR || format == GL_BGR_INTEGER );
362 return VK_FORMAT_B5G6R5_UNORM_PACK16;
363 case GL_UNSIGNED_SHORT_4_4_4_4:
364 assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
365 return VK_FORMAT_R4G4B4A4_UNORM_PACK16;
366 case GL_UNSIGNED_SHORT_4_4_4_4_REV:
367 assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
368 return VK_FORMAT_B4G4R4A4_UNORM_PACK16;
369 case GL_UNSIGNED_SHORT_5_5_5_1:
370 assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
371 return VK_FORMAT_R5G5B5A1_UNORM_PACK16;
372 case GL_UNSIGNED_SHORT_1_5_5_5_REV:
373 assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
374 return VK_FORMAT_A1R5G5B5_UNORM_PACK16;
375 case GL_UNSIGNED_INT_8_8_8_8:
376 assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
377 return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_R8G8B8A8_UINT : VK_FORMAT_R8G8B8A8_UNORM;
378 case GL_UNSIGNED_INT_8_8_8_8_REV:
379 assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
380 return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_A8B8G8R8_UINT_PACK32 : VK_FORMAT_A8B8G8R8_UNORM_PACK32;
381 case GL_UNSIGNED_INT_10_10_10_2:
382 assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
383 return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_A2R10G10B10_UINT_PACK32 : VK_FORMAT_A2R10G10B10_UNORM_PACK32;
384 case GL_UNSIGNED_INT_2_10_10_10_REV:
385 assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
386 return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_A2B10G10R10_UINT_PACK32 : VK_FORMAT_A2B10G10R10_UNORM_PACK32;
387 case GL_UNSIGNED_INT_10F_11F_11F_REV:
388 assert( format == GL_RGB || format == GL_BGR );
389 return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
390 case GL_UNSIGNED_INT_5_9_9_9_REV:
391 assert( format == GL_RGB || format == GL_BGR );
392 return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
393 case GL_UNSIGNED_INT_24_8:
394 assert( format == GL_DEPTH_STENCIL );
395 return VK_FORMAT_D24_UNORM_S8_UINT;
396 case GL_FLOAT_32_UNSIGNED_INT_24_8_REV:
397 assert( format == GL_DEPTH_STENCIL );
398 return VK_FORMAT_D32_SFLOAT_S8_UINT;
399 }
400
401 return VK_FORMAT_UNDEFINED;
402}
403
404static inline VkFormat vkGetFormatFromOpenGLType( const GLenum type, const GLuint numComponents, const GLboolean normalized )
405{
406 switch ( type )
407 {
408 //
409 // 8 bits per component
410 //
411 case GL_UNSIGNED_BYTE:
412 {
413 switch ( numComponents )
414 {
415 case 1: return normalized ? VK_FORMAT_R8_UNORM : VK_FORMAT_R8_UINT;
416 case 2: return normalized ? VK_FORMAT_R8G8_UNORM : VK_FORMAT_R8G8_UINT;
417 case 3: return normalized ? VK_FORMAT_R8G8B8_UNORM : VK_FORMAT_R8G8B8_UINT;
418 case 4: return normalized ? VK_FORMAT_R8G8B8A8_UNORM : VK_FORMAT_R8G8B8A8_UINT;
419 }
420 break;
421 }
422 case GL_BYTE:
423 {
424 switch ( numComponents )
425 {
426 case 1: return normalized ? VK_FORMAT_R8_SNORM : VK_FORMAT_R8_SINT;
427 case 2: return normalized ? VK_FORMAT_R8G8_SNORM : VK_FORMAT_R8G8_SINT;
428 case 3: return normalized ? VK_FORMAT_R8G8B8_SNORM : VK_FORMAT_R8G8B8_SINT;
429 case 4: return normalized ? VK_FORMAT_R8G8B8A8_SNORM : VK_FORMAT_R8G8B8A8_SINT;
430 }
431 break;
432 }
433
434 //
435 // 16 bits per component
436 //
437 case GL_UNSIGNED_SHORT:
438 {
439 switch ( numComponents )
440 {
441 case 1: return normalized ? VK_FORMAT_R16_UNORM : VK_FORMAT_R16_UINT;
442 case 2: return normalized ? VK_FORMAT_R16G16_UNORM : VK_FORMAT_R16G16_UINT;
443 case 3: return normalized ? VK_FORMAT_R16G16B16_UNORM : VK_FORMAT_R16G16B16_UINT;
444 case 4: return normalized ? VK_FORMAT_R16G16B16A16_UNORM : VK_FORMAT_R16G16B16A16_UINT;
445 }
446 break;
447 }
448 case GL_SHORT:
449 {
450 switch ( numComponents )
451 {
452 case 1: return normalized ? VK_FORMAT_R16_SNORM : VK_FORMAT_R16_SINT;
453 case 2: return normalized ? VK_FORMAT_R16G16_SNORM : VK_FORMAT_R16G16_SINT;
454 case 3: return normalized ? VK_FORMAT_R16G16B16_SNORM : VK_FORMAT_R16G16B16_SINT;
455 case 4: return normalized ? VK_FORMAT_R16G16B16A16_SNORM : VK_FORMAT_R16G16B16A16_SINT;
456 }
457 break;
458 }
459 case GL_HALF_FLOAT:
460 case GL_HALF_FLOAT_OES:
461 {
462 switch ( numComponents )
463 {
464 case 1: return VK_FORMAT_R16_SFLOAT;
465 case 2: return VK_FORMAT_R16G16_SFLOAT;
466 case 3: return VK_FORMAT_R16G16B16_SFLOAT;
467 case 4: return VK_FORMAT_R16G16B16A16_SFLOAT;
468 }
469 break;
470 }
471
472 //
473 // 32 bits per component
474 //
475 case GL_UNSIGNED_INT:
476 {
477 switch ( numComponents )
478 {
479 case 1: return VK_FORMAT_R32_UINT;
480 case 2: return VK_FORMAT_R32G32_UINT;
481 case 3: return VK_FORMAT_R32G32B32_UINT;
482 case 4: return VK_FORMAT_R32G32B32A32_UINT;
483 }
484 break;
485 }
486 case GL_INT:
487 {
488 switch ( numComponents )
489 {
490 case 1: return VK_FORMAT_R32_SINT;
491 case 2: return VK_FORMAT_R32G32_SINT;
492 case 3: return VK_FORMAT_R32G32B32_SINT;
493 case 4: return VK_FORMAT_R32G32B32A32_SINT;
494 }
495 break;
496 }
497 case GL_FLOAT:
498 {
499 switch ( numComponents )
500 {
501 case 1: return VK_FORMAT_R32_SFLOAT;
502 case 2: return VK_FORMAT_R32G32_SFLOAT;
503 case 3: return VK_FORMAT_R32G32B32_SFLOAT;
504 case 4: return VK_FORMAT_R32G32B32A32_SFLOAT;
505 }
506 break;
507 }
508
509 //
510 // 64 bits per component
511 //
512 case GL_UNSIGNED_INT64:
513 {
514 switch ( numComponents )
515 {
516 case 1: return VK_FORMAT_R64_UINT;
517 case 2: return VK_FORMAT_R64G64_UINT;
518 case 3: return VK_FORMAT_R64G64B64_UINT;
519 case 4: return VK_FORMAT_R64G64B64A64_UINT;
520 }
521 break;
522 }
523 case GL_INT64:
524 {
525 switch ( numComponents )
526 {
527 case 1: return VK_FORMAT_R64_SINT;
528 case 2: return VK_FORMAT_R64G64_SINT;
529 case 3: return VK_FORMAT_R64G64B64_SINT;
530 case 4: return VK_FORMAT_R64G64B64A64_SINT;
531 }
532 break;
533 }
534 case GL_DOUBLE:
535 {
536 switch ( numComponents )
537 {
538 case 1: return VK_FORMAT_R64_SFLOAT;
539 case 2: return VK_FORMAT_R64G64_SFLOAT;
540 case 3: return VK_FORMAT_R64G64B64_SFLOAT;
541 case 4: return VK_FORMAT_R64G64B64A64_SFLOAT;
542 }
543 break;
544 }
545
546 //
547 // Packed
548 //
549 case GL_UNSIGNED_BYTE_3_3_2: return VK_FORMAT_UNDEFINED;
550 case GL_UNSIGNED_BYTE_2_3_3_REV: return VK_FORMAT_UNDEFINED;
551 case GL_UNSIGNED_SHORT_5_6_5: return VK_FORMAT_R5G6B5_UNORM_PACK16;
552 case GL_UNSIGNED_SHORT_5_6_5_REV: return VK_FORMAT_B5G6R5_UNORM_PACK16;
553 case GL_UNSIGNED_SHORT_4_4_4_4: return VK_FORMAT_R4G4B4A4_UNORM_PACK16;
554 case GL_UNSIGNED_SHORT_4_4_4_4_REV: return VK_FORMAT_B4G4R4A4_UNORM_PACK16;
555 case GL_UNSIGNED_SHORT_5_5_5_1: return VK_FORMAT_R5G5B5A1_UNORM_PACK16;
556 case GL_UNSIGNED_SHORT_1_5_5_5_REV: return VK_FORMAT_A1R5G5B5_UNORM_PACK16;
557 case GL_UNSIGNED_INT_8_8_8_8: return normalized ? VK_FORMAT_R8G8B8A8_UNORM : VK_FORMAT_R8G8B8A8_UINT;
558 case GL_UNSIGNED_INT_8_8_8_8_REV: return normalized ? VK_FORMAT_A8B8G8R8_UNORM_PACK32 : VK_FORMAT_A8B8G8R8_UINT_PACK32;
559 case GL_UNSIGNED_INT_10_10_10_2: return normalized ? VK_FORMAT_A2R10G10B10_UNORM_PACK32 : VK_FORMAT_A2R10G10B10_UINT_PACK32;
560 case GL_UNSIGNED_INT_2_10_10_10_REV: return normalized ? VK_FORMAT_A2B10G10R10_UNORM_PACK32 : VK_FORMAT_A2B10G10R10_UINT_PACK32;
561 case GL_UNSIGNED_INT_10F_11F_11F_REV: return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
562 case GL_UNSIGNED_INT_5_9_9_9_REV: return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
563 case GL_UNSIGNED_INT_24_8: return VK_FORMAT_D24_UNORM_S8_UINT;
564 case GL_FLOAT_32_UNSIGNED_INT_24_8_REV: return VK_FORMAT_D32_SFLOAT_S8_UINT;
565 }
566
567 return VK_FORMAT_UNDEFINED;
568}
569
570static inline VkFormat vkGetFormatFromOpenGLInternalFormat( const GLenum internalFormat )
571{
572 switch ( internalFormat )
573 {
574 //
575 // 8 bits per component
576 //
577 case GL_R8: return VK_FORMAT_R8_UNORM; // 1-component, 8-bit unsigned normalized
578 case GL_RG8: return VK_FORMAT_R8G8_UNORM; // 2-component, 8-bit unsigned normalized
579 case GL_RGB8: return VK_FORMAT_R8G8B8_UNORM; // 3-component, 8-bit unsigned normalized
580 case GL_RGBA8: return VK_FORMAT_R8G8B8A8_UNORM; // 4-component, 8-bit unsigned normalized
581
582 case GL_R8_SNORM: return VK_FORMAT_R8_SNORM; // 1-component, 8-bit signed normalized
583 case GL_RG8_SNORM: return VK_FORMAT_R8G8_SNORM; // 2-component, 8-bit signed normalized
584 case GL_RGB8_SNORM: return VK_FORMAT_R8G8B8_SNORM; // 3-component, 8-bit signed normalized
585 case GL_RGBA8_SNORM: return VK_FORMAT_R8G8B8A8_SNORM; // 4-component, 8-bit signed normalized
586
587 case GL_R8UI: return VK_FORMAT_R8_UINT; // 1-component, 8-bit unsigned integer
588 case GL_RG8UI: return VK_FORMAT_R8G8_UINT; // 2-component, 8-bit unsigned integer
589 case GL_RGB8UI: return VK_FORMAT_R8G8B8_UINT; // 3-component, 8-bit unsigned integer
590 case GL_RGBA8UI: return VK_FORMAT_R8G8B8A8_UINT; // 4-component, 8-bit unsigned integer
591
592 case GL_R8I: return VK_FORMAT_R8_SINT; // 1-component, 8-bit signed integer
593 case GL_RG8I: return VK_FORMAT_R8G8_SINT; // 2-component, 8-bit signed integer
594 case GL_RGB8I: return VK_FORMAT_R8G8B8_SINT; // 3-component, 8-bit signed integer
595 case GL_RGBA8I: return VK_FORMAT_R8G8B8A8_SINT; // 4-component, 8-bit signed integer
596
597 case GL_SR8: return VK_FORMAT_R8_SRGB; // 1-component, 8-bit sRGB
598 case GL_SRG8: return VK_FORMAT_R8G8_SRGB; // 2-component, 8-bit sRGB
599 case GL_SRGB8: return VK_FORMAT_R8G8B8_SRGB; // 3-component, 8-bit sRGB
600 case GL_SRGB8_ALPHA8: return VK_FORMAT_R8G8B8A8_SRGB; // 4-component, 8-bit sRGB
601
602 //
603 // 16 bits per component
604 //
605 case GL_R16: return VK_FORMAT_R16_UNORM; // 1-component, 16-bit unsigned normalized
606 case GL_RG16: return VK_FORMAT_R16G16_UNORM; // 2-component, 16-bit unsigned normalized
607 case GL_RGB16: return VK_FORMAT_R16G16B16_UNORM; // 3-component, 16-bit unsigned normalized
608 case GL_RGBA16: return VK_FORMAT_R16G16B16A16_UNORM; // 4-component, 16-bit unsigned normalized
609
610 case GL_R16_SNORM: return VK_FORMAT_R16_SNORM; // 1-component, 16-bit signed normalized
611 case GL_RG16_SNORM: return VK_FORMAT_R16G16_SNORM; // 2-component, 16-bit signed normalized
612 case GL_RGB16_SNORM: return VK_FORMAT_R16G16B16_SNORM; // 3-component, 16-bit signed normalized
613 case GL_RGBA16_SNORM: return VK_FORMAT_R16G16B16A16_SNORM; // 4-component, 16-bit signed normalized
614
615 case GL_R16UI: return VK_FORMAT_R16_UINT; // 1-component, 16-bit unsigned integer
616 case GL_RG16UI: return VK_FORMAT_R16G16_UINT; // 2-component, 16-bit unsigned integer
617 case GL_RGB16UI: return VK_FORMAT_R16G16B16_UINT; // 3-component, 16-bit unsigned integer
618 case GL_RGBA16UI: return VK_FORMAT_R16G16B16A16_UINT; // 4-component, 16-bit unsigned integer
619
620 case GL_R16I: return VK_FORMAT_R16_SINT; // 1-component, 16-bit signed integer
621 case GL_RG16I: return VK_FORMAT_R16G16_SINT; // 2-component, 16-bit signed integer
622 case GL_RGB16I: return VK_FORMAT_R16G16B16_SINT; // 3-component, 16-bit signed integer
623 case GL_RGBA16I: return VK_FORMAT_R16G16B16A16_SINT; // 4-component, 16-bit signed integer
624
625 case GL_R16F: return VK_FORMAT_R16_SFLOAT; // 1-component, 16-bit floating-point
626 case GL_RG16F: return VK_FORMAT_R16G16_SFLOAT; // 2-component, 16-bit floating-point
627 case GL_RGB16F: return VK_FORMAT_R16G16B16_SFLOAT; // 3-component, 16-bit floating-point
628 case GL_RGBA16F: return VK_FORMAT_R16G16B16A16_SFLOAT; // 4-component, 16-bit floating-point
629
630 //
631 // 32 bits per component
632 //
633 case GL_R32UI: return VK_FORMAT_R32_UINT; // 1-component, 32-bit unsigned integer
634 case GL_RG32UI: return VK_FORMAT_R32G32_UINT; // 2-component, 32-bit unsigned integer
635 case GL_RGB32UI: return VK_FORMAT_R32G32B32_UINT; // 3-component, 32-bit unsigned integer
636 case GL_RGBA32UI: return VK_FORMAT_R32G32B32A32_UINT; // 4-component, 32-bit unsigned integer
637
638 case GL_R32I: return VK_FORMAT_R32_SINT; // 1-component, 32-bit signed integer
639 case GL_RG32I: return VK_FORMAT_R32G32_SINT; // 2-component, 32-bit signed integer
640 case GL_RGB32I: return VK_FORMAT_R32G32B32_SINT; // 3-component, 32-bit signed integer
641 case GL_RGBA32I: return VK_FORMAT_R32G32B32A32_SINT; // 4-component, 32-bit signed integer
642
643 case GL_R32F: return VK_FORMAT_R32_SFLOAT; // 1-component, 32-bit floating-point
644 case GL_RG32F: return VK_FORMAT_R32G32_SFLOAT; // 2-component, 32-bit floating-point
645 case GL_RGB32F: return VK_FORMAT_R32G32B32_SFLOAT; // 3-component, 32-bit floating-point
646 case GL_RGBA32F: return VK_FORMAT_R32G32B32A32_SFLOAT; // 4-component, 32-bit floating-point
647
648 //
649 // Packed
650 //
651 case GL_R3_G3_B2: return VK_FORMAT_UNDEFINED; // 3-component 3:3:2, unsigned normalized
652 case GL_RGB4: return VK_FORMAT_UNDEFINED; // 3-component 4:4:4, unsigned normalized
653 case GL_RGB5: return VK_FORMAT_R5G5B5A1_UNORM_PACK16; // 3-component 5:5:5, unsigned normalized
654 case GL_RGB565: return VK_FORMAT_R5G6B5_UNORM_PACK16; // 3-component 5:6:5, unsigned normalized
655 case GL_RGB10: return VK_FORMAT_A2R10G10B10_UNORM_PACK32; // 3-component 10:10:10, unsigned normalized
656 case GL_RGB12: return VK_FORMAT_UNDEFINED; // 3-component 12:12:12, unsigned normalized
657 case GL_RGBA2: return VK_FORMAT_UNDEFINED; // 4-component 2:2:2:2, unsigned normalized
658 case GL_RGBA4: return VK_FORMAT_R4G4B4A4_UNORM_PACK16; // 4-component 4:4:4:4, unsigned normalized
659 case GL_RGBA12: return VK_FORMAT_UNDEFINED; // 4-component 12:12:12:12, unsigned normalized
660 case GL_RGB5_A1: return VK_FORMAT_A1R5G5B5_UNORM_PACK16; // 4-component 5:5:5:1, unsigned normalized
661 case GL_RGB10_A2: return VK_FORMAT_A2R10G10B10_UNORM_PACK32; // 4-component 10:10:10:2, unsigned normalized
662 case GL_RGB10_A2UI: return VK_FORMAT_A2R10G10B10_UINT_PACK32; // 4-component 10:10:10:2, unsigned integer
663 case GL_R11F_G11F_B10F: return VK_FORMAT_B10G11R11_UFLOAT_PACK32; // 3-component 11:11:10, floating-point
664 case GL_RGB9_E5: return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32; // 3-component/exp 9:9:9/5, floating-point
665
666 //
667 // S3TC/DXT/BC
668 //
669
670 case GL_COMPRESSED_RGB_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGB_UNORM_BLOCK; // line through 3D space, 4x4 blocks, unsigned normalized
671 case GL_COMPRESSED_RGBA_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGBA_UNORM_BLOCK; // line through 3D space plus 1-bit alpha, 4x4 blocks, unsigned normalized
672 case GL_COMPRESSED_RGBA_S3TC_DXT3_EXT: return VK_FORMAT_BC2_UNORM_BLOCK; // line through 3D space plus line through 1D space, 4x4 blocks, unsigned normalized
673 case GL_COMPRESSED_RGBA_S3TC_DXT5_EXT: return VK_FORMAT_BC3_UNORM_BLOCK; // line through 3D space plus 4-bit alpha, 4x4 blocks, unsigned normalized
674
675 case GL_COMPRESSED_SRGB_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGB_SRGB_BLOCK; // line through 3D space, 4x4 blocks, sRGB
676 case GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGBA_SRGB_BLOCK; // line through 3D space plus 1-bit alpha, 4x4 blocks, sRGB
677 case GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT: return VK_FORMAT_BC2_SRGB_BLOCK; // line through 3D space plus line through 1D space, 4x4 blocks, sRGB
678 case GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT: return VK_FORMAT_BC3_SRGB_BLOCK; // line through 3D space plus 4-bit alpha, 4x4 blocks, sRGB
679
680 case GL_COMPRESSED_LUMINANCE_LATC1_EXT: return VK_FORMAT_BC4_UNORM_BLOCK; // line through 1D space, 4x4 blocks, unsigned normalized
681 case GL_COMPRESSED_LUMINANCE_ALPHA_LATC2_EXT: return VK_FORMAT_BC5_UNORM_BLOCK; // two lines through 1D space, 4x4 blocks, unsigned normalized
682 case GL_COMPRESSED_SIGNED_LUMINANCE_LATC1_EXT: return VK_FORMAT_BC4_SNORM_BLOCK; // line through 1D space, 4x4 blocks, signed normalized
683 case GL_COMPRESSED_SIGNED_LUMINANCE_ALPHA_LATC2_EXT: return VK_FORMAT_BC5_SNORM_BLOCK; // two lines through 1D space, 4x4 blocks, signed normalized
684
685 case GL_COMPRESSED_RED_RGTC1: return VK_FORMAT_BC4_UNORM_BLOCK; // line through 1D space, 4x4 blocks, unsigned normalized
686 case GL_COMPRESSED_RG_RGTC2: return VK_FORMAT_BC5_UNORM_BLOCK; // two lines through 1D space, 4x4 blocks, unsigned normalized
687 case GL_COMPRESSED_SIGNED_RED_RGTC1: return VK_FORMAT_BC4_SNORM_BLOCK; // line through 1D space, 4x4 blocks, signed normalized
688 case GL_COMPRESSED_SIGNED_RG_RGTC2: return VK_FORMAT_BC5_SNORM_BLOCK; // two lines through 1D space, 4x4 blocks, signed normalized
689
690 case GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT: return VK_FORMAT_BC6H_UFLOAT_BLOCK; // 3-component, 4x4 blocks, unsigned floating-point
691 case GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT: return VK_FORMAT_BC6H_SFLOAT_BLOCK; // 3-component, 4x4 blocks, signed floating-point
692 case GL_COMPRESSED_RGBA_BPTC_UNORM: return VK_FORMAT_BC7_UNORM_BLOCK; // 4-component, 4x4 blocks, unsigned normalized
693 case GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM: return VK_FORMAT_BC7_SRGB_BLOCK; // 4-component, 4x4 blocks, sRGB
694
695 //
696 // ETC
697 //
698 case GL_ETC1_RGB8_OES: return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK; // 3-component ETC1, 4x4 blocks, unsigned normalized
699
700 case GL_COMPRESSED_RGB8_ETC2: return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK; // 3-component ETC2, 4x4 blocks, unsigned normalized
701 case GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2: return VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK; // 4-component ETC2 with 1-bit alpha, 4x4 blocks, unsigned normalized
702 case GL_COMPRESSED_RGBA8_ETC2_EAC: return VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK; // 4-component ETC2, 4x4 blocks, unsigned normalized
703
704 case GL_COMPRESSED_SRGB8_ETC2: return VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK; // 3-component ETC2, 4x4 blocks, sRGB
705 case GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2: return VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK; // 4-component ETC2 with 1-bit alpha, 4x4 blocks, sRGB
706 case GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC: return VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK; // 4-component ETC2, 4x4 blocks, sRGB
707
708 case GL_COMPRESSED_R11_EAC: return VK_FORMAT_EAC_R11_UNORM_BLOCK; // 1-component ETC, 4x4 blocks, unsigned normalized
709 case GL_COMPRESSED_RG11_EAC: return VK_FORMAT_EAC_R11G11_UNORM_BLOCK; // 2-component ETC, 4x4 blocks, unsigned normalized
710 case GL_COMPRESSED_SIGNED_R11_EAC: return VK_FORMAT_EAC_R11_SNORM_BLOCK; // 1-component ETC, 4x4 blocks, signed normalized
711 case GL_COMPRESSED_SIGNED_RG11_EAC: return VK_FORMAT_EAC_R11G11_SNORM_BLOCK; // 2-component ETC, 4x4 blocks, signed normalized
712
713 //
714 // PVRTC
715 //
716 case GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG: return VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG; // 3-component PVRTC, 16x8 blocks, unsigned normalized
717 case GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG: return VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG; // 3-component PVRTC, 8x8 blocks, unsigned normalized
718 case GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG: return VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 16x8 blocks, unsigned normalized
719 case GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG: return VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 8x8 blocks, unsigned normalized
720 case GL_COMPRESSED_RGBA_PVRTC_2BPPV2_IMG: return VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 8x4 blocks, unsigned normalized
721 case GL_COMPRESSED_RGBA_PVRTC_4BPPV2_IMG: return VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 4x4 blocks, unsigned normalized
722
723 case GL_COMPRESSED_SRGB_PVRTC_2BPPV1_EXT: return VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG; // 3-component PVRTC, 16x8 blocks, sRGB
724 case GL_COMPRESSED_SRGB_PVRTC_4BPPV1_EXT: return VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG; // 3-component PVRTC, 8x8 blocks, sRGB
725 case GL_COMPRESSED_SRGB_ALPHA_PVRTC_2BPPV1_EXT: return VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 16x8 blocks, sRGB
726 case GL_COMPRESSED_SRGB_ALPHA_PVRTC_4BPPV1_EXT: return VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 8x8 blocks, sRGB
727 case GL_COMPRESSED_SRGB_ALPHA_PVRTC_2BPPV2_IMG: return VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 8x4 blocks, sRGB
728 case GL_COMPRESSED_SRGB_ALPHA_PVRTC_4BPPV2_IMG: return VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 4x4 blocks, sRGB
729
730 //
731 // ASTC
732 //
733 case GL_COMPRESSED_RGBA_ASTC_4x4_KHR: return VK_FORMAT_ASTC_4x4_UNORM_BLOCK; // 4-component ASTC, 4x4 blocks, unsigned normalized
734 case GL_COMPRESSED_RGBA_ASTC_5x4_KHR: return VK_FORMAT_ASTC_5x4_UNORM_BLOCK; // 4-component ASTC, 5x4 blocks, unsigned normalized
735 case GL_COMPRESSED_RGBA_ASTC_5x5_KHR: return VK_FORMAT_ASTC_5x5_UNORM_BLOCK; // 4-component ASTC, 5x5 blocks, unsigned normalized
736 case GL_COMPRESSED_RGBA_ASTC_6x5_KHR: return VK_FORMAT_ASTC_6x5_UNORM_BLOCK; // 4-component ASTC, 6x5 blocks, unsigned normalized
737 case GL_COMPRESSED_RGBA_ASTC_6x6_KHR: return VK_FORMAT_ASTC_6x6_UNORM_BLOCK; // 4-component ASTC, 6x6 blocks, unsigned normalized
738 case GL_COMPRESSED_RGBA_ASTC_8x5_KHR: return VK_FORMAT_ASTC_8x5_UNORM_BLOCK; // 4-component ASTC, 8x5 blocks, unsigned normalized
739 case GL_COMPRESSED_RGBA_ASTC_8x6_KHR: return VK_FORMAT_ASTC_8x6_UNORM_BLOCK; // 4-component ASTC, 8x6 blocks, unsigned normalized
740 case GL_COMPRESSED_RGBA_ASTC_8x8_KHR: return VK_FORMAT_ASTC_8x8_UNORM_BLOCK; // 4-component ASTC, 8x8 blocks, unsigned normalized
741 case GL_COMPRESSED_RGBA_ASTC_10x5_KHR: return VK_FORMAT_ASTC_10x5_UNORM_BLOCK; // 4-component ASTC, 10x5 blocks, unsigned normalized
742 case GL_COMPRESSED_RGBA_ASTC_10x6_KHR: return VK_FORMAT_ASTC_10x6_UNORM_BLOCK; // 4-component ASTC, 10x6 blocks, unsigned normalized
743 case GL_COMPRESSED_RGBA_ASTC_10x8_KHR: return VK_FORMAT_ASTC_10x8_UNORM_BLOCK; // 4-component ASTC, 10x8 blocks, unsigned normalized
744 case GL_COMPRESSED_RGBA_ASTC_10x10_KHR: return VK_FORMAT_ASTC_10x10_UNORM_BLOCK; // 4-component ASTC, 10x10 blocks, unsigned normalized
745 case GL_COMPRESSED_RGBA_ASTC_12x10_KHR: return VK_FORMAT_ASTC_12x10_UNORM_BLOCK; // 4-component ASTC, 12x10 blocks, unsigned normalized
746 case GL_COMPRESSED_RGBA_ASTC_12x12_KHR: return VK_FORMAT_ASTC_12x12_UNORM_BLOCK; // 4-component ASTC, 12x12 blocks, unsigned normalized
747
748 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR: return VK_FORMAT_ASTC_4x4_SRGB_BLOCK; // 4-component ASTC, 4x4 blocks, sRGB
749 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR: return VK_FORMAT_ASTC_5x4_SRGB_BLOCK; // 4-component ASTC, 5x4 blocks, sRGB
750 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR: return VK_FORMAT_ASTC_5x5_SRGB_BLOCK; // 4-component ASTC, 5x5 blocks, sRGB
751 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR: return VK_FORMAT_ASTC_6x5_SRGB_BLOCK; // 4-component ASTC, 6x5 blocks, sRGB
752 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR: return VK_FORMAT_ASTC_6x6_SRGB_BLOCK; // 4-component ASTC, 6x6 blocks, sRGB
753 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR: return VK_FORMAT_ASTC_8x5_SRGB_BLOCK; // 4-component ASTC, 8x5 blocks, sRGB
754 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR: return VK_FORMAT_ASTC_8x6_SRGB_BLOCK; // 4-component ASTC, 8x6 blocks, sRGB
755 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR: return VK_FORMAT_ASTC_8x8_SRGB_BLOCK; // 4-component ASTC, 8x8 blocks, sRGB
756 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR: return VK_FORMAT_ASTC_10x5_SRGB_BLOCK; // 4-component ASTC, 10x5 blocks, sRGB
757 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR: return VK_FORMAT_ASTC_10x6_SRGB_BLOCK; // 4-component ASTC, 10x6 blocks, sRGB
758 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR: return VK_FORMAT_ASTC_10x8_SRGB_BLOCK; // 4-component ASTC, 10x8 blocks, sRGB
759 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR: return VK_FORMAT_ASTC_10x10_SRGB_BLOCK; // 4-component ASTC, 10x10 blocks, sRGB
760 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR: return VK_FORMAT_ASTC_12x10_SRGB_BLOCK; // 4-component ASTC, 12x10 blocks, sRGB
761 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR: return VK_FORMAT_ASTC_12x12_SRGB_BLOCK; // 4-component ASTC, 12x12 blocks, sRGB
762
763 case GL_COMPRESSED_RGBA_ASTC_3x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 3x3x3 blocks, unsigned normalized
764 case GL_COMPRESSED_RGBA_ASTC_4x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x3x3 blocks, unsigned normalized
765 case GL_COMPRESSED_RGBA_ASTC_4x4x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x3 blocks, unsigned normalized
766 case GL_COMPRESSED_RGBA_ASTC_4x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x4 blocks, unsigned normalized
767 case GL_COMPRESSED_RGBA_ASTC_5x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x4x4 blocks, unsigned normalized
768 case GL_COMPRESSED_RGBA_ASTC_5x5x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x4 blocks, unsigned normalized
769 case GL_COMPRESSED_RGBA_ASTC_5x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x5 blocks, unsigned normalized
770 case GL_COMPRESSED_RGBA_ASTC_6x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x5x5 blocks, unsigned normalized
771 case GL_COMPRESSED_RGBA_ASTC_6x6x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x5 blocks, unsigned normalized
772 case GL_COMPRESSED_RGBA_ASTC_6x6x6_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x6 blocks, unsigned normalized
773
774 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_3x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 3x3x3 blocks, sRGB
775 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x3x3 blocks, sRGB
776 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x3 blocks, sRGB
777 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x4 blocks, sRGB
778 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x4x4 blocks, sRGB
779 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x4 blocks, sRGB
780 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x5 blocks, sRGB
781 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x5x5 blocks, sRGB
782 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x5 blocks, sRGB
783 case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6x6_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x6 blocks, sRGB
784
785 //
786 // ATC
787 //
788 case GL_ATC_RGB_AMD: return VK_FORMAT_UNDEFINED; // 3-component, 4x4 blocks, unsigned normalized
789 case GL_ATC_RGBA_EXPLICIT_ALPHA_AMD: return VK_FORMAT_UNDEFINED; // 4-component, 4x4 blocks, unsigned normalized
790 case GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD: return VK_FORMAT_UNDEFINED; // 4-component, 4x4 blocks, unsigned normalized
791
792 //
793 // Palletized
794 //
795 case GL_PALETTE4_RGB8_OES: return VK_FORMAT_UNDEFINED; // 3-component 8:8:8, 4-bit palette, unsigned normalized
796 case GL_PALETTE4_RGBA8_OES: return VK_FORMAT_UNDEFINED; // 4-component 8:8:8:8, 4-bit palette, unsigned normalized
797 case GL_PALETTE4_R5_G6_B5_OES: return VK_FORMAT_UNDEFINED; // 3-component 5:6:5, 4-bit palette, unsigned normalized
798 case GL_PALETTE4_RGBA4_OES: return VK_FORMAT_UNDEFINED; // 4-component 4:4:4:4, 4-bit palette, unsigned normalized
799 case GL_PALETTE4_RGB5_A1_OES: return VK_FORMAT_UNDEFINED; // 4-component 5:5:5:1, 4-bit palette, unsigned normalized
800 case GL_PALETTE8_RGB8_OES: return VK_FORMAT_UNDEFINED; // 3-component 8:8:8, 8-bit palette, unsigned normalized
801 case GL_PALETTE8_RGBA8_OES: return VK_FORMAT_UNDEFINED; // 4-component 8:8:8:8, 8-bit palette, unsigned normalized
802 case GL_PALETTE8_R5_G6_B5_OES: return VK_FORMAT_UNDEFINED; // 3-component 5:6:5, 8-bit palette, unsigned normalized
803 case GL_PALETTE8_RGBA4_OES: return VK_FORMAT_UNDEFINED; // 4-component 4:4:4:4, 8-bit palette, unsigned normalized
804 case GL_PALETTE8_RGB5_A1_OES: return VK_FORMAT_UNDEFINED; // 4-component 5:5:5:1, 8-bit palette, unsigned normalized
805
806 //
807 // Depth/stencil
808 //
809 case GL_DEPTH_COMPONENT16: return VK_FORMAT_D16_UNORM;
810 case GL_DEPTH_COMPONENT24: return VK_FORMAT_X8_D24_UNORM_PACK32;
811 case GL_DEPTH_COMPONENT32: return VK_FORMAT_UNDEFINED;
812 case GL_DEPTH_COMPONENT32F: return VK_FORMAT_D32_SFLOAT;
813 case GL_DEPTH_COMPONENT32F_NV: return VK_FORMAT_D32_SFLOAT;
814 case GL_STENCIL_INDEX1: return VK_FORMAT_UNDEFINED;
815 case GL_STENCIL_INDEX4: return VK_FORMAT_UNDEFINED;
816 case GL_STENCIL_INDEX8: return VK_FORMAT_S8_UINT;
817 case GL_STENCIL_INDEX16: return VK_FORMAT_UNDEFINED;
818 case GL_DEPTH24_STENCIL8: return VK_FORMAT_D24_UNORM_S8_UINT;
819 case GL_DEPTH32F_STENCIL8: return VK_FORMAT_D32_SFLOAT_S8_UINT;
820 case GL_DEPTH32F_STENCIL8_NV: return VK_FORMAT_D32_SFLOAT_S8_UINT;
821
822 default: return VK_FORMAT_UNDEFINED;
823 }
824}
825
826static inline void vkGetFormatSize( const VkFormat format, ktxFormatSize * pFormatSize )
827{
828 pFormatSize->minBlocksX = pFormatSize->minBlocksY = 1;
829 switch ( format )
830 {
831 case VK_FORMAT_R4G4_UNORM_PACK8:
832 pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
833 pFormatSize->paletteSizeInBits = 0;
834 pFormatSize->blockSizeInBits = 1 * 8;
835 pFormatSize->blockWidth = 1;
836 pFormatSize->blockHeight = 1;
837 pFormatSize->blockDepth = 1;
838 break;
839 case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
840 case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
841 case VK_FORMAT_R5G6B5_UNORM_PACK16:
842 case VK_FORMAT_B5G6R5_UNORM_PACK16:
843 case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
844 case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
845 case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
846 pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
847 pFormatSize->paletteSizeInBits = 0;
848 pFormatSize->blockSizeInBits = 2 * 8;
849 pFormatSize->blockWidth = 1;
850 pFormatSize->blockHeight = 1;
851 pFormatSize->blockDepth = 1;
852 break;
853 case VK_FORMAT_R8_UNORM:
854 case VK_FORMAT_R8_SNORM:
855 case VK_FORMAT_R8_USCALED:
856 case VK_FORMAT_R8_SSCALED:
857 case VK_FORMAT_R8_UINT:
858 case VK_FORMAT_R8_SINT:
859 case VK_FORMAT_R8_SRGB:
860 pFormatSize->flags = 0;
861 pFormatSize->paletteSizeInBits = 0;
862 pFormatSize->blockSizeInBits = 1 * 8;
863 pFormatSize->blockWidth = 1;
864 pFormatSize->blockHeight = 1;
865 pFormatSize->blockDepth = 1;
866 break;
867 case VK_FORMAT_R8G8_UNORM:
868 case VK_FORMAT_R8G8_SNORM:
869 case VK_FORMAT_R8G8_USCALED:
870 case VK_FORMAT_R8G8_SSCALED:
871 case VK_FORMAT_R8G8_UINT:
872 case VK_FORMAT_R8G8_SINT:
873 case VK_FORMAT_R8G8_SRGB:
874 pFormatSize->flags = 0;
875 pFormatSize->paletteSizeInBits = 0;
876 pFormatSize->blockSizeInBits = 2 * 8;
877 pFormatSize->blockWidth = 1;
878 pFormatSize->blockHeight = 1;
879 pFormatSize->blockDepth = 1;
880 break;
881 case VK_FORMAT_R8G8B8_UNORM:
882 case VK_FORMAT_R8G8B8_SNORM:
883 case VK_FORMAT_R8G8B8_USCALED:
884 case VK_FORMAT_R8G8B8_SSCALED:
885 case VK_FORMAT_R8G8B8_UINT:
886 case VK_FORMAT_R8G8B8_SINT:
887 case VK_FORMAT_R8G8B8_SRGB:
888 case VK_FORMAT_B8G8R8_UNORM:
889 case VK_FORMAT_B8G8R8_SNORM:
890 case VK_FORMAT_B8G8R8_USCALED:
891 case VK_FORMAT_B8G8R8_SSCALED:
892 case VK_FORMAT_B8G8R8_UINT:
893 case VK_FORMAT_B8G8R8_SINT:
894 case VK_FORMAT_B8G8R8_SRGB:
895 pFormatSize->flags = 0;
896 pFormatSize->paletteSizeInBits = 0;
897 pFormatSize->blockSizeInBits = 3 * 8;
898 pFormatSize->blockWidth = 1;
899 pFormatSize->blockHeight = 1;
900 pFormatSize->blockDepth = 1;
901 break;
902 case VK_FORMAT_R8G8B8A8_UNORM:
903 case VK_FORMAT_R8G8B8A8_SNORM:
904 case VK_FORMAT_R8G8B8A8_USCALED:
905 case VK_FORMAT_R8G8B8A8_SSCALED:
906 case VK_FORMAT_R8G8B8A8_UINT:
907 case VK_FORMAT_R8G8B8A8_SINT:
908 case VK_FORMAT_R8G8B8A8_SRGB:
909 case VK_FORMAT_B8G8R8A8_UNORM:
910 case VK_FORMAT_B8G8R8A8_SNORM:
911 case VK_FORMAT_B8G8R8A8_USCALED:
912 case VK_FORMAT_B8G8R8A8_SSCALED:
913 case VK_FORMAT_B8G8R8A8_UINT:
914 case VK_FORMAT_B8G8R8A8_SINT:
915 case VK_FORMAT_B8G8R8A8_SRGB:
916 pFormatSize->flags = 0;
917 pFormatSize->paletteSizeInBits = 0;
918 pFormatSize->blockSizeInBits = 4 * 8;
919 pFormatSize->blockWidth = 1;
920 pFormatSize->blockHeight = 1;
921 pFormatSize->blockDepth = 1;
922 break;
923 case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
924 case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
925 case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
926 case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
927 case VK_FORMAT_A8B8G8R8_UINT_PACK32:
928 case VK_FORMAT_A8B8G8R8_SINT_PACK32:
929 case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
930 pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
931 pFormatSize->paletteSizeInBits = 0;
932 pFormatSize->blockSizeInBits = 4 * 8;
933 pFormatSize->blockWidth = 1;
934 pFormatSize->blockHeight = 1;
935 pFormatSize->blockDepth = 1;
936 break;
937 case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
938 case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
939 case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
940 case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
941 case VK_FORMAT_A2R10G10B10_UINT_PACK32:
942 case VK_FORMAT_A2R10G10B10_SINT_PACK32:
943 case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
944 case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
945 case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
946 case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
947 case VK_FORMAT_A2B10G10R10_UINT_PACK32:
948 case VK_FORMAT_A2B10G10R10_SINT_PACK32:
949 pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
950 pFormatSize->paletteSizeInBits = 0;
951 pFormatSize->blockSizeInBits = 4 * 8;
952 pFormatSize->blockWidth = 1;
953 pFormatSize->blockHeight = 1;
954 pFormatSize->blockDepth = 1;
955 break;
956 case VK_FORMAT_R16_UNORM:
957 case VK_FORMAT_R16_SNORM:
958 case VK_FORMAT_R16_USCALED:
959 case VK_FORMAT_R16_SSCALED:
960 case VK_FORMAT_R16_UINT:
961 case VK_FORMAT_R16_SINT:
962 case VK_FORMAT_R16_SFLOAT:
963 pFormatSize->flags = 0;
964 pFormatSize->paletteSizeInBits = 0;
965 pFormatSize->blockSizeInBits = 2 * 8;
966 pFormatSize->blockWidth = 1;
967 pFormatSize->blockHeight = 1;
968 pFormatSize->blockDepth = 1;
969 break;
970 case VK_FORMAT_R16G16_UNORM:
971 case VK_FORMAT_R16G16_SNORM:
972 case VK_FORMAT_R16G16_USCALED:
973 case VK_FORMAT_R16G16_SSCALED:
974 case VK_FORMAT_R16G16_UINT:
975 case VK_FORMAT_R16G16_SINT:
976 case VK_FORMAT_R16G16_SFLOAT:
977 pFormatSize->flags = 0;
978 pFormatSize->paletteSizeInBits = 0;
979 pFormatSize->blockSizeInBits = 4 * 8;
980 pFormatSize->blockWidth = 1;
981 pFormatSize->blockHeight = 1;
982 pFormatSize->blockDepth = 1;
983 break;
984 case VK_FORMAT_R16G16B16_UNORM:
985 case VK_FORMAT_R16G16B16_SNORM:
986 case VK_FORMAT_R16G16B16_USCALED:
987 case VK_FORMAT_R16G16B16_SSCALED:
988 case VK_FORMAT_R16G16B16_UINT:
989 case VK_FORMAT_R16G16B16_SINT:
990 case VK_FORMAT_R16G16B16_SFLOAT:
991 pFormatSize->flags = 0;
992 pFormatSize->paletteSizeInBits = 0;
993 pFormatSize->blockSizeInBits = 6 * 8;
994 pFormatSize->blockWidth = 1;
995 pFormatSize->blockHeight = 1;
996 pFormatSize->blockDepth = 1;
997 break;
998 case VK_FORMAT_R16G16B16A16_UNORM:
999 case VK_FORMAT_R16G16B16A16_SNORM:
1000 case VK_FORMAT_R16G16B16A16_USCALED:
1001 case VK_FORMAT_R16G16B16A16_SSCALED:
1002 case VK_FORMAT_R16G16B16A16_UINT:
1003 case VK_FORMAT_R16G16B16A16_SINT:
1004 case VK_FORMAT_R16G16B16A16_SFLOAT:
1005 pFormatSize->flags = 0;
1006 pFormatSize->paletteSizeInBits = 0;
1007 pFormatSize->blockSizeInBits = 8 * 8;
1008 pFormatSize->blockWidth = 1;
1009 pFormatSize->blockHeight = 1;
1010 pFormatSize->blockDepth = 1;
1011 break;
1012 case VK_FORMAT_R32_UINT:
1013 case VK_FORMAT_R32_SINT:
1014 case VK_FORMAT_R32_SFLOAT:
1015 pFormatSize->flags = 0;
1016 pFormatSize->paletteSizeInBits = 0;
1017 pFormatSize->blockSizeInBits = 4 * 8;
1018 pFormatSize->blockWidth = 1;
1019 pFormatSize->blockHeight = 1;
1020 pFormatSize->blockDepth = 1;
1021 break;
1022 case VK_FORMAT_R32G32_UINT:
1023 case VK_FORMAT_R32G32_SINT:
1024 case VK_FORMAT_R32G32_SFLOAT:
1025 pFormatSize->flags = 0;
1026 pFormatSize->paletteSizeInBits = 0;
1027 pFormatSize->blockSizeInBits = 8 * 8;
1028 pFormatSize->blockWidth = 1;
1029 pFormatSize->blockHeight = 1;
1030 pFormatSize->blockDepth = 1;
1031 break;
1032 case VK_FORMAT_R32G32B32_UINT:
1033 case VK_FORMAT_R32G32B32_SINT:
1034 case VK_FORMAT_R32G32B32_SFLOAT:
1035 pFormatSize->flags = 0;
1036 pFormatSize->paletteSizeInBits = 0;
1037 pFormatSize->blockSizeInBits = 12 * 8;
1038 pFormatSize->blockWidth = 1;
1039 pFormatSize->blockHeight = 1;
1040 pFormatSize->blockDepth = 1;
1041 break;
1042 case VK_FORMAT_R32G32B32A32_UINT:
1043 case VK_FORMAT_R32G32B32A32_SINT:
1044 case VK_FORMAT_R32G32B32A32_SFLOAT:
1045 pFormatSize->flags = 0;
1046 pFormatSize->paletteSizeInBits = 0;
1047 pFormatSize->blockSizeInBits = 16 * 8;
1048 pFormatSize->blockWidth = 1;
1049 pFormatSize->blockHeight = 1;
1050 pFormatSize->blockDepth = 1;
1051 break;
1052 case VK_FORMAT_R64_UINT:
1053 case VK_FORMAT_R64_SINT:
1054 case VK_FORMAT_R64_SFLOAT:
1055 pFormatSize->flags = 0;
1056 pFormatSize->paletteSizeInBits = 0;
1057 pFormatSize->blockSizeInBits = 8 * 8;
1058 pFormatSize->blockWidth = 1;
1059 pFormatSize->blockHeight = 1;
1060 pFormatSize->blockDepth = 1;
1061 break;
1062 case VK_FORMAT_R64G64_UINT:
1063 case VK_FORMAT_R64G64_SINT:
1064 case VK_FORMAT_R64G64_SFLOAT:
1065 pFormatSize->flags = 0;
1066 pFormatSize->paletteSizeInBits = 0;
1067 pFormatSize->blockSizeInBits = 16 * 8;
1068 pFormatSize->blockWidth = 1;
1069 pFormatSize->blockHeight = 1;
1070 pFormatSize->blockDepth = 1;
1071 break;
1072 case VK_FORMAT_R64G64B64_UINT:
1073 case VK_FORMAT_R64G64B64_SINT:
1074 case VK_FORMAT_R64G64B64_SFLOAT:
1075 pFormatSize->flags = 0;
1076 pFormatSize->paletteSizeInBits = 0;
1077 pFormatSize->blockSizeInBits = 24 * 8;
1078 pFormatSize->blockWidth = 1;
1079 pFormatSize->blockHeight = 1;
1080 pFormatSize->blockDepth = 1;
1081 break;
1082 case VK_FORMAT_R64G64B64A64_UINT:
1083 case VK_FORMAT_R64G64B64A64_SINT:
1084 case VK_FORMAT_R64G64B64A64_SFLOAT:
1085 pFormatSize->flags = 0;
1086 pFormatSize->paletteSizeInBits = 0;
1087 pFormatSize->blockSizeInBits = 32 * 8;
1088 pFormatSize->blockWidth = 1;
1089 pFormatSize->blockHeight = 1;
1090 pFormatSize->blockDepth = 1;
1091 break;
1092 case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
1093 case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
1094 pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
1095 pFormatSize->paletteSizeInBits = 0;
1096 pFormatSize->blockSizeInBits = 4 * 8;
1097 pFormatSize->blockWidth = 1;
1098 pFormatSize->blockHeight = 1;
1099 pFormatSize->blockDepth = 1;
1100 break;
1101 case VK_FORMAT_D16_UNORM:
1102 pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT;
1103 pFormatSize->paletteSizeInBits = 0;
1104 pFormatSize->blockSizeInBits = 2 * 8;
1105 pFormatSize->blockWidth = 1;
1106 pFormatSize->blockHeight = 1;
1107 pFormatSize->blockDepth = 1;
1108 break;
1109 case VK_FORMAT_X8_D24_UNORM_PACK32:
1110 pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT | KTX_FORMAT_SIZE_DEPTH_BIT;
1111 pFormatSize->paletteSizeInBits = 0;
1112 pFormatSize->blockSizeInBits = 4 * 8;
1113 pFormatSize->blockWidth = 1;
1114 pFormatSize->blockHeight = 1;
1115 pFormatSize->blockDepth = 1;
1116 break;
1117 case VK_FORMAT_D32_SFLOAT:
1118 pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT;
1119 pFormatSize->paletteSizeInBits = 0;
1120 pFormatSize->blockSizeInBits = 4 * 8;
1121 pFormatSize->blockWidth = 1;
1122 pFormatSize->blockHeight = 1;
1123 pFormatSize->blockDepth = 1;
1124 break;
1125 case VK_FORMAT_S8_UINT:
1126 pFormatSize->flags = KTX_FORMAT_SIZE_STENCIL_BIT;
1127 pFormatSize->paletteSizeInBits = 0;
1128 pFormatSize->blockSizeInBits = 1 * 8;
1129 pFormatSize->blockWidth = 1;
1130 pFormatSize->blockHeight = 1;
1131 pFormatSize->blockDepth = 1;
1132 break;
1133 case VK_FORMAT_D16_UNORM_S8_UINT:
1134 pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT | KTX_FORMAT_SIZE_STENCIL_BIT;
1135 pFormatSize->paletteSizeInBits = 0;
1136 pFormatSize->blockSizeInBits = 3 * 8;
1137 pFormatSize->blockWidth = 1;
1138 pFormatSize->blockHeight = 1;
1139 pFormatSize->blockDepth = 1;
1140 break;
1141 case VK_FORMAT_D24_UNORM_S8_UINT:
1142 pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT | KTX_FORMAT_SIZE_STENCIL_BIT;
1143 pFormatSize->paletteSizeInBits = 0;
1144 pFormatSize->blockSizeInBits = 4 * 8;
1145 pFormatSize->blockWidth = 1;
1146 pFormatSize->blockHeight = 1;
1147 pFormatSize->blockDepth = 1;
1148 break;
1149 case VK_FORMAT_D32_SFLOAT_S8_UINT:
1150 pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT | KTX_FORMAT_SIZE_STENCIL_BIT;
1151 pFormatSize->paletteSizeInBits = 0;
1152 pFormatSize->blockSizeInBits = 8 * 8;
1153 pFormatSize->blockWidth = 1;
1154 pFormatSize->blockHeight = 1;
1155 pFormatSize->blockDepth = 1;
1156 break;
1157 case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
1158 case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
1159 case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
1160 case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
1161 case VK_FORMAT_BC4_UNORM_BLOCK:
1162 case VK_FORMAT_BC4_SNORM_BLOCK:
1163 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1164 pFormatSize->paletteSizeInBits = 0;
1165 pFormatSize->blockSizeInBits = 8 * 8;
1166 pFormatSize->blockWidth = 4;
1167 pFormatSize->blockHeight = 4;
1168 pFormatSize->blockDepth = 1;
1169 break;
1170 case VK_FORMAT_BC2_UNORM_BLOCK:
1171 case VK_FORMAT_BC2_SRGB_BLOCK:
1172 case VK_FORMAT_BC3_UNORM_BLOCK:
1173 case VK_FORMAT_BC3_SRGB_BLOCK:
1174 case VK_FORMAT_BC5_UNORM_BLOCK:
1175 case VK_FORMAT_BC5_SNORM_BLOCK:
1176 case VK_FORMAT_BC6H_UFLOAT_BLOCK:
1177 case VK_FORMAT_BC6H_SFLOAT_BLOCK:
1178 case VK_FORMAT_BC7_UNORM_BLOCK:
1179 case VK_FORMAT_BC7_SRGB_BLOCK:
1180 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1181 pFormatSize->paletteSizeInBits = 0;
1182 pFormatSize->blockSizeInBits = 16 * 8;
1183 pFormatSize->blockWidth = 4;
1184 pFormatSize->blockHeight = 4;
1185 pFormatSize->blockDepth = 1;
1186 break;
1187 case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
1188 case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
1189 case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
1190 case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
1191 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1192 pFormatSize->paletteSizeInBits = 0;
1193 pFormatSize->blockSizeInBits = 8 * 8;
1194 pFormatSize->blockWidth = 4;
1195 pFormatSize->blockHeight = 4;
1196 pFormatSize->blockDepth = 1;
1197 break;
1198 case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
1199 case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
1200 case VK_FORMAT_EAC_R11_UNORM_BLOCK:
1201 case VK_FORMAT_EAC_R11_SNORM_BLOCK:
1202 case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
1203 case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
1204 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1205 pFormatSize->paletteSizeInBits = 0;
1206 pFormatSize->blockSizeInBits = 16 * 8;
1207 pFormatSize->blockWidth = 4;
1208 pFormatSize->blockHeight = 4;
1209 pFormatSize->blockDepth = 1;
1210 break;
1211 case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
1212 case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
1213 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1214 pFormatSize->paletteSizeInBits = 0;
1215 pFormatSize->blockSizeInBits = 8 * 8;
1216 pFormatSize->blockWidth = 8;
1217 pFormatSize->blockHeight = 4;
1218 pFormatSize->blockDepth = 1;
1219 pFormatSize->minBlocksX = 2;
1220 pFormatSize->minBlocksY = 2;
1221 break;
1222 case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
1223 case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
1224 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1225 pFormatSize->paletteSizeInBits = 0;
1226 pFormatSize->blockSizeInBits = 8 * 8;
1227 pFormatSize->blockWidth = 8;
1228 pFormatSize->blockHeight = 4;
1229 pFormatSize->blockDepth = 1;
1230 break;
1231 case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
1232 case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
1233 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1234 pFormatSize->paletteSizeInBits = 0;
1235 pFormatSize->blockSizeInBits = 8 * 8;
1236 pFormatSize->blockWidth = 4;
1237 pFormatSize->blockHeight = 4;
1238 pFormatSize->blockDepth = 1;
1239 pFormatSize->minBlocksX = 2;
1240 pFormatSize->minBlocksY = 2;
1241 break;
1242 case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
1243 case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
1244 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1245 pFormatSize->paletteSizeInBits = 0;
1246 pFormatSize->blockSizeInBits = 8 * 8;
1247 pFormatSize->blockWidth = 4;
1248 pFormatSize->blockHeight = 4;
1249 pFormatSize->blockDepth = 1;
1250 break;
1251 case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
1252 case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
1253 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1254 pFormatSize->paletteSizeInBits = 0;
1255 pFormatSize->blockSizeInBits = 16 * 8;
1256 pFormatSize->blockWidth = 4;
1257 pFormatSize->blockHeight = 4;
1258 pFormatSize->blockDepth = 1;
1259 break;
1260 case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
1261 case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
1262 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1263 pFormatSize->paletteSizeInBits = 0;
1264 pFormatSize->blockSizeInBits = 16 * 8;
1265 pFormatSize->blockWidth = 5;
1266 pFormatSize->blockHeight = 4;
1267 pFormatSize->blockDepth = 1;
1268 break;
1269 case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
1270 case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
1271 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1272 pFormatSize->paletteSizeInBits = 0;
1273 pFormatSize->blockSizeInBits = 16 * 8;
1274 pFormatSize->blockWidth = 5;
1275 pFormatSize->blockHeight = 5;
1276 pFormatSize->blockDepth = 1;
1277 break;
1278 case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
1279 case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
1280 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1281 pFormatSize->paletteSizeInBits = 0;
1282 pFormatSize->blockSizeInBits = 16 * 8;
1283 pFormatSize->blockWidth = 6;
1284 pFormatSize->blockHeight = 5;
1285 pFormatSize->blockDepth = 1;
1286 break;
1287 case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
1288 case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
1289 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1290 pFormatSize->paletteSizeInBits = 0;
1291 pFormatSize->blockSizeInBits = 16 * 8;
1292 pFormatSize->blockWidth = 6;
1293 pFormatSize->blockHeight = 6;
1294 pFormatSize->blockDepth = 1;
1295 break;
1296 case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
1297 case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
1298 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1299 pFormatSize->paletteSizeInBits = 0;
1300 pFormatSize->blockSizeInBits = 16 * 8;
1301 pFormatSize->blockWidth = 8;
1302 pFormatSize->blockHeight = 5;
1303 pFormatSize->blockDepth = 1;
1304 break;
1305 case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
1306 case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
1307 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1308 pFormatSize->paletteSizeInBits = 0;
1309 pFormatSize->blockSizeInBits = 16 * 8;
1310 pFormatSize->blockWidth = 8;
1311 pFormatSize->blockHeight = 6;
1312 pFormatSize->blockDepth = 1;
1313 break;
1314 case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
1315 case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
1316 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1317 pFormatSize->paletteSizeInBits = 0;
1318 pFormatSize->blockSizeInBits = 16 * 8;
1319 pFormatSize->blockWidth = 8;
1320 pFormatSize->blockHeight = 8;
1321 pFormatSize->blockDepth = 1;
1322 break;
1323 case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
1324 case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
1325 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1326 pFormatSize->paletteSizeInBits = 0;
1327 pFormatSize->blockSizeInBits = 16 * 8;
1328 pFormatSize->blockWidth = 10;
1329 pFormatSize->blockHeight = 5;
1330 pFormatSize->blockDepth = 1;
1331 break;
1332 case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
1333 case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
1334 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1335 pFormatSize->paletteSizeInBits = 0;
1336 pFormatSize->blockSizeInBits = 16 * 8;
1337 pFormatSize->blockWidth = 10;
1338 pFormatSize->blockHeight = 6;
1339 pFormatSize->blockDepth = 1;
1340 break;
1341 case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
1342 case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
1343 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1344 pFormatSize->paletteSizeInBits = 0;
1345 pFormatSize->blockSizeInBits = 16 * 8;
1346 pFormatSize->blockWidth = 10;
1347 pFormatSize->blockHeight = 8;
1348 pFormatSize->blockDepth = 1;
1349 break;
1350 case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
1351 case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
1352 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1353 pFormatSize->paletteSizeInBits = 0;
1354 pFormatSize->blockSizeInBits = 16 * 8;
1355 pFormatSize->blockWidth = 10;
1356 pFormatSize->blockHeight = 10;
1357 pFormatSize->blockDepth = 1;
1358 break;
1359 case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
1360 case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
1361 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1362 pFormatSize->paletteSizeInBits = 0;
1363 pFormatSize->blockSizeInBits = 16 * 8;
1364 pFormatSize->blockWidth = 12;
1365 pFormatSize->blockHeight = 10;
1366 pFormatSize->blockDepth = 1;
1367 break;
1368 case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
1369 case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
1370 pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
1371 pFormatSize->paletteSizeInBits = 0;
1372 pFormatSize->blockSizeInBits = 16 * 8;
1373 pFormatSize->blockWidth = 12;
1374 pFormatSize->blockHeight = 12;
1375 pFormatSize->blockDepth = 1;
1376 break;
1377 default:
1378 pFormatSize->flags = 0;
1379 pFormatSize->paletteSizeInBits = 0;
1380 pFormatSize->blockSizeInBits = 0 * 8;
1381 pFormatSize->blockWidth = 1;
1382 pFormatSize->blockHeight = 1;
1383 pFormatSize->blockDepth = 1;
1384 break;
1385 }
1386}
1387
1388#endif // !VK_FORMAT_H
1389