123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389 |
- /*
- ================================================================================================
- Description : Vulkan format properties and conversion from OpenGL.
- Author : J.M.P. van Waveren
- Date : 07/17/2016
- Language : C99
- Format : Real tabs with the tab size equal to 4 spaces.
- Copyright : Copyright (c) 2016 Oculus VR, LLC. All Rights reserved.
- LICENSE
- =======
- Copyright 2016 Oculus VR, LLC.
- SPDX-License-Identifier: Apache-2.0
- DESCRIPTION
- ===========
- This header implements several support routines to convert OpenGL formats/types
- to Vulkan formats. These routines are particularly useful for loading file
- formats that store OpenGL formats/types such as KTX and glTF.
- The functions in this header file convert the format, internalFormat and type
- that are used as parameters to the following OpenGL functions:
- void glTexImage2D( GLenum target, GLint level, GLint internalFormat,
- GLsizei width, GLsizei height, GLint border,
- GLenum format, GLenum type, const GLvoid * data );
- void glTexImage3D( GLenum target, GLint level, GLint internalFormat,
- GLsizei width, GLsizei height, GLsizei depth, GLint border,
- GLenum format, GLenum type, const GLvoid * data );
- void glCompressedTexImage2D( GLenum target, GLint level, GLenum internalformat,
- GLsizei width, GLsizei height, GLint border,
- GLsizei imageSize, const GLvoid * data );
- void glCompressedTexImage3D( GLenum target, GLint level, GLenum internalformat,
- GLsizei width, GLsizei height, GLsizei depth, GLint border,
- GLsizei imageSize, const GLvoid * data );
- void glTexStorage2D( GLenum target, GLsizei levels, GLenum internalformat,
- GLsizei width, GLsizei height );
- void glTexStorage3D( GLenum target, GLsizei levels, GLenum internalformat,
- GLsizei width, GLsizei height, GLsizei depth );
- void glVertexAttribPointer( GLuint index, GLint size, GLenum type, GLboolean normalized,
- GLsizei stride, const GLvoid * pointer);
- IMPLEMENTATION
- ==============
- This file does not include OpenGL / OpenGL ES headers because:
- 1. Including OpenGL / OpenGL ES headers is platform dependent and
- may require a separate installation of an OpenGL SDK.
- 2. The OpenGL format/type constants are the same between extensions and core.
- 3. The OpenGL format/type constants are the same between OpenGL and OpenGL ES.
- 4. File formats like KTX and glTF may use OpenGL formats and types that
- are not supported by the OpenGL implementation on the platform but are
- supported by the Vulkan implementation.
- ENTRY POINTS
- ============
- static inline VkFormat vkGetFormatFromOpenGLFormat( const GLenum format, const GLenum type );
- static inline VkFormat vkGetFormatFromOpenGLType( const GLenum type, const GLuint numComponents, const GLboolean normalized );
- static inline VkFormat vkGetFormatFromOpenGLInternalFormat( const GLenum internalFormat );
- static inline void vkGetFormatSize( const VkFormat format, VkFormatSize * pFormatSize );
- MODIFICATIONS for use in libktx
- ===============================
- 2019.5.30 Use common ktxFormatSize to return results. Mark Callow, Edgewise Consulting.
- 2019.6.12 Add mapping of PVRTC formats. "
- ================================================================================================
- */
- #if !defined( VK_FORMAT_H )
- #define VK_FORMAT_H
- #include "gl_format.h"
- static inline VkFormat vkGetFormatFromOpenGLFormat( const GLenum format, const GLenum type )
- {
- switch ( type )
- {
- //
- // 8 bits per component
- //
- case GL_UNSIGNED_BYTE:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R8_UNORM;
- case GL_RG: return VK_FORMAT_R8G8_UNORM;
- case GL_RGB: return VK_FORMAT_R8G8B8_UNORM;
- case GL_BGR: return VK_FORMAT_B8G8R8_UNORM;
- case GL_RGBA: return VK_FORMAT_R8G8B8A8_UNORM;
- case GL_BGRA: return VK_FORMAT_B8G8R8A8_UNORM;
- case GL_RED_INTEGER: return VK_FORMAT_R8_UINT;
- case GL_RG_INTEGER: return VK_FORMAT_R8G8_UINT;
- case GL_RGB_INTEGER: return VK_FORMAT_R8G8B8_UINT;
- case GL_BGR_INTEGER: return VK_FORMAT_B8G8R8_UINT;
- case GL_RGBA_INTEGER: return VK_FORMAT_R8G8B8A8_UINT;
- case GL_BGRA_INTEGER: return VK_FORMAT_B8G8R8A8_UINT;
- case GL_STENCIL_INDEX: return VK_FORMAT_S8_UINT;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
- }
- break;
- }
- case GL_BYTE:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R8_SNORM;
- case GL_RG: return VK_FORMAT_R8G8_SNORM;
- case GL_RGB: return VK_FORMAT_R8G8B8_SNORM;
- case GL_BGR: return VK_FORMAT_B8G8R8_SNORM;
- case GL_RGBA: return VK_FORMAT_R8G8B8A8_SNORM;
- case GL_BGRA: return VK_FORMAT_B8G8R8A8_SNORM;
- case GL_RED_INTEGER: return VK_FORMAT_R8_SINT;
- case GL_RG_INTEGER: return VK_FORMAT_R8G8_SINT;
- case GL_RGB_INTEGER: return VK_FORMAT_R8G8B8_SINT;
- case GL_BGR_INTEGER: return VK_FORMAT_B8G8R8_SINT;
- case GL_RGBA_INTEGER: return VK_FORMAT_R8G8B8A8_SINT;
- case GL_BGRA_INTEGER: return VK_FORMAT_B8G8R8A8_SINT;
- case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
- }
- break;
- }
- //
- // 16 bits per component
- //
- case GL_UNSIGNED_SHORT:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R16_UNORM;
- case GL_RG: return VK_FORMAT_R16G16_UNORM;
- case GL_RGB: return VK_FORMAT_R16G16B16_UNORM;
- case GL_BGR: return VK_FORMAT_UNDEFINED;
- case GL_RGBA: return VK_FORMAT_R16G16B16A16_UNORM;
- case GL_BGRA: return VK_FORMAT_UNDEFINED;
- case GL_RED_INTEGER: return VK_FORMAT_R16_UINT;
- case GL_RG_INTEGER: return VK_FORMAT_R16G16_UINT;
- case GL_RGB_INTEGER: return VK_FORMAT_R16G16B16_UINT;
- case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGBA_INTEGER: return VK_FORMAT_R16G16B16A16_UINT;
- case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_D16_UNORM;
- case GL_DEPTH_STENCIL: return VK_FORMAT_D16_UNORM_S8_UINT;
- }
- break;
- }
- case GL_SHORT:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R16_SNORM;
- case GL_RG: return VK_FORMAT_R16G16_SNORM;
- case GL_RGB: return VK_FORMAT_R16G16B16_SNORM;
- case GL_BGR: return VK_FORMAT_UNDEFINED;
- case GL_RGBA: return VK_FORMAT_R16G16B16A16_SNORM;
- case GL_BGRA: return VK_FORMAT_UNDEFINED;
- case GL_RED_INTEGER: return VK_FORMAT_R16_SINT;
- case GL_RG_INTEGER: return VK_FORMAT_R16G16_SINT;
- case GL_RGB_INTEGER: return VK_FORMAT_R16G16B16_SINT;
- case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGBA_INTEGER: return VK_FORMAT_R16G16B16A16_SINT;
- case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
- }
- break;
- }
- case GL_HALF_FLOAT:
- case GL_HALF_FLOAT_OES:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R16_SFLOAT;
- case GL_RG: return VK_FORMAT_R16G16_SFLOAT;
- case GL_RGB: return VK_FORMAT_R16G16B16_SFLOAT;
- case GL_BGR: return VK_FORMAT_UNDEFINED;
- case GL_RGBA: return VK_FORMAT_R16G16B16A16_SFLOAT;
- case GL_BGRA: return VK_FORMAT_UNDEFINED;
- case GL_RED_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RG_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGB_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGBA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
- }
- break;
- }
- //
- // 32 bits per component
- //
- case GL_UNSIGNED_INT:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R32_UINT;
- case GL_RG: return VK_FORMAT_R32G32_UINT;
- case GL_RGB: return VK_FORMAT_R32G32B32_UINT;
- case GL_BGR: return VK_FORMAT_UNDEFINED;
- case GL_RGBA: return VK_FORMAT_R32G32B32A32_UINT;
- case GL_BGRA: return VK_FORMAT_UNDEFINED;
- case GL_RED_INTEGER: return VK_FORMAT_R32_UINT;
- case GL_RG_INTEGER: return VK_FORMAT_R32G32_UINT;
- case GL_RGB_INTEGER: return VK_FORMAT_R32G32B32_UINT;
- case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGBA_INTEGER: return VK_FORMAT_R32G32B32A32_UINT;
- case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_X8_D24_UNORM_PACK32;
- case GL_DEPTH_STENCIL: return VK_FORMAT_D24_UNORM_S8_UINT;
- }
- break;
- }
- case GL_INT:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R32_SINT;
- case GL_RG: return VK_FORMAT_R32G32_SINT;
- case GL_RGB: return VK_FORMAT_R32G32B32_SINT;
- case GL_BGR: return VK_FORMAT_UNDEFINED;
- case GL_RGBA: return VK_FORMAT_R32G32B32A32_SINT;
- case GL_BGRA: return VK_FORMAT_UNDEFINED;
- case GL_RED_INTEGER: return VK_FORMAT_R32_SINT;
- case GL_RG_INTEGER: return VK_FORMAT_R32G32_SINT;
- case GL_RGB_INTEGER: return VK_FORMAT_R32G32B32_SINT;
- case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGBA_INTEGER: return VK_FORMAT_R32G32B32A32_SINT;
- case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
- }
- break;
- }
- case GL_FLOAT:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R32_SFLOAT;
- case GL_RG: return VK_FORMAT_R32G32_SFLOAT;
- case GL_RGB: return VK_FORMAT_R32G32B32_SFLOAT;
- case GL_BGR: return VK_FORMAT_UNDEFINED;
- case GL_RGBA: return VK_FORMAT_R32G32B32A32_SFLOAT;
- case GL_BGRA: return VK_FORMAT_UNDEFINED;
- case GL_RED_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RG_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGB_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGBA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_D32_SFLOAT;
- case GL_DEPTH_STENCIL: return VK_FORMAT_D32_SFLOAT_S8_UINT;
- }
- break;
- }
- //
- // 64 bits per component
- //
- case GL_UNSIGNED_INT64:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R64_UINT;
- case GL_RG: return VK_FORMAT_R64G64_UINT;
- case GL_RGB: return VK_FORMAT_R64G64B64_UINT;
- case GL_BGR: return VK_FORMAT_UNDEFINED;
- case GL_RGBA: return VK_FORMAT_R64G64B64A64_UINT;
- case GL_BGRA: return VK_FORMAT_UNDEFINED;
- case GL_RED_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RG_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGB_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGBA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
- }
- break;
- }
- case GL_INT64:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R64_SINT;
- case GL_RG: return VK_FORMAT_R64G64_SINT;
- case GL_RGB: return VK_FORMAT_R64G64B64_SINT;
- case GL_BGR: return VK_FORMAT_UNDEFINED;
- case GL_RGBA: return VK_FORMAT_R64G64B64A64_SINT;
- case GL_BGRA: return VK_FORMAT_UNDEFINED;
- case GL_RED_INTEGER: return VK_FORMAT_R64_SINT;
- case GL_RG_INTEGER: return VK_FORMAT_R64G64_SINT;
- case GL_RGB_INTEGER: return VK_FORMAT_R64G64B64_SINT;
- case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGBA_INTEGER: return VK_FORMAT_R64G64B64A64_SINT;
- case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
- }
- break;
- }
- case GL_DOUBLE:
- {
- switch ( format )
- {
- case GL_RED: return VK_FORMAT_R64_SFLOAT;
- case GL_RG: return VK_FORMAT_R64G64_SFLOAT;
- case GL_RGB: return VK_FORMAT_R64G64B64_SFLOAT;
- case GL_BGR: return VK_FORMAT_UNDEFINED;
- case GL_RGBA: return VK_FORMAT_R64G64B64A64_SFLOAT;
- case GL_BGRA: return VK_FORMAT_UNDEFINED;
- case GL_RED_INTEGER: return VK_FORMAT_R64_SFLOAT;
- case GL_RG_INTEGER: return VK_FORMAT_R64G64_SFLOAT;
- case GL_RGB_INTEGER: return VK_FORMAT_R64G64B64_SFLOAT;
- case GL_BGR_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_RGBA_INTEGER: return VK_FORMAT_R64G64B64A64_SFLOAT;
- case GL_BGRA_INTEGER: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_STENCIL: return VK_FORMAT_UNDEFINED;
- }
- break;
- }
- //
- // Packed
- //
- case GL_UNSIGNED_BYTE_3_3_2:
- assert( format == GL_RGB || format == GL_RGB_INTEGER );
- return VK_FORMAT_UNDEFINED;
- case GL_UNSIGNED_BYTE_2_3_3_REV:
- assert( format == GL_BGR || format == GL_BGR_INTEGER );
- return VK_FORMAT_UNDEFINED;
- case GL_UNSIGNED_SHORT_5_6_5:
- assert( format == GL_RGB || format == GL_RGB_INTEGER );
- return VK_FORMAT_R5G6B5_UNORM_PACK16;
- case GL_UNSIGNED_SHORT_5_6_5_REV:
- assert( format == GL_BGR || format == GL_BGR_INTEGER );
- return VK_FORMAT_B5G6R5_UNORM_PACK16;
- case GL_UNSIGNED_SHORT_4_4_4_4:
- assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
- return VK_FORMAT_R4G4B4A4_UNORM_PACK16;
- case GL_UNSIGNED_SHORT_4_4_4_4_REV:
- assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
- return VK_FORMAT_B4G4R4A4_UNORM_PACK16;
- case GL_UNSIGNED_SHORT_5_5_5_1:
- assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
- return VK_FORMAT_R5G5B5A1_UNORM_PACK16;
- case GL_UNSIGNED_SHORT_1_5_5_5_REV:
- assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
- return VK_FORMAT_A1R5G5B5_UNORM_PACK16;
- case GL_UNSIGNED_INT_8_8_8_8:
- assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
- return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_R8G8B8A8_UINT : VK_FORMAT_R8G8B8A8_UNORM;
- case GL_UNSIGNED_INT_8_8_8_8_REV:
- assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
- return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_A8B8G8R8_UINT_PACK32 : VK_FORMAT_A8B8G8R8_UNORM_PACK32;
- case GL_UNSIGNED_INT_10_10_10_2:
- assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
- return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_A2R10G10B10_UINT_PACK32 : VK_FORMAT_A2R10G10B10_UNORM_PACK32;
- case GL_UNSIGNED_INT_2_10_10_10_REV:
- assert( format == GL_RGB || format == GL_BGRA || format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER );
- return ( format == GL_RGB_INTEGER || format == GL_BGRA_INTEGER ) ? VK_FORMAT_A2B10G10R10_UINT_PACK32 : VK_FORMAT_A2B10G10R10_UNORM_PACK32;
- case GL_UNSIGNED_INT_10F_11F_11F_REV:
- assert( format == GL_RGB || format == GL_BGR );
- return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
- case GL_UNSIGNED_INT_5_9_9_9_REV:
- assert( format == GL_RGB || format == GL_BGR );
- return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
- case GL_UNSIGNED_INT_24_8:
- assert( format == GL_DEPTH_STENCIL );
- return VK_FORMAT_D24_UNORM_S8_UINT;
- case GL_FLOAT_32_UNSIGNED_INT_24_8_REV:
- assert( format == GL_DEPTH_STENCIL );
- return VK_FORMAT_D32_SFLOAT_S8_UINT;
- }
- return VK_FORMAT_UNDEFINED;
- }
- static inline VkFormat vkGetFormatFromOpenGLType( const GLenum type, const GLuint numComponents, const GLboolean normalized )
- {
- switch ( type )
- {
- //
- // 8 bits per component
- //
- case GL_UNSIGNED_BYTE:
- {
- switch ( numComponents )
- {
- case 1: return normalized ? VK_FORMAT_R8_UNORM : VK_FORMAT_R8_UINT;
- case 2: return normalized ? VK_FORMAT_R8G8_UNORM : VK_FORMAT_R8G8_UINT;
- case 3: return normalized ? VK_FORMAT_R8G8B8_UNORM : VK_FORMAT_R8G8B8_UINT;
- case 4: return normalized ? VK_FORMAT_R8G8B8A8_UNORM : VK_FORMAT_R8G8B8A8_UINT;
- }
- break;
- }
- case GL_BYTE:
- {
- switch ( numComponents )
- {
- case 1: return normalized ? VK_FORMAT_R8_SNORM : VK_FORMAT_R8_SINT;
- case 2: return normalized ? VK_FORMAT_R8G8_SNORM : VK_FORMAT_R8G8_SINT;
- case 3: return normalized ? VK_FORMAT_R8G8B8_SNORM : VK_FORMAT_R8G8B8_SINT;
- case 4: return normalized ? VK_FORMAT_R8G8B8A8_SNORM : VK_FORMAT_R8G8B8A8_SINT;
- }
- break;
- }
- //
- // 16 bits per component
- //
- case GL_UNSIGNED_SHORT:
- {
- switch ( numComponents )
- {
- case 1: return normalized ? VK_FORMAT_R16_UNORM : VK_FORMAT_R16_UINT;
- case 2: return normalized ? VK_FORMAT_R16G16_UNORM : VK_FORMAT_R16G16_UINT;
- case 3: return normalized ? VK_FORMAT_R16G16B16_UNORM : VK_FORMAT_R16G16B16_UINT;
- case 4: return normalized ? VK_FORMAT_R16G16B16A16_UNORM : VK_FORMAT_R16G16B16A16_UINT;
- }
- break;
- }
- case GL_SHORT:
- {
- switch ( numComponents )
- {
- case 1: return normalized ? VK_FORMAT_R16_SNORM : VK_FORMAT_R16_SINT;
- case 2: return normalized ? VK_FORMAT_R16G16_SNORM : VK_FORMAT_R16G16_SINT;
- case 3: return normalized ? VK_FORMAT_R16G16B16_SNORM : VK_FORMAT_R16G16B16_SINT;
- case 4: return normalized ? VK_FORMAT_R16G16B16A16_SNORM : VK_FORMAT_R16G16B16A16_SINT;
- }
- break;
- }
- case GL_HALF_FLOAT:
- case GL_HALF_FLOAT_OES:
- {
- switch ( numComponents )
- {
- case 1: return VK_FORMAT_R16_SFLOAT;
- case 2: return VK_FORMAT_R16G16_SFLOAT;
- case 3: return VK_FORMAT_R16G16B16_SFLOAT;
- case 4: return VK_FORMAT_R16G16B16A16_SFLOAT;
- }
- break;
- }
- //
- // 32 bits per component
- //
- case GL_UNSIGNED_INT:
- {
- switch ( numComponents )
- {
- case 1: return VK_FORMAT_R32_UINT;
- case 2: return VK_FORMAT_R32G32_UINT;
- case 3: return VK_FORMAT_R32G32B32_UINT;
- case 4: return VK_FORMAT_R32G32B32A32_UINT;
- }
- break;
- }
- case GL_INT:
- {
- switch ( numComponents )
- {
- case 1: return VK_FORMAT_R32_SINT;
- case 2: return VK_FORMAT_R32G32_SINT;
- case 3: return VK_FORMAT_R32G32B32_SINT;
- case 4: return VK_FORMAT_R32G32B32A32_SINT;
- }
- break;
- }
- case GL_FLOAT:
- {
- switch ( numComponents )
- {
- case 1: return VK_FORMAT_R32_SFLOAT;
- case 2: return VK_FORMAT_R32G32_SFLOAT;
- case 3: return VK_FORMAT_R32G32B32_SFLOAT;
- case 4: return VK_FORMAT_R32G32B32A32_SFLOAT;
- }
- break;
- }
- //
- // 64 bits per component
- //
- case GL_UNSIGNED_INT64:
- {
- switch ( numComponents )
- {
- case 1: return VK_FORMAT_R64_UINT;
- case 2: return VK_FORMAT_R64G64_UINT;
- case 3: return VK_FORMAT_R64G64B64_UINT;
- case 4: return VK_FORMAT_R64G64B64A64_UINT;
- }
- break;
- }
- case GL_INT64:
- {
- switch ( numComponents )
- {
- case 1: return VK_FORMAT_R64_SINT;
- case 2: return VK_FORMAT_R64G64_SINT;
- case 3: return VK_FORMAT_R64G64B64_SINT;
- case 4: return VK_FORMAT_R64G64B64A64_SINT;
- }
- break;
- }
- case GL_DOUBLE:
- {
- switch ( numComponents )
- {
- case 1: return VK_FORMAT_R64_SFLOAT;
- case 2: return VK_FORMAT_R64G64_SFLOAT;
- case 3: return VK_FORMAT_R64G64B64_SFLOAT;
- case 4: return VK_FORMAT_R64G64B64A64_SFLOAT;
- }
- break;
- }
- //
- // Packed
- //
- case GL_UNSIGNED_BYTE_3_3_2: return VK_FORMAT_UNDEFINED;
- case GL_UNSIGNED_BYTE_2_3_3_REV: return VK_FORMAT_UNDEFINED;
- case GL_UNSIGNED_SHORT_5_6_5: return VK_FORMAT_R5G6B5_UNORM_PACK16;
- case GL_UNSIGNED_SHORT_5_6_5_REV: return VK_FORMAT_B5G6R5_UNORM_PACK16;
- case GL_UNSIGNED_SHORT_4_4_4_4: return VK_FORMAT_R4G4B4A4_UNORM_PACK16;
- case GL_UNSIGNED_SHORT_4_4_4_4_REV: return VK_FORMAT_B4G4R4A4_UNORM_PACK16;
- case GL_UNSIGNED_SHORT_5_5_5_1: return VK_FORMAT_R5G5B5A1_UNORM_PACK16;
- case GL_UNSIGNED_SHORT_1_5_5_5_REV: return VK_FORMAT_A1R5G5B5_UNORM_PACK16;
- case GL_UNSIGNED_INT_8_8_8_8: return normalized ? VK_FORMAT_R8G8B8A8_UNORM : VK_FORMAT_R8G8B8A8_UINT;
- case GL_UNSIGNED_INT_8_8_8_8_REV: return normalized ? VK_FORMAT_A8B8G8R8_UNORM_PACK32 : VK_FORMAT_A8B8G8R8_UINT_PACK32;
- case GL_UNSIGNED_INT_10_10_10_2: return normalized ? VK_FORMAT_A2R10G10B10_UNORM_PACK32 : VK_FORMAT_A2R10G10B10_UINT_PACK32;
- case GL_UNSIGNED_INT_2_10_10_10_REV: return normalized ? VK_FORMAT_A2B10G10R10_UNORM_PACK32 : VK_FORMAT_A2B10G10R10_UINT_PACK32;
- case GL_UNSIGNED_INT_10F_11F_11F_REV: return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
- case GL_UNSIGNED_INT_5_9_9_9_REV: return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
- case GL_UNSIGNED_INT_24_8: return VK_FORMAT_D24_UNORM_S8_UINT;
- case GL_FLOAT_32_UNSIGNED_INT_24_8_REV: return VK_FORMAT_D32_SFLOAT_S8_UINT;
- }
- return VK_FORMAT_UNDEFINED;
- }
- static inline VkFormat vkGetFormatFromOpenGLInternalFormat( const GLenum internalFormat )
- {
- switch ( internalFormat )
- {
- //
- // 8 bits per component
- //
- case GL_R8: return VK_FORMAT_R8_UNORM; // 1-component, 8-bit unsigned normalized
- case GL_RG8: return VK_FORMAT_R8G8_UNORM; // 2-component, 8-bit unsigned normalized
- case GL_RGB8: return VK_FORMAT_R8G8B8_UNORM; // 3-component, 8-bit unsigned normalized
- case GL_RGBA8: return VK_FORMAT_R8G8B8A8_UNORM; // 4-component, 8-bit unsigned normalized
- case GL_R8_SNORM: return VK_FORMAT_R8_SNORM; // 1-component, 8-bit signed normalized
- case GL_RG8_SNORM: return VK_FORMAT_R8G8_SNORM; // 2-component, 8-bit signed normalized
- case GL_RGB8_SNORM: return VK_FORMAT_R8G8B8_SNORM; // 3-component, 8-bit signed normalized
- case GL_RGBA8_SNORM: return VK_FORMAT_R8G8B8A8_SNORM; // 4-component, 8-bit signed normalized
- case GL_R8UI: return VK_FORMAT_R8_UINT; // 1-component, 8-bit unsigned integer
- case GL_RG8UI: return VK_FORMAT_R8G8_UINT; // 2-component, 8-bit unsigned integer
- case GL_RGB8UI: return VK_FORMAT_R8G8B8_UINT; // 3-component, 8-bit unsigned integer
- case GL_RGBA8UI: return VK_FORMAT_R8G8B8A8_UINT; // 4-component, 8-bit unsigned integer
- case GL_R8I: return VK_FORMAT_R8_SINT; // 1-component, 8-bit signed integer
- case GL_RG8I: return VK_FORMAT_R8G8_SINT; // 2-component, 8-bit signed integer
- case GL_RGB8I: return VK_FORMAT_R8G8B8_SINT; // 3-component, 8-bit signed integer
- case GL_RGBA8I: return VK_FORMAT_R8G8B8A8_SINT; // 4-component, 8-bit signed integer
- case GL_SR8: return VK_FORMAT_R8_SRGB; // 1-component, 8-bit sRGB
- case GL_SRG8: return VK_FORMAT_R8G8_SRGB; // 2-component, 8-bit sRGB
- case GL_SRGB8: return VK_FORMAT_R8G8B8_SRGB; // 3-component, 8-bit sRGB
- case GL_SRGB8_ALPHA8: return VK_FORMAT_R8G8B8A8_SRGB; // 4-component, 8-bit sRGB
- //
- // 16 bits per component
- //
- case GL_R16: return VK_FORMAT_R16_UNORM; // 1-component, 16-bit unsigned normalized
- case GL_RG16: return VK_FORMAT_R16G16_UNORM; // 2-component, 16-bit unsigned normalized
- case GL_RGB16: return VK_FORMAT_R16G16B16_UNORM; // 3-component, 16-bit unsigned normalized
- case GL_RGBA16: return VK_FORMAT_R16G16B16A16_UNORM; // 4-component, 16-bit unsigned normalized
- case GL_R16_SNORM: return VK_FORMAT_R16_SNORM; // 1-component, 16-bit signed normalized
- case GL_RG16_SNORM: return VK_FORMAT_R16G16_SNORM; // 2-component, 16-bit signed normalized
- case GL_RGB16_SNORM: return VK_FORMAT_R16G16B16_SNORM; // 3-component, 16-bit signed normalized
- case GL_RGBA16_SNORM: return VK_FORMAT_R16G16B16A16_SNORM; // 4-component, 16-bit signed normalized
- case GL_R16UI: return VK_FORMAT_R16_UINT; // 1-component, 16-bit unsigned integer
- case GL_RG16UI: return VK_FORMAT_R16G16_UINT; // 2-component, 16-bit unsigned integer
- case GL_RGB16UI: return VK_FORMAT_R16G16B16_UINT; // 3-component, 16-bit unsigned integer
- case GL_RGBA16UI: return VK_FORMAT_R16G16B16A16_UINT; // 4-component, 16-bit unsigned integer
- case GL_R16I: return VK_FORMAT_R16_SINT; // 1-component, 16-bit signed integer
- case GL_RG16I: return VK_FORMAT_R16G16_SINT; // 2-component, 16-bit signed integer
- case GL_RGB16I: return VK_FORMAT_R16G16B16_SINT; // 3-component, 16-bit signed integer
- case GL_RGBA16I: return VK_FORMAT_R16G16B16A16_SINT; // 4-component, 16-bit signed integer
- case GL_R16F: return VK_FORMAT_R16_SFLOAT; // 1-component, 16-bit floating-point
- case GL_RG16F: return VK_FORMAT_R16G16_SFLOAT; // 2-component, 16-bit floating-point
- case GL_RGB16F: return VK_FORMAT_R16G16B16_SFLOAT; // 3-component, 16-bit floating-point
- case GL_RGBA16F: return VK_FORMAT_R16G16B16A16_SFLOAT; // 4-component, 16-bit floating-point
- //
- // 32 bits per component
- //
- case GL_R32UI: return VK_FORMAT_R32_UINT; // 1-component, 32-bit unsigned integer
- case GL_RG32UI: return VK_FORMAT_R32G32_UINT; // 2-component, 32-bit unsigned integer
- case GL_RGB32UI: return VK_FORMAT_R32G32B32_UINT; // 3-component, 32-bit unsigned integer
- case GL_RGBA32UI: return VK_FORMAT_R32G32B32A32_UINT; // 4-component, 32-bit unsigned integer
- case GL_R32I: return VK_FORMAT_R32_SINT; // 1-component, 32-bit signed integer
- case GL_RG32I: return VK_FORMAT_R32G32_SINT; // 2-component, 32-bit signed integer
- case GL_RGB32I: return VK_FORMAT_R32G32B32_SINT; // 3-component, 32-bit signed integer
- case GL_RGBA32I: return VK_FORMAT_R32G32B32A32_SINT; // 4-component, 32-bit signed integer
- case GL_R32F: return VK_FORMAT_R32_SFLOAT; // 1-component, 32-bit floating-point
- case GL_RG32F: return VK_FORMAT_R32G32_SFLOAT; // 2-component, 32-bit floating-point
- case GL_RGB32F: return VK_FORMAT_R32G32B32_SFLOAT; // 3-component, 32-bit floating-point
- case GL_RGBA32F: return VK_FORMAT_R32G32B32A32_SFLOAT; // 4-component, 32-bit floating-point
- //
- // Packed
- //
- case GL_R3_G3_B2: return VK_FORMAT_UNDEFINED; // 3-component 3:3:2, unsigned normalized
- case GL_RGB4: return VK_FORMAT_UNDEFINED; // 3-component 4:4:4, unsigned normalized
- case GL_RGB5: return VK_FORMAT_R5G5B5A1_UNORM_PACK16; // 3-component 5:5:5, unsigned normalized
- case GL_RGB565: return VK_FORMAT_R5G6B5_UNORM_PACK16; // 3-component 5:6:5, unsigned normalized
- case GL_RGB10: return VK_FORMAT_A2R10G10B10_UNORM_PACK32; // 3-component 10:10:10, unsigned normalized
- case GL_RGB12: return VK_FORMAT_UNDEFINED; // 3-component 12:12:12, unsigned normalized
- case GL_RGBA2: return VK_FORMAT_UNDEFINED; // 4-component 2:2:2:2, unsigned normalized
- case GL_RGBA4: return VK_FORMAT_R4G4B4A4_UNORM_PACK16; // 4-component 4:4:4:4, unsigned normalized
- case GL_RGBA12: return VK_FORMAT_UNDEFINED; // 4-component 12:12:12:12, unsigned normalized
- case GL_RGB5_A1: return VK_FORMAT_A1R5G5B5_UNORM_PACK16; // 4-component 5:5:5:1, unsigned normalized
- case GL_RGB10_A2: return VK_FORMAT_A2R10G10B10_UNORM_PACK32; // 4-component 10:10:10:2, unsigned normalized
- case GL_RGB10_A2UI: return VK_FORMAT_A2R10G10B10_UINT_PACK32; // 4-component 10:10:10:2, unsigned integer
- case GL_R11F_G11F_B10F: return VK_FORMAT_B10G11R11_UFLOAT_PACK32; // 3-component 11:11:10, floating-point
- case GL_RGB9_E5: return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32; // 3-component/exp 9:9:9/5, floating-point
- //
- // S3TC/DXT/BC
- //
- case GL_COMPRESSED_RGB_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGB_UNORM_BLOCK; // line through 3D space, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGBA_UNORM_BLOCK; // line through 3D space plus 1-bit alpha, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_S3TC_DXT3_EXT: return VK_FORMAT_BC2_UNORM_BLOCK; // line through 3D space plus line through 1D space, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_S3TC_DXT5_EXT: return VK_FORMAT_BC3_UNORM_BLOCK; // line through 3D space plus 4-bit alpha, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_SRGB_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGB_SRGB_BLOCK; // line through 3D space, 4x4 blocks, sRGB
- case GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT1_EXT: return VK_FORMAT_BC1_RGBA_SRGB_BLOCK; // line through 3D space plus 1-bit alpha, 4x4 blocks, sRGB
- case GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT3_EXT: return VK_FORMAT_BC2_SRGB_BLOCK; // line through 3D space plus line through 1D space, 4x4 blocks, sRGB
- case GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT: return VK_FORMAT_BC3_SRGB_BLOCK; // line through 3D space plus 4-bit alpha, 4x4 blocks, sRGB
- case GL_COMPRESSED_LUMINANCE_LATC1_EXT: return VK_FORMAT_BC4_UNORM_BLOCK; // line through 1D space, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_LUMINANCE_ALPHA_LATC2_EXT: return VK_FORMAT_BC5_UNORM_BLOCK; // two lines through 1D space, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_SIGNED_LUMINANCE_LATC1_EXT: return VK_FORMAT_BC4_SNORM_BLOCK; // line through 1D space, 4x4 blocks, signed normalized
- case GL_COMPRESSED_SIGNED_LUMINANCE_ALPHA_LATC2_EXT: return VK_FORMAT_BC5_SNORM_BLOCK; // two lines through 1D space, 4x4 blocks, signed normalized
- case GL_COMPRESSED_RED_RGTC1: return VK_FORMAT_BC4_UNORM_BLOCK; // line through 1D space, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RG_RGTC2: return VK_FORMAT_BC5_UNORM_BLOCK; // two lines through 1D space, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_SIGNED_RED_RGTC1: return VK_FORMAT_BC4_SNORM_BLOCK; // line through 1D space, 4x4 blocks, signed normalized
- case GL_COMPRESSED_SIGNED_RG_RGTC2: return VK_FORMAT_BC5_SNORM_BLOCK; // two lines through 1D space, 4x4 blocks, signed normalized
- case GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT: return VK_FORMAT_BC6H_UFLOAT_BLOCK; // 3-component, 4x4 blocks, unsigned floating-point
- case GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT: return VK_FORMAT_BC6H_SFLOAT_BLOCK; // 3-component, 4x4 blocks, signed floating-point
- case GL_COMPRESSED_RGBA_BPTC_UNORM: return VK_FORMAT_BC7_UNORM_BLOCK; // 4-component, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM: return VK_FORMAT_BC7_SRGB_BLOCK; // 4-component, 4x4 blocks, sRGB
- //
- // ETC
- //
- case GL_ETC1_RGB8_OES: return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK; // 3-component ETC1, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGB8_ETC2: return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK; // 3-component ETC2, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2: return VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK; // 4-component ETC2 with 1-bit alpha, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA8_ETC2_EAC: return VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK; // 4-component ETC2, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_SRGB8_ETC2: return VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK; // 3-component ETC2, 4x4 blocks, sRGB
- case GL_COMPRESSED_SRGB8_PUNCHTHROUGH_ALPHA1_ETC2: return VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK; // 4-component ETC2 with 1-bit alpha, 4x4 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC: return VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK; // 4-component ETC2, 4x4 blocks, sRGB
- case GL_COMPRESSED_R11_EAC: return VK_FORMAT_EAC_R11_UNORM_BLOCK; // 1-component ETC, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RG11_EAC: return VK_FORMAT_EAC_R11G11_UNORM_BLOCK; // 2-component ETC, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_SIGNED_R11_EAC: return VK_FORMAT_EAC_R11_SNORM_BLOCK; // 1-component ETC, 4x4 blocks, signed normalized
- case GL_COMPRESSED_SIGNED_RG11_EAC: return VK_FORMAT_EAC_R11G11_SNORM_BLOCK; // 2-component ETC, 4x4 blocks, signed normalized
- //
- // PVRTC
- //
- case GL_COMPRESSED_RGB_PVRTC_2BPPV1_IMG: return VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG; // 3-component PVRTC, 16x8 blocks, unsigned normalized
- case GL_COMPRESSED_RGB_PVRTC_4BPPV1_IMG: return VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG; // 3-component PVRTC, 8x8 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_PVRTC_2BPPV1_IMG: return VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 16x8 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_PVRTC_4BPPV1_IMG: return VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 8x8 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_PVRTC_2BPPV2_IMG: return VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 8x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_PVRTC_4BPPV2_IMG: return VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG; // 4-component PVRTC, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_SRGB_PVRTC_2BPPV1_EXT: return VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG; // 3-component PVRTC, 16x8 blocks, sRGB
- case GL_COMPRESSED_SRGB_PVRTC_4BPPV1_EXT: return VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG; // 3-component PVRTC, 8x8 blocks, sRGB
- case GL_COMPRESSED_SRGB_ALPHA_PVRTC_2BPPV1_EXT: return VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 16x8 blocks, sRGB
- case GL_COMPRESSED_SRGB_ALPHA_PVRTC_4BPPV1_EXT: return VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 8x8 blocks, sRGB
- case GL_COMPRESSED_SRGB_ALPHA_PVRTC_2BPPV2_IMG: return VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 8x4 blocks, sRGB
- case GL_COMPRESSED_SRGB_ALPHA_PVRTC_4BPPV2_IMG: return VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG; // 4-component PVRTC, 4x4 blocks, sRGB
- //
- // ASTC
- //
- case GL_COMPRESSED_RGBA_ASTC_4x4_KHR: return VK_FORMAT_ASTC_4x4_UNORM_BLOCK; // 4-component ASTC, 4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_5x4_KHR: return VK_FORMAT_ASTC_5x4_UNORM_BLOCK; // 4-component ASTC, 5x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_5x5_KHR: return VK_FORMAT_ASTC_5x5_UNORM_BLOCK; // 4-component ASTC, 5x5 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_6x5_KHR: return VK_FORMAT_ASTC_6x5_UNORM_BLOCK; // 4-component ASTC, 6x5 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_6x6_KHR: return VK_FORMAT_ASTC_6x6_UNORM_BLOCK; // 4-component ASTC, 6x6 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_8x5_KHR: return VK_FORMAT_ASTC_8x5_UNORM_BLOCK; // 4-component ASTC, 8x5 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_8x6_KHR: return VK_FORMAT_ASTC_8x6_UNORM_BLOCK; // 4-component ASTC, 8x6 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_8x8_KHR: return VK_FORMAT_ASTC_8x8_UNORM_BLOCK; // 4-component ASTC, 8x8 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_10x5_KHR: return VK_FORMAT_ASTC_10x5_UNORM_BLOCK; // 4-component ASTC, 10x5 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_10x6_KHR: return VK_FORMAT_ASTC_10x6_UNORM_BLOCK; // 4-component ASTC, 10x6 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_10x8_KHR: return VK_FORMAT_ASTC_10x8_UNORM_BLOCK; // 4-component ASTC, 10x8 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_10x10_KHR: return VK_FORMAT_ASTC_10x10_UNORM_BLOCK; // 4-component ASTC, 10x10 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_12x10_KHR: return VK_FORMAT_ASTC_12x10_UNORM_BLOCK; // 4-component ASTC, 12x10 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_12x12_KHR: return VK_FORMAT_ASTC_12x12_UNORM_BLOCK; // 4-component ASTC, 12x12 blocks, unsigned normalized
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR: return VK_FORMAT_ASTC_4x4_SRGB_BLOCK; // 4-component ASTC, 4x4 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4_KHR: return VK_FORMAT_ASTC_5x4_SRGB_BLOCK; // 4-component ASTC, 5x4 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5_KHR: return VK_FORMAT_ASTC_5x5_SRGB_BLOCK; // 4-component ASTC, 5x5 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5_KHR: return VK_FORMAT_ASTC_6x5_SRGB_BLOCK; // 4-component ASTC, 6x5 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6_KHR: return VK_FORMAT_ASTC_6x6_SRGB_BLOCK; // 4-component ASTC, 6x6 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x5_KHR: return VK_FORMAT_ASTC_8x5_SRGB_BLOCK; // 4-component ASTC, 8x5 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x6_KHR: return VK_FORMAT_ASTC_8x6_SRGB_BLOCK; // 4-component ASTC, 8x6 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_8x8_KHR: return VK_FORMAT_ASTC_8x8_SRGB_BLOCK; // 4-component ASTC, 8x8 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x5_KHR: return VK_FORMAT_ASTC_10x5_SRGB_BLOCK; // 4-component ASTC, 10x5 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x6_KHR: return VK_FORMAT_ASTC_10x6_SRGB_BLOCK; // 4-component ASTC, 10x6 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x8_KHR: return VK_FORMAT_ASTC_10x8_SRGB_BLOCK; // 4-component ASTC, 10x8 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_10x10_KHR: return VK_FORMAT_ASTC_10x10_SRGB_BLOCK; // 4-component ASTC, 10x10 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x10_KHR: return VK_FORMAT_ASTC_12x10_SRGB_BLOCK; // 4-component ASTC, 12x10 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_12x12_KHR: return VK_FORMAT_ASTC_12x12_SRGB_BLOCK; // 4-component ASTC, 12x12 blocks, sRGB
- case GL_COMPRESSED_RGBA_ASTC_3x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 3x3x3 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_4x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x3x3 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_4x4x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x3 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_4x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_5x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x4x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_5x5x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x4 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_5x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x5 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_6x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x5x5 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_6x6x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x5 blocks, unsigned normalized
- case GL_COMPRESSED_RGBA_ASTC_6x6x6_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x6 blocks, unsigned normalized
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_3x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 3x3x3 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x3x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x3x3 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4x3_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x3 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 4x4x4 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x4x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x4x4 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5x4_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x4 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_5x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 5x5x5 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x5x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x5x5 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6x5_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x5 blocks, sRGB
- case GL_COMPRESSED_SRGB8_ALPHA8_ASTC_6x6x6_OES: return VK_FORMAT_UNDEFINED; // 4-component ASTC, 6x6x6 blocks, sRGB
- //
- // ATC
- //
- case GL_ATC_RGB_AMD: return VK_FORMAT_UNDEFINED; // 3-component, 4x4 blocks, unsigned normalized
- case GL_ATC_RGBA_EXPLICIT_ALPHA_AMD: return VK_FORMAT_UNDEFINED; // 4-component, 4x4 blocks, unsigned normalized
- case GL_ATC_RGBA_INTERPOLATED_ALPHA_AMD: return VK_FORMAT_UNDEFINED; // 4-component, 4x4 blocks, unsigned normalized
- //
- // Palletized
- //
- case GL_PALETTE4_RGB8_OES: return VK_FORMAT_UNDEFINED; // 3-component 8:8:8, 4-bit palette, unsigned normalized
- case GL_PALETTE4_RGBA8_OES: return VK_FORMAT_UNDEFINED; // 4-component 8:8:8:8, 4-bit palette, unsigned normalized
- case GL_PALETTE4_R5_G6_B5_OES: return VK_FORMAT_UNDEFINED; // 3-component 5:6:5, 4-bit palette, unsigned normalized
- case GL_PALETTE4_RGBA4_OES: return VK_FORMAT_UNDEFINED; // 4-component 4:4:4:4, 4-bit palette, unsigned normalized
- case GL_PALETTE4_RGB5_A1_OES: return VK_FORMAT_UNDEFINED; // 4-component 5:5:5:1, 4-bit palette, unsigned normalized
- case GL_PALETTE8_RGB8_OES: return VK_FORMAT_UNDEFINED; // 3-component 8:8:8, 8-bit palette, unsigned normalized
- case GL_PALETTE8_RGBA8_OES: return VK_FORMAT_UNDEFINED; // 4-component 8:8:8:8, 8-bit palette, unsigned normalized
- case GL_PALETTE8_R5_G6_B5_OES: return VK_FORMAT_UNDEFINED; // 3-component 5:6:5, 8-bit palette, unsigned normalized
- case GL_PALETTE8_RGBA4_OES: return VK_FORMAT_UNDEFINED; // 4-component 4:4:4:4, 8-bit palette, unsigned normalized
- case GL_PALETTE8_RGB5_A1_OES: return VK_FORMAT_UNDEFINED; // 4-component 5:5:5:1, 8-bit palette, unsigned normalized
- //
- // Depth/stencil
- //
- case GL_DEPTH_COMPONENT16: return VK_FORMAT_D16_UNORM;
- case GL_DEPTH_COMPONENT24: return VK_FORMAT_X8_D24_UNORM_PACK32;
- case GL_DEPTH_COMPONENT32: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH_COMPONENT32F: return VK_FORMAT_D32_SFLOAT;
- case GL_DEPTH_COMPONENT32F_NV: return VK_FORMAT_D32_SFLOAT;
- case GL_STENCIL_INDEX1: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX4: return VK_FORMAT_UNDEFINED;
- case GL_STENCIL_INDEX8: return VK_FORMAT_S8_UINT;
- case GL_STENCIL_INDEX16: return VK_FORMAT_UNDEFINED;
- case GL_DEPTH24_STENCIL8: return VK_FORMAT_D24_UNORM_S8_UINT;
- case GL_DEPTH32F_STENCIL8: return VK_FORMAT_D32_SFLOAT_S8_UINT;
- case GL_DEPTH32F_STENCIL8_NV: return VK_FORMAT_D32_SFLOAT_S8_UINT;
- default: return VK_FORMAT_UNDEFINED;
- }
- }
- static inline void vkGetFormatSize( const VkFormat format, ktxFormatSize * pFormatSize )
- {
- pFormatSize->minBlocksX = pFormatSize->minBlocksY = 1;
- switch ( format )
- {
- case VK_FORMAT_R4G4_UNORM_PACK8:
- pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 1 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
- case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
- case VK_FORMAT_R5G6B5_UNORM_PACK16:
- case VK_FORMAT_B5G6R5_UNORM_PACK16:
- case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
- case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
- case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
- pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 2 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R8_UNORM:
- case VK_FORMAT_R8_SNORM:
- case VK_FORMAT_R8_USCALED:
- case VK_FORMAT_R8_SSCALED:
- case VK_FORMAT_R8_UINT:
- case VK_FORMAT_R8_SINT:
- case VK_FORMAT_R8_SRGB:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 1 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R8G8_UNORM:
- case VK_FORMAT_R8G8_SNORM:
- case VK_FORMAT_R8G8_USCALED:
- case VK_FORMAT_R8G8_SSCALED:
- case VK_FORMAT_R8G8_UINT:
- case VK_FORMAT_R8G8_SINT:
- case VK_FORMAT_R8G8_SRGB:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 2 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R8G8B8_UNORM:
- case VK_FORMAT_R8G8B8_SNORM:
- case VK_FORMAT_R8G8B8_USCALED:
- case VK_FORMAT_R8G8B8_SSCALED:
- case VK_FORMAT_R8G8B8_UINT:
- case VK_FORMAT_R8G8B8_SINT:
- case VK_FORMAT_R8G8B8_SRGB:
- case VK_FORMAT_B8G8R8_UNORM:
- case VK_FORMAT_B8G8R8_SNORM:
- case VK_FORMAT_B8G8R8_USCALED:
- case VK_FORMAT_B8G8R8_SSCALED:
- case VK_FORMAT_B8G8R8_UINT:
- case VK_FORMAT_B8G8R8_SINT:
- case VK_FORMAT_B8G8R8_SRGB:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 3 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R8G8B8A8_UNORM:
- case VK_FORMAT_R8G8B8A8_SNORM:
- case VK_FORMAT_R8G8B8A8_USCALED:
- case VK_FORMAT_R8G8B8A8_SSCALED:
- case VK_FORMAT_R8G8B8A8_UINT:
- case VK_FORMAT_R8G8B8A8_SINT:
- case VK_FORMAT_R8G8B8A8_SRGB:
- case VK_FORMAT_B8G8R8A8_UNORM:
- case VK_FORMAT_B8G8R8A8_SNORM:
- case VK_FORMAT_B8G8R8A8_USCALED:
- case VK_FORMAT_B8G8R8A8_SSCALED:
- case VK_FORMAT_B8G8R8A8_UINT:
- case VK_FORMAT_B8G8R8A8_SINT:
- case VK_FORMAT_B8G8R8A8_SRGB:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 4 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
- case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
- case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
- case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
- case VK_FORMAT_A8B8G8R8_UINT_PACK32:
- case VK_FORMAT_A8B8G8R8_SINT_PACK32:
- case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
- pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 4 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
- case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
- case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
- case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
- case VK_FORMAT_A2R10G10B10_UINT_PACK32:
- case VK_FORMAT_A2R10G10B10_SINT_PACK32:
- case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
- case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
- case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
- case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
- case VK_FORMAT_A2B10G10R10_UINT_PACK32:
- case VK_FORMAT_A2B10G10R10_SINT_PACK32:
- pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 4 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R16_UNORM:
- case VK_FORMAT_R16_SNORM:
- case VK_FORMAT_R16_USCALED:
- case VK_FORMAT_R16_SSCALED:
- case VK_FORMAT_R16_UINT:
- case VK_FORMAT_R16_SINT:
- case VK_FORMAT_R16_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 2 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R16G16_UNORM:
- case VK_FORMAT_R16G16_SNORM:
- case VK_FORMAT_R16G16_USCALED:
- case VK_FORMAT_R16G16_SSCALED:
- case VK_FORMAT_R16G16_UINT:
- case VK_FORMAT_R16G16_SINT:
- case VK_FORMAT_R16G16_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 4 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R16G16B16_UNORM:
- case VK_FORMAT_R16G16B16_SNORM:
- case VK_FORMAT_R16G16B16_USCALED:
- case VK_FORMAT_R16G16B16_SSCALED:
- case VK_FORMAT_R16G16B16_UINT:
- case VK_FORMAT_R16G16B16_SINT:
- case VK_FORMAT_R16G16B16_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 6 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R16G16B16A16_UNORM:
- case VK_FORMAT_R16G16B16A16_SNORM:
- case VK_FORMAT_R16G16B16A16_USCALED:
- case VK_FORMAT_R16G16B16A16_SSCALED:
- case VK_FORMAT_R16G16B16A16_UINT:
- case VK_FORMAT_R16G16B16A16_SINT:
- case VK_FORMAT_R16G16B16A16_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 8 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R32_UINT:
- case VK_FORMAT_R32_SINT:
- case VK_FORMAT_R32_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 4 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R32G32_UINT:
- case VK_FORMAT_R32G32_SINT:
- case VK_FORMAT_R32G32_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 8 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R32G32B32_UINT:
- case VK_FORMAT_R32G32B32_SINT:
- case VK_FORMAT_R32G32B32_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 12 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R32G32B32A32_UINT:
- case VK_FORMAT_R32G32B32A32_SINT:
- case VK_FORMAT_R32G32B32A32_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R64_UINT:
- case VK_FORMAT_R64_SINT:
- case VK_FORMAT_R64_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 8 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R64G64_UINT:
- case VK_FORMAT_R64G64_SINT:
- case VK_FORMAT_R64G64_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R64G64B64_UINT:
- case VK_FORMAT_R64G64B64_SINT:
- case VK_FORMAT_R64G64B64_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 24 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_R64G64B64A64_UINT:
- case VK_FORMAT_R64G64B64A64_SINT:
- case VK_FORMAT_R64G64B64A64_SFLOAT:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 32 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
- case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
- pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 4 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_D16_UNORM:
- pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 2 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_X8_D24_UNORM_PACK32:
- pFormatSize->flags = KTX_FORMAT_SIZE_PACKED_BIT | KTX_FORMAT_SIZE_DEPTH_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 4 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_D32_SFLOAT:
- pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 4 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_S8_UINT:
- pFormatSize->flags = KTX_FORMAT_SIZE_STENCIL_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 1 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_D16_UNORM_S8_UINT:
- pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT | KTX_FORMAT_SIZE_STENCIL_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 3 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_D24_UNORM_S8_UINT:
- pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT | KTX_FORMAT_SIZE_STENCIL_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 4 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_D32_SFLOAT_S8_UINT:
- pFormatSize->flags = KTX_FORMAT_SIZE_DEPTH_BIT | KTX_FORMAT_SIZE_STENCIL_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 8 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
- case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
- case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
- case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
- case VK_FORMAT_BC4_UNORM_BLOCK:
- case VK_FORMAT_BC4_SNORM_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 8 * 8;
- pFormatSize->blockWidth = 4;
- pFormatSize->blockHeight = 4;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_BC2_UNORM_BLOCK:
- case VK_FORMAT_BC2_SRGB_BLOCK:
- case VK_FORMAT_BC3_UNORM_BLOCK:
- case VK_FORMAT_BC3_SRGB_BLOCK:
- case VK_FORMAT_BC5_UNORM_BLOCK:
- case VK_FORMAT_BC5_SNORM_BLOCK:
- case VK_FORMAT_BC6H_UFLOAT_BLOCK:
- case VK_FORMAT_BC6H_SFLOAT_BLOCK:
- case VK_FORMAT_BC7_UNORM_BLOCK:
- case VK_FORMAT_BC7_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 4;
- pFormatSize->blockHeight = 4;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 8 * 8;
- pFormatSize->blockWidth = 4;
- pFormatSize->blockHeight = 4;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
- case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
- case VK_FORMAT_EAC_R11_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11_SNORM_BLOCK:
- case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
- case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 4;
- pFormatSize->blockHeight = 4;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG:
- case VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 8 * 8;
- pFormatSize->blockWidth = 8;
- pFormatSize->blockHeight = 4;
- pFormatSize->blockDepth = 1;
- pFormatSize->minBlocksX = 2;
- pFormatSize->minBlocksY = 2;
- break;
- case VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG:
- case VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 8 * 8;
- pFormatSize->blockWidth = 8;
- pFormatSize->blockHeight = 4;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG:
- case VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 8 * 8;
- pFormatSize->blockWidth = 4;
- pFormatSize->blockHeight = 4;
- pFormatSize->blockDepth = 1;
- pFormatSize->minBlocksX = 2;
- pFormatSize->minBlocksY = 2;
- break;
- case VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG:
- case VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 8 * 8;
- pFormatSize->blockWidth = 4;
- pFormatSize->blockHeight = 4;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
- case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 4;
- pFormatSize->blockHeight = 4;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
- case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 5;
- pFormatSize->blockHeight = 4;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 5;
- pFormatSize->blockHeight = 5;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 6;
- pFormatSize->blockHeight = 5;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 6;
- pFormatSize->blockHeight = 6;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 8;
- pFormatSize->blockHeight = 5;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 8;
- pFormatSize->blockHeight = 6;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
- case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 8;
- pFormatSize->blockHeight = 8;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 10;
- pFormatSize->blockHeight = 5;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 10;
- pFormatSize->blockHeight = 6;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 10;
- pFormatSize->blockHeight = 8;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
- case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 10;
- pFormatSize->blockHeight = 10;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
- case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 12;
- pFormatSize->blockHeight = 10;
- pFormatSize->blockDepth = 1;
- break;
- case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
- case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
- pFormatSize->flags = KTX_FORMAT_SIZE_COMPRESSED_BIT;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 16 * 8;
- pFormatSize->blockWidth = 12;
- pFormatSize->blockHeight = 12;
- pFormatSize->blockDepth = 1;
- break;
- default:
- pFormatSize->flags = 0;
- pFormatSize->paletteSizeInBits = 0;
- pFormatSize->blockSizeInBits = 0 * 8;
- pFormatSize->blockWidth = 1;
- pFormatSize->blockHeight = 1;
- pFormatSize->blockDepth = 1;
- break;
- }
- }
- #endif // !VK_FORMAT_H
|