Add support for rendering more formats

- On LE machines, A8B8G8R8_*_PACK32 are the same as R8G8B8A8
- Add support for B8G8R8A8_SRGB in various places

Bug: b/118386749
Test: dEQP-VK.renderpass.*
Change-Id: I16d587b72317a05277fe2c8b337af5670eb51310
Reviewed-on: https://swiftshader-review.googlesource.com/c/SwiftShader/+/29228
Tested-by: Chris Forbes <chrisforbes@google.com>
Reviewed-by: Nicolas Capens <nicolascapens@google.com>
diff --git a/src/Pipeline/PixelProgram.cpp b/src/Pipeline/PixelProgram.cpp
index 127a148..ac9c792 100644
--- a/src/Pipeline/PixelProgram.cpp
+++ b/src/Pipeline/PixelProgram.cpp
@@ -112,12 +112,15 @@
 			{
 			case VK_FORMAT_R5G6B5_UNORM_PACK16:
 			case VK_FORMAT_B8G8R8A8_UNORM:
+			case VK_FORMAT_B8G8R8A8_SRGB:
 			case VK_FORMAT_R8G8B8A8_UNORM:
 			case VK_FORMAT_R8G8B8A8_SRGB:
 			case VK_FORMAT_R8G8_UNORM:
 			case VK_FORMAT_R8_UNORM:
 			case VK_FORMAT_R16G16_UNORM:
 			case VK_FORMAT_R16G16B16A16_UNORM:
+			case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+			case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
 				for(unsigned int q = 0; q < state.multiSample; q++)
 				{
 					Pointer<Byte> buffer = cBuffer[index] + q * *Pointer<Int>(data + OFFSET(DrawData, colorSliceB[index]));
@@ -166,6 +169,8 @@
 			case VK_FORMAT_R8_UINT:
 			case VK_FORMAT_R8G8_UINT:
 			case VK_FORMAT_R8G8B8A8_UINT:
+			case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+			case VK_FORMAT_A8B8G8R8_SINT_PACK32:
 				for(unsigned int q = 0; q < state.multiSample; q++)
 				{
 					Pointer<Byte> buffer = cBuffer[index] + q * *Pointer<Int>(data + OFFSET(DrawData, colorSliceB[index]));
@@ -199,12 +204,15 @@
 				break;
 			case VK_FORMAT_R5G6B5_UNORM_PACK16:
 			case VK_FORMAT_B8G8R8A8_UNORM:
+			case VK_FORMAT_B8G8R8A8_SRGB:
 			case VK_FORMAT_R8G8B8A8_UNORM:
 			case VK_FORMAT_R8G8B8A8_SRGB:
 			case VK_FORMAT_R8G8_UNORM:
 			case VK_FORMAT_R8_UNORM:
 			case VK_FORMAT_R16G16_UNORM:
 			case VK_FORMAT_R16G16B16A16_UNORM:
+			case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+			case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
 				oC[index].x = Max(oC[index].x, Float4(0.0f)); oC[index].x = Min(oC[index].x, Float4(1.0f));
 				oC[index].y = Max(oC[index].y, Float4(0.0f)); oC[index].y = Min(oC[index].y, Float4(1.0f));
 				oC[index].z = Max(oC[index].z, Float4(0.0f)); oC[index].z = Min(oC[index].z, Float4(1.0f));
@@ -231,6 +239,8 @@
 			case VK_FORMAT_R8_UINT:
 			case VK_FORMAT_R8G8_UINT:
 			case VK_FORMAT_R8G8B8A8_UINT:
+			case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+			case VK_FORMAT_A8B8G8R8_SINT_PACK32:
 				break;
 			default:
 				UNIMPLEMENTED("VkFormat: %d", int(state.targetFormat[index]));
diff --git a/src/Pipeline/PixelRoutine.cpp b/src/Pipeline/PixelRoutine.cpp
index b2f669a..9aac2e6 100644
--- a/src/Pipeline/PixelRoutine.cpp
+++ b/src/Pipeline/PixelRoutine.cpp
@@ -1017,6 +1017,7 @@
 			pixel.w = Short4(0xFFFFu);
 			break;
 		case VK_FORMAT_B8G8R8A8_UNORM:
+		case VK_FORMAT_B8G8R8A8_SRGB:
 			buffer = cBuffer + 4 * x;
 			c01 = *Pointer<Short4>(buffer);
 			buffer += *Pointer<Int>(data + OFFSET(DrawData, colorPitchB[index]));
@@ -1244,10 +1245,13 @@
 				current.z = AddSat(As<UShort4>(current.z), UShort4(0x0400));
 				break;
 			case VK_FORMAT_B8G8R8A8_UNORM:
+			case VK_FORMAT_B8G8R8A8_SRGB:
 			case VK_FORMAT_R8G8B8A8_UNORM:
 			case VK_FORMAT_R8G8B8A8_SRGB:
 			case VK_FORMAT_R8G8_UNORM:
 			case VK_FORMAT_R8_UNORM:
+			case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+			case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
 				current.x = current.x - As<Short4>(As<UShort4>(current.x) >> 8) + Short4(0x0080);
 				current.y = current.y - As<Short4>(As<UShort4>(current.y) >> 8) + Short4(0x0080);
 				current.z = current.z - As<Short4>(As<UShort4>(current.z) >> 8) + Short4(0x0080);
@@ -1273,6 +1277,7 @@
 			}
 			break;
 		case VK_FORMAT_B8G8R8A8_UNORM:
+		case VK_FORMAT_B8G8R8A8_SRGB:
 			if(rgbaWriteMask == 0x7)
 			{
 				current.x = As<Short4>(As<UShort4>(current.x) >> 8);
@@ -1309,6 +1314,8 @@
 			break;
 		case VK_FORMAT_R8G8B8A8_UNORM:
 		case VK_FORMAT_R8G8B8A8_SRGB:
+		case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+		case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
 			if(rgbaWriteMask == 0x7)
 			{
 				current.x = As<Short4>(As<UShort4>(current.x) >> 8);
@@ -1428,11 +1435,12 @@
 			}
 			break;
 		case VK_FORMAT_B8G8R8A8_UNORM:
+		case VK_FORMAT_B8G8R8A8_SRGB:
 			{
 				Pointer<Byte> buffer = cBuffer + x * 4;
 				Short4 value = *Pointer<Short4>(buffer);
 
-				if(state.targetFormat[index] == VK_FORMAT_B8G8R8A8_UNORM && bgraWriteMask != 0x0000000F)   // FIXME: Need for masking when XRGB && Fh?
+				if(bgraWriteMask != 0x0000000F)   // FIXME: Need for masking when XRGB && Fh?
 				{
 					Short4 masked = value;
 					c01 &= *Pointer<Short4>(constants + OFFSET(Constants,maskB4Q[bgraWriteMask][0]));
@@ -1448,7 +1456,7 @@
 				buffer += *Pointer<Int>(data + OFFSET(DrawData,colorPitchB[index]));
 				value = *Pointer<Short4>(buffer);
 
-				if(state.targetFormat[index] == VK_FORMAT_B8G8R8A8_UNORM && bgraWriteMask != 0x0000000F)   // FIXME: Need for masking when XRGB && Fh?
+				if(bgraWriteMask != 0x0000000F)   // FIXME: Need for masking when XRGB && Fh?
 				{
 					Short4 masked = value;
 					c23 &= *Pointer<Short4>(constants + OFFSET(Constants,maskB4Q[bgraWriteMask][0]));
@@ -1464,11 +1472,13 @@
 			break;
 		case VK_FORMAT_R8G8B8A8_UNORM:
 		case VK_FORMAT_R8G8B8A8_SRGB:
+		case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+		case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
 			{
 				Pointer<Byte> buffer = cBuffer + x * 4;
 				Short4 value = *Pointer<Short4>(buffer);
 
-				bool masked = ((state.targetFormat[index] == VK_FORMAT_R8G8B8A8_UNORM || state.targetFormat[index] == VK_FORMAT_R8G8B8A8_SRGB) && rgbaWriteMask != 0x0000000F); // FIXME: Need for masking when XBGR && Fh?
+				bool masked = (rgbaWriteMask != 0x0000000F); // FIXME: Need for masking when XBGR && Fh?
 
 				if(masked)
 				{
@@ -1994,6 +2004,8 @@
 		case VK_FORMAT_R16G16B16A16_UINT:
 		case VK_FORMAT_R8G8B8A8_SINT:
 		case VK_FORMAT_R8G8B8A8_UINT:
+		case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+		case VK_FORMAT_A8B8G8R8_SINT_PACK32:
 			transpose4x4(oC.x, oC.y, oC.z, oC.w);
 			break;
 		default:
@@ -2346,13 +2358,17 @@
 			break;
 		case VK_FORMAT_R8G8B8A8_SINT:
 		case VK_FORMAT_R8G8B8A8_UINT:
+		case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+		case VK_FORMAT_A8B8G8R8_SINT_PACK32:
 			if((rgbaWriteMask & 0x0000000F) != 0x0)
 			{
 				UInt2 value, packedCol, mergedMask;
 
 				buffer = cBuffer + 4 * x;
 
-				if(state.targetFormat[index] == VK_FORMAT_R8G8B8A8_SINT)
+				bool isSigned = state.targetFormat[index] == VK_FORMAT_R8G8B8A8_SINT || state.targetFormat[index] == VK_FORMAT_A8B8G8R8_SINT_PACK32;
+
+				if(isSigned)
 				{
 					packedCol = As<UInt2>(PackSigned(Short4(As<Int4>(oC.x)), Short4(As<Int4>(oC.y))));
 				}
@@ -2370,7 +2386,7 @@
 
 				buffer += *Pointer<Int>(data + OFFSET(DrawData, colorPitchB[index]));
 
-				if(state.targetFormat[index] == VK_FORMAT_R8G8B8A8_SINT)
+				if(isSigned)
 				{
 					packedCol = As<UInt2>(PackSigned(Short4(As<Int4>(oC.z)), Short4(As<Int4>(oC.w))));
 				}
diff --git a/src/Vulkan/VkFormat.cpp b/src/Vulkan/VkFormat.cpp
index d3a48c7..5a77526 100644
--- a/src/Vulkan/VkFormat.cpp
+++ b/src/Vulkan/VkFormat.cpp
@@ -147,6 +147,7 @@
 	switch(format)
 	{
 	case VK_FORMAT_B8G8R8A8_UNORM:
+	case VK_FORMAT_B8G8R8A8_SRGB:
 	case VK_FORMAT_R8G8B8A8_UNORM:
 	case VK_FORMAT_R8G8B8A8_SRGB:
 	case VK_FORMAT_R5G6B5_UNORM_PACK16:
@@ -163,8 +164,10 @@
 	{
 	case VK_FORMAT_UNDEFINED:
 	case VK_FORMAT_B8G8R8A8_UNORM:
+	case VK_FORMAT_B8G8R8A8_SRGB:
 	case VK_FORMAT_R8G8B8A8_UNORM:
 	case VK_FORMAT_R8G8B8A8_SRGB:
+	case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
 	case VK_FORMAT_R5G6B5_UNORM_PACK16:
 		return true;
 	default:
@@ -394,295 +397,295 @@
 	// Return a single format per group of compatible formats, for quick comparison
 	switch(format)
 	{
-	// 8 - bit, Block size 1 byte, 1 texel / block

-	case VK_FORMAT_R4G4_UNORM_PACK8:

-	case VK_FORMAT_R8_UNORM:

-	case VK_FORMAT_R8_SNORM:

-	case VK_FORMAT_R8_USCALED:

-	case VK_FORMAT_R8_SSCALED:

-	case VK_FORMAT_R8_UINT:

-	case VK_FORMAT_R8_SINT:

-	case VK_FORMAT_R8_SRGB:

-		return VK_FORMAT_R8_UNORM;

-

-	// 16 - bit, Block size 2 bytes, 1 texel / block

-	case VK_FORMAT_R4G4B4A4_UNORM_PACK16:

-	case VK_FORMAT_B4G4R4A4_UNORM_PACK16:

-	case VK_FORMAT_R5G6B5_UNORM_PACK16:

-	case VK_FORMAT_B5G6R5_UNORM_PACK16:

-	case VK_FORMAT_R5G5B5A1_UNORM_PACK16:

-	case VK_FORMAT_B5G5R5A1_UNORM_PACK16:

-	case VK_FORMAT_A1R5G5B5_UNORM_PACK16:

-	case VK_FORMAT_R8G8_UNORM:

-	case VK_FORMAT_R8G8_SNORM:

-	case VK_FORMAT_R8G8_USCALED:

-	case VK_FORMAT_R8G8_SSCALED:

-	case VK_FORMAT_R8G8_UINT:

-	case VK_FORMAT_R8G8_SINT:

-	case VK_FORMAT_R8G8_SRGB:

-	case VK_FORMAT_R16_UNORM:

-	case VK_FORMAT_R16_SNORM:

-	case VK_FORMAT_R16_USCALED:

-	case VK_FORMAT_R16_SSCALED:

-	case VK_FORMAT_R16_UINT:

-	case VK_FORMAT_R16_SINT:

-	case VK_FORMAT_R16_SFLOAT:

-	case VK_FORMAT_R10X6_UNORM_PACK16:

-	case VK_FORMAT_R12X4_UNORM_PACK16:

-		return VK_FORMAT_R16_UNORM;

-

-	// 24 - bit, Block size 3 bytes, 1 texel / block

-	case VK_FORMAT_R8G8B8_UNORM:

-	case VK_FORMAT_R8G8B8_SNORM:

-	case VK_FORMAT_R8G8B8_USCALED:

-	case VK_FORMAT_R8G8B8_SSCALED:

-	case VK_FORMAT_R8G8B8_UINT:

-	case VK_FORMAT_R8G8B8_SINT:

-	case VK_FORMAT_R8G8B8_SRGB:

-	case VK_FORMAT_B8G8R8_UNORM:

-	case VK_FORMAT_B8G8R8_SNORM:

-	case VK_FORMAT_B8G8R8_USCALED:

-	case VK_FORMAT_B8G8R8_SSCALED:

-	case VK_FORMAT_B8G8R8_UINT:

-	case VK_FORMAT_B8G8R8_SINT:

-	case VK_FORMAT_B8G8R8_SRGB:

-		return VK_FORMAT_R8G8B8_UNORM;

-

-	// 32 - bit, Block size 4 bytes, 1 texel / block

-	case VK_FORMAT_R8G8B8A8_UNORM:

-	case VK_FORMAT_R8G8B8A8_SNORM:

-	case VK_FORMAT_R8G8B8A8_USCALED:

-	case VK_FORMAT_R8G8B8A8_SSCALED:

-	case VK_FORMAT_R8G8B8A8_UINT:

-	case VK_FORMAT_R8G8B8A8_SINT:

-	case VK_FORMAT_R8G8B8A8_SRGB:

-	case VK_FORMAT_B8G8R8A8_UNORM:

-	case VK_FORMAT_B8G8R8A8_SNORM:

-	case VK_FORMAT_B8G8R8A8_USCALED:

-	case VK_FORMAT_B8G8R8A8_SSCALED:

-	case VK_FORMAT_B8G8R8A8_UINT:

-	case VK_FORMAT_B8G8R8A8_SINT:

-	case VK_FORMAT_B8G8R8A8_SRGB:

-	case VK_FORMAT_A8B8G8R8_UNORM_PACK32:

-	case VK_FORMAT_A8B8G8R8_SNORM_PACK32:

-	case VK_FORMAT_A8B8G8R8_USCALED_PACK32:

-	case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:

-	case VK_FORMAT_A8B8G8R8_UINT_PACK32:

-	case VK_FORMAT_A8B8G8R8_SINT_PACK32:

-	case VK_FORMAT_A8B8G8R8_SRGB_PACK32:

-	case VK_FORMAT_A2R10G10B10_UNORM_PACK32:

-	case VK_FORMAT_A2R10G10B10_SNORM_PACK32:

-	case VK_FORMAT_A2R10G10B10_USCALED_PACK32:

-	case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:

-	case VK_FORMAT_A2R10G10B10_UINT_PACK32:

-	case VK_FORMAT_A2R10G10B10_SINT_PACK32:

-	case VK_FORMAT_A2B10G10R10_UNORM_PACK32:

-	case VK_FORMAT_A2B10G10R10_SNORM_PACK32:

-	case VK_FORMAT_A2B10G10R10_USCALED_PACK32:

-	case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:

-	case VK_FORMAT_A2B10G10R10_UINT_PACK32:

-	case VK_FORMAT_A2B10G10R10_SINT_PACK32:

-	case VK_FORMAT_R16G16_UNORM:

-	case VK_FORMAT_R16G16_SNORM:

-	case VK_FORMAT_R16G16_USCALED:

-	case VK_FORMAT_R16G16_SSCALED:

-	case VK_FORMAT_R16G16_UINT:

-	case VK_FORMAT_R16G16_SINT:

-	case VK_FORMAT_R16G16_SFLOAT:

-	case VK_FORMAT_R32_UINT:

-	case VK_FORMAT_R32_SINT:

-	case VK_FORMAT_R32_SFLOAT:

-	case VK_FORMAT_B10G11R11_UFLOAT_PACK32:

-	case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:

-	case VK_FORMAT_R10X6G10X6_UNORM_2PACK16:

-	case VK_FORMAT_R12X4G12X4_UNORM_2PACK16:

-		return VK_FORMAT_R8G8B8A8_UNORM;

-

-	// 48 - bit, Block size 6 bytes, 1 texel / block

-	case VK_FORMAT_R16G16B16_UNORM:

-	case VK_FORMAT_R16G16B16_SNORM:

-	case VK_FORMAT_R16G16B16_USCALED:

-	case VK_FORMAT_R16G16B16_SSCALED:

-	case VK_FORMAT_R16G16B16_UINT:

-	case VK_FORMAT_R16G16B16_SINT:

-	case VK_FORMAT_R16G16B16_SFLOAT:

-		return VK_FORMAT_R16G16B16_UNORM;

-

-	// 64 - bit, Block size 8 bytes, 1 texel / block

-	case VK_FORMAT_R16G16B16A16_UNORM:

-	case VK_FORMAT_R16G16B16A16_SNORM:

-	case VK_FORMAT_R16G16B16A16_USCALED:

-	case VK_FORMAT_R16G16B16A16_SSCALED:

-	case VK_FORMAT_R16G16B16A16_UINT:

-	case VK_FORMAT_R16G16B16A16_SINT:

-	case VK_FORMAT_R16G16B16A16_SFLOAT:

-	case VK_FORMAT_R32G32_UINT:

-	case VK_FORMAT_R32G32_SINT:

-	case VK_FORMAT_R32G32_SFLOAT:

-	case VK_FORMAT_R64_UINT:

-	case VK_FORMAT_R64_SINT:

-	case VK_FORMAT_R64_SFLOAT:

-		return VK_FORMAT_R16G16B16A16_UNORM;

-

-	// 96 - bit, Block size 12 bytes, 1 texel / block

-	case VK_FORMAT_R32G32B32_UINT:

-	case VK_FORMAT_R32G32B32_SINT:

-	case VK_FORMAT_R32G32B32_SFLOAT:

-		return VK_FORMAT_R32G32B32_UINT;

-

-	// 128 - bit, Block size 16 bytes, 1 texel / block

-	case VK_FORMAT_R32G32B32A32_UINT:

-	case VK_FORMAT_R32G32B32A32_SINT:

-	case VK_FORMAT_R32G32B32A32_SFLOAT:

-	case VK_FORMAT_R64G64_UINT:

-	case VK_FORMAT_R64G64_SINT:

-	case VK_FORMAT_R64G64_SFLOAT:

-		return VK_FORMAT_R32G32B32A32_UINT;

-

-	// 192 - bit, Block size 24 bytes, 1 texel / block

-	case VK_FORMAT_R64G64B64_UINT:

-	case VK_FORMAT_R64G64B64_SINT:

-	case VK_FORMAT_R64G64B64_SFLOAT:

-		return VK_FORMAT_R64G64B64_UINT;

-

-	// 256 - bit, Block size 32 bytes, 1 texel / block

-	case VK_FORMAT_R64G64B64A64_UINT:

-	case VK_FORMAT_R64G64B64A64_SINT:

-	case VK_FORMAT_R64G64B64A64_SFLOAT:

-		return VK_FORMAT_R64G64B64A64_UINT;

-

-	// BC1_RGB(64 bit), Block size 8 bytes, 16 texels / block

-	case VK_FORMAT_BC1_RGB_UNORM_BLOCK:

-	case VK_FORMAT_BC1_RGB_SRGB_BLOCK:

-		return VK_FORMAT_BC1_RGB_UNORM_BLOCK;

-

-	// BC1_RGBA(64 bit), Block size 8 bytes, 16 texels / block

-	case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:

-	case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:

-		return VK_FORMAT_BC1_RGBA_UNORM_BLOCK;

-

-	// BC2(128 bit), Block size 16 bytes, 16 texels / block

-	case VK_FORMAT_BC2_UNORM_BLOCK:

-	case VK_FORMAT_BC2_SRGB_BLOCK:

-		return VK_FORMAT_BC2_UNORM_BLOCK;

-

-	// BC3(128 bit), Block size 16 bytes, 16 texels / block

-	case VK_FORMAT_BC3_UNORM_BLOCK:

-	case VK_FORMAT_BC3_SRGB_BLOCK:

-		return VK_FORMAT_BC3_UNORM_BLOCK;

-

-	// BC4(64 bit), Block size 8 bytes, 16 texels / block

-	case VK_FORMAT_BC4_UNORM_BLOCK:

-	case VK_FORMAT_BC4_SNORM_BLOCK:

-		return VK_FORMAT_BC4_UNORM_BLOCK;

-

-	// BC5(128 bit), Block size 16 bytes, 16 texels / block

-	case VK_FORMAT_BC5_UNORM_BLOCK:

-	case VK_FORMAT_BC5_SNORM_BLOCK:

-		return VK_FORMAT_BC5_UNORM_BLOCK;

-

-	// BC6H(128 bit), Block size 16 bytes, 16 texels / block

-	case VK_FORMAT_BC6H_UFLOAT_BLOCK:

-	case VK_FORMAT_BC6H_SFLOAT_BLOCK:

-		return VK_FORMAT_BC6H_UFLOAT_BLOCK;

-

-	// BC7(128 bit), Block size 16 bytes, 16 texels / block

-	case VK_FORMAT_BC7_UNORM_BLOCK:

-	case VK_FORMAT_BC7_SRGB_BLOCK:

-		return VK_FORMAT_BC7_UNORM_BLOCK;

-

-	// ETC2_RGB(64 bit), Block size 8 bytes, 16 texels / block

-	case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:

-	case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:

-		return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;

-

-	// ETC2_RGBA(64 bit), Block size 8 bytes, 16 texels / block

-	case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:

-	case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:

-		return VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;

-

-	// ETC2_EAC_RGBA(64 bit), Block size 8 bytes, 16 texels / block

-	case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:

-	case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:

-		return VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;

-

-	// EAC_R(64 bit), Block size 8 bytes, 16 texels / block

-	case VK_FORMAT_EAC_R11_UNORM_BLOCK:

-	case VK_FORMAT_EAC_R11_SNORM_BLOCK:

-		return VK_FORMAT_EAC_R11_UNORM_BLOCK;

-

-	// EAC_RG(128 bit), Block size 16 bytes, 16 texels / block

-	case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:

-	case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:

-		return VK_FORMAT_EAC_R11G11_UNORM_BLOCK;

-

-	// ASTC_4x4(128 bit), Block size 16 bytes, 16 texels / block

-	case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_4x4_UNORM_BLOCK;

-

-	// ASTC_5x4(128 bit), Block size 16 bytes, 20 texels / block

-	case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_5x4_UNORM_BLOCK;

-

-	// ASTC_5x5(128 bit), Block size 16 bytes, 25 texels / block

-	case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_5x5_UNORM_BLOCK;

-

-	// ASTC_6x5(128 bit), Block size 16 bytes, 30 texels / block

-	case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_6x5_UNORM_BLOCK;

-

-	// ASTC_6x6(128 bit), Block size 16 bytes, 36 texels / block

-	case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_6x6_UNORM_BLOCK;

-

-	// ASTC_8x5(128 bit), Block size 16 bytes, 40 texels / block

-	case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_8x5_UNORM_BLOCK;

-

-	// ASTC_8x6(128 bit), Block size 16 bytes, 48 texels / block

-	case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_8x6_UNORM_BLOCK;

-

-	// ASTC_8x8(128 bit), Block size 16 bytes, 64 texels / block

-	case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_8x8_UNORM_BLOCK;

-

-	// ASTC_10x5(128 bit), Block size 16 bytes, 50 texels / block

-	case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_10x5_UNORM_BLOCK;

-

-	// ASTC_10x6(128 bit), Block size 16 bytes, 60 texels / block

-	case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_10x6_UNORM_BLOCK;

-

-	// ASTC_10x8(128 bit), Block size 16 bytes, 80 texels / block

-	case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_10x8_UNORM_BLOCK;

-

-	// ASTC_10x10(128 bit), Block size 16 bytes, 100 texels / block

-	case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_10x10_UNORM_BLOCK;

-

-	// ASTC_12x10(128 bit), Block size 16 bytes, 120 texels / block

-	case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:

-	case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:

-		return VK_FORMAT_ASTC_12x10_UNORM_BLOCK;

-

-	// ASTC_12x12(128 bit), Block size 16 bytes, 144 texels / block

-	case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:

+	// 8 - bit, Block size 1 byte, 1 texel / block
+	case VK_FORMAT_R4G4_UNORM_PACK8:
+	case VK_FORMAT_R8_UNORM:
+	case VK_FORMAT_R8_SNORM:
+	case VK_FORMAT_R8_USCALED:
+	case VK_FORMAT_R8_SSCALED:
+	case VK_FORMAT_R8_UINT:
+	case VK_FORMAT_R8_SINT:
+	case VK_FORMAT_R8_SRGB:
+		return VK_FORMAT_R8_UNORM;
+
+	// 16 - bit, Block size 2 bytes, 1 texel / block
+	case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
+	case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
+	case VK_FORMAT_R5G6B5_UNORM_PACK16:
+	case VK_FORMAT_B5G6R5_UNORM_PACK16:
+	case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+	case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
+	case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
+	case VK_FORMAT_R8G8_UNORM:
+	case VK_FORMAT_R8G8_SNORM:
+	case VK_FORMAT_R8G8_USCALED:
+	case VK_FORMAT_R8G8_SSCALED:
+	case VK_FORMAT_R8G8_UINT:
+	case VK_FORMAT_R8G8_SINT:
+	case VK_FORMAT_R8G8_SRGB:
+	case VK_FORMAT_R16_UNORM:
+	case VK_FORMAT_R16_SNORM:
+	case VK_FORMAT_R16_USCALED:
+	case VK_FORMAT_R16_SSCALED:
+	case VK_FORMAT_R16_UINT:
+	case VK_FORMAT_R16_SINT:
+	case VK_FORMAT_R16_SFLOAT:
+	case VK_FORMAT_R10X6_UNORM_PACK16:
+	case VK_FORMAT_R12X4_UNORM_PACK16:
+		return VK_FORMAT_R16_UNORM;
+
+	// 24 - bit, Block size 3 bytes, 1 texel / block
+	case VK_FORMAT_R8G8B8_UNORM:
+	case VK_FORMAT_R8G8B8_SNORM:
+	case VK_FORMAT_R8G8B8_USCALED:
+	case VK_FORMAT_R8G8B8_SSCALED:
+	case VK_FORMAT_R8G8B8_UINT:
+	case VK_FORMAT_R8G8B8_SINT:
+	case VK_FORMAT_R8G8B8_SRGB:
+	case VK_FORMAT_B8G8R8_UNORM:
+	case VK_FORMAT_B8G8R8_SNORM:
+	case VK_FORMAT_B8G8R8_USCALED:
+	case VK_FORMAT_B8G8R8_SSCALED:
+	case VK_FORMAT_B8G8R8_UINT:
+	case VK_FORMAT_B8G8R8_SINT:
+	case VK_FORMAT_B8G8R8_SRGB:
+		return VK_FORMAT_R8G8B8_UNORM;
+
+	// 32 - bit, Block size 4 bytes, 1 texel / block
+	case VK_FORMAT_R8G8B8A8_UNORM:
+	case VK_FORMAT_R8G8B8A8_SNORM:
+	case VK_FORMAT_R8G8B8A8_USCALED:
+	case VK_FORMAT_R8G8B8A8_SSCALED:
+	case VK_FORMAT_R8G8B8A8_UINT:
+	case VK_FORMAT_R8G8B8A8_SINT:
+	case VK_FORMAT_R8G8B8A8_SRGB:
+	case VK_FORMAT_B8G8R8A8_UNORM:
+	case VK_FORMAT_B8G8R8A8_SNORM:
+	case VK_FORMAT_B8G8R8A8_USCALED:
+	case VK_FORMAT_B8G8R8A8_SSCALED:
+	case VK_FORMAT_B8G8R8A8_UINT:
+	case VK_FORMAT_B8G8R8A8_SINT:
+	case VK_FORMAT_B8G8R8A8_SRGB:
+	case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+	case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
+	case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
+	case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
+	case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+	case VK_FORMAT_A8B8G8R8_SINT_PACK32:
+	case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
+	case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+	case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+	case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+	case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+	case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+	case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+	case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+	case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+	case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+	case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+	case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+	case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+	case VK_FORMAT_R16G16_UNORM:
+	case VK_FORMAT_R16G16_SNORM:
+	case VK_FORMAT_R16G16_USCALED:
+	case VK_FORMAT_R16G16_SSCALED:
+	case VK_FORMAT_R16G16_UINT:
+	case VK_FORMAT_R16G16_SINT:
+	case VK_FORMAT_R16G16_SFLOAT:
+	case VK_FORMAT_R32_UINT:
+	case VK_FORMAT_R32_SINT:
+	case VK_FORMAT_R32_SFLOAT:
+	case VK_FORMAT_B10G11R11_UFLOAT_PACK32:
+	case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:
+	case VK_FORMAT_R10X6G10X6_UNORM_2PACK16:
+	case VK_FORMAT_R12X4G12X4_UNORM_2PACK16:
+		return VK_FORMAT_R8G8B8A8_UNORM;
+
+	// 48 - bit, Block size 6 bytes, 1 texel / block
+	case VK_FORMAT_R16G16B16_UNORM:
+	case VK_FORMAT_R16G16B16_SNORM:
+	case VK_FORMAT_R16G16B16_USCALED:
+	case VK_FORMAT_R16G16B16_SSCALED:
+	case VK_FORMAT_R16G16B16_UINT:
+	case VK_FORMAT_R16G16B16_SINT:
+	case VK_FORMAT_R16G16B16_SFLOAT:
+		return VK_FORMAT_R16G16B16_UNORM;
+
+	// 64 - bit, Block size 8 bytes, 1 texel / block
+	case VK_FORMAT_R16G16B16A16_UNORM:
+	case VK_FORMAT_R16G16B16A16_SNORM:
+	case VK_FORMAT_R16G16B16A16_USCALED:
+	case VK_FORMAT_R16G16B16A16_SSCALED:
+	case VK_FORMAT_R16G16B16A16_UINT:
+	case VK_FORMAT_R16G16B16A16_SINT:
+	case VK_FORMAT_R16G16B16A16_SFLOAT:
+	case VK_FORMAT_R32G32_UINT:
+	case VK_FORMAT_R32G32_SINT:
+	case VK_FORMAT_R32G32_SFLOAT:
+	case VK_FORMAT_R64_UINT:
+	case VK_FORMAT_R64_SINT:
+	case VK_FORMAT_R64_SFLOAT:
+		return VK_FORMAT_R16G16B16A16_UNORM;
+
+	// 96 - bit, Block size 12 bytes, 1 texel / block
+	case VK_FORMAT_R32G32B32_UINT:
+	case VK_FORMAT_R32G32B32_SINT:
+	case VK_FORMAT_R32G32B32_SFLOAT:
+		return VK_FORMAT_R32G32B32_UINT;
+
+	// 128 - bit, Block size 16 bytes, 1 texel / block
+	case VK_FORMAT_R32G32B32A32_UINT:
+	case VK_FORMAT_R32G32B32A32_SINT:
+	case VK_FORMAT_R32G32B32A32_SFLOAT:
+	case VK_FORMAT_R64G64_UINT:
+	case VK_FORMAT_R64G64_SINT:
+	case VK_FORMAT_R64G64_SFLOAT:
+		return VK_FORMAT_R32G32B32A32_UINT;
+
+	// 192 - bit, Block size 24 bytes, 1 texel / block
+	case VK_FORMAT_R64G64B64_UINT:
+	case VK_FORMAT_R64G64B64_SINT:
+	case VK_FORMAT_R64G64B64_SFLOAT:
+		return VK_FORMAT_R64G64B64_UINT;
+
+	// 256 - bit, Block size 32 bytes, 1 texel / block
+	case VK_FORMAT_R64G64B64A64_UINT:
+	case VK_FORMAT_R64G64B64A64_SINT:
+	case VK_FORMAT_R64G64B64A64_SFLOAT:
+		return VK_FORMAT_R64G64B64A64_UINT;
+
+	// BC1_RGB(64 bit), Block size 8 bytes, 16 texels / block
+	case VK_FORMAT_BC1_RGB_UNORM_BLOCK:
+	case VK_FORMAT_BC1_RGB_SRGB_BLOCK:
+		return VK_FORMAT_BC1_RGB_UNORM_BLOCK;
+
+	// BC1_RGBA(64 bit), Block size 8 bytes, 16 texels / block
+	case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
+	case VK_FORMAT_BC1_RGBA_SRGB_BLOCK:
+		return VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
+
+	// BC2(128 bit), Block size 16 bytes, 16 texels / block
+	case VK_FORMAT_BC2_UNORM_BLOCK:
+	case VK_FORMAT_BC2_SRGB_BLOCK:
+		return VK_FORMAT_BC2_UNORM_BLOCK;
+
+	// BC3(128 bit), Block size 16 bytes, 16 texels / block
+	case VK_FORMAT_BC3_UNORM_BLOCK:
+	case VK_FORMAT_BC3_SRGB_BLOCK:
+		return VK_FORMAT_BC3_UNORM_BLOCK;
+
+	// BC4(64 bit), Block size 8 bytes, 16 texels / block
+	case VK_FORMAT_BC4_UNORM_BLOCK:
+	case VK_FORMAT_BC4_SNORM_BLOCK:
+		return VK_FORMAT_BC4_UNORM_BLOCK;
+
+	// BC5(128 bit), Block size 16 bytes, 16 texels / block
+	case VK_FORMAT_BC5_UNORM_BLOCK:
+	case VK_FORMAT_BC5_SNORM_BLOCK:
+		return VK_FORMAT_BC5_UNORM_BLOCK;
+
+	// BC6H(128 bit), Block size 16 bytes, 16 texels / block
+	case VK_FORMAT_BC6H_UFLOAT_BLOCK:
+	case VK_FORMAT_BC6H_SFLOAT_BLOCK:
+		return VK_FORMAT_BC6H_UFLOAT_BLOCK;
+
+	// BC7(128 bit), Block size 16 bytes, 16 texels / block
+	case VK_FORMAT_BC7_UNORM_BLOCK:
+	case VK_FORMAT_BC7_SRGB_BLOCK:
+		return VK_FORMAT_BC7_UNORM_BLOCK;
+
+	// ETC2_RGB(64 bit), Block size 8 bytes, 16 texels / block
+	case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+	case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+		return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
+
+	// ETC2_RGBA(64 bit), Block size 8 bytes, 16 texels / block
+	case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+	case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+		return VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
+
+	// ETC2_EAC_RGBA(64 bit), Block size 8 bytes, 16 texels / block
+	case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+	case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+		return VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
+
+	// EAC_R(64 bit), Block size 8 bytes, 16 texels / block
+	case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+	case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+		return VK_FORMAT_EAC_R11_UNORM_BLOCK;
+
+	// EAC_RG(128 bit), Block size 16 bytes, 16 texels / block
+	case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+	case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+		return VK_FORMAT_EAC_R11G11_UNORM_BLOCK;
+
+	// ASTC_4x4(128 bit), Block size 16 bytes, 16 texels / block
+	case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
+
+	// ASTC_5x4(128 bit), Block size 16 bytes, 20 texels / block
+	case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_5x4_UNORM_BLOCK;
+
+	// ASTC_5x5(128 bit), Block size 16 bytes, 25 texels / block
+	case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_5x5_UNORM_BLOCK;
+
+	// ASTC_6x5(128 bit), Block size 16 bytes, 30 texels / block
+	case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_6x5_UNORM_BLOCK;
+
+	// ASTC_6x6(128 bit), Block size 16 bytes, 36 texels / block
+	case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_6x6_UNORM_BLOCK;
+
+	// ASTC_8x5(128 bit), Block size 16 bytes, 40 texels / block
+	case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_8x5_UNORM_BLOCK;
+
+	// ASTC_8x6(128 bit), Block size 16 bytes, 48 texels / block
+	case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_8x6_UNORM_BLOCK;
+
+	// ASTC_8x8(128 bit), Block size 16 bytes, 64 texels / block
+	case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_8x8_UNORM_BLOCK;
+
+	// ASTC_10x5(128 bit), Block size 16 bytes, 50 texels / block
+	case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_10x5_UNORM_BLOCK;
+
+	// ASTC_10x6(128 bit), Block size 16 bytes, 60 texels / block
+	case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_10x6_UNORM_BLOCK;
+
+	// ASTC_10x8(128 bit), Block size 16 bytes, 80 texels / block
+	case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_10x8_UNORM_BLOCK;
+
+	// ASTC_10x10(128 bit), Block size 16 bytes, 100 texels / block
+	case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_10x10_UNORM_BLOCK;
+
+	// ASTC_12x10(128 bit), Block size 16 bytes, 120 texels / block
+	case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+	case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+		return VK_FORMAT_ASTC_12x10_UNORM_BLOCK;
+
+	// ASTC_12x12(128 bit), Block size 16 bytes, 144 texels / block
+	case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
 	case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
 		return VK_FORMAT_ASTC_12x12_UNORM_BLOCK;