Expose the minimal subset of required supported formats

This cl exposes the bare minimum in terms of format requirements
in order to support Vulkan 1.1.

Fixes all failures in: dEQP-VK.api.info.format_properties.*

Bug b/117974925

Change-Id: Ia84b1b13430faffeb840e118b09b9f3e352c8cc9
Reviewed-on: https://swiftshader-review.googlesource.com/c/22888
Tested-by: Alexis Hétu <sugoi@google.com>
Reviewed-by: Nicolas Capens <nicolascapens@google.com>
diff --git a/src/Vulkan/VkPhysicalDevice.cpp b/src/Vulkan/VkPhysicalDevice.cpp
index 3a18d97..c002daf 100644
--- a/src/Vulkan/VkPhysicalDevice.cpp
+++ b/src/Vulkan/VkPhysicalDevice.cpp
@@ -28,7 +28,7 @@
 {
 	static const VkPhysicalDeviceFeatures features
 	{
-		true, // robustBufferAccess
+		true,  // robustBufferAccess
 		false, // fullDrawIndexUint32
 		false, // imageCubeArray
 		false, // independentBlend
@@ -48,7 +48,7 @@
 		false, // alphaToOne
 		false, // multiViewport
 		false, // samplerAnisotropy
-		false, // textureCompressionETC2
+		true,  // textureCompressionETC2
 		false, // textureCompressionASTC_LDR
 		false, // textureCompressionBC
 		false, // occlusionQueryPrecise
@@ -332,6 +332,265 @@
 	pFormatProperties->linearTilingFeatures = 0; // Unsupported format
 	pFormatProperties->optimalTilingFeatures = 0; // Unsupported format
 	pFormatProperties->bufferFeatures = 0; // Unsupported format
+
+	switch(format)

+	{

+	case VK_FORMAT_B4G4R4A4_UNORM_PACK16:

+	case VK_FORMAT_R5G6B5_UNORM_PACK16:

+	case VK_FORMAT_A1R5G5B5_UNORM_PACK16:

+	case VK_FORMAT_R8_UNORM:

+	case VK_FORMAT_R8_SNORM:

+	case VK_FORMAT_R8G8_UNORM:

+	case VK_FORMAT_R8G8_SNORM:

+	case VK_FORMAT_R8G8B8A8_UNORM:

+	case VK_FORMAT_R8G8B8A8_SNORM:

+	case VK_FORMAT_R8G8B8A8_SRGB:

+	case VK_FORMAT_B8G8R8A8_UNORM:

+	case VK_FORMAT_B8G8R8A8_SRGB:

+	case VK_FORMAT_A8B8G8R8_UNORM_PACK32:

+	case VK_FORMAT_A8B8G8R8_SNORM_PACK32:

+	case VK_FORMAT_A8B8G8R8_SRGB_PACK32:

+	case VK_FORMAT_A2B10G10R10_UNORM_PACK32:

+	case VK_FORMAT_R16_SFLOAT:

+	case VK_FORMAT_R16G16_SFLOAT:

+	case VK_FORMAT_R16G16B16A16_SFLOAT:

+	case VK_FORMAT_B10G11R11_UFLOAT_PACK32:

+	case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32:

+		pFormatProperties->optimalTilingFeatures |=

+			VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT;

+		// Fall through

+	case VK_FORMAT_R8_UINT:

+	case VK_FORMAT_R8_SINT:

+	case VK_FORMAT_R8G8_UINT:

+	case VK_FORMAT_R8G8_SINT:

+	case VK_FORMAT_R8G8B8A8_UINT:

+	case VK_FORMAT_R8G8B8A8_SINT:

+	case VK_FORMAT_A8B8G8R8_UINT_PACK32:

+	case VK_FORMAT_A8B8G8R8_SINT_PACK32:

+	case VK_FORMAT_A2B10G10R10_UINT_PACK32:

+	case VK_FORMAT_R16_UINT:

+	case VK_FORMAT_R16_SINT:

+	case VK_FORMAT_R16G16_UINT:

+	case VK_FORMAT_R16G16_SINT:

+	case VK_FORMAT_R16G16B16A16_UINT:

+	case VK_FORMAT_R16G16B16A16_SINT:

+	case VK_FORMAT_R32_UINT:

+	case VK_FORMAT_R32_SINT:

+	case VK_FORMAT_R32_SFLOAT:

+	case VK_FORMAT_R32G32_UINT:

+	case VK_FORMAT_R32G32_SINT:

+	case VK_FORMAT_R32G32_SFLOAT:

+	case VK_FORMAT_R32G32B32A32_UINT:

+	case VK_FORMAT_R32G32B32A32_SINT:

+	case VK_FORMAT_R32G32B32A32_SFLOAT:

+	case VK_FORMAT_D16_UNORM:

+	case VK_FORMAT_D32_SFLOAT:

+		pFormatProperties->optimalTilingFeatures |=

+			VK_FORMAT_FEATURE_BLIT_SRC_BIT |

+			VK_FORMAT_FEATURE_TRANSFER_SRC_BIT |

+			VK_FORMAT_FEATURE_TRANSFER_DST_BIT;

+		// Fall through

+	case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:

+	case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:

+	case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:

+	case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:

+	case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:

+	case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:

+	case VK_FORMAT_EAC_R11_UNORM_BLOCK:

+	case VK_FORMAT_EAC_R11_SNORM_BLOCK:

+	case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:

+	case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:

+		pFormatProperties->optimalTilingFeatures |=

+			VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;

+		break;

+	}

+

+	switch(format)

+	{

+	case VK_FORMAT_R32_UINT:

+	case VK_FORMAT_R32_SINT:

+		pFormatProperties->optimalTilingFeatures |=

+			VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT;

+		pFormatProperties->bufferFeatures |=

+			VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT;

+		// Fall through

+	case VK_FORMAT_R8G8B8A8_UNORM:

+	case VK_FORMAT_R8G8B8A8_SNORM:

+	case VK_FORMAT_R8G8B8A8_UINT:

+	case VK_FORMAT_R8G8B8A8_SINT:

+	case VK_FORMAT_R16G16B16A16_UINT:

+	case VK_FORMAT_R16G16B16A16_SINT:

+	case VK_FORMAT_R16G16B16A16_SFLOAT:

+	case VK_FORMAT_R32_SFLOAT:

+	case VK_FORMAT_R32G32_UINT:

+	case VK_FORMAT_R32G32_SINT:

+	case VK_FORMAT_R32G32_SFLOAT:

+	case VK_FORMAT_R32G32B32A32_UINT:

+	case VK_FORMAT_R32G32B32A32_SINT:

+	case VK_FORMAT_R32G32B32A32_SFLOAT:

+		pFormatProperties->optimalTilingFeatures |=

+			VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT;

+		// Fall through

+	case VK_FORMAT_A8B8G8R8_UNORM_PACK32:

+	case VK_FORMAT_A8B8G8R8_SNORM_PACK32:

+	case VK_FORMAT_A8B8G8R8_UINT_PACK32:

+	case VK_FORMAT_A8B8G8R8_SINT_PACK32:

+		pFormatProperties->bufferFeatures |=

+			VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT;

+		break;

+	}

+

+	switch(format)

+	{

+	case VK_FORMAT_R5G6B5_UNORM_PACK16:

+	case VK_FORMAT_A1R5G5B5_UNORM_PACK16:

+	case VK_FORMAT_R8_UNORM:

+	case VK_FORMAT_R8G8_UNORM:

+	case VK_FORMAT_R8G8B8A8_UNORM:

+	case VK_FORMAT_R8G8B8A8_SRGB:

+	case VK_FORMAT_B8G8R8A8_UNORM:

+	case VK_FORMAT_B8G8R8A8_SRGB:

+	case VK_FORMAT_A8B8G8R8_UNORM_PACK32:

+	case VK_FORMAT_A8B8G8R8_SRGB_PACK32:

+	case VK_FORMAT_A2B10G10R10_UNORM_PACK32:

+	case VK_FORMAT_R16_SFLOAT:

+	case VK_FORMAT_R16G16_SFLOAT:

+	case VK_FORMAT_R16G16B16A16_SFLOAT:

+		pFormatProperties->optimalTilingFeatures |=

+			VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT;

+		// Fall through

+	case VK_FORMAT_R8_UINT:

+	case VK_FORMAT_R8_SINT:

+	case VK_FORMAT_R8G8_UINT:

+	case VK_FORMAT_R8G8_SINT:

+	case VK_FORMAT_R8G8B8A8_UINT:

+	case VK_FORMAT_R8G8B8A8_SINT:

+	case VK_FORMAT_A8B8G8R8_UINT_PACK32:

+	case VK_FORMAT_A8B8G8R8_SINT_PACK32:

+	case VK_FORMAT_A2B10G10R10_UINT_PACK32:

+	case VK_FORMAT_R16_UINT:

+	case VK_FORMAT_R16_SINT:

+	case VK_FORMAT_R16G16_UINT:

+	case VK_FORMAT_R16G16_SINT:

+	case VK_FORMAT_R16G16B16A16_UINT:

+	case VK_FORMAT_R16G16B16A16_SINT:

+	case VK_FORMAT_R32_UINT:

+	case VK_FORMAT_R32_SINT:

+	case VK_FORMAT_R32_SFLOAT:

+	case VK_FORMAT_R32G32_UINT:

+	case VK_FORMAT_R32G32_SINT:

+	case VK_FORMAT_R32G32_SFLOAT:

+	case VK_FORMAT_R32G32B32A32_UINT:

+	case VK_FORMAT_R32G32B32A32_SINT:

+	case VK_FORMAT_R32G32B32A32_SFLOAT:

+		pFormatProperties->optimalTilingFeatures |=

+			VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT |

+			VK_FORMAT_FEATURE_BLIT_DST_BIT;

+		break;

+	case VK_FORMAT_D16_UNORM:

+	case VK_FORMAT_D32_SFLOAT: // Note: either VK_FORMAT_D32_SFLOAT or VK_FORMAT_X8_D24_UNORM_PACK32 must be supported

+	case VK_FORMAT_D32_SFLOAT_S8_UINT: // Note: either VK_FORMAT_D24_UNORM_S8_UINT or VK_FORMAT_D32_SFLOAT_S8_UINT must be supported

+		pFormatProperties->optimalTilingFeatures |=

+			VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;

+		break;

+	}

+

+	switch(format)
+	{

+	case VK_FORMAT_R8_UNORM:

+	case VK_FORMAT_R8_SNORM:

+	case VK_FORMAT_R8_UINT:

+	case VK_FORMAT_R8_SINT:

+	case VK_FORMAT_R8G8_UNORM:

+	case VK_FORMAT_R8G8_SNORM:

+	case VK_FORMAT_R8G8_UINT:

+	case VK_FORMAT_R8G8_SINT:

+	case VK_FORMAT_R8G8B8A8_UNORM:

+	case VK_FORMAT_R8G8B8A8_SNORM:

+	case VK_FORMAT_R8G8B8A8_UINT:

+	case VK_FORMAT_R8G8B8A8_SINT:

+	case VK_FORMAT_B8G8R8A8_UNORM:

+	case VK_FORMAT_A8B8G8R8_UNORM_PACK32:

+	case VK_FORMAT_A8B8G8R8_SNORM_PACK32:

+	case VK_FORMAT_A8B8G8R8_UINT_PACK32:

+	case VK_FORMAT_A8B8G8R8_SINT_PACK32:

+	case VK_FORMAT_A2B10G10R10_UNORM_PACK32:

+	case VK_FORMAT_R16_UNORM:

+	case VK_FORMAT_R16_SNORM:

+	case VK_FORMAT_R16_UINT:

+	case VK_FORMAT_R16_SINT:

+	case VK_FORMAT_R16_SFLOAT:

+	case VK_FORMAT_R16G16_UNORM:

+	case VK_FORMAT_R16G16_SNORM:

+	case VK_FORMAT_R16G16_UINT:

+	case VK_FORMAT_R16G16_SINT:

+	case VK_FORMAT_R16G16_SFLOAT:

+	case VK_FORMAT_R16G16B16A16_UNORM:

+	case VK_FORMAT_R16G16B16A16_SNORM:

+	case VK_FORMAT_R16G16B16A16_UINT:

+	case VK_FORMAT_R16G16B16A16_SINT:

+	case VK_FORMAT_R16G16B16A16_SFLOAT:

+	case VK_FORMAT_R32_UINT:

+	case VK_FORMAT_R32_SINT:

+	case VK_FORMAT_R32_SFLOAT:

+	case VK_FORMAT_R32G32_UINT:

+	case VK_FORMAT_R32G32_SINT:

+	case VK_FORMAT_R32G32_SFLOAT:

+	case VK_FORMAT_R32G32B32_UINT:

+	case VK_FORMAT_R32G32B32_SINT:

+	case VK_FORMAT_R32G32B32_SFLOAT:

+	case VK_FORMAT_R32G32B32A32_UINT:

+	case VK_FORMAT_R32G32B32A32_SINT:

+	case VK_FORMAT_R32G32B32A32_SFLOAT:

+		pFormatProperties->bufferFeatures |=

+			VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT;

+		break;

+	}
+

+	switch(format)
+	{

+	case VK_FORMAT_R8_UNORM:

+	case VK_FORMAT_R8_SNORM:

+	case VK_FORMAT_R8_UINT:

+	case VK_FORMAT_R8_SINT:

+	case VK_FORMAT_R8G8_UNORM:

+	case VK_FORMAT_R8G8_SNORM:

+	case VK_FORMAT_R8G8_UINT:

+	case VK_FORMAT_R8G8_SINT:

+	case VK_FORMAT_R8G8B8A8_UNORM:

+	case VK_FORMAT_R8G8B8A8_SNORM:

+	case VK_FORMAT_R8G8B8A8_UINT:

+	case VK_FORMAT_R8G8B8A8_SINT:

+	case VK_FORMAT_B8G8R8A8_UNORM:

+	case VK_FORMAT_A8B8G8R8_UNORM_PACK32:

+	case VK_FORMAT_A8B8G8R8_SNORM_PACK32:

+	case VK_FORMAT_A8B8G8R8_UINT_PACK32:

+	case VK_FORMAT_A8B8G8R8_SINT_PACK32:

+	case VK_FORMAT_A2B10G10R10_UNORM_PACK32:

+	case VK_FORMAT_A2B10G10R10_UINT_PACK32:

+	case VK_FORMAT_R16_UINT:

+	case VK_FORMAT_R16_SINT:

+	case VK_FORMAT_R16_SFLOAT:

+	case VK_FORMAT_R16G16_UINT:

+	case VK_FORMAT_R16G16_SINT:

+	case VK_FORMAT_R16G16_SFLOAT:

+	case VK_FORMAT_R16G16B16A16_UINT:

+	case VK_FORMAT_R16G16B16A16_SINT:

+	case VK_FORMAT_R16G16B16A16_SFLOAT:

+	case VK_FORMAT_R32_UINT:

+	case VK_FORMAT_R32_SINT:

+	case VK_FORMAT_R32_SFLOAT:

+	case VK_FORMAT_R32G32_UINT:

+	case VK_FORMAT_R32G32_SINT:

+	case VK_FORMAT_R32G32_SFLOAT:

+	case VK_FORMAT_R32G32B32A32_UINT:

+	case VK_FORMAT_R32G32B32A32_SINT:

+	case VK_FORMAT_R32G32B32A32_SFLOAT:

+	case VK_FORMAT_B10G11R11_UFLOAT_PACK32:

+		pFormatProperties->bufferFeatures |=

+			VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT;

+		break;

+	}
 }
 
 void PhysicalDevice::getImageFormatProperties(VkFormat format, VkImageType type, VkImageTiling tiling,