Add utility functions from sw::Surface to vk::Format
In order to eventually remove sw::Surface, some utility
functions used by vk::Image were copied to vk::Format.
Bug b/126883332
Change-Id: Ie8404b70adc3336d536dcd5c0ec26b63e46c5174
Reviewed-on: https://swiftshader-review.googlesource.com/c/SwiftShader/+/26872
Tested-by: Alexis Hétu <sugoi@google.com>
Reviewed-by: Nicolas Capens <nicolascapens@google.com>
diff --git a/src/Vulkan/VkFormat.cpp b/src/Vulkan/VkFormat.cpp
new file mode 100644
index 0000000..6c0fa10
--- /dev/null
+++ b/src/Vulkan/VkFormat.cpp
@@ -0,0 +1,463 @@
+// Copyright 2019 The SwiftShader Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "VkFormat.h"
+#include "VkDebug.hpp"
+#include "System/Math.hpp"
+
+namespace vk
+{
+
+bool Format::isSignedNonNormalizedInteger() const
+{
+ switch(format)
+ {
+ case VK_FORMAT_R8_SINT:
+ case VK_FORMAT_R8G8_SINT:
+ case VK_FORMAT_R8G8B8_SINT:
+ case VK_FORMAT_B8G8R8_SINT:
+ case VK_FORMAT_R8G8B8A8_SINT:
+ case VK_FORMAT_B8G8R8A8_SINT:
+ case VK_FORMAT_A8B8G8R8_SINT_PACK32:
+ case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+ case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+ case VK_FORMAT_R16_SINT:
+ case VK_FORMAT_R16G16_SINT:
+ case VK_FORMAT_R16G16B16_SINT:
+ case VK_FORMAT_R16G16B16A16_SINT:
+ case VK_FORMAT_R32_SINT:
+ case VK_FORMAT_R32G32_SINT:
+ case VK_FORMAT_R32G32B32_SINT:
+ case VK_FORMAT_R32G32B32A32_SINT:
+ case VK_FORMAT_R64_SINT:
+ case VK_FORMAT_R64G64_SINT:
+ case VK_FORMAT_R64G64B64_SINT:
+ case VK_FORMAT_R64G64B64A64_SINT:
+ return true;
+ default:
+ return false;
+ }
+}
+
+bool Format::isUnsignedNonNormalizedInteger() const
+{
+ switch(format)
+ {
+ case VK_FORMAT_R8_UINT:
+ case VK_FORMAT_R8G8_UINT:
+ case VK_FORMAT_R8G8B8_UINT:
+ case VK_FORMAT_B8G8R8_UINT:
+ case VK_FORMAT_R8G8B8A8_UINT:
+ case VK_FORMAT_B8G8R8A8_UINT:
+ case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+ case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+ case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+ case VK_FORMAT_R16_UINT:
+ case VK_FORMAT_R16G16_UINT:
+ case VK_FORMAT_R16G16B16_UINT:
+ case VK_FORMAT_R16G16B16A16_UINT:
+ case VK_FORMAT_R32_UINT:
+ case VK_FORMAT_R32G32_UINT:
+ case VK_FORMAT_R32G32B32_UINT:
+ case VK_FORMAT_R32G32B32A32_UINT:
+ case VK_FORMAT_R64_UINT:
+ case VK_FORMAT_R64G64_UINT:
+ case VK_FORMAT_R64G64B64_UINT:
+ case VK_FORMAT_R64G64B64A64_UINT:
+ return true;
+ default:
+ return false;
+ }
+}
+
+bool Format::isStencil() const
+{
+ switch(format)
+ {
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ case VK_FORMAT_S8_UINT:
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ return true;
+ case VK_FORMAT_D16_UNORM:
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ case VK_FORMAT_D32_SFLOAT:
+ default:
+ return false;
+ }
+}
+
+bool Format::isDepth() const
+{
+ switch(format)
+ {
+ case VK_FORMAT_D16_UNORM:
+ case VK_FORMAT_D16_UNORM_S8_UINT:
+ case VK_FORMAT_X8_D24_UNORM_PACK32:
+ case VK_FORMAT_D24_UNORM_S8_UINT:
+ case VK_FORMAT_D32_SFLOAT:
+ case VK_FORMAT_D32_SFLOAT_S8_UINT:
+ return true;
+ case VK_FORMAT_S8_UINT:
+ default:
+ return false;
+ }
+}
+
+int Format::bytes() const
+{
+ switch(format)
+ {
+ case VK_FORMAT_UNDEFINED:
+ return 0;
+ case VK_FORMAT_R4G4_UNORM_PACK8:
+ return 1;
+ case VK_FORMAT_R4G4B4A4_UNORM_PACK16:
+ case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
+ case VK_FORMAT_R5G6B5_UNORM_PACK16:
+ case VK_FORMAT_B5G6R5_UNORM_PACK16:
+ case VK_FORMAT_R5G5B5A1_UNORM_PACK16:
+ case VK_FORMAT_B5G5R5A1_UNORM_PACK16:
+ case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
+ return 2;
+ case VK_FORMAT_R8_UNORM:
+ case VK_FORMAT_R8_SNORM:
+ case VK_FORMAT_R8_USCALED:
+ case VK_FORMAT_R8_SSCALED:
+ case VK_FORMAT_R8_UINT:
+ case VK_FORMAT_R8_SINT:
+ case VK_FORMAT_R8_SRGB:
+ return 1;
+ case VK_FORMAT_R8G8_UNORM:
+ case VK_FORMAT_R8G8_SNORM:
+ case VK_FORMAT_R8G8_USCALED:
+ case VK_FORMAT_R8G8_SSCALED:
+ case VK_FORMAT_R8G8_UINT:
+ case VK_FORMAT_R8G8_SINT:
+ case VK_FORMAT_R8G8_SRGB:
+ return 2;
+ case VK_FORMAT_R8G8B8_UNORM:
+ case VK_FORMAT_R8G8B8_SNORM:
+ case VK_FORMAT_R8G8B8_USCALED:
+ case VK_FORMAT_R8G8B8_SSCALED:
+ case VK_FORMAT_R8G8B8_UINT:
+ case VK_FORMAT_R8G8B8_SINT:
+ case VK_FORMAT_R8G8B8_SRGB:
+ case VK_FORMAT_B8G8R8_UNORM:
+ case VK_FORMAT_B8G8R8_SNORM:
+ case VK_FORMAT_B8G8R8_USCALED:
+ case VK_FORMAT_B8G8R8_SSCALED:
+ case VK_FORMAT_B8G8R8_UINT:
+ case VK_FORMAT_B8G8R8_SINT:
+ case VK_FORMAT_B8G8R8_SRGB:
+ return 3;
+ case VK_FORMAT_R8G8B8A8_UNORM:
+ case VK_FORMAT_R8G8B8A8_SNORM:
+ case VK_FORMAT_R8G8B8A8_USCALED:
+ case VK_FORMAT_R8G8B8A8_SSCALED:
+ case VK_FORMAT_R8G8B8A8_UINT:
+ case VK_FORMAT_R8G8B8A8_SINT:
+ case VK_FORMAT_R8G8B8A8_SRGB:
+ case VK_FORMAT_B8G8R8A8_UNORM:
+ case VK_FORMAT_B8G8R8A8_SNORM:
+ case VK_FORMAT_B8G8R8A8_USCALED:
+ case VK_FORMAT_B8G8R8A8_SSCALED:
+ case VK_FORMAT_B8G8R8A8_UINT:
+ case VK_FORMAT_B8G8R8A8_SINT:
+ case VK_FORMAT_B8G8R8A8_SRGB:
+ case VK_FORMAT_A8B8G8R8_UNORM_PACK32:
+ case VK_FORMAT_A8B8G8R8_SNORM_PACK32:
+ case VK_FORMAT_A8B8G8R8_USCALED_PACK32:
+ case VK_FORMAT_A8B8G8R8_SSCALED_PACK32:
+ case VK_FORMAT_A8B8G8R8_UINT_PACK32:
+ case VK_FORMAT_A8B8G8R8_SINT_PACK32:
+ case VK_FORMAT_A8B8G8R8_SRGB_PACK32:
+ case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
+ case VK_FORMAT_A2R10G10B10_SNORM_PACK32:
+ case VK_FORMAT_A2R10G10B10_USCALED_PACK32:
+ case VK_FORMAT_A2R10G10B10_SSCALED_PACK32:
+ case VK_FORMAT_A2R10G10B10_UINT_PACK32:
+ case VK_FORMAT_A2R10G10B10_SINT_PACK32:
+ case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
+ case VK_FORMAT_A2B10G10R10_SNORM_PACK32:
+ case VK_FORMAT_A2B10G10R10_USCALED_PACK32:
+ case VK_FORMAT_A2B10G10R10_SSCALED_PACK32:
+ case VK_FORMAT_A2B10G10R10_UINT_PACK32:
+ case VK_FORMAT_A2B10G10R10_SINT_PACK32:
+ return 4;
+ case VK_FORMAT_R16_UNORM:
+ case VK_FORMAT_R16_SNORM:
+ case VK_FORMAT_R16_USCALED:
+ case VK_FORMAT_R16_SSCALED:
+ case VK_FORMAT_R16_UINT:
+ case VK_FORMAT_R16_SINT:
+ case VK_FORMAT_R16_SFLOAT:
+ return 2;
+ case VK_FORMAT_R16G16_UNORM:
+ case VK_FORMAT_R16G16_SNORM:
+ case VK_FORMAT_R16G16_USCALED:
+ case VK_FORMAT_R16G16_SSCALED:
+ case VK_FORMAT_R16G16_UINT:
+ case VK_FORMAT_R16G16_SINT:
+ case VK_FORMAT_R16G16_SFLOAT:
+ return 4;
+ case VK_FORMAT_R16G16B16_UNORM:
+ case VK_FORMAT_R16G16B16_SNORM:
+ case VK_FORMAT_R16G16B16_USCALED:
+ case VK_FORMAT_R16G16B16_SSCALED:
+ case VK_FORMAT_R16G16B16_UINT:
+ case VK_FORMAT_R16G16B16_SINT:
+ case VK_FORMAT_R16G16B16_SFLOAT:
+ return 6;
+ case VK_FORMAT_R16G16B16A16_UNORM:
+ case VK_FORMAT_R16G16B16A16_SNORM:
+ case VK_FORMAT_R16G16B16A16_USCALED:
+ case VK_FORMAT_R16G16B16A16_SSCALED:
+ case VK_FORMAT_R16G16B16A16_UINT:
+ case VK_FORMAT_R16G16B16A16_SINT:
+ case VK_FORMAT_R16G16B16A16_SFLOAT:
+ return 8;
+ case VK_FORMAT_R32_UINT:
+ case VK_FORMAT_R32_SINT:
+ case VK_FORMAT_R32_SFLOAT:
+ return 4;
+ case VK_FORMAT_R32G32_UINT:
+ case VK_FORMAT_R32G32_SINT:
+ case VK_FORMAT_R32G32_SFLOAT:
+ return 8;
+ case VK_FORMAT_R32G32B32_UINT:
+ case VK_FORMAT_R32G32B32_SINT:
+ case VK_FORMAT_R32G32B32_SFLOAT:
+ return 12;
+ case VK_FORMAT_R32G32B32A32_UINT:
+ case VK_FORMAT_R32G32B32A32_SINT:
+ case VK_FORMAT_R32G32B32A32_SFLOAT:
+ return 16;
+ case VK_FORMAT_R64_UINT:
+ case VK_FORMAT_R64_SINT:
+ case VK_FORMAT_R64_SFLOAT:
+ return 8;
+ case VK_FORMAT_R64G64_UINT:
+ case VK_FORMAT_R64G64_SINT:
+ case VK_FORMAT_R64G64_SFLOAT:
+ return 16;
+ case VK_FORMAT_R64G64B64_UINT:
+ case VK_FORMAT_R64G64B64_SINT:
+ case VK_FORMAT_R64G64B64_SFLOAT:
+ return 24;
+ case VK_FORMAT_R64G64B64A64_UINT:
+ case VK_FORMAT_R64G64B64A64_SINT:
+ case VK_FORMAT_R64G64B64A64_SFLOAT:
+ return 32;
+ case VK_FORMAT_B10G11R11_UFLOAT_PACK32: return 4;
+ case VK_FORMAT_E5B9G9R9_UFLOAT_PACK32: return 4;
+ case VK_FORMAT_D16_UNORM: return 2;
+ case VK_FORMAT_X8_D24_UNORM_PACK32: return 4;
+ case VK_FORMAT_D32_SFLOAT: return 4;
+ case VK_FORMAT_S8_UINT: return 1;
+ case VK_FORMAT_D16_UNORM_S8_UINT: return 2; // Separate depth and stencil planes
+ case VK_FORMAT_D24_UNORM_S8_UINT: return 4; // Combined depth and stencil planes
+ case VK_FORMAT_D32_SFLOAT_S8_UINT: return 4; // Separate depth and stencil planes
+ // Note: Compressed formats don't return bytes per pixel,
+ // since these would be fractional. The returned value
+ // is bytes per pixel for 1 column, so 2 for 64 bit 4x4
+ // blocks and 4 for 128 bit 4x4 blocks.
+ case VK_FORMAT_BC1_RGB_UNORM_BLOCK: return 2;
+ case VK_FORMAT_BC1_RGB_SRGB_BLOCK: return 2;
+ case VK_FORMAT_BC1_RGBA_UNORM_BLOCK: return 2;
+ case VK_FORMAT_BC1_RGBA_SRGB_BLOCK: return 2;
+ case VK_FORMAT_BC2_UNORM_BLOCK: return 4;
+ case VK_FORMAT_BC2_SRGB_BLOCK: return 4;
+ case VK_FORMAT_BC3_UNORM_BLOCK: return 4;
+ case VK_FORMAT_BC3_SRGB_BLOCK: return 4;
+ case VK_FORMAT_BC4_UNORM_BLOCK: return 2;
+ case VK_FORMAT_BC4_SNORM_BLOCK: return 2;
+ case VK_FORMAT_BC5_UNORM_BLOCK: return 4;
+ case VK_FORMAT_BC5_SNORM_BLOCK: return 4;
+ case VK_FORMAT_BC6H_UFLOAT_BLOCK: return 4;
+ case VK_FORMAT_BC6H_SFLOAT_BLOCK: return 4;
+ case VK_FORMAT_BC7_UNORM_BLOCK: return 4;
+ case VK_FORMAT_BC7_SRGB_BLOCK: return 4;
+ case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK: return 2;
+ case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK: return 2;
+ case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK: return 2;
+ case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK: return 2;
+ case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK: return 4;
+ case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK: return 4;
+ case VK_FORMAT_EAC_R11_UNORM_BLOCK: return 2;
+ case VK_FORMAT_EAC_R11_SNORM_BLOCK: return 2;
+ case VK_FORMAT_EAC_R11G11_UNORM_BLOCK: return 4;
+ case VK_FORMAT_EAC_R11G11_SNORM_BLOCK: return 4;
+ case VK_FORMAT_ASTC_4x4_UNORM_BLOCK: return 4;
+ case VK_FORMAT_ASTC_4x4_SRGB_BLOCK: return 4;
+ case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x12_SRGB_BLOCK: return 0; // FIXME
+ case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: return 1; // Y plane only
+ default:
+ ASSERT(false);
+ }
+
+ return 0;
+}
+
+int Format::pitchB(int width, int border, bool target) const
+{
+ width += 2 * border;
+
+ // Render targets require 2x2 quads
+ if(target || isDepth() || isStencil())
+ {
+ width = sw::align<2>(width);
+ }
+
+ switch(format)
+ {
+ case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+ return 8 * ((width + 3) / 4); // 64 bit per 4x4 block, computed per 4 rows
+ case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+ return 16 * ((width + 3) / 4); // 128 bit per 4x4 block, computed per 4 rows
+ case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+ return 16 * ((width + 4) / 5);
+ case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+ return 16 * ((width + 5) / 6);
+ case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+ return 16 * ((width + 7) / 8);
+ case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+ return 16 * ((width + 9) / 10);
+ case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+ return 16 * ((width + 11) / 12);
+ case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
+ return sw::align<16>(width);
+ default:
+ return bytes() * width;
+ }
+}
+
+int Format::sliceB(int width, int height, int border, bool target) const
+{
+ height += 2 * border;
+
+ // Render targets require 2x2 quads
+ if(target || isDepth() || isStencil())
+ {
+ height = sw::align<2>(height);
+ }
+
+ switch(format)
+ {
+ case VK_FORMAT_EAC_R11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11_SNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_UNORM_BLOCK:
+ case VK_FORMAT_EAC_R11G11_SNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK:
+ case VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_4x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_4x4_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_5x4_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x4_SRGB_BLOCK:
+ return pitchB(width, border, target) * ((height + 3) / 4); // Pitch computed per 4 rows
+ case VK_FORMAT_ASTC_5x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_5x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_6x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x5_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x5_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x5_SRGB_BLOCK:
+ return pitchB(width, border, target) * ((height + 4) / 5); // Pitch computed per 5 rows
+ case VK_FORMAT_ASTC_6x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_6x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_8x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x6_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x6_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x6_SRGB_BLOCK:
+ return pitchB(width, border, target) * ((height + 5) / 6); // Pitch computed per 6 rows
+ case VK_FORMAT_ASTC_8x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_8x8_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_10x8_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x8_SRGB_BLOCK:
+ return pitchB(width, border, target) * ((height + 7) / 8); // Pitch computed per 8 rows
+ case VK_FORMAT_ASTC_10x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_10x10_SRGB_BLOCK:
+ case VK_FORMAT_ASTC_12x10_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x10_SRGB_BLOCK:
+ return pitchB(width, border, target) * ((height + 9) / 10); // Pitch computed per 10 rows
+ case VK_FORMAT_ASTC_12x12_UNORM_BLOCK:
+ case VK_FORMAT_ASTC_12x12_SRGB_BLOCK:
+ return pitchB(width, border, target) * ((height + 11) / 12); // Pitch computed per 12 rows
+ default:
+ return pitchB(width, border, target) * height; // Pitch computed per row
+ }
+}
+
+} // namespace vk
\ No newline at end of file
diff --git a/src/Vulkan/VkFormat.h b/src/Vulkan/VkFormat.h
new file mode 100644
index 0000000..53f0abb
--- /dev/null
+++ b/src/Vulkan/VkFormat.h
@@ -0,0 +1,44 @@
+// Copyright 2019 The SwiftShader Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#ifndef VK_FORMAT_UTILS_HPP_
+#define VK_FORMAT_UTILS_HPP_
+
+#include <vulkan/vulkan_core.h>
+
+namespace vk
+{
+
+class Format
+{
+public:
+ Format(VkFormat format) : format(format) {}
+ operator VkFormat() const { return format; }
+
+ bool isSignedNonNormalizedInteger() const;
+ bool isUnsignedNonNormalizedInteger() const;
+
+ bool isStencil() const;
+ bool isDepth() const;
+
+ int bytes() const;
+ int pitchB(int width, int border, bool target) const;
+ int sliceB(int width, int height, int border, bool target) const;
+private:
+ VkFormat format;
+};
+
+} // namespace vk
+
+#endif // VK_FORMAT_UTILS_HPP_
\ No newline at end of file
diff --git a/src/Vulkan/VkImage.cpp b/src/Vulkan/VkImage.cpp
index 4d2d71a..90581e4 100644
--- a/src/Vulkan/VkImage.cpp
+++ b/src/Vulkan/VkImage.cpp
@@ -17,19 +17,18 @@
#include "VkDevice.hpp"
#include "VkImage.hpp"
#include "Device/Blitter.hpp"
-#include "Device/Surface.hpp"
#include <cstring>
namespace
{
- VkImageAspectFlags GetAspects(VkFormat format)
+ VkImageAspectFlags GetAspects(vk::Format format)
{
// TODO: probably just flatten this out to a full format list, and alter
// isDepth / isStencil etc to check for their aspect
VkImageAspectFlags aspects = 0;
- if (sw::Surface::isDepth(format)) aspects |= VK_IMAGE_ASPECT_DEPTH_BIT;
- if (sw::Surface::isStencil(format)) aspects |= VK_IMAGE_ASPECT_STENCIL_BIT;
+ if (format.isDepth()) aspects |= VK_IMAGE_ASPECT_DEPTH_BIT;
+ if (format.isStencil()) aspects |= VK_IMAGE_ASPECT_STENCIL_BIT;
// TODO: YCbCr planar formats have different aspects
@@ -337,7 +336,7 @@
// Depth and Stencil pitch should be computed separately
ASSERT((aspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) !=
(VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT));
- return sw::Surface::pitchB(getMipLevelExtent(mipLevel).width, isCube() ? 1 : 0, getFormat(aspect), false);
+ return getFormat(aspect).pitchB(getMipLevelExtent(mipLevel).width, isCube() ? 1 : 0, false);
}
int Image::slicePitchBytes(VkImageAspectFlagBits aspect, uint32_t mipLevel) const
@@ -346,7 +345,7 @@
ASSERT((aspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) !=
(VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT));
VkExtent3D mipLevelExtent = getMipLevelExtent(mipLevel);
- return sw::Surface::sliceB(mipLevelExtent.width, mipLevelExtent.height, isCube() ? 1 : 0, getFormat(aspect), false);
+ return getFormat(aspect).sliceB(mipLevelExtent.width, mipLevelExtent.height, isCube() ? 1 : 0, false);
}
int Image::bytesPerTexel(VkImageAspectFlagBits aspect) const
@@ -354,10 +353,10 @@
// Depth and Stencil bytes should be computed separately
ASSERT((aspect & (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)) !=
(VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT));
- return sw::Surface::bytes(getFormat(aspect));
+ return getFormat(aspect).bytes();
}
-VkFormat Image::getFormat(VkImageAspectFlagBits aspect) const
+Format Image::getFormat(VkImageAspectFlagBits aspect) const
{
switch(aspect)
{
@@ -467,11 +466,11 @@
{
// Set the proper format for the clear value, as described here:
// https://www.khronos.org/registry/vulkan/specs/1.1-extensions/html/vkspec.html#clears-values
- if(sw::Surface::isSignedNonNormalizedInteger(format))
+ if(format.isSignedNonNormalizedInteger())
{
return VK_FORMAT_R32G32B32A32_SINT;
}
- else if(sw::Surface::isUnsignedNonNormalizedInteger(format))
+ else if(format.isUnsignedNonNormalizedInteger())
{
return VK_FORMAT_R32G32B32A32_UINT;
}
diff --git a/src/Vulkan/VkImage.hpp b/src/Vulkan/VkImage.hpp
index 2aec28a..01bc517 100644
--- a/src/Vulkan/VkImage.hpp
+++ b/src/Vulkan/VkImage.hpp
@@ -16,6 +16,7 @@
#define VK_IMAGE_HPP_
#include "VkObject.hpp"
+#include "VkFormat.h"
namespace vk
{
@@ -51,8 +52,8 @@
void clear(const VkClearDepthStencilValue& color, const VkImageSubresourceRange& subresourceRange);
VkImageType getImageType() const { return imageType; }
- VkFormat getFormat() const { return format; }
- VkFormat getFormat(VkImageAspectFlagBits aspect) const;
+ const Format& getFormat() const { return format; }
+ Format getFormat(VkImageAspectFlagBits aspect) const;
uint32_t getArrayLayers() const { return arrayLayers; }
uint32_t getMipLevels() const { return mipLevels; }
uint32_t getLastLayerIndex(const VkImageSubresourceRange& subresourceRange) const;
@@ -82,7 +83,7 @@
VkDeviceSize memoryOffset = 0;
VkImageCreateFlags flags = 0;
VkImageType imageType = VK_IMAGE_TYPE_2D;
- VkFormat format = VK_FORMAT_UNDEFINED;
+ Format format = VK_FORMAT_UNDEFINED;
VkExtent3D extent = {0, 0, 0};
uint32_t mipLevels = 0;
uint32_t arrayLayers = 0;
diff --git a/src/Vulkan/vulkan.vcxproj b/src/Vulkan/vulkan.vcxproj
index 7eaae3f..a16b838 100644
--- a/src/Vulkan/vulkan.vcxproj
+++ b/src/Vulkan/vulkan.vcxproj
@@ -109,6 +109,7 @@
<ClCompile Include="VkDescriptorUpdateTemplate.cpp" />
<ClCompile Include="VkDevice.cpp" />
<ClCompile Include="VkDeviceMemory.cpp" />
+ <ClCompile Include="VkFormat.cpp" />
<ClCompile Include="VkFramebuffer.cpp" />
<ClCompile Include="VkGetProcAddress.cpp" />
<ClCompile Include="VkImage.cpp" />
@@ -210,6 +211,7 @@
<ClInclude Include="VkDeviceMemory.hpp" />
<ClInclude Include="VkEvent.hpp" />
<ClInclude Include="VkFence.hpp" />
+ <ClInclude Include="VkFormat.h" />
<ClInclude Include="VkFramebuffer.hpp" />
<ClInclude Include="VkGetProcAddress.h" />
<ClInclude Include="VkImage.hpp" />
diff --git a/src/Vulkan/vulkan.vcxproj.filters b/src/Vulkan/vulkan.vcxproj.filters
index 2139b9e..bd77af9 100644
--- a/src/Vulkan/vulkan.vcxproj.filters
+++ b/src/Vulkan/vulkan.vcxproj.filters
@@ -222,9 +222,21 @@
<ClCompile Include="VkDeviceMemory.cpp">
<Filter>Source Files\Vulkan</Filter>
</ClCompile>
+ <ClCompile Include="VkFormat.cpp">
+ <Filter>Source Files\Vulkan</Filter>
+ </ClCompile>
+ <ClCompile Include="VkFramebuffer.cpp">
+ <Filter>Source Files\Vulkan</Filter>
+ </ClCompile>
<ClCompile Include="VkGetProcAddress.cpp">
<Filter>Source Files\Vulkan</Filter>
</ClCompile>
+ <ClCompile Include="VkImage.cpp">
+ <Filter>Source Files\Vulkan</Filter>
+ </ClCompile>
+ <ClCompile Include="VkImageView.cpp">
+ <Filter>Source Files\Vulkan</Filter>
+ </ClCompile>
<ClCompile Include="VkInstance.cpp">
<Filter>Source Files\Vulkan</Filter>
</ClCompile>
@@ -240,19 +252,13 @@
<ClCompile Include="VkPipelineLayout.cpp">
<Filter>Source Files\Vulkan</Filter>
</ClCompile>
- <ClCompile Include="VkImage.cpp">
- <Filter>Source Files\Vulkan</Filter>
- </ClCompile>
- <ClCompile Include="VkImageView.cpp">
- <Filter>Source Files\Vulkan</Filter>
- </ClCompile>
<ClCompile Include="VkPromotedExtensions.cpp">
<Filter>Source Files\Vulkan</Filter>
</ClCompile>
- <ClCompile Include="VkQueue.cpp">
+ <ClCompile Include="VkQueryPool.cpp">
<Filter>Source Files\Vulkan</Filter>
</ClCompile>
- <ClCompile Include="VkFramebuffer.cpp">
+ <ClCompile Include="VkQueue.cpp">
<Filter>Source Files\Vulkan</Filter>
</ClCompile>
<ClCompile Include="VkRenderPass.cpp">
@@ -261,9 +267,6 @@
<ClCompile Include="VkShaderModule.cpp">
<Filter>Source Files\Vulkan</Filter>
</ClCompile>
- <ClCompile Include="VkQueryPool.cpp">
- <Filter>Source Files\Vulkan</Filter>
- </ClCompile>
<ClCompile Include="..\Pipeline\SpirvShader.cpp">
<Filter>Source Files\Pipeline</Filter>
</ClCompile>
@@ -311,6 +314,9 @@
<ClInclude Include="VkFence.hpp">
<Filter>Header Files\Vulkan</Filter>
</ClInclude>
+ <ClInclude Include="VkFormat.h">
+ <Filter>Header Files\Vulkan</Filter>
+ </ClInclude>
<ClInclude Include="VkFramebuffer.hpp">
<Filter>Header Files\Vulkan</Filter>
</ClInclude>