Update Marl to 9c689c9a8

Changes:
    9c689c9a8 Add MARL_USE_PTHREAD_THREAD_LOCAL option
    abd033696 Explicitly #include <utility> in finally.h
    2e82e6999 Fix build issue with GetModuleHandle when using UNICODE
    40b9a1c47 gcc -Wshadow warning fixes

Commands:
    ./third_party/update-marl.sh

Bug: b/140546382
Change-Id: I0fe71df4ca087650fd1d9cb81c816c2d8d41dcfb
diff --git a/.clang-format b/.clang-format
index 08178fe..5a7a390 100644
--- a/.clang-format
+++ b/.clang-format
@@ -1,5 +1,118 @@
-# http://clang.llvm.org/docs/ClangFormatStyleOptions.html
-BasedOnStyle: Chromium
-
 ---
 Language:        Cpp
+# BasedOnStyle:  Google
+AccessModifierOffset: -4
+AlignAfterOpenBracket: Align
+AlignConsecutiveAssignments: false
+AlignConsecutiveDeclarations: false
+AlignEscapedNewlines: Left
+AlignOperands:   true
+AlignTrailingComments: true
+AllowAllParametersOfDeclarationOnNextLine: true
+AllowShortBlocksOnASingleLine: true
+AllowShortCaseLabelsOnASingleLine: true
+AllowShortFunctionsOnASingleLine: Inline
+AllowShortIfStatementsOnASingleLine: true
+AllowShortLoopsOnASingleLine: true
+AlwaysBreakAfterDefinitionReturnType: None
+AlwaysBreakAfterReturnType: None
+AlwaysBreakBeforeMultilineStrings: true
+AlwaysBreakTemplateDeclarations: true
+BinPackArguments: true
+BinPackParameters: true
+BraceWrapping:
+  AfterCaseLabel: true
+  AfterClass: true
+  AfterControlStatement: true
+  AfterEnum: true
+  AfterExternBlock: false
+  AfterFunction: true
+  AfterNamespace: false
+  AfterStruct: true
+  AfterUnion: true
+  BeforeCatch: true
+  BeforeElse: true
+  IndentBraces: false
+  SplitEmptyFunction: false
+  SplitEmptyNamespace: false
+  SplitEmptyRecord: false
+BreakBeforeBinaryOperators: None
+BreakBeforeBraces: Custom
+BreakBeforeInheritanceComma: false
+BreakBeforeTernaryOperators: true
+BreakConstructorInitializersBeforeComma: false
+BreakConstructorInitializers: BeforeComma
+BreakAfterJavaFieldAnnotations: false
+BreakStringLiterals: true
+ColumnLimit: 0
+CommentPragmas:  '^ IWYU pragma:'
+CompactNamespaces: false
+ConstructorInitializerAllOnOneLineOrOnePerLine: false
+ConstructorInitializerIndentWidth: 4
+ContinuationIndentWidth: 4
+Cpp11BracedListStyle: false
+DerivePointerAlignment: false
+DisableFormat:   false
+ExperimentalAutoDetectBinPacking: false
+FixNamespaceComments: true
+ForEachMacros:
+  - Q_FOREACH
+  - BOOST_FOREACH
+IncludeBlocks:   Preserve
+IncludeCategories:
+  - Regex:           '^"[^/]*"'
+    Priority:        1
+  - Regex:           '^".*/.*"'
+    Priority:        2
+  - Regex:           '^<.*\..*>'
+    Priority:        3
+  - Regex:           '^<[^.]*>'
+    Priority:        4
+  - Regex:           '.*'
+    Priority:        5
+IncludeIsMainRegex: '([-_](test|unittest))?$'
+IndentCaseBlocks: true
+IndentCaseLabels: false
+IndentPPDirectives: AfterHash
+IndentWidth:     4
+IndentWrappedFunctionNames: false
+JavaScriptQuotes: Leave
+JavaScriptWrapImports: true
+KeepEmptyLinesAtTheStartOfBlocks: true
+MacroBlockBegin: ''
+MacroBlockEnd:   ''
+MaxEmptyLinesToKeep: 1
+NamespaceIndentation: None
+ObjCBlockIndentWidth: 2
+ObjCSpaceAfterProperty: false
+ObjCSpaceBeforeProtocolList: false
+PenaltyBreakAssignment: 2
+PenaltyBreakBeforeFirstCallParameter: 1
+PenaltyBreakComment: 300
+PenaltyBreakFirstLessLess: 120
+PenaltyBreakString: 1000
+PenaltyExcessCharacter: 1000000
+PenaltyReturnTypeOnItsOwnLine: 200
+PointerAlignment: Right
+RawStringFormats:
+  - Language:        TextProto
+    BasedOnStyle:    google
+ReflowComments:  true
+SortIncludes:    true
+SortUsingDeclarations: true
+SpaceAfterCStyleCast: false
+SpaceAfterTemplateKeyword: false
+SpaceBeforeAssignmentOperators: true
+SpaceBeforeParens: Never
+SpaceInEmptyParentheses: false
+SpacesBeforeTrailingComments: 2
+SpacesInAngles:  false
+SpacesInContainerLiterals: true
+SpacesInCStyleCastParentheses: false
+SpacesInParentheses: false
+SpacesInSquareBrackets: false
+Standard:        Auto
+TabWidth:        4
+UseTab:          ForIndentation
+...
+
diff --git a/.dir-locals.el b/.dir-locals.el
new file mode 100644
index 0000000..fe208a4
--- /dev/null
+++ b/.dir-locals.el
@@ -0,0 +1,165 @@
+;;; Directory Local Variables
+;;; See Info node `(emacs) Directory Variables' for more information.
+
+((c++-mode
+  (tab-width . 4)
+  (indent-tabs-mode . t)
+  (c-basic-offset . 4)
+  (show-trailing-whitespace . t)
+  (indicate-empty-lines . t)
+  (c-offsets-alist
+   (inexpr-class . +)
+   (inexpr-statement . +)
+   (lambda-intro-cont . +)
+   (inlambda . c-lineup-inexpr-block)
+   (template-args-cont c-lineup-template-args +)
+   (incomposition . +)
+   (inmodule . +)
+   (innamespace . +)
+   (inextern-lang . 0)
+   (composition-close . 0)
+   (module-close . 0)
+   (namespace-close . 0)
+   (extern-lang-close . 0)
+   (composition-open . 0)
+   (module-open . 0)
+   (namespace-open . 0)
+   (extern-lang-open . 0)
+   (objc-method-call-cont c-lineup-ObjC-method-call-colons c-lineup-ObjC-method-call +)
+   (objc-method-args-cont . c-lineup-ObjC-method-args)
+   (objc-method-intro .
+                      [0])
+   (friend . 0)
+   (cpp-define-intro c-lineup-cpp-define +)
+   (cpp-macro-cont . +)
+   (cpp-macro .
+              [0])
+   (inclass . +)
+   (stream-op . c-lineup-streamop)
+   (arglist-cont-nonempty c-lineup-gcc-asm-reg c-lineup-arglist)
+   (arglist-cont c-lineup-gcc-asm-reg 0)
+   (comment-intro . 0)
+   (catch-clause . 0)
+   (else-clause . 0)
+   (do-while-closure . 0)
+   (access-label . -)
+   (case-label . +)
+   (substatement . +)
+   (statement-case-intro . +)
+   (statement . 0)
+   (brace-entry-open . 0)
+   (brace-list-entry . 0)
+   (brace-list-intro . +)
+   (brace-list-close . 0)
+   (block-close . 0)
+   (block-open . 0)
+   (inher-cont . c-lineup-multi-inher)
+   (inher-intro . ++)
+   (member-init-cont . c-lineup-multi-inher)
+   (member-init-intro . ++)
+   (annotation-var-cont . +)
+   (annotation-top-cont . 0)
+   (topmost-intro . 0)
+   (knr-argdecl . 0)
+   (func-decl-cont . ++)
+   (inline-close . 0)
+   (class-close . 0)
+   (class-open . 0)
+   (defun-block-intro . +)
+   (defun-close . 0)
+   (defun-open . 0)
+   (c . c-lineup-C-comments)
+   (string . c-lineup-dont-change)
+   (topmost-intro-cont . c-lineup-topmost-intro-cont)
+   (brace-list-open . 0)
+   (inline-open . 0)
+   (arglist-close . c-lineup-arglist)
+   (arglist-intro google-c-lineup-expression-plus-4)
+   (statement-cont nil nil ++)
+   (statement-case-open . +)
+   (label . /)
+   (substatement-label . 2)
+   (substatement-open . 0)
+   (knr-argdecl-intro . +)
+   (statement-block-intro . +)))
+(c-mode
+  (tab-width . 4)
+  (indent-tabs-mode . t)
+  (c-basic-offset . 4)
+  (show-trailing-whitespace . t)
+  (indicate-empty-lines . t)
+  (c-offsets-alist
+   (inexpr-class . +)
+   (inexpr-statement . +)
+   (lambda-intro-cont . +)
+   (inlambda . c-lineup-inexpr-block)
+   (template-args-cont c-lineup-template-args +)
+   (incomposition . +)
+   (inmodule . +)
+   (innamespace . +)
+   (inextern-lang . 0)
+   (composition-close . 0)
+   (module-close . 0)
+   (namespace-close . 0)
+   (extern-lang-close . 0)
+   (composition-open . 0)
+   (module-open . 0)
+   (namespace-open . 0)
+   (extern-lang-open . 0)
+   (objc-method-call-cont c-lineup-ObjC-method-call-colons c-lineup-ObjC-method-call +)
+   (objc-method-args-cont . c-lineup-ObjC-method-args)
+   (objc-method-intro .
+                      [0])
+   (friend . 0)
+   (cpp-define-intro c-lineup-cpp-define +)
+   (cpp-macro-cont . +)
+   (cpp-macro .
+              [0])
+   (inclass . +)
+   (stream-op . c-lineup-streamop)
+   (arglist-cont-nonempty c-lineup-gcc-asm-reg c-lineup-arglist)
+   (arglist-cont c-lineup-gcc-asm-reg 0)
+   (comment-intro . 0)
+   (catch-clause . 0)
+   (else-clause . 0)
+   (do-while-closure . 0)
+   (access-label . -)
+   (case-label . +)
+   (substatement . +)
+   (statement-case-intro . +)
+   (statement . 0)
+   (brace-entry-open . 0)
+   (brace-list-entry . 0)
+   (brace-list-intro . +)
+   (brace-list-close . 0)
+   (block-close . 0)
+   (block-open . 0)
+   (inher-cont . c-lineup-multi-inher)
+   (inher-intro . ++)
+   (member-init-cont . c-lineup-multi-inher)
+   (member-init-intro . ++)
+   (annotation-var-cont . +)
+   (annotation-top-cont . 0)
+   (topmost-intro . 0)
+   (knr-argdecl . 0)
+   (func-decl-cont . ++)
+   (inline-close . 0)
+   (class-close . 0)
+   (class-open . 0)
+   (defun-block-intro . +)
+   (defun-close . 0)
+   (defun-open . 0)
+   (c . c-lineup-C-comments)
+   (string . c-lineup-dont-change)
+   (topmost-intro-cont . c-lineup-topmost-intro-cont)
+   (brace-list-open . 0)
+   (inline-open . 0)
+   (arglist-close . c-lineup-arglist)
+   (arglist-intro google-c-lineup-expression-plus-4)
+   (statement-cont nil nil ++)
+   (statement-case-open . +)
+   (label . /)
+   (substatement-label . 2)
+   (substatement-open . 0)
+   (knr-argdecl-intro . +)
+   (statement-block-intro . +))))
diff --git a/.gitignore b/.gitignore
index 4c7dc22..065adea 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,8 +1,47 @@
-/.vs/
-/.vscode/
-/build/
-/cmake-build-*/
+# Ignored folders #
+/cache/
+/lib/
+/obj/
+/bin/
 /out/
-bazel-*
-CMakeSettings.json
-/.idea/
+.vs
+.vscode/ipch
+CMakeFiles/
+.idea/
+cmake-build-debug/
+
+# Per user vscode config files.
+.vscode/launch.json
+.vscode/settings.json
+
+# The /build/ directory is recommended for CMake build output
+!/build
+/build/*
+
+
+# Ignored files #
+*.obj
+*.lib
+*.log
+*.tlog
+*.exe
+*.ilk
+*.pdb
+*.sbr
+*.bsc
+*.dll
+*.res
+*.idb
+*.sdf
+*.suo
+*.o
+*.depend
+*.layout
+*.opensdf
+*.aps
+*.opendb
+*.db
+*~
+.*.sw*
+.sw*
+CMakeCache.txt
diff --git a/.gitmodules b/.gitmodules
index 335eee4..cb86c5c 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,6 +1,27 @@
+[submodule "third_party/cppdap"]
+	path = third_party/cppdap
+	url = https://github.com/google/cppdap
 [submodule "third_party/googletest"]
 	path = third_party/googletest
 	url = https://github.com/google/googletest.git
+[submodule "third_party/json"]
+	path = third_party/json
+	url = https://github.com/nlohmann/json.git
+[submodule "third_party/libbacktrace/src"]
+	path = third_party/libbacktrace/src
+	url = https://github.com/ianlancetaylor/libbacktrace.git
+[submodule "third_party/PowerVR_Examples"]
+	path = third_party/PowerVR_Examples
+	url = https://github.com/powervr-graphics/Native_SDK.git
 [submodule "third_party/benchmark"]
 	path = third_party/benchmark
 	url = https://github.com/google/benchmark.git
+[submodule "third_party/glslang"]
+	path = third_party/glslang
+	url = https://github.com/KhronosGroup/glslang.git
+[submodule "third_party/git-hooks"]
+	path = third_party/git-hooks
+	url = https://swiftshader.googlesource.com/git-hooks
+[submodule "third_party/llvm-project"]
+	path = third_party/llvm-project
+	url = https://github.com/llvm/llvm-project.git
diff --git a/.vscode/c_cpp_properties.json b/.vscode/c_cpp_properties.json
new file mode 100644
index 0000000..a0faf9e
--- /dev/null
+++ b/.vscode/c_cpp_properties.json
@@ -0,0 +1,73 @@
+{
+    "configurations": [
+        {
+            "name": "Linux",
+            "defines": [
+                "ENABLE_VK_DEBUGGER=1",
+                "ENABLE_RR_DEBUG_INFO=1",
+                "VERIFY_LLVM_IR=1"
+            ],
+            "includePath": [
+                "${workspaceFolder}/build/spirv-tools-ext/include",
+                "${workspaceFolder}/include",
+                "${workspaceFolder}/src",
+                "${workspaceFolder}/third_party/benchmark/include",
+                "${workspaceFolder}/third_party/cppdap/include",
+                "${workspaceFolder}/third_party/llvm-10.0/configs/common/include",
+                "${workspaceFolder}/third_party/llvm-10.0/configs/windows/include",
+                "${workspaceFolder}/third_party/llvm-10.0/llvm/include",
+                "${workspaceFolder}/third_party/marl/include",
+                "${workspaceFolder}/third_party/SPIRV-Headers/include",
+                "${workspaceFolder}/third_party/SPIRV-Tools/include"
+            ],
+            "cStandard": "c11",
+            "cppStandard": "c++17"
+        },
+        {
+            "name": "Mac",
+            "defines": [
+                "ENABLE_VK_DEBUGGER=1",
+                "ENABLE_RR_DEBUG_INFO=1",
+                "VERIFY_LLVM_IR=1"
+            ],
+            "includePath": [
+                "${workspaceFolder}/build/spirv-tools-ext/include",
+                "${workspaceFolder}/include",
+                "${workspaceFolder}/src",
+                "${workspaceFolder}/third_party/benchmark/include",
+                "${workspaceFolder}/third_party/cppdap/include",
+                "${workspaceFolder}/third_party/llvm-10.0/configs/common/include",
+                "${workspaceFolder}/third_party/llvm-10.0/configs/windows/include",
+                "${workspaceFolder}/third_party/llvm-10.0/llvm/include",
+                "${workspaceFolder}/third_party/marl/include",
+                "${workspaceFolder}/third_party/SPIRV-Headers/include",
+                "${workspaceFolder}/third_party/SPIRV-Tools/include"
+            ],
+            "cStandard": "c11",
+            "cppStandard": "c++17"
+        },
+        {
+            "name": "Win32",
+            "defines": [
+                "ENABLE_VK_DEBUGGER=1",
+                "ENABLE_RR_DEBUG_INFO=1",
+                "VERIFY_LLVM_IR=1"
+            ],
+            "includePath": [
+                "${workspaceFolder}/include",
+                "${workspaceFolder}/src",
+                "${workspaceFolder}/third_party/benchmark/include",
+                "${workspaceFolder}/third_party/cppdap/include",
+                "${workspaceFolder}/third_party/llvm-10.0/configs/common/include",
+                "${workspaceFolder}/third_party/llvm-10.0/configs/windows/include",
+                "${workspaceFolder}/third_party/llvm-10.0/llvm/include",
+                "${workspaceFolder}/third_party/marl/include",
+                "${workspaceFolder}/third_party/SPIRV-Headers/include",
+                "${workspaceFolder}/third_party/SPIRV-Tools/include"
+            ],
+            "cStandard": "c11",
+            "cppStandard": "c++17"
+        }
+    ],
+    "version": 4
+}
\ No newline at end of file
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
new file mode 100644
index 0000000..11a1dc3
--- /dev/null
+++ b/.vscode/tasks.json
@@ -0,0 +1,109 @@
+{
+    // See https://go.microsoft.com/fwlink/?LinkId=733558
+    // for the documentation about the tasks.json format
+    // Available variables which can be used inside of strings.
+    // ${workspaceRoot}: the root folder of the team
+    // ${file}: the current opened file
+    // ${fileBasename}: the current opened file's basename
+    // ${fileDirname}: the current opened file's dirname
+    // ${fileExtname}: the current opened file's extension
+    // ${cwd}: the current working directory of the spawned process
+    "version": "2.0.0",
+    "tasks": [
+        {
+            "label": "make",
+            "group": {
+                "kind": "build",
+                "isDefault": true
+            },
+            "type": "shell",
+            "command": "sh",
+            "osx": {
+                "args": [
+                    "-c",
+                    "cmake --build . && echo Done"
+                ]
+            },
+            "linux": {
+                "args": [
+                    "-c",
+                    "cmake --build . && echo Done"
+                ]
+            },
+            "windows": {
+                "args": [
+                    "-c",
+                    "cmake --build . && echo Done"
+                ]
+            },
+            "options": {
+                "cwd": "${workspaceRoot}/build",
+            },
+            "presentation": {
+                "echo": false,
+                "reveal": "always",
+                "focus": false,
+                "panel": "shared",
+                "showReuseMessage": false,
+                "clear": true,
+            },
+            "problemMatcher": {
+                "owner": "cpp",
+                "fileLocation": "absolute",
+                "pattern": {
+                    "regexp": "^(.*):(\\d+):(\\d+):\\s+(warning|error):\\s+(.*)$",
+                    "file": 1,
+                    "line": 2,
+                    "column": 3,
+                    "severity": 4,
+                    "message": 5
+                }
+            }
+        },
+        {
+            "label": "cmake",
+            "type": "shell",
+            "command": "cmake",
+            "args": [
+                "..",
+                "-GNinja",
+                "-DCMAKE_BUILD_TYPE=${input:buildType}",
+                "-DSWIFTSHADER_WARNINGS_AS_ERRORS=1",
+                "-DSWIFTSHADER_DCHECK_ALWAYS_ON=1",
+                "-DREACTOR_VERIFY_LLVM_IR=1",
+            ],
+            "options": {
+                "cwd": "${workspaceRoot}/build"
+            },
+            "problemMatcher": [],
+        },
+        {
+            "label": "Push branch for review",
+            "type": "shell",
+            "command": "git",
+            "args": [
+                "push",
+                "origin",
+                "HEAD:refs/for/master"
+            ],
+            "options": {
+                "cwd": "${workspaceRoot}"
+            },
+            "problemMatcher": [],
+        }
+    ],
+    "inputs": [
+        {
+            "id": "buildType",
+            "type": "pickString",
+            "options": [
+                "Debug",
+                "Release",
+                "MinSizeRel",
+                "RelWithDebInfo",
+            ],
+            "default": "Debug",
+            "description": "The type of build",
+        },
+    ]
+}
diff --git a/AUTHORS.txt b/AUTHORS.txt
new file mode 100644
index 0000000..faa8822
--- /dev/null
+++ b/AUTHORS.txt
@@ -0,0 +1,9 @@
+# This is the official list of SwiftShader authors for copyright purposes.

+# This file is distinct from the CONTRIBUTORS files.

+# See the latter for an explanation.

+# Names should be added to this file as:

+# Name or Organization <email address>

+# The email address is not required for organizations.

+

+Google Inc.

+The ANGLE Project Authors <angleproject@googlegroups.com>
\ No newline at end of file
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..6931936
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,138 @@
+//
+// Copyright (C) 2018 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+package {
+    default_applicable_licenses: ["external_swiftshader_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'filegroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+//
+// large-scale-change filtered out the below license kinds as false-positives:
+//   SPDX-license-identifier-GPL
+//   SPDX-license-identifier-GPL-3.0
+// http://go/android-license-faq
+license {
+    name: "external_swiftshader_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-BSD",
+        "SPDX-license-identifier-MIT",
+        "SPDX-license-identifier-NCSA",
+        "legacy_unencumbered",
+    ],
+    license_text: [
+        "LICENSE.txt",
+    ],
+}
+
+cc_defaults {
+    name: "swiftshader_common",
+
+    gnu_extensions: false,
+
+    cflags: [
+        "-Werror",
+        "-Wwrite-strings",
+    ],
+
+    cppflags: [
+        "-Woverloaded-virtual",
+        "-DVK_USE_PLATFORM_ANDROID_KHR",
+        "-DVK_EXPORT= ",
+    ],
+    cpp_std: "c++17",
+
+    arch: {
+        x86: {
+            cflags: [
+                "-msse2",
+            ],
+        },
+        x86_64: {
+            cflags: [
+                "-msse2",
+            ],
+        },
+    },
+
+    target: {
+        host: {
+            cppflags: [
+                "-fno-rtti",
+                "-fno-exceptions",
+            ],
+            compile_multilib: "64",
+        },
+
+        // We don't need Darwin host-side builds
+        darwin: {
+            enabled: false,
+        },
+    },
+}
+
+cc_defaults {
+    name: "swiftshader_common_release",
+
+    defaults: [ "swiftshader_common" ],
+
+    cflags: [
+        "-Os",
+        "-fomit-frame-pointer",
+        "-ffunction-sections",
+        "-fdata-sections",
+    ],
+}
+
+cc_defaults {
+    name: "swiftshader_common_debug",
+
+    defaults: [ "swiftshader_common" ],
+
+    cflags: [
+        "-O0",
+        "-g",
+        "-UNDEBUG",
+    ],
+}
+
+cc_library_headers {
+    name: "swiftshader_platform_headers",
+    host_supported: true,
+    device_supported: true,
+    vendor_available: true,
+    export_include_dirs: ["include"],
+}
+
+cc_library_headers {
+    name: "swiftshader_host_headers",
+    device_supported: false,
+    host_supported: true,
+    export_include_dirs: ["include/Android"],
+}
diff --git a/BUILD.gn b/BUILD.gn
new file mode 100644
index 0000000..b405f51
--- /dev/null
+++ b/BUILD.gn
@@ -0,0 +1,91 @@
+# Copyright 2016 The SwiftShader Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import("src/Reactor/reactor.gni")
+
+config("swiftshader_config") {
+  cflags = []
+  defines = []
+  asmflags = []
+
+  if (is_clang) {
+    cflags += [ "-Wno-shadow" ]
+  }
+
+  if (is_debug) {
+    if (swiftshader_startup_dialog) {
+      defines += [ "DEBUGGER_WAIT_DIALOG" ]
+    }
+  }
+  if (is_win) {
+    # Disable MSVC warnings about std::aligned_storage being broken before
+    # VS 2017 15.8
+    defines += [ "_ENABLE_EXTENDED_ALIGNED_STORAGE" ]
+
+    # Diable some MSVC warnings.
+    if (!is_clang) {
+      cflags += [
+        "/wd4065",  # switch statement contains 'default' but no 'case' labels
+        "/wd4309",  # Truncation of constant value. See PixelRoutine.cpp casts
+                    # of signed shorts.
+      ]
+    }
+  } else if (!is_debug) {
+    cflags += [ "-Os" ]
+  }
+
+  if (build_with_chromium) {
+    if (is_clang) {
+      if (current_cpu == "arm64") {
+        import("//build/config/arm.gni")
+
+        if (arm_control_flow_integrity == "standard") {
+          cflags += [ "-mbranch-protection=standard" ]
+          asmflags += [ "-mbranch-protection=standard" ]
+        } else if (arm_control_flow_integrity == "pac") {
+          cflags += [ "-mbranch-protection=pac-ret" ]
+          asmflags += [ "-mbranch-protection=pac-ret" ]
+        } else {
+          assert(arm_control_flow_integrity == "none",
+                 "Invalid branch protection option!")
+        }
+      }
+    }
+  }
+}
+
+group("swiftshader") {
+  data_deps = [
+    "src/Vulkan:icd_file",
+    "src/Vulkan:swiftshader_libvulkan",
+  ]
+}
+
+if (build_with_chromium) {
+  group("swiftshader_tests") {
+    testonly = true
+
+    data_deps = [ "tests/SystemUnitTests:swiftshader_system_unittests" ]
+
+    if (supports_llvm) {
+      data_deps +=
+          [ "tests/ReactorUnitTests:swiftshader_reactor_llvm_unittests" ]
+    }
+
+    if (supports_subzero) {
+      data_deps +=
+          [ "tests/ReactorUnitTests:swiftshader_reactor_subzero_unittests" ]
+    }
+  }
+}
diff --git a/CMakeLists.txt b/CMakeLists.txt
index 5f17ee8..fc252f8 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,10 +1,10 @@
-# Copyright 2019 The Marl Authors.
+# Copyright 2020 The SwiftShader Authors. All Rights Reserved.
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#     https://www.apache.org/licenses/LICENSE-2.0
+#    http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
@@ -12,395 +12,956 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-cmake_minimum_required(VERSION 3.0)
+cmake_minimum_required(VERSION 3.13)
 
-include(cmake/parse_version.cmake)
-parse_version("${CMAKE_CURRENT_SOURCE_DIR}/CHANGES.md" MARL)
+project(SwiftShader C CXX ASM)
 
-set(CMAKE_CXX_STANDARD 11)
-
-project(Marl
-    VERSION   "${MARL_VERSION_MAJOR}.${MARL_VERSION_MINOR}.${MARL_VERSION_PATCH}"
-    LANGUAGES C CXX ASM
-)
-
-include(CheckCXXSourceCompiles)
-
-# MARL_IS_SUBPROJECT is 1 if added via add_subdirectory() from another project.
-get_directory_property(MARL_IS_SUBPROJECT PARENT_DIRECTORY)
-if(MARL_IS_SUBPROJECT)
-    set(MARL_IS_SUBPROJECT 1)
+set(CMAKE_CXX_STANDARD 17)
+set(CXX_STANDARD_REQUIRED ON)
+# MSVC doesn't define __cplusplus by default
+if(MSVC)
+    string(APPEND CMAKE_CXX_FLAGS " /Zc:__cplusplus")
 endif()
 
 ###########################################################
+# Detect system
+###########################################################
+
+if(CMAKE_SYSTEM_NAME MATCHES "Linux")
+    set(LINUX TRUE)
+elseif(CMAKE_SYSTEM_NAME MATCHES "Android")
+    set(ANDROID TRUE)
+    set(CMAKE_CXX_FLAGS "-DANDROID_NDK_BUILD")
+elseif(WIN32)
+elseif(APPLE)
+elseif(FUCHSIA)
+    # NOTE: Building for Fuchsia requires a Fuchsia CMake-based SDK.
+    # See https://fuchsia-review.googlesource.com/c/fuchsia/+/379673
+    find_package(FuchsiaLibraries)
+else()
+    message(FATAL_ERROR "Platform is not supported")
+endif()
+
+if(CMAKE_SYSTEM_PROCESSOR MATCHES "arm" OR CMAKE_SYSTEM_PROCESSOR MATCHES "aarch")
+    if(CMAKE_SIZEOF_VOID_P EQUAL 8)
+        set(ARCH "aarch64")
+    else()
+        set(ARCH "arm")
+    endif()
+elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^mips.*")
+    if(CMAKE_SIZEOF_VOID_P EQUAL 8)
+        set(ARCH "mips64el")
+    else()
+        set(ARCH "mipsel")
+    endif()
+elseif(CMAKE_SYSTEM_PROCESSOR MATCHES "^ppc.*")
+    if(CMAKE_SIZEOF_VOID_P EQUAL 8)
+        set(ARCH "ppc64le")
+    else()
+        message(FATAL_ERROR "Architecture is not supported")
+    endif()
+else()
+    if(CMAKE_SIZEOF_VOID_P EQUAL 8)
+        set(ARCH "x86_64")
+    else()
+        set(ARCH "x86")
+    endif()
+endif()
+
+# Cross compiling on macOS. The cross compiling architecture should override
+# auto-detected system architecture settings.
+if(CMAKE_OSX_ARCHITECTURES)
+    if(CMAKE_OSX_ARCHITECTURES MATCHES "arm64")
+        set(ARCH "aarch64")
+    elseif(CMAKE_OSX_ARCHITECTURES MATCHES "x86_64")
+        set(ARCH "x86_64")
+    elseif(CMAKE_OSX_ARCHITECTURES MATCHES "i386")
+        set(ARCH "x86")
+    else()
+        message(FATAL_ERROR "Architecture ${CMAKE_OSX_ARCHITECTURES} is not "
+                            "supported. Only one architecture (arm64, x86_64 "
+                            "or i386) could be specified at build time.")
+    endif()
+endif()
+
+set(CMAKE_MACOSX_RPATH TRUE)
+
+if ((CMAKE_GENERATOR MATCHES "Visual Studio") AND (CMAKE_GENERATOR_TOOLSET STREQUAL ""))
+  message(WARNING "Visual Studio generators use the x86 host compiler by "
+                  "default, even for 64-bit targets. This can result in linker "
+                  "instability and out of memory errors. To use the 64-bit "
+                  "host compiler, pass -Thost=x64 on the CMake command line.")
+endif()
+
+# Use CCache if available
+find_program(CCACHE_FOUND ccache)
+if(CCACHE_FOUND)
+    message(STATUS "Using ccache")
+    set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache)
+    set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache)
+endif()
+
+###########################################################
+# Install Gerrit commit hook
+###########################################################
+
+if(NOT EXISTS ${CMAKE_SOURCE_DIR}/.git/hooks/commit-msg)
+    message(WARNING "
+        .git/hooks/commit-msg was not found.
+        Downloading from https://gerrit-review.googlesource.com/tools/hooks/commit-msg...
+    ")
+
+    file(DOWNLOAD https://gerrit-review.googlesource.com/tools/hooks/commit-msg ${CMAKE_SOURCE_DIR}/commit-msg)
+
+    file(COPY ${CMAKE_SOURCE_DIR}/commit-msg
+         DESTINATION ${CMAKE_SOURCE_DIR}/.git/hooks/
+         FILE_PERMISSIONS
+           OWNER_READ OWNER_WRITE OWNER_EXECUTE
+           GROUP_READ GROUP_WRITE GROUP_EXECUTE
+           WORLD_READ WORLD_EXECUTE)
+    file(REMOVE ${CMAKE_SOURCE_DIR}/commit-msg)
+endif()
+
+###########################################################
+# Host libraries
+###########################################################
+
+if(LINUX)
+    include(CheckSymbolExists)
+    check_symbol_exists(mallinfo malloc.h HAVE_MALLINFO)
+    check_symbol_exists(mallinfo2 malloc.h HAVE_MALLINFO2)
+endif()
+
+if(SWIFTSHADER_BUILD_WSI_DIRECTFB)
+    find_library(DIRECTFB directfb)
+    find_path(DIRECTFB_INCLUDE_DIR directfb/directfb.h)
+endif(SWIFTSHADER_BUILD_WSI_DIRECTFB)
+if(SWIFTSHADER_BUILD_WSI_D2D)
+    find_library(D2D drm)
+    find_path(D2D_INCLUDE_DIR libdrm/drm.h)
+endif(SWIFTSHADER_BUILD_WSI_D2D)
+
+###########################################################
 # Options
 ###########################################################
+
+if(NOT CMAKE_BUILD_TYPE)
+    set(CMAKE_BUILD_TYPE "Release" CACHE STRING "The type of build: Debug Release MinSizeRel RelWithDebInfo." FORCE)
+    set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS Debug Release MinSizeRel RelWithDebInfo)
+endif()
+
 function(option_if_not_defined name description default)
     if(NOT DEFINED ${name})
         option(${name} ${description} ${default})
     endif()
 endfunction()
 
-option_if_not_defined(MARL_WARNINGS_AS_ERRORS "Treat warnings as errors" OFF)
-option_if_not_defined(MARL_BUILD_EXAMPLES "Build example applications" OFF)
-option_if_not_defined(MARL_BUILD_TESTS "Build tests" OFF)
-option_if_not_defined(MARL_BUILD_BENCHMARKS "Build benchmarks" OFF)
-option_if_not_defined(MARL_BUILD_SHARED "Build marl as a shared / dynamic library (default static)" OFF)
-option_if_not_defined(MARL_USE_PTHREAD_THREAD_LOCAL "Use pthreads for thread local storage" OFF)
-option_if_not_defined(MARL_ASAN "Build marl with address sanitizer" OFF)
-option_if_not_defined(MARL_MSAN "Build marl with memory sanitizer" OFF)
-option_if_not_defined(MARL_TSAN "Build marl with thread sanitizer" OFF)
-option_if_not_defined(MARL_INSTALL "Create marl install target" OFF)
-option_if_not_defined(MARL_FULL_BENCHMARK "Run benchmarks for [0 .. numLogicalCPUs] with no stepping" OFF)
-option_if_not_defined(MARL_FIBERS_USE_UCONTEXT "Use ucontext instead of assembly for fibers (ignored for platforms that do not support ucontext)" OFF)
-option_if_not_defined(MARL_DEBUG_ENABLED "Enable debug checks even in release builds" OFF)
+if(LINUX)
+    option_if_not_defined(SWIFTSHADER_BUILD_WSI_XCB "Build the XCB WSI support" TRUE)
+    option_if_not_defined(SWIFTSHADER_BUILD_WSI_WAYLAND "Build the Wayland WSI support" TRUE)
+    option_if_not_defined(SWIFTSHADER_BUILD_WSI_DIRECTFB "Build the DirectFB WSI support" FALSE)
+    option_if_not_defined(SWIFTSHADER_BUILD_WSI_D2D "Build the Direct-to-Display WSI support" FALSE)
+endif()
+
+option_if_not_defined(SWIFTSHADER_BUILD_PVR "Build the PowerVR examples" FALSE)
+option_if_not_defined(SWIFTSHADER_BUILD_TESTS "Build unit tests" TRUE)
+option_if_not_defined(SWIFTSHADER_BUILD_BENCHMARKS "Build benchmarks" FALSE)
+
+option_if_not_defined(SWIFTSHADER_USE_GROUP_SOURCES "Group the source files in a folder tree for Visual Studio" TRUE)
+
+option_if_not_defined(SWIFTSHADER_MSAN "Build with memory sanitizer" FALSE)
+option_if_not_defined(SWIFTSHADER_ASAN "Build with address sanitizer" FALSE)
+option_if_not_defined(SWIFTSHADER_TSAN "Build with thread sanitizer" FALSE)
+option_if_not_defined(SWIFTSHADER_UBSAN "Build with undefined behavior sanitizer" FALSE)
+option_if_not_defined(SWIFTSHADER_EMIT_COVERAGE "Emit code coverage information" FALSE)
+option_if_not_defined(SWIFTSHADER_WARNINGS_AS_ERRORS "Treat all warnings as errors" TRUE)
+option_if_not_defined(SWIFTSHADER_DCHECK_ALWAYS_ON "Check validation macros even in release builds" FALSE)
+option_if_not_defined(REACTOR_EMIT_DEBUG_INFO "Emit debug info for JIT functions" FALSE)
+option_if_not_defined(REACTOR_EMIT_PRINT_LOCATION "Emit printing of location info for JIT functions" FALSE)
+option_if_not_defined(REACTOR_EMIT_ASM_FILE "Emit asm files for JIT functions" FALSE)
+option_if_not_defined(REACTOR_ENABLE_PRINT "Enable RR_PRINT macros" FALSE)
+option_if_not_defined(REACTOR_VERIFY_LLVM_IR "Check reactor-generated LLVM IR is valid even in release builds" FALSE)
+option_if_not_defined(SWIFTSHADER_LESS_DEBUG_INFO "Generate less debug info to reduce file size" FALSE)
+# option_if_not_defined(SWIFTSHADER_ENABLE_VULKAN_DEBUGGER "Enable Vulkan debugger support" FALSE)  # TODO(b/251802301)
+option_if_not_defined(SWIFTSHADER_ENABLE_ASTC "Enable ASTC compressed textures support" TRUE)  # TODO(b/150130101)
+
+if(SWIFTSHADER_ENABLE_VULKAN_DEBUGGER)
+    set(SWIFTSHADER_BUILD_CPPDAP TRUE)
+endif()
+
+set(DEFAULT_REACTOR_BACKEND "LLVM")
+set(REACTOR_BACKEND ${DEFAULT_REACTOR_BACKEND} CACHE STRING "JIT compiler back-end used by Reactor")
+set_property(CACHE REACTOR_BACKEND PROPERTY STRINGS LLVM LLVM-Submodule Subzero)
+
+set(DEFAULT_SWIFTSHADER_LLVM_VERSION "10.0")
+set(SWIFTSHADER_LLVM_VERSION ${DEFAULT_SWIFTSHADER_LLVM_VERSION} CACHE STRING "LLVM version to use")
+set_property(CACHE SWIFTSHADER_LLVM_VERSION PROPERTY STRINGS "10.0")
+
+# If defined, overrides the default optimization level of the current reactor backend.
+# Set to one of the rr::Optimization::Level enum values.
+set(REACTOR_DEFAULT_OPT_LEVEL "" CACHE STRING "Reactor default optimization level")
+set_property(CACHE REACTOR_DEFAULT_OPT_LEVEL PROPERTY STRINGS "None" "Less" "Default" "Aggressive")
+
+if(NOT DEFINED SWIFTSHADER_LOGGING_LEVEL)
+    set(SWIFTSHADER_LOGGING_LEVEL "Info" CACHE STRING "SwiftShader logging level")
+    set_property(CACHE SWIFTSHADER_LOGGING_LEVEL PROPERTY STRINGS "Verbose" "Debug" "Info" "Warn" "Error" "Fatal" "Disabled")
+endif()
+
+# LLVM disallows calling cmake . from the main LLVM dir, the reason is that
+# it builds header files that could overwrite the orignal ones. Here we
+# want to include LLVM as a subdirectory and even though it wouldn't cause
+# the problem, if cmake . is called from the main dir, the condition that
+# LLVM checkes, "CMAKE_CURRENT_SOURCE_DIR == CMAKE_CURRENT_BINARY_DIR" will be true. So we
+# disallow it ourselves too to. In addition if there are remining CMakeFiles
+# and CMakeCache in the directory, cmake .. from a subdirectory will still
+# try to build from the main directory so we instruct users to delete these
+# files when they get the error.
+if(CMAKE_CURRENT_SOURCE_DIR STREQUAL CMAKE_CURRENT_BINARY_DIR)
+    message(FATAL_ERROR "In source builds are not allowed by LLVM, please create a build/ directory and build from there. You may have to delete the CMakeCache.txt file and CMakeFiles directory that are next to the CMakeLists.txt.")
+endif()
+
+set_property(GLOBAL PROPERTY USE_FOLDERS TRUE)
 
 ###########################################################
 # Directories
 ###########################################################
-function(set_if_not_defined name value)
-    if(NOT DEFINED ${name})
-        set(${name} ${value} PARENT_SCOPE)
+
+set(SWIFTSHADER_DIR ${CMAKE_CURRENT_SOURCE_DIR})
+set(SOURCE_DIR ${SWIFTSHADER_DIR}/src)
+set(THIRD_PARTY_DIR ${SWIFTSHADER_DIR}/third_party)
+set(TESTS_DIR ${SWIFTSHADER_DIR}/tests)
+
+###########################################################
+# Initialize submodules
+###########################################################
+
+function(InitSubmodule target submodule_dir)
+    if (NOT TARGET ${target})
+        if(NOT EXISTS ${submodule_dir}/.git)
+            message(WARNING "
+        Target ${target} from submodule ${submodule_dir} missing.
+        Running 'git submodule update --init' to download it:
+            ")
+
+            execute_process(COMMAND git -C ${SWIFTSHADER_DIR} submodule update --init ${submodule_dir})
+        endif()
     endif()
 endfunction()
 
-set(MARL_SRC_DIR ${CMAKE_CURRENT_SOURCE_DIR}/src)
-set(MARL_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/include)
-set_if_not_defined(MARL_THIRD_PARTY_DIR ${CMAKE_CURRENT_SOURCE_DIR}/third_party)
-set_if_not_defined(MARL_GOOGLETEST_DIR ${MARL_THIRD_PARTY_DIR}/googletest)
-set_if_not_defined(MARL_BENCHMARK_DIR ${MARL_THIRD_PARTY_DIR}/benchmark)
-
-###########################################################
-# Submodules
-###########################################################
-if(MARL_BUILD_TESTS)
-    if(NOT EXISTS ${MARL_GOOGLETEST_DIR}/.git)
-        message(WARNING "third_party/googletest submodule missing.")
-        message(WARNING "Run: `git submodule update --init` to build tests.")
-        set(MARL_BUILD_TESTS OFF)
-    endif()
-endif(MARL_BUILD_TESTS)
-
-if(MARL_BUILD_BENCHMARKS)
-    if(NOT EXISTS ${MARL_BENCHMARK_DIR}/.git)
-        message(WARNING "third_party/benchmark submodule missing.")
-        message(WARNING "Run: `git submodule update --init` to build benchmarks.")
-        set(MARL_BUILD_BENCHMARKS OFF)
-    endif()
-endif(MARL_BUILD_BENCHMARKS)
-
-if(MARL_BUILD_BENCHMARKS)
-    set(BENCHMARK_ENABLE_TESTING FALSE CACHE BOOL FALSE FORCE)
-    add_subdirectory(${MARL_BENCHMARK_DIR})
-endif(MARL_BUILD_BENCHMARKS)
-
-###########################################################
-# Compiler feature tests
-###########################################################
-# Check that the Clang Thread Safety Analysis' try_acquire_capability behaves
-# correctly. This is broken on some earlier versions of clang.
-# See: https://bugs.llvm.org/show_bug.cgi?id=32954
-set(SAVE_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS})
-set(CMAKE_REQUIRED_FLAGS "-Wthread-safety -Werror")
-check_cxx_source_compiles(
-    "int main() {
-      struct __attribute__((capability(\"mutex\"))) Mutex {
-        void Unlock() __attribute__((release_capability)) {};
-        bool TryLock() __attribute__((try_acquire_capability(true))) { return true; };
-      };
-      Mutex m;
-      if (m.TryLock()) {
-        m.Unlock();  // Should not warn.
-      }
-      return 0;
-    }"
-    MARL_THREAD_SAFETY_ANALYSIS_SUPPORTED)
-set(CMAKE_REQUIRED_FLAGS ${SAVE_CMAKE_REQUIRED_FLAGS})
-
-# Check whether ucontext is supported.
-set(SAVE_CMAKE_REQUIRED_FLAGS ${CMAKE_REQUIRED_FLAGS})
-set(CMAKE_REQUIRED_FLAGS "-Werror")
-check_cxx_source_compiles(
-    "#include <ucontext.h>
-    int main() {
-      ucontext_t ctx;
-      getcontext(&ctx);
-      makecontext(&ctx, nullptr, 2, 1, 2);
-      swapcontext(&ctx, &ctx);
-      return 0;
-    }"
-    MARL_UCONTEXT_SUPPORTED)
-set(CMAKE_REQUIRED_FLAGS ${SAVE_CMAKE_REQUIRED_FLAGS})
-if (MARL_FIBERS_USE_UCONTEXT AND NOT MARL_UCONTEXT_SUPPORTED)
-    # Disable MARL_FIBERS_USE_UCONTEXT and warn if MARL_UCONTEXT_SUPPORTED is 0.
-    message(WARNING "MARL_FIBERS_USE_UCONTEXT is enabled, but ucontext is not supported by the target. Disabling")
-    set(MARL_FIBERS_USE_UCONTEXT 0)
+if (SWIFTSHADER_BUILD_TESTS OR SWIFTSHADER_BUILD_BENCHMARKS)
+    set(BUILD_VULKAN_WRAPPER TRUE)
 endif()
 
-if(MARL_IS_SUBPROJECT)
-    # Export supported flags as this may be useful to parent projects
-    set(MARL_THREAD_SAFETY_ANALYSIS_SUPPORTED PARENT_SCOPE ${MARL_THREAD_SAFETY_ANALYSIS_SUPPORTED})
-    set(MARL_UCONTEXT_SUPPORTED               PARENT_SCOPE ${MARL_UCONTEXT_SUPPORTED})
+if (BUILD_VULKAN_WRAPPER)
+    InitSubmodule(glslang ${THIRD_PARTY_DIR}/glslang)
+endif()
+
+if (SWIFTSHADER_BUILD_TESTS)
+    InitSubmodule(gtest ${THIRD_PARTY_DIR}/googletest)
+endif()
+
+if(SWIFTSHADER_BUILD_BENCHMARKS)
+    InitSubmodule(benchmark::benchmark ${THIRD_PARTY_DIR}/benchmark)
+endif()
+
+if(REACTOR_EMIT_DEBUG_INFO)
+    InitSubmodule(libbacktrace ${THIRD_PARTY_DIR}/libbacktrace/src)
+endif()
+
+if(SWIFTSHADER_BUILD_PVR)
+    InitSubmodule(PVRCore ${THIRD_PARTY_DIR}/PowerVR_Examples)
+endif()
+
+if(SWIFTSHADER_BUILD_CPPDAP)
+    InitSubmodule(json ${THIRD_PARTY_DIR}/json)
+    InitSubmodule(cppdap ${THIRD_PARTY_DIR}/cppdap)
+endif()
+
+if(${REACTOR_BACKEND} STREQUAL "LLVM-Submodule")
+    InitSubmodule(llvm-submodule ${THIRD_PARTY_DIR}/llvm-project)
 endif()
 
 ###########################################################
-# File lists
+# Convenience macros
 ###########################################################
-set(MARL_LIST
-    ${MARL_SRC_DIR}/debug.cpp
-    ${MARL_SRC_DIR}/memory.cpp
-    ${MARL_SRC_DIR}/scheduler.cpp
-    ${MARL_SRC_DIR}/thread.cpp
-    ${MARL_SRC_DIR}/trace.cpp
-)
-if(NOT MSVC)
-    list(APPEND MARL_LIST
-        ${MARL_SRC_DIR}/osfiber_aarch64.c
-        ${MARL_SRC_DIR}/osfiber_arm.c
-        ${MARL_SRC_DIR}/osfiber_asm_aarch64.S
-        ${MARL_SRC_DIR}/osfiber_asm_arm.S
-        ${MARL_SRC_DIR}/osfiber_asm_loongarch64.S
-        ${MARL_SRC_DIR}/osfiber_asm_mips64.S
-        ${MARL_SRC_DIR}/osfiber_asm_ppc64.S
-        ${MARL_SRC_DIR}/osfiber_asm_rv64.S
-        ${MARL_SRC_DIR}/osfiber_asm_x64.S
-        ${MARL_SRC_DIR}/osfiber_asm_x86.S
-        ${MARL_SRC_DIR}/osfiber_loongarch64.c
-        ${MARL_SRC_DIR}/osfiber_mips64.c
-        ${MARL_SRC_DIR}/osfiber_ppc64.c
-        ${MARL_SRC_DIR}/osfiber_rv64.c
-        ${MARL_SRC_DIR}/osfiber_x64.c
-        ${MARL_SRC_DIR}/osfiber_x86.c
-    )
-    # CMAKE_OSX_ARCHITECTURES settings aren't propagated to assembly files when
-    # building for Apple platforms (https://gitlab.kitware.com/cmake/cmake/-/issues/20771),
-    # we treat assembly files as C files to work around this bug.
-    set_source_files_properties(
-        ${MARL_SRC_DIR}/osfiber_asm_aarch64.S
-        ${MARL_SRC_DIR}/osfiber_asm_arm.S
-        ${MARL_SRC_DIR}/osfiber_asm_loongarch64.S
-        ${MARL_SRC_DIR}/osfiber_asm_mips64.S
-        ${MARL_SRC_DIR}/osfiber_asm_ppc64.S
-        ${MARL_SRC_DIR}/osfiber_asm_x64.S
-        ${MARL_SRC_DIR}/osfiber_asm_x86.S
-        PROPERTIES LANGUAGE C
-    )
-endif(NOT MSVC)
 
-###########################################################
-# OS libraries
-###########################################################
-find_package(Threads REQUIRED)
-
-###########################################################
-# Functions
-###########################################################
-function(marl_set_target_options target)
-    if(MARL_THREAD_SAFETY_ANALYSIS_SUPPORTED)
-        target_compile_options(${target} PRIVATE "-Wthread-safety")
-    endif()
-
-    # Enable all warnings
-    if(MSVC)
-        target_compile_options(${target} PRIVATE "-W4")
-    else()
-        target_compile_options(${target} PRIVATE "-Wall")
-    endif()
-
-    # Disable specific, pedantic warnings
-    if(MSVC)
-        target_compile_options(${target} PRIVATE
-            "-D_CRT_SECURE_NO_WARNINGS"
-            "/wd4127" # conditional expression is constant
-            "/wd4324" # structure was padded due to alignment specifier
-        )
-    endif()
-
-    # Treat all warnings as errors
-    if(MARL_WARNINGS_AS_ERRORS)
-        if(MSVC)
-            target_compile_options(${target} PRIVATE "/WX")
+# Recursively calls source_group on the files of the directory
+# so that Visual Studio has the files in a folder tree
+macro(group_all_sources directory)
+    file(GLOB files RELATIVE ${SWIFTSHADER_DIR}/${directory} ${SWIFTSHADER_DIR}/${directory}/*)
+    foreach(file ${files})
+        if(IS_DIRECTORY ${SWIFTSHADER_DIR}/${directory}/${file})
+            group_all_sources(${directory}/${file})
         else()
-            target_compile_options(${target} PRIVATE "-Werror")
+            string(REPLACE "/" "\\" groupname ${directory})
+            source_group(${groupname} FILES ${SWIFTSHADER_DIR}/${directory}/${file})
         endif()
-    endif(MARL_WARNINGS_AS_ERRORS)
+    endforeach()
+endmacro()
 
-    if(MARL_USE_PTHREAD_THREAD_LOCAL)
-        target_compile_definitions(${target} PRIVATE "MARL_USE_PTHREAD_THREAD_LOCAL=1")
-        target_link_libraries(${target} PUBLIC pthread)
-    endif()
-
-    if(MARL_ASAN)
-        target_compile_options(${target} PUBLIC "-fsanitize=address")
-        target_link_libraries(${target} PUBLIC "-fsanitize=address")
-    elseif(MARL_MSAN)
-        target_compile_options(${target} PUBLIC "-fsanitize=memory")
-        target_link_libraries(${target} PUBLIC "-fsanitize=memory")
-    elseif(MARL_TSAN)
-        target_compile_options(${target} PUBLIC "-fsanitize=thread")
-        target_link_libraries(${target} PUBLIC "-fsanitize=thread")
-    endif()
-
-    if(MARL_FIBERS_USE_UCONTEXT)
-        target_compile_definitions(${target} PRIVATE "MARL_FIBERS_USE_UCONTEXT=1")
-    endif()
-
-    if(MARL_DEBUG_ENABLED)
-        target_compile_definitions(${target} PRIVATE "MARL_DEBUG_ENABLED=1")
-    endif()
-
-    if(CMAKE_SYSTEM_PROCESSOR MATCHES "^rv.*")
-        target_link_libraries(${target} INTERFACE atomic) #explicitly use -latomic for RISC-V linking
-    endif()
-
-    target_include_directories(${target} PUBLIC $<BUILD_INTERFACE:${MARL_INCLUDE_DIR}>)
-endfunction(marl_set_target_options)
-
-###########################################################
-# Targets
-###########################################################
-
-# marl
-if(MARL_BUILD_SHARED OR BUILD_SHARED_LIBS)
-    add_library(marl SHARED ${MARL_LIST})
+# Takes target library and a directory where the export map is
+# and add the linker options so that only the API symbols are
+# exported.
+macro(set_shared_library_export_map TARGET DIR)
     if(MSVC)
-        target_compile_definitions(marl
-            PRIVATE "MARL_BUILDING_DLL=1"
-            PUBLIC  "MARL_DLL=1"
+        set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_FLAGS " /DEF:\"${DIR}/${TARGET}.def\"")
+    elseif(APPLE)
+        # The exported symbols list only exports the API functions and
+        # hides all the others.
+        set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_FLAGS "-exported_symbols_list ${DIR}/${TARGET}.exports")
+        set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_DEPENDS "${DIR}/${TARGET}.exports;")
+        # Don't allow undefined symbols, unless it's a Sanitizer build.
+        if(NOT SWIFTSHADER_MSAN AND NOT SWIFTSHADER_ASAN AND NOT SWIFTSHADER_TSAN AND NOT SWIFTSHADER_UBSAN)
+            set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_FLAGS " -Wl,-undefined,error")
+        endif()
+    elseif(LINUX OR FUCHSIA)
+        # NOTE: The Fuchsia linker script is needed to export the vk_icdInitializeConnectToServiceCallback
+        # entry point (a private implementation detail betwen the Fuchsia Vulkan loader and the ICD).
+        if ((FUCHSIA) AND ("${TARGET}" STREQUAL "vk_swiftshader"))
+          set(LINKER_VERSION_SCRIPT "fuchsia_vk_swiftshader.lds")
+        else()
+          set(LINKER_VERSION_SCRIPT "${TARGET}.lds")
+        endif()
+
+        # The version script only exports the API functions and
+        # hides all the others.
+        set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_FLAGS " -Wl,--version-script=${DIR}/${LINKER_VERSION_SCRIPT}")
+        set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_DEPENDS "${DIR}/${LINKER_VERSION_SCRIPT};")
+
+        # -Bsymbolic binds symbol references to their global definitions within
+        # a shared object, thereby preventing symbol preemption.
+        set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_FLAGS "  -Wl,-Bsymbolic")
+
+        if(ARCH STREQUAL "mipsel" OR ARCH STREQUAL "mips64el")
+          # MIPS supports sysv hash-style only.
+          set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_FLAGS " -Wl,--hash-style=sysv")
+        elseif(LINUX)
+          # Both hash-style are needed, because we want both gold and
+          # GNU ld to be able to read our libraries.
+          set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_FLAGS " -Wl,--hash-style=both")
+        endif()
+
+        if(NOT ${SWIFTSHADER_EMIT_COVERAGE})
+            # Gc sections is used in combination with each functions being
+            # in its own section, to reduce the binary size.
+            set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_FLAGS " -Wl,--gc-sections")
+        endif()
+
+        # Don't allow undefined symbols, unless it's a Sanitizer build.
+        if(NOT SWIFTSHADER_MSAN AND NOT SWIFTSHADER_ASAN AND NOT SWIFTSHADER_TSAN AND NOT SWIFTSHADER_UBSAN)
+            set_property(TARGET ${TARGET} APPEND_STRING PROPERTY LINK_FLAGS " -Wl,--no-undefined")
+        endif()
+    endif()
+endmacro()
+
+if(SWIFTSHADER_USE_GROUP_SOURCES)
+    group_all_sources(src)
+endif()
+
+###########################################################
+# Compile flags
+###########################################################
+
+# Flags for project code (non 3rd party)
+set(SWIFTSHADER_COMPILE_OPTIONS "")
+set(SWIFTSHADER_LINK_FLAGS "")
+set(SWIFTSHADER_LIBS "")
+
+macro(set_cpp_flag FLAG)
+    if(${ARGC} GREATER 1)
+        set(CMAKE_CXX_FLAGS_${ARGV1} "${CMAKE_CXX_FLAGS_${ARGV1}} ${FLAG}")
+    else()
+        set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} ${FLAG}")
+    endif()
+endmacro()
+
+macro(set_linker_flag FLAG)
+    if(${ARGC} GREATER 1)
+        set(CMAKE_EXE_LINKER_FLAGS_${ARGV1} "${CMAKE_EXE_LINKER_FLAGS_${ARGV1}} ${FLAG}")
+        set(CMAKE_SHARED_LINKER_FLAGS_${ARGV1} "${CMAKE_EXE_LINKER_FLAGS_${ARGV1}} ${FLAG}")
+    else()
+        set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${FLAG}")
+        set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} ${FLAG}")
+    endif()
+endmacro()
+
+if(MSVC)
+    set_cpp_flag("/MP")
+    add_definitions(-D_CRT_SECURE_NO_WARNINGS)
+    add_definitions(-D_SCL_SECURE_NO_WARNINGS)
+    add_definitions(-D_SBCS)  # Single Byte Character Set (ASCII)
+    add_definitions(-D_ENABLE_EXTENDED_ALIGNED_STORAGE)  # Disable MSVC warnings about std::aligned_storage being broken before VS 2017 15.8
+
+    set_linker_flag("/DEBUG:FASTLINK" DEBUG)
+    set_linker_flag("/DEBUG:FASTLINK" RELWITHDEBINFO)
+
+    # Disable specific warnings
+    # TODO: Not all of these should be disabled, but for now, we want a warning-free msvc build. Remove these one by one
+    #       and fix the actual warnings in code.
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS
+        "/wd4005" # 'identifier' : macro redefinition
+        "/wd4018" # 'expression' : signed/unsigned mismatch
+        "/wd4065" # switch statement contains 'default' but no 'case' labels
+        "/wd4141" # 'modifier' : used more than once
+        "/wd4244" # 'conversion' conversion from 'type1' to 'type2', possible loss of data
+        "/wd4267" # 'var' : conversion from 'size_t' to 'type', possible loss of data
+        "/wd4291" # 'void X new(size_t,unsigned int,unsigned int)': no matching operator delete found; memory will not be freed if initialization throws an exception
+        "/wd4309" # 'conversion' : truncation of constant value
+        "/wd4624" # 'derived class' : destructor was implicitly defined as deleted because a base class destructor is inaccessible or deleted
+        "/wd4800" # 'type' : forcing value to bool 'true' or 'false' (performance warning)
+        "/wd4838" # conversion from 'type_1' to 'type_2' requires a narrowing conversion
+        "/wd5030" # attribute 'attribute' is not recognized
+        "/wd5038" # data member 'member1' will be initialized after data member 'member2' data member 'member' will be initialized after base class 'base_class'
+        "/wd4146" # unary minus operator applied to unsigned type, result still unsigned
+    )
+
+    # Treat specific warnings as errors
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS
+        "/we4018" # 'expression' : signed/unsigned mismatch
+        "/we4062" # enumerator 'identifier' in switch of enum 'enumeration' is not handled
+        "/we4471" # 'enumeration': a forward declaration of an unscoped enumeration must have an underlying type (int assumed)
+        "/we4838" # conversion from 'type_1' to 'type_2' requires a narrowing conversion
+        "/we5038" # data member 'member1' will be initialized after data member 'member2' data member 'member' will be initialized after base class 'base_class'
+        "/we4101" # 'identifier' : unreferenced local variable
+    )
+else()
+    # Explicitly enable these warnings.
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS
+        "-Wall"
+        "-Wreorder"
+        "-Wsign-compare"
+        "-Wmissing-braces"
+    )
+
+    if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
+        if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 9)
+            list(APPEND SWIFTSHADER_COMPILE_OPTIONS
+                "-Wdeprecated-copy"  # implicit copy constructor for 'X' is deprecated because of user-declared copy assignment operator.
+            )
+        endif()
+    elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
+        list(APPEND SWIFTSHADER_COMPILE_OPTIONS
+            "-Wextra"
+            "-Wunreachable-code-loop-increment"
+            "-Wunused-lambda-capture"
+            "-Wstring-conversion"
+            "-Wextra-semi"
+            "-Wignored-qualifiers"
+            "-Wdeprecated-copy"  # implicit copy constructor for 'X' is deprecated because of user-declared copy assignment operator.
+            # TODO(b/208256248): Avoid exit-time destructor.
+            #"-Wexit-time-destructors"  # declaration requires an exit-time destructor
         )
     endif()
-else()
-    add_library(marl ${MARL_LIST})
+
+    if (SWIFTSHADER_EMIT_COVERAGE)
+        if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
+            list(APPEND SWIFTSHADER_COMPILE_OPTIONS "--coverage")
+            list(APPEND SWIFTSHADER_LIBS "gcov")
+        elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
+            list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-fprofile-instr-generate" "-fcoverage-mapping")
+            list(APPEND SWIFTSHADER_LINK_FLAGS "-fprofile-instr-generate" "-fcoverage-mapping")
+        else()
+            message(FATAL_ERROR "Coverage generation not supported for the ${CMAKE_CXX_COMPILER_ID} toolchain")
+        endif()
+    endif()
+
+    # Disable pedantic warnings
+    if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
+        list(APPEND SWIFTSHADER_COMPILE_OPTIONS
+            "-Wno-ignored-attributes"   # ignoring attributes on template argument 'X'
+            "-Wno-attributes"           # 'X' attribute ignored
+            "-Wno-strict-aliasing"      # dereferencing type-punned pointer will break strict-aliasing rules
+            "-Wno-comment"              # multi-line comment
+        )
+        if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 9)
+            list(APPEND SWIFTSHADER_COMPILE_OPTIONS
+                "-Wno-init-list-lifetime"  # assignment from temporary initializer_list does not extend the lifetime of the underlying array
+            )
+        endif()
+    elseif(CMAKE_CXX_COMPILER_ID MATCHES "Clang")
+        list(APPEND SWIFTSHADER_COMPILE_OPTIONS
+            "-Wno-unneeded-internal-declaration"  # function 'X' is not needed and will not be emitted
+            "-Wno-unused-private-field"           # private field 'offset' is not used - TODO: Consider enabling this once Vulkan is further implemented.
+            "-Wno-comment"                        # multi-line comment
+            "-Wno-extra-semi"                     # extra ';' after member function definition
+            "-Wno-unused-parameter"               # unused parameter 'X'
+
+            # Silence errors caused by unknown warnings when building with older
+            # versions of Clang. This demands checking that warnings added above
+            # are spelled correctly and work as intended!
+            "-Wno-unknown-warning-option"
+        )
+    endif()
+
+    if(ARCH STREQUAL "x86")
+        set_cpp_flag("-m32")
+        set_cpp_flag("-msse2")
+        set_cpp_flag("-mfpmath=sse")
+        set_cpp_flag("-march=pentium4")
+        set_cpp_flag("-mtune=generic")
+    endif()
+    if(ARCH STREQUAL "x86_64")
+        set_cpp_flag("-m64")
+        set_cpp_flag("-fPIC")
+        set_cpp_flag("-march=x86-64")
+        set_cpp_flag("-mtune=generic")
+    endif()
+    if(ARCH STREQUAL "mipsel")
+        set_cpp_flag("-EL")
+        set_cpp_flag("-march=mips32r2")
+        set_cpp_flag("-fPIC")
+        set_cpp_flag("-mhard-float")
+        set_cpp_flag("-mfp32")
+        set_cpp_flag("-mxgot")
+    endif()
+    if(ARCH STREQUAL "mips64el")
+        set_cpp_flag("-EL")
+        set_cpp_flag("-march=mips64r2")
+        set_cpp_flag("-mabi=64")
+        set_cpp_flag("-fPIC")
+        set_cpp_flag("-mxgot")
+    endif()
+
+    if(SWIFTSHADER_LESS_DEBUG_INFO)
+        # Use -g1 to be able to get stack traces
+        set_cpp_flag("-g -g1" DEBUG)
+        set_cpp_flag("-g -g1" RELWITHDEBINFO)
+    else()
+        # Use -g3 to have even more debug info
+        set_cpp_flag("-g -g3" DEBUG)
+        set_cpp_flag("-g -g3" RELWITHDEBINFO)
+    endif()
+
+    if(NOT CMAKE_CXX_COMPILER_ID MATCHES "Clang")
+        # Treated as an unused argument with clang
+        set_cpp_flag("-s" RELEASE)
+    endif()
+
+    # For distribution it is more important to be slim than super optimized
+    set_cpp_flag("-Os" RELEASE)
+    set_cpp_flag("-Os" RELWITHDEBINFO)
+
+    set_cpp_flag("-DNDEBUG" RELEASE)
+    set_cpp_flag("-DNDEBUG" RELWITHDEBINFO)
+
+    # Put each variable and function in its own section so that when linking
+    # with -gc-sections unused functions and variables are removed.
+    set_cpp_flag("-ffunction-sections" RELEASE)
+    set_cpp_flag("-fdata-sections" RELEASE)
+    set_cpp_flag("-fomit-frame-pointer" RELEASE)
+
+    if(SWIFTSHADER_MSAN)
+        if(NOT CMAKE_CXX_COMPILER_ID MATCHES "Clang")
+            message(FATAL_ERROR " \n"
+                    " MemorySanitizer usage requires compiling with Clang.")
+        endif()
+
+        if(NOT DEFINED ENV{SWIFTSHADER_MSAN_INSTRUMENTED_LIBCXX_PATH})
+            message(FATAL_ERROR " \n"
+                    " MemorySanitizer usage requires an instrumented build of libc++.\n"
+                    " Set the SWIFTSHADER_MSAN_INSTRUMENTED_LIBCXX_PATH environment variable to the\n"
+                    " build output path. See\n"
+                    " https://github.com/google/sanitizers/wiki/MemorySanitizerLibcxxHowTo#instrumented-libc\n"
+                    " for details on how to build an MSan instrumented libc++.")
+        endif()
+
+        set_cpp_flag("-fsanitize=memory")
+        set_linker_flag("-fsanitize=memory")
+        set_cpp_flag("-stdlib=libc++")
+        set_linker_flag("-L$ENV{SWIFTSHADER_MSAN_INSTRUMENTED_LIBCXX_PATH}/lib")
+        set_cpp_flag("-I$ENV{SWIFTSHADER_MSAN_INSTRUMENTED_LIBCXX_PATH}/include")
+        set_cpp_flag("-I$ENV{SWIFTSHADER_MSAN_INSTRUMENTED_LIBCXX_PATH}/include/c++/v1")
+        set_linker_flag("-Wl,-rpath,$ENV{SWIFTSHADER_MSAN_INSTRUMENTED_LIBCXX_PATH}/lib")
+    elseif(SWIFTSHADER_ASAN)
+        set_cpp_flag("-fsanitize=address")
+        set_linker_flag("-fsanitize=address")
+    elseif(SWIFTSHADER_TSAN)
+        set_cpp_flag("-fsanitize=thread")
+        set_linker_flag("-fsanitize=thread")
+    elseif(SWIFTSHADER_UBSAN)
+        set_cpp_flag("-fsanitize=undefined")
+        set_linker_flag("-fsanitize=undefined")
+    endif()
 endif()
 
-if(NOT MSVC)
-    # Public API symbols are made visible with the MARL_EXPORT annotation.
-    target_compile_options(marl PRIVATE "-fvisibility=hidden")
+if(SWIFTSHADER_DCHECK_ALWAYS_ON)
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-DDCHECK_ALWAYS_ON")
 endif()
 
-set_target_properties(marl PROPERTIES
-    POSITION_INDEPENDENT_CODE 1
-    VERSION ${MARL_VERSION}
-    SOVERSION "${MARL_VERSION_MAJOR}"
+if(SWIFTSHADER_WARNINGS_AS_ERRORS)
+    if(MSVC)
+        set(WARNINGS_AS_ERRORS "/WX")  # Treat all warnings as errors
+    else()
+        set(WARNINGS_AS_ERRORS "-Werror")  # Treat all warnings as errors
+    endif()
+endif()
+
+# Enable Reactor Print() functionality in Debug/RelWithDebInfo builds or when explicitly enabled.
+if(CMAKE_BUILD_TYPE MATCHES "Deb")
+    set(REACTOR_ENABLE_PRINT TRUE)
+endif()
+
+if(REACTOR_EMIT_PRINT_LOCATION)
+    # This feature depends on REACTOR_EMIT_DEBUG_INFO and REACTOR_ENABLE_PRINT
+    set(REACTOR_EMIT_DEBUG_INFO TRUE)
+    set(REACTOR_ENABLE_PRINT TRUE)
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-DENABLE_RR_EMIT_PRINT_LOCATION")
+endif()
+
+if(REACTOR_EMIT_ASM_FILE)
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-DENABLE_RR_EMIT_ASM_FILE")
+endif()
+
+if(REACTOR_EMIT_DEBUG_INFO)
+    message(WARNING "REACTOR_EMIT_DEBUG_INFO is enabled. This will likely affect performance.")
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-DENABLE_RR_DEBUG_INFO")
+endif()
+
+if(REACTOR_ENABLE_PRINT)
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-DENABLE_RR_PRINT")
+endif()
+
+if(REACTOR_VERIFY_LLVM_IR)
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-DENABLE_RR_LLVM_IR_VERIFICATION")
+endif()
+
+if(REACTOR_DEFAULT_OPT_LEVEL)
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-DREACTOR_DEFAULT_OPT_LEVEL=${REACTOR_DEFAULT_OPT_LEVEL}")
+endif()
+
+if(DEFINED SWIFTSHADER_LOGGING_LEVEL)
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-DSWIFTSHADER_LOGGING_LEVEL=${SWIFTSHADER_LOGGING_LEVEL}")
+endif()
+
+if(WIN32)
+    add_definitions(-DWINVER=0x501 -DNOMINMAX -DSTRICT)
+    set(CMAKE_FIND_LIBRARY_PREFIXES ${CMAKE_FIND_LIBRARY_PREFIXES} "" "lib")
+endif()
+
+set(USE_EXCEPTIONS
+    ${REACTOR_EMIT_DEBUG_INFO} # boost::stacktrace uses exceptions
 )
+if(NOT MSVC)
+    if (${USE_EXCEPTIONS})
+        list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-fexceptions")
+    else()
+        list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-fno-exceptions")
+    endif()
+endif()
+unset(USE_EXCEPTIONS)
 
-marl_set_target_options(marl)
+###########################################################
+# libbacktrace and boost
+###########################################################
+if(REACTOR_EMIT_DEBUG_INFO)
+    add_subdirectory(${THIRD_PARTY_DIR}/libbacktrace EXCLUDE_FROM_ALL)
+    add_subdirectory(${THIRD_PARTY_DIR}/boost EXCLUDE_FROM_ALL)
+endif()
 
-target_link_libraries(marl PUBLIC Threads::Threads)
+###########################################################
+# LLVM
+###########################################################
+add_subdirectory(${THIRD_PARTY_DIR}/llvm-${SWIFTSHADER_LLVM_VERSION} EXCLUDE_FROM_ALL)
+set_target_properties(llvm PROPERTIES FOLDER "third_party")
 
-# install
-if(MARL_INSTALL)
-    include(CMakePackageConfigHelpers)
-    include(GNUInstallDirs)
+###########################################################
+# LLVM-Submodule
+###########################################################
+if(${REACTOR_BACKEND} STREQUAL "LLVM-Submodule")
+    set(LLVM_INCLUDE_TESTS FALSE)
+    set(LLVM_ENABLE_RTTI TRUE)
+    add_subdirectory(${THIRD_PARTY_DIR}/llvm-project/llvm EXCLUDE_FROM_ALL)
+    if(ARCH STREQUAL "aarch64")
+        llvm_map_components_to_libnames(llvm_libs orcjit aarch64asmparser aarch64codegen)
+    elseif(ARCH STREQUAL "arm")
+        llvm_map_components_to_libnames(llvm_libs orcjit armasmparser armcodegen)
+    elseif(ARCH MATCHES "^mips.*")
+        llvm_map_components_to_libnames(llvm_libs orcjit mipsasmparser mipscodegen)
+    elseif(ARCH STREQUAL "ppc64le")
+        llvm_map_components_to_libnames(llvm_libs orcjit powerpcasmparser powerpccodegen)
+    elseif(ARCH MATCHES "^x86.*")
+        llvm_map_components_to_libnames(llvm_libs orcjit x86asmparser x86codegen)
+    endif()
+    set_target_properties(${llvm_libs} PROPERTIES FOLDER "third_party")
+endif()
 
-    configure_package_config_file(
-        ${CMAKE_CURRENT_SOURCE_DIR}/cmake/marl-config.cmake.in
-        ${CMAKE_CURRENT_BINARY_DIR}/marl-config.cmake
-        INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/marl
+###########################################################
+# Subzero
+###########################################################
+add_subdirectory(${THIRD_PARTY_DIR}/llvm-subzero EXCLUDE_FROM_ALL)
+add_subdirectory(${THIRD_PARTY_DIR}/subzero EXCLUDE_FROM_ALL)
+set_target_properties(llvm-subzero PROPERTIES FOLDER "third_party")
+set_target_properties(subzero PROPERTIES FOLDER "third_party")
+
+###########################################################
+# marl
+###########################################################
+set(MARL_THIRD_PARTY_DIR ${THIRD_PARTY_DIR})
+add_subdirectory(${THIRD_PARTY_DIR}/marl)
+set_target_properties(marl PROPERTIES FOLDER "third_party")
+
+if(MARL_THREAD_SAFETY_ANALYSIS_SUPPORTED)
+    list(APPEND SWIFTSHADER_COMPILE_OPTIONS "-Wthread-safety")
+endif()
+
+###########################################################
+# cppdap
+###########################################################
+if(SWIFTSHADER_BUILD_CPPDAP)
+    set(CPPDAP_THIRD_PARTY_DIR ${THIRD_PARTY_DIR})
+    add_subdirectory(${THIRD_PARTY_DIR}/cppdap)
+endif()
+
+###########################################################
+# astc-encoder
+###########################################################
+if(SWIFTSHADER_ENABLE_ASTC)
+    add_subdirectory(${THIRD_PARTY_DIR}/astc-encoder)
+    set_target_properties(astc-encoder PROPERTIES FOLDER "third_party")
+endif()
+
+###########################################################
+# gtest and gmock
+###########################################################
+if(SWIFTSHADER_BUILD_TESTS)
+    # For Win32, force gtest to match our CRT (shared)
+    set(gtest_force_shared_crt TRUE CACHE BOOL "" FORCE)
+    set(INSTALL_GTEST FALSE CACHE BOOL "" FORCE)
+    add_subdirectory(${THIRD_PARTY_DIR}/googletest EXCLUDE_FROM_ALL)
+    # gtest finds python, which picks python 2 first, if present.
+    # We need to undo this so that SPIR-V can later find python3.
+    unset(PYTHON_EXECUTABLE CACHE)
+    set_target_properties(gmock PROPERTIES FOLDER "third_party")
+    set_target_properties(gmock_main PROPERTIES FOLDER "third_party")
+    set_target_properties(gtest PROPERTIES FOLDER "third_party")
+    set_target_properties(gtest_main PROPERTIES FOLDER "third_party")
+endif()
+
+###########################################################
+# File Lists
+###########################################################
+
+###########################################################
+# Append OS specific files to lists
+###########################################################
+
+if(WIN32)
+    set(OS_LIBS odbc32 odbccp32 WS2_32 dxguid)
+elseif(LINUX)
+    set(OS_LIBS dl pthread)
+    if(SWIFTSHADER_BUILD_WSI_WAYLAND)
+        include_directories("${SWIFTSHADER_DIR}/include/Wayland")
+    endif()
+    if(SWIFTSHADER_BUILD_WSI_DIRECTFB)
+        list(APPEND OS_LIBS "${DIRECTFB}")
+        include_directories(${DIRECTFB_INCLUDE_DIR}/directfb)
+    endif()
+    if(SWIFTSHADER_BUILD_WSI_D2D)
+        list(APPEND OS_LIBS "${D2D}")
+        include_directories(${D2D_INCLUDE_DIR}/libdrm)
+    endif()
+elseif(FUCHSIA)
+    set(OS_LIBS zircon)
+elseif(APPLE)
+    find_library(COCOA_FRAMEWORK Cocoa)
+    find_library(QUARTZ_FRAMEWORK Quartz)
+    find_library(CORE_FOUNDATION_FRAMEWORK CoreFoundation)
+    find_library(IOSURFACE_FRAMEWORK IOSurface)
+    find_library(METAL_FRAMEWORK Metal)
+    set(OS_LIBS "${COCOA_FRAMEWORK}" "${QUARTZ_FRAMEWORK}" "${CORE_FOUNDATION_FRAMEWORK}" "${IOSURFACE_FRAMEWORK}" "${METAL_FRAMEWORK}")
+endif()
+
+###########################################################
+# SwiftShader Targets
+###########################################################
+
+add_subdirectory(src/Reactor) # Add ReactorSubzero and ReactorLLVM targets
+
+if(${REACTOR_BACKEND} STREQUAL "LLVM")
+    add_library(Reactor ALIAS ReactorLLVM)
+elseif(${REACTOR_BACKEND} STREQUAL "LLVM-Submodule")
+    add_library(Reactor ALIAS ReactorLLVMSubmodule)
+elseif(${REACTOR_BACKEND} STREQUAL "Subzero")
+    add_library(Reactor ALIAS ReactorSubzero)
+else()
+    message(FATAL_ERROR "REACTOR_BACKEND must be 'LLVM', 'LLVM-Submodule' or 'Subzero'")
+endif()
+
+if (NOT TARGET SPIRV-Tools)
+    # This variable is also used by SPIRV-Tools to locate SPIRV-Headers
+    set(SPIRV-Headers_SOURCE_DIR "${THIRD_PARTY_DIR}/SPIRV-Headers")
+    set(SPIRV_SKIP_TESTS TRUE CACHE BOOL "" FORCE)
+    set(SPIRV_SKIP_EXECUTABLES TRUE CACHE BOOL "" FORCE)
+    add_subdirectory(${THIRD_PARTY_DIR}/SPIRV-Tools) # Add SPIRV-Tools target
+endif()
+
+# Add a vk_base interface library for shared vulkan build options.
+# TODO: Create src/Base and make this a lib target, and move stuff from
+# src/Vulkan into it that is needed by vk_pipeline, vk_device, and vk_wsi.
+add_library(vk_base INTERFACE)
+
+if(SWIFTSHADER_ENABLE_VULKAN_DEBUGGER)
+    target_compile_definitions(vk_base INTERFACE "ENABLE_VK_DEBUGGER")
+endif()
+
+if(WIN32)
+    target_compile_definitions(vk_base INTERFACE "VK_USE_PLATFORM_WIN32_KHR")
+elseif(LINUX)
+    if(SWIFTSHADER_BUILD_WSI_XCB)
+        target_compile_definitions(vk_base INTERFACE "VK_USE_PLATFORM_XCB_KHR")
+    endif()
+    if(SWIFTSHADER_BUILD_WSI_WAYLAND)
+        target_compile_definitions(vk_base INTERFACE "VK_USE_PLATFORM_WAYLAND_KHR")
+    endif()
+    if(SWIFTSHADER_BUILD_WSI_DIRECTFB)
+        if(DIRECTFB AND DIRECTFB_INCLUDE_DIR)
+            target_compile_definitions(vk_base INTERFACE "VK_USE_PLATFORM_DIRECTFB_EXT")
+        endif()
+    endif(SWIFTSHADER_BUILD_WSI_DIRECTFB)
+    if(SWIFTSHADER_BUILD_WSI_D2D)
+        if(D2D)
+            target_compile_definitions(vk_base INTERFACE "VK_USE_PLATFORM_DISPLAY_KHR")
+        endif()
+    endif(SWIFTSHADER_BUILD_WSI_D2D)
+elseif(APPLE)
+    target_compile_definitions(vk_base INTERFACE "VK_USE_PLATFORM_MACOS_MVK")
+    target_compile_definitions(vk_base INTERFACE "VK_USE_PLATFORM_METAL_EXT")
+elseif(FUCHSIA)
+    target_compile_definitions(vk_base INTERFACE "VK_USE_PLATFORM_FUCHSIA")
+else()
+    message(FATAL_ERROR "Platform does not support Vulkan yet")
+endif()
+
+add_subdirectory(src/System) # Add vk_system target
+add_subdirectory(src/Pipeline) # Add vk_pipeline target
+add_subdirectory(src/WSI) # Add vk_wsi target
+add_subdirectory(src/Device) # Add vk_device target
+add_subdirectory(src/Vulkan) # Add vk_swiftshader target
+
+if(CMAKE_CXX_COMPILER_ID MATCHES "Clang" AND # turbo-cov is only useful for clang coverage info
+    SWIFTSHADER_EMIT_COVERAGE)
+    add_subdirectory(${TESTS_DIR}/regres/cov/turbo-cov)
+endif()
+
+###########################################################
+# Sample programs and tests
+###########################################################
+
+# TODO(b/161976310): Add support for building PowerVR on MacOS
+if(APPLE AND SWIFTSHADER_BUILD_PVR)
+    message(WARNING "Building PowerVR examples for SwiftShader is not yet supported on Apple platforms.")
+    set(SWIFTSHADER_BUILD_PVR FALSE)
+endif()
+
+if(SWIFTSHADER_BUILD_PVR)
+    if(UNIX AND NOT APPLE)
+        set(PVR_WINDOW_SYSTEM XCB)
+
+        # Set the RPATH of the next defined build targets to $ORIGIN,
+        # allowing them to load shared libraries from the execution directory.
+        set(CMAKE_BUILD_RPATH "$ORIGIN")
+    endif()
+
+    set(PVR_BUILD_EXAMPLES TRUE CACHE BOOL "Build the PowerVR SDK Examples" FORCE)
+    set(PVR_BUILD_VULKAN_EXAMPLES TRUE CACHE BOOL "Build the Vulkan PowerVR SDK Examples" FORCE)
+    add_subdirectory(${THIRD_PARTY_DIR}/PowerVR_Examples)
+
+    # Samples known to work well
+    set(PVR_VULKAN_TARGET_GOOD
+        VulkanBumpmap
+        VulkanExampleUI
+        VulkanGaussianBlur
+        VulkanGlass
+        VulkanGnomeHorde
+        VulkanHelloAPI
+        VulkanImageBasedLighting
+        VulkanIntroducingPVRUtils
+        VulkanMultiSampling
+        VulkanNavigation2D
+        VulkanParticleSystem
+        VulkanSkinning
     )
 
-    install(DIRECTORY ${MARL_INCLUDE_DIR}/marl
-        DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
-        USE_SOURCE_PERMISSIONS
+    set(PVR_VULKAN_TARGET_OTHER
+        VulkanDeferredShading
+        VulkanDeferredShadingPFX
+        VulkanGameOfLife
+        VulkanIBLMapsGenerator
+        VulkanIMGTextureFilterCubic
+        VulkanIntroducingPVRShell
+        VulkanIntroducingPVRVk
+        VulkanIntroducingUIRenderer
+        VulkanMultithreading
+        VulkanNavigation3D
+        VulkanPostProcessing
+        VulkanPVRScopeExample
+        VulkanPVRScopeRemote
     )
 
-    install(TARGETS marl
-        EXPORT marl-targets
-        ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
-        LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
-        RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
-        INCLUDES DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}
+    set(PVR_TARGET_OTHER
+        glslang
+        glslangValidator
+        glslang-default-resource-limits
+        OSDependent
+        pugixml
+        PVRAssets
+        PVRCamera
+        PVRCore
+        PVRPfx
+        PVRShell
+        PVRUtilsVk
+        PVRVk
+        SPIRV
+        spirv-remap
+        SPVRemapper
+        uninstall
     )
 
-    install(EXPORT marl-targets
-        FILE marl-targets.cmake
-        NAMESPACE marl::
-        DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/marl
+    set(PVR_VULKAN_TARGET
+        ${PVR_VULKAN_TARGET_GOOD}
+        ${PVR_VULKAN_TARGET_OTHER}
     )
 
-    install(FILES ${CMAKE_CURRENT_BINARY_DIR}/marl-config.cmake
-        DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/marl
-    )
-endif(MARL_INSTALL)
+    foreach(pvr_target ${PVR_VULKAN_TARGET})
+        add_dependencies(${pvr_target} vk_swiftshader)
+    endforeach()
 
-# tests
-if(MARL_BUILD_TESTS)
-    set(MARL_TEST_LIST
-        ${MARL_SRC_DIR}/blockingcall_test.cpp
-        ${MARL_SRC_DIR}/conditionvariable_test.cpp
-        ${MARL_SRC_DIR}/containers_test.cpp
-        ${MARL_SRC_DIR}/dag_test.cpp
-        ${MARL_SRC_DIR}/defer_test.cpp
-        ${MARL_SRC_DIR}/event_test.cpp
-        ${MARL_SRC_DIR}/marl_test.cpp
-        ${MARL_SRC_DIR}/marl_test.h
-        ${MARL_SRC_DIR}/memory_test.cpp
-        ${MARL_SRC_DIR}/osfiber_test.cpp
-        ${MARL_SRC_DIR}/parallelize_test.cpp
-        ${MARL_SRC_DIR}/pool_test.cpp
-        ${MARL_SRC_DIR}/scheduler_test.cpp
-        ${MARL_SRC_DIR}/thread_test.cpp
-        ${MARL_SRC_DIR}/ticket_test.cpp
-        ${MARL_SRC_DIR}/waitgroup_test.cpp
-        ${MARL_GOOGLETEST_DIR}/googletest/src/gtest-all.cc
-        ${MARL_GOOGLETEST_DIR}/googlemock/src/gmock-all.cc
-    )
+    foreach(pvr_target ${PVR_VULKAN_TARGET_GOOD})
+        set_target_properties(${pvr_target} PROPERTIES FOLDER Samples)
+    endforeach()
 
-    set(MARL_TEST_INCLUDE_DIR
-        ${MARL_GOOGLETEST_DIR}/googletest/include/
-        ${MARL_GOOGLETEST_DIR}/googlemock/include/
-        ${MARL_GOOGLETEST_DIR}/googletest/
-        ${MARL_GOOGLETEST_DIR}/googlemock/
-    )
+    foreach(pvr_target ${PVR_TARGET_OTHER} ${PVR_VULKAN_TARGET_OTHER})
+        set_target_properties(${pvr_target} PROPERTIES FOLDER Samples/PowerVR-Build)
+    endforeach()
+endif()
 
-    add_executable(marl-unittests ${MARL_TEST_LIST})
+if(BUILD_VULKAN_WRAPPER)
+    if (NOT TARGET glslang)
+        add_subdirectory(${THIRD_PARTY_DIR}/glslang)
+    endif()
+    add_subdirectory(${TESTS_DIR}/VulkanWrapper) # Add VulkanWrapper target
+endif()
 
-    set_target_properties(marl-unittests PROPERTIES
-        INCLUDE_DIRECTORIES "${MARL_TEST_INCLUDE_DIR}"
-        FOLDER "Tests"
-    )
+if(SWIFTSHADER_BUILD_TESTS)
+    add_subdirectory(${TESTS_DIR}/ReactorUnitTests) # Add ReactorUnitTests target
+    add_subdirectory(${TESTS_DIR}/MathUnitTests) # Add math-unittests target
+    add_subdirectory(${TESTS_DIR}/SystemUnitTests) # Add system-unittests target
+endif()
 
-    marl_set_target_options(marl-unittests)
+if(SWIFTSHADER_BUILD_BENCHMARKS)
+    if (NOT TARGET benchmark::benchmark)
+        set(BENCHMARK_ENABLE_TESTING FALSE CACHE BOOL FALSE FORCE)
+        add_subdirectory(${THIRD_PARTY_DIR}/benchmark)
+        set_target_properties(benchmark PROPERTIES FOLDER "third_party")
+        set_target_properties(benchmark_main PROPERTIES FOLDER "third_party")
+    endif()
 
-    target_link_libraries(marl-unittests PRIVATE marl)
-endif(MARL_BUILD_TESTS)
+    add_subdirectory(${TESTS_DIR}/PipelineBenchmarks) # Add PipelineBenchmarks target
+    add_subdirectory(${TESTS_DIR}/ReactorBenchmarks) # Add ReactorBenchmarks target
+    add_subdirectory(${TESTS_DIR}/SystemBenchmarks) # Add system-benchmarks target
+    add_subdirectory(${TESTS_DIR}/VulkanBenchmarks) # Add VulkanBenchmarks target
+endif()
 
-# benchmarks
-if(MARL_BUILD_BENCHMARKS)
-    set(MARL_BENCHMARK_LIST
-        ${MARL_SRC_DIR}/blockingcall_bench.cpp
-        ${MARL_SRC_DIR}/defer_bench.cpp
-        ${MARL_SRC_DIR}/event_bench.cpp
-        ${MARL_SRC_DIR}/marl_bench.cpp
-        ${MARL_SRC_DIR}/non_marl_bench.cpp
-        ${MARL_SRC_DIR}/scheduler_bench.cpp
-        ${MARL_SRC_DIR}/ticket_bench.cpp
-        ${MARL_SRC_DIR}/waitgroup_bench.cpp
-    )
-
-    add_executable(marl-benchmarks ${MARL_BENCHMARK_LIST})
-    set_target_properties(${target} PROPERTIES FOLDER "Benchmarks")
-
-    marl_set_target_options(marl-benchmarks)
-
-    target_compile_definitions(marl-benchmarks PRIVATE
-        "MARL_FULL_BENCHMARK=${MARL_FULL_BENCHMARK}"
-    )
-
-    target_link_libraries(marl-benchmarks PRIVATE benchmark::benchmark marl)
-endif(MARL_BUILD_BENCHMARKS)
-
-# examples
-if(MARL_BUILD_EXAMPLES)
-    function(build_example target)
-        add_executable(${target} "${CMAKE_CURRENT_SOURCE_DIR}/examples/${target}.cpp")
-        set_target_properties(${target} PROPERTIES FOLDER "Examples")
-        marl_set_target_options(${target})
-        target_link_libraries(${target} PRIVATE marl)
-    endfunction(build_example)
-
-    build_example(fractal)
-    build_example(hello_task)
-    build_example(primes)
-    build_example(tasks_in_tasks)
-endif(MARL_BUILD_EXAMPLES)
+if(SWIFTSHADER_BUILD_TESTS)
+    add_subdirectory(${TESTS_DIR}/VulkanUnitTests) # Add VulkanUnitTests target
+endif()
diff --git a/CMakeSettings.json b/CMakeSettings.json
new file mode 100644
index 0000000..23c622f
--- /dev/null
+++ b/CMakeSettings.json
@@ -0,0 +1,55 @@
+{
+  "configurations": [
+    {
+      "name": "x64-Debug",
+      "generator": "Ninja",
+      "configurationType": "Debug",
+      "inheritEnvironments": [ "msvc_x64_x64" ],
+      "buildRoot": "${projectDir}\\out\\build\\${name}",
+      "installRoot": "${projectDir}\\out\\install\\${name}",
+      "cmakeCommandArgs": "",
+      "buildCommandArgs": "-v",
+      "ctestCommandArgs": "",
+      "variables": [
+        {
+          "name": "REACTOR_BACKEND",
+          "value": "Subzero",
+          "type": "STRING"
+        }
+      ]
+    },
+    {
+      "name": "x86-Debug",
+      "generator": "Ninja",
+      "configurationType": "Debug",
+      "buildRoot": "${projectDir}\\out\\build\\${name}",
+      "installRoot": "${projectDir}\\out\\install\\${name}",
+      "cmakeCommandArgs": "",
+      "buildCommandArgs": "-v",
+      "ctestCommandArgs": "",
+      "inheritEnvironments": [ "msvc_x86" ]
+    },
+    {
+      "name": "x86-Release",
+      "generator": "Ninja",
+      "configurationType": "RelWithDebInfo",
+      "buildRoot": "${projectDir}\\out\\build\\${name}",
+      "installRoot": "${projectDir}\\out\\install\\${name}",
+      "cmakeCommandArgs": "",
+      "buildCommandArgs": "-v",
+      "ctestCommandArgs": "",
+      "inheritEnvironments": [ "msvc_x86" ]
+    },
+    {
+      "name": "x64-Release",
+      "generator": "Ninja",
+      "configurationType": "RelWithDebInfo",
+      "buildRoot": "${projectDir}\\out\\build\\${name}",
+      "installRoot": "${projectDir}\\out\\install\\${name}",
+      "cmakeCommandArgs": "",
+      "buildCommandArgs": "-v",
+      "ctestCommandArgs": "",
+      "inheritEnvironments": [ "msvc_x64_x64" ]
+    }
+  ]
+}
\ No newline at end of file
diff --git a/CONTRIBUTING.txt b/CONTRIBUTING.txt
new file mode 100644
index 0000000..2cedc8e
--- /dev/null
+++ b/CONTRIBUTING.txt
@@ -0,0 +1,28 @@
+Want to contribute? Great! First, read this page (including the small print at the end).

+

+### Before you contribute

+Before we can use your code, you must sign the

+[Google Individual Contributor License Agreement]

+(https://cla.developers.google.com/about/google-individual)

+(CLA), which you can do online. The CLA is necessary mainly because you own the

+copyright to your changes, even after your contribution becomes part of our

+codebase, so we need your permission to use and distribute your code. We also

+need to be sure of various other things—for instance that you'll tell us if you

+know that your code infringes on other people's patents. You don't have to sign

+the CLA until after you've submitted your code for review and a member has

+approved it, but you must do it before we can put your code into our codebase.

+Before you start working on a larger contribution, you should get in touch with

+us first through the issue tracker with your idea so that we can help out and

+possibly guide you. Coordinating up front makes it much easier to avoid

+frustration later on.

+

+### Code reviews

+All submissions, including submissions by project members, require review.

+

+Information on how to sumbit changes for review is provided in README.md.

+

+### The small print

+Contributions made by corporations are covered by a different agreement than

+the one above, the

+[Software Grant and Corporate Contributor License Agreement]

+(https://cla.developers.google.com/about/google-corporate).
\ No newline at end of file
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
new file mode 100644
index 0000000..5e930c8
--- /dev/null
+++ b/CONTRIBUTORS.txt
@@ -0,0 +1,42 @@
+# People who have agreed to one of the CLAs and can contribute patches.

+# The AUTHORS file lists the copyright holders; this file

+# lists people.  For example, Google employees are listed here

+# but not in AUTHORS, because Google holds the copyright.

+#

+# https://developers.google.com/open-source/cla/individual

+# https://developers.google.com/open-source/cla/corporate

+#

+# Names should be added to this file as:

+#     Name <email address>

+

+Google Inc.

+ Nicolas Capens <capn@google.com>

+ Alexis Hétu <sugoi@google.com>

+ Shannon Woods <shannonwoods@google.com>

+ Corentin Wallez <cwallez@google.com>

+ Greg Hartman <ghartman@google.com>

+ Ping-Hao Wu <pinghao@google.com>

+ Maxime Grégoire <mgregoire@google.com>

+ Veranika Liaukevich <veranika@google.com>

+ John Bauman <jbauman@google.com>

+ Keun Soo Yim <yim@google.com>

+ John Sheu <sheu@google.com>

+ Philippe Hamel <hamelphi@google.com>

+ Daniel Toyama <kenjitoyama@google.com>

+ Meng-Lin Wu <marleymoo@google.com>

+ Krzysztof Kosiński <krzysio@google.com>

+ Chris Forbes <chrisforbes@google.com>

+ Ben Clayton <bclayton@google.com>

+ Hernan Liatis <hliatis@google.com>

+ Logan (Tzu-hsiang) Chien <loganchien@google.com>

+ Stephen White <senorblanco@google.com>

+ Raymond Chiu <chiur@google.com>

+ Shahbaz Youssefi <syoussefi@google.com>

+

+TransGaming Inc.

+ Nicolas Capens

+ Gavriel State

+ Jim MacArthur

+ Daniel Koch

+ Luther Johnson

+ Rob Stepinski

diff --git a/DIR_METADATA b/DIR_METADATA
new file mode 100644
index 0000000..72bfcd7
--- /dev/null
+++ b/DIR_METADATA
@@ -0,0 +1,3 @@
+monorail {
+  component: "Internals>GPU>SwiftShader"
+}
diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100644
index 0000000..75b5248
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,202 @@
+

+                                 Apache License

+                           Version 2.0, January 2004

+                        http://www.apache.org/licenses/

+

+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION

+

+   1. Definitions.

+

+      "License" shall mean the terms and conditions for use, reproduction,

+      and distribution as defined by Sections 1 through 9 of this document.

+

+      "Licensor" shall mean the copyright owner or entity authorized by

+      the copyright owner that is granting the License.

+

+      "Legal Entity" shall mean the union of the acting entity and all

+      other entities that control, are controlled by, or are under common

+      control with that entity. For the purposes of this definition,

+      "control" means (i) the power, direct or indirect, to cause the

+      direction or management of such entity, whether by contract or

+      otherwise, or (ii) ownership of fifty percent (50%) or more of the

+      outstanding shares, or (iii) beneficial ownership of such entity.

+

+      "You" (or "Your") shall mean an individual or Legal Entity

+      exercising permissions granted by this License.

+

+      "Source" form shall mean the preferred form for making modifications,

+      including but not limited to software source code, documentation

+      source, and configuration files.

+

+      "Object" form shall mean any form resulting from mechanical

+      transformation or translation of a Source form, including but

+      not limited to compiled object code, generated documentation,

+      and conversions to other media types.

+

+      "Work" shall mean the work of authorship, whether in Source or

+      Object form, made available under the License, as indicated by a

+      copyright notice that is included in or attached to the work

+      (an example is provided in the Appendix below).

+

+      "Derivative Works" shall mean any work, whether in Source or Object

+      form, that is based on (or derived from) the Work and for which the

+      editorial revisions, annotations, elaborations, or other modifications

+      represent, as a whole, an original work of authorship. For the purposes

+      of this License, Derivative Works shall not include works that remain

+      separable from, or merely link (or bind by name) to the interfaces of,

+      the Work and Derivative Works thereof.

+

+      "Contribution" shall mean any work of authorship, including

+      the original version of the Work and any modifications or additions

+      to that Work or Derivative Works thereof, that is intentionally

+      submitted to Licensor for inclusion in the Work by the copyright owner

+      or by an individual or Legal Entity authorized to submit on behalf of

+      the copyright owner. For the purposes of this definition, "submitted"

+      means any form of electronic, verbal, or written communication sent

+      to the Licensor or its representatives, including but not limited to

+      communication on electronic mailing lists, source code control systems,

+      and issue tracking systems that are managed by, or on behalf of, the

+      Licensor for the purpose of discussing and improving the Work, but

+      excluding communication that is conspicuously marked or otherwise

+      designated in writing by the copyright owner as "Not a Contribution."

+

+      "Contributor" shall mean Licensor and any individual or Legal Entity

+      on behalf of whom a Contribution has been received by Licensor and

+      subsequently incorporated within the Work.

+

+   2. Grant of Copyright License. Subject to the terms and conditions of

+      this License, each Contributor hereby grants to You a perpetual,

+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable

+      copyright license to reproduce, prepare Derivative Works of,

+      publicly display, publicly perform, sublicense, and distribute the

+      Work and such Derivative Works in Source or Object form.

+

+   3. Grant of Patent License. Subject to the terms and conditions of

+      this License, each Contributor hereby grants to You a perpetual,

+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable

+      (except as stated in this section) patent license to make, have made,

+      use, offer to sell, sell, import, and otherwise transfer the Work,

+      where such license applies only to those patent claims licensable

+      by such Contributor that are necessarily infringed by their

+      Contribution(s) alone or by combination of their Contribution(s)

+      with the Work to which such Contribution(s) was submitted. If You

+      institute patent litigation against any entity (including a

+      cross-claim or counterclaim in a lawsuit) alleging that the Work

+      or a Contribution incorporated within the Work constitutes direct

+      or contributory patent infringement, then any patent licenses

+      granted to You under this License for that Work shall terminate

+      as of the date such litigation is filed.

+

+   4. Redistribution. You may reproduce and distribute copies of the

+      Work or Derivative Works thereof in any medium, with or without

+      modifications, and in Source or Object form, provided that You

+      meet the following conditions:

+

+      (a) You must give any other recipients of the Work or

+          Derivative Works a copy of this License; and

+

+      (b) You must cause any modified files to carry prominent notices

+          stating that You changed the files; and

+

+      (c) You must retain, in the Source form of any Derivative Works

+          that You distribute, all copyright, patent, trademark, and

+          attribution notices from the Source form of the Work,

+          excluding those notices that do not pertain to any part of

+          the Derivative Works; and

+

+      (d) If the Work includes a "NOTICE" text file as part of its

+          distribution, then any Derivative Works that You distribute must

+          include a readable copy of the attribution notices contained

+          within such NOTICE file, excluding those notices that do not

+          pertain to any part of the Derivative Works, in at least one

+          of the following places: within a NOTICE text file distributed

+          as part of the Derivative Works; within the Source form or

+          documentation, if provided along with the Derivative Works; or,

+          within a display generated by the Derivative Works, if and

+          wherever such third-party notices normally appear. The contents

+          of the NOTICE file are for informational purposes only and

+          do not modify the License. You may add Your own attribution

+          notices within Derivative Works that You distribute, alongside

+          or as an addendum to the NOTICE text from the Work, provided

+          that such additional attribution notices cannot be construed

+          as modifying the License.

+

+      You may add Your own copyright statement to Your modifications and

+      may provide additional or different license terms and conditions

+      for use, reproduction, or distribution of Your modifications, or

+      for any such Derivative Works as a whole, provided Your use,

+      reproduction, and distribution of the Work otherwise complies with

+      the conditions stated in this License.

+

+   5. Submission of Contributions. Unless You explicitly state otherwise,

+      any Contribution intentionally submitted for inclusion in the Work

+      by You to the Licensor shall be under the terms and conditions of

+      this License, without any additional terms or conditions.

+      Notwithstanding the above, nothing herein shall supersede or modify

+      the terms of any separate license agreement you may have executed

+      with Licensor regarding such Contributions.

+

+   6. Trademarks. This License does not grant permission to use the trade

+      names, trademarks, service marks, or product names of the Licensor,

+      except as required for reasonable and customary use in describing the

+      origin of the Work and reproducing the content of the NOTICE file.

+

+   7. Disclaimer of Warranty. Unless required by applicable law or

+      agreed to in writing, Licensor provides the Work (and each

+      Contributor provides its Contributions) on an "AS IS" BASIS,

+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or

+      implied, including, without limitation, any warranties or conditions

+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A

+      PARTICULAR PURPOSE. You are solely responsible for determining the

+      appropriateness of using or redistributing the Work and assume any

+      risks associated with Your exercise of permissions under this License.

+

+   8. Limitation of Liability. In no event and under no legal theory,

+      whether in tort (including negligence), contract, or otherwise,

+      unless required by applicable law (such as deliberate and grossly

+      negligent acts) or agreed to in writing, shall any Contributor be

+      liable to You for damages, including any direct, indirect, special,

+      incidental, or consequential damages of any character arising as a

+      result of this License or out of the use or inability to use the

+      Work (including but not limited to damages for loss of goodwill,

+      work stoppage, computer failure or malfunction, or any and all

+      other commercial damages or losses), even if such Contributor

+      has been advised of the possibility of such damages.

+

+   9. Accepting Warranty or Additional Liability. While redistributing

+      the Work or Derivative Works thereof, You may choose to offer,

+      and charge a fee for, acceptance of support, warranty, indemnity,

+      or other liability obligations and/or rights consistent with this

+      License. However, in accepting such obligations, You may act only

+      on Your own behalf and on Your sole responsibility, not on behalf

+      of any other Contributor, and only if You agree to indemnify,

+      defend, and hold each Contributor harmless for any liability

+      incurred by, or claims asserted against, such Contributor by reason

+      of your accepting any such warranty or additional liability.

+

+   END OF TERMS AND CONDITIONS

+

+   APPENDIX: How to apply the Apache License to your work.

+

+      To apply the Apache License to your work, attach the following

+      boilerplate notice, with the fields enclosed by brackets "[]"

+      replaced with your own identifying information. (Don't include

+      the brackets!)  The text should be enclosed in the appropriate

+      comment syntax for the file format. We also recommend that a

+      file or class name and description of purpose be included on the

+      same "printed page" as the copyright notice for easier

+      identification within third-party archives.

+

+   Copyright [yyyy] [name of copyright owner]

+

+   Licensed under the Apache License, Version 2.0 (the "License");

+   you may not use this file except in compliance with the License.

+   You may obtain a copy of the License at

+

+       http://www.apache.org/licenses/LICENSE-2.0

+

+   Unless required by applicable law or agreed to in writing, software

+   distributed under the License is distributed on an "AS IS" BASIS,

+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.

+   See the License for the specific language governing permissions and

+   limitations under the License.

diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..60f0d1f
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1,21 @@
+# This is the list of people with commit rights actively working on SwiftShader
+#
+# This list is used by Chromium and Android to make sure that one of the owners
+# in this list has approved a SwiftShader related change before landing it.
+#
+# Note that the upstream source-of-truth git repository at
+# swiftshader.googlesource.com/SwiftShader has ownership and access control
+# seperate from those controlled by this OWNERS file.
+
+jonahr@google.com
+syoussefi@google.com
+geofflang@google.com
+
+sugoi@google.com         #{LAST_RESORT_SUGGESTION}
+shannonwoods@google.com  #{LAST_RESORT_SUGGESTION}
+chrisforbes@google.com   #{LAST_RESORT_SUGGESTION}
+cwallez@google.com       #{LAST_RESORT_SUGGESTION}
+amaiorano@google.com     #{LAST_RESORT_SUGGESTION}
+natsu@google.com         #{LAST_RESORT_SUGGESTION}
+schuffelen@google.com    #{LAST_RESORT_SUGGESTION}
+bclayton@google.com      #{LAST_RESORT_SUGGESTION}
\ No newline at end of file
diff --git a/README.md b/README.md
index 540be90..de5e1ce 100644
--- a/README.md
+++ b/README.md
@@ -1,214 +1,127 @@
-# Marl
-
-Marl is a hybrid thread / fiber task scheduler written in C++ 11.
-
-## About
-
-Marl is a C++ 11 library that provides a fluent interface for running tasks across a number of threads.
-
-Marl uses a combination of fibers and threads to allow efficient execution of tasks that can block, while keeping a fixed number of hardware threads.
-
-Marl supports Windows, macOS, Linux, FreeBSD, Fuchsia, Android and iOS (arm, aarch64, loongarch64, mips64, ppc64, rv64, x86 and x64).
-
-Marl has no dependencies on other libraries (with an exception on googletest for building the optional unit tests).
-
-Example:
-
-```cpp
-#include "marl/defer.h"
-#include "marl/event.h"
-#include "marl/scheduler.h"
-#include "marl/waitgroup.h"
-
-#include <cstdio>
-
-int main() {
-  // Create a marl scheduler using all the logical processors available to the process.
-  // Bind this scheduler to the main thread so we can call marl::schedule()
-  marl::Scheduler scheduler(marl::Scheduler::Config::allCores());
-  scheduler.bind();
-  defer(scheduler.unbind());  // Automatically unbind before returning.
-
-  constexpr int numTasks = 10;
-
-  // Create an event that is manually reset.
-  marl::Event sayHello(marl::Event::Mode::Manual);
-
-  // Create a WaitGroup with an initial count of numTasks.
-  marl::WaitGroup saidHello(numTasks);
-
-  // Schedule some tasks to run asynchronously.
-  for (int i = 0; i < numTasks; i++) {
-    // Each task will run on one of the 4 worker threads.
-    marl::schedule([=] {  // All marl primitives are capture-by-value.
-      // Decrement the WaitGroup counter when the task has finished.
-      defer(saidHello.done());
-
-      printf("Task %d waiting to say hello...\n", i);
-
-      // Blocking in a task?
-      // The scheduler will find something else for this thread to do.
-      sayHello.wait();
-
-      printf("Hello from task %d!\n", i);
-    });
-  }
-
-  sayHello.signal();  // Unblock all the tasks.
-
-  saidHello.wait();  // Wait for all tasks to complete.
-
-  printf("All tasks said hello.\n");
-
-  // All tasks are guaranteed to complete before the scheduler is destructed.
-}
-```
-
-## Benchmarks
-
-Graphs of several microbenchmarks can be found [here](https://google.github.io/marl/benchmarks).
-
-## Building
-
-Marl contains many unit tests and examples that can be built using CMake.
-
-Unit tests require fetching the `googletest` external project, which can be done by typing the following in your terminal:
-
-```bash
-cd <path-to-marl>
-git submodule update --init
-```
-
-### Linux and macOS
-
-To build the unit tests and examples, type the following in your terminal:
-
-```bash
-cd <path-to-marl>
-mkdir build
-cd build
-cmake .. -DMARL_BUILD_EXAMPLES=1 -DMARL_BUILD_TESTS=1
-make
-```
-
-The resulting binaries will be found in `<path-to-marl>/build`
-
-### Installing Marl (vcpkg)
-
-Alternatively, you can build and install Marl using [vcpkg](https://github.com/Microsoft/vcpkg/) dependency manager:
-
-```bash or powershell
-git clone https://github.com/Microsoft/vcpkg.git
-cd vcpkg
-./bootstrap-vcpkg.sh
-./vcpkg integrate install
-./vcpkg install marl
-```
-
-The Marl port in vcpkg is kept up to date by Microsoft team members and community contributors. If the version is out of date, please [create an issue or pull request](https://github.com/Microsoft/vcpkg) on the vcpkg repository.
-
-### Windows
-
-Marl can be built using [Visual Studio 2019's CMake integration](https://docs.microsoft.com/en-us/cpp/build/cmake-projects-in-visual-studio?view=vs-2019).
-
-### Using Marl in your CMake project
-
-You can build and link Marl using `add_subdirectory()` in your project's `CMakeLists.txt` file:
-
-```cmake
-set(MARL_DIR <path-to-marl>) # example <path-to-marl>: "${CMAKE_CURRENT_SOURCE_DIR}/third_party/marl"
-add_subdirectory(${MARL_DIR})
-```
-
-This will define the `marl` library target, which you can pass to `target_link_libraries()`:
-
-```cmake
-target_link_libraries(<target> marl) # replace <target> with the name of your project's target
-```
-
-You may also wish to specify your own paths to the third party libraries used by `marl`.
-You can do this by setting any of the following variables before the call to `add_subdirectory()`:
-
-```cmake
-set(MARL_THIRD_PARTY_DIR <third-party-root-directory>) # defaults to ${MARL_DIR}/third_party
-set(MARL_GOOGLETEST_DIR  <path-to-googletest>)         # defaults to ${MARL_THIRD_PARTY_DIR}/googletest
-add_subdirectory(${MARL_DIR})
-```
-
-### Usage Recommendations
-
-#### Capture marl synchronization primitives by value
-
-All marl synchronization primitives aside from `marl::ConditionVariable` should be lambda-captured by **value**:
-
-```c++
-marl::Event event;
-marl::schedule([=]{ // [=] Good, [&] Bad.
-  event.signal();
-})
-```
-
-Internally, these primitives hold a shared pointer to the primitive state. By capturing by value we avoid common issues where the primitive may be destructed before the last reference is used.
-
-#### Create one instance of `marl::Scheduler`, use it for the lifetime of the process
-
-The `marl::Scheduler` constructor can be expensive as it may spawn a number of hardware threads. \
-Destructing the `marl::Scheduler` requires waiting on all tasks to complete.
-
-Multiple `marl::Scheduler`s may fight each other for hardware thread utilization.
-
-For these reasons, it is recommended to create a single `marl::Scheduler` for the lifetime of your process.
-
-For example:
-
-```c++
-int main() {
-  marl::Scheduler scheduler(marl::Scheduler::Config::allCores());
-  scheduler.bind();
-  defer(scheduler.unbind());
-
-  return do_program_stuff();
-}
-```
-
-#### Bind the scheduler to externally created threads
-
-In order to call `marl::schedule()` the scheduler must be bound to the calling thread. Failure to bind the scheduler to the thread before calling `marl::schedule()` will result in undefined behavior.
-
-`marl::Scheduler` may be simultaneously bound to any number of threads, and the scheduler can be retrieved from a bound thread with `marl::Scheduler::get()`.
-
-A typical way to pass the scheduler from one thread to another would be:
-
-```c++
-std::thread spawn_new_thread() {
-  // Grab the scheduler from the currently running thread.
-  marl::Scheduler* scheduler = marl::Scheduler::get();
-
-  // Spawn the new thread.
-  return std::thread([=] {
-    // Bind the scheduler to the new thread.
-    scheduler->bind();
-    defer(scheduler->unbind());
-
-    // You can now safely call `marl::schedule()`
-    run_thread_logic();
-  });
-}
-
-```
-
-Always remember to unbind the scheduler before terminating the thread. Forgetting to unbind will result in the `marl::Scheduler` destructor blocking indefinitely.
-
-#### Don't use externally blocking calls in marl tasks
-
-The `marl::Scheduler` internally holds a number of worker threads which will execute the scheduled tasks. If a marl task becomes blocked on a marl synchronization primitive, marl can yield from the blocked task and continue execution of other scheduled tasks.
-
-Calling a non-marl blocking function on a marl worker thread will prevent that worker thread from being able to switch to execute other tasks until the blocking function has returned. Examples of these non-marl blocking functions include: [`std::mutex::lock()`](https://en.cppreference.com/w/cpp/thread/mutex/lock), [`std::condition_variable::wait()`](https://en.cppreference.com/w/cpp/thread/condition_variable/wait), [`accept()`](http://man7.org/linux/man-pages/man2/accept.2.html).
-
-Short blocking calls are acceptable, such as a mutex lock to access a data structure. However be careful that you do not use a marl blocking call with a `std::mutex` lock held - the marl task may yield with the lock held, and block other tasks from re-locking the mutex. This sort of situation may end up with a deadlock.
-
-If you need to make a blocking call from a marl worker thread, you may wish to use [`marl::blocking_call()`](https://github.com/google/marl/blob/main/include/marl/blockingcall.h), which will spawn a new thread for performing the call, allowing the marl worker to continue processing other scheduled tasks.
-
----
-
-Note: This is not an officially supported Google product
+# SwiftShader

+

+[![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)

+

+Introduction

+------------

+

+SwiftShader[^1] is a high-performance CPU-based implementation[^2] of the Vulkan[^3] 1.3 graphics API. Its goal is to provide hardware independence for advanced 3D graphics.

+

+> NOTE: The [ANGLE](http://angleproject.org/) project can be used to achieve a layered implementation[^4] of OpenGL ES 3.1 (aka. "SwANGLE").

+

+Building

+--------

+

+SwiftShader libraries can be built for Windows, Linux, and macOS.\

+Android and Chrome (OS) build environments are also supported.

+

+* **CMake**

+\

+  [Install CMake](https://cmake.org/download/) for Linux, macOS, or Windows and use either [the GUI](https://cmake.org/runningcmake/) or run the following terminal commands:

+  ```

+  cd build

+  cmake ..

+  cmake --build . --parallel

+

+  ./vk-unittests

+  ```

+  Tip: Set the [CMAKE_BUILD_PARALLEL_LEVEL](https://cmake.org/cmake/help/latest/envvar/CMAKE_BUILD_PARALLEL_LEVEL.html) environment variable to control the level of parallelism.

+

+

+* **Visual Studio**

+\

+  To build the Vulkan ICD library, use [Visual Studio 2019](https://visualstudio.microsoft.com/vs/community/) to open the project folder and wait for it to run CMake. Open the [CMake Targets View](https://docs.microsoft.com/en-us/cpp/build/cmake-projects-in-visual-studio?view=vs-2019#ide-integration) in the Solution Explorer and select the vk_swiftshader project to [build](https://docs.microsoft.com/en-us/cpp/build/cmake-projects-in-visual-studio?view=vs-2019#building-cmake-projects) it.

+

+

+Usage

+-----

+

+The SwiftShader libraries act as drop-in replacements for graphics drivers.

+

+On Windows, most applications can be made to use SwiftShader's DLLs by placing them in the same folder as the executable. On Linux, the `LD_LIBRARY_PATH` environment variable or `-rpath` linker option can be used to direct applications to search for shared libraries in the indicated directory first.

+

+In general, Vulkan applications look for a shared library named `vulkan-1.dll` on Windows (`vulkan-1.so` on Linux). This 'loader' library then redirects API calls to the actual Installable Client Driver (ICD). SwiftShader's ICD is named `libvk_swiftshader.dll`, but it can be renamed to `vulkan-1.dll` to be loaded directly by the application. Alternatively, you can set the `VK_ICD_FILENAMES` environment variable to the path to `vk_swiftshader_icd.json` file that is generated under the build directory (e.g. `.\SwiftShader\build\Windows\vk_swiftshader_icd.json`). To learn more about how Vulkan loading works, read the [official documentation here](https://github.com/KhronosGroup/Vulkan-Loader/blob/master/loader/LoaderAndLayerInterface.md).

+

+Contributing

+------------

+

+See [CONTRIBUTING.txt](CONTRIBUTING.txt) for important contributing requirements.

+

+The canonical repository for SwiftShader is hosted at:

+https://swiftshader.googlesource.com/SwiftShader.

+

+All changes must be reviewed and approved in the [Gerrit](https://www.gerritcodereview.com/) review tool at:

+https://swiftshader-review.googlesource.com. You must sign in to this site with a Google Account before changes can be uploaded.

+

+Next, authenticate your account here:

+https://swiftshader.googlesource.com/new-password (use the same e-mail address as the one configured as the [Git commit author](https://git-scm.com/book/en/v2/Getting-Started-First-Time-Git-Setup#_your_identity)).

+

+All changes require a [Change-ID](https://gerrit-review.googlesource.com/Documentation/user-changeid.html) tag in the commit message. A [commit hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) may be used to add this tag automatically, and can be found at:

+https://gerrit-review.googlesource.com/tools/hooks/commit-msg. You can execute `git clone https://swiftshader.googlesource.com/SwiftShader` and manually place the commit hook in `SwiftShader/.git/hooks/`, or to clone the repository and install the commit hook in one go:

+

+    git clone https://swiftshader.googlesource.com/SwiftShader && (cd SwiftShader && git submodule update --init --recursive third_party/git-hooks && ./third_party/git-hooks/install_hooks.sh)

+

+On Windows, this command line requires using the [Git Bash Shell](https://www.atlassian.com/git/tutorials/git-bash).

+

+Changes are uploaded to Gerrit by executing:

+

+    git push origin HEAD:refs/for/master

+

+When ready, [add](https://gerrit-review.googlesource.com/Documentation/intro-user.html#adding-reviewers) a project [owner](OWNERS) as a reviewer on your change.

+

+Some tests will automatically be run against the change. Notably, [presubmit.sh](tests/presubmit.sh) verifies the change has been formatted using [clang-format 11.0.1](tests/kokoro/gcp_ubuntu/check_style.sh). Most IDEs come with clang-format support, but may require upgrading/downgrading to the [clang-format version 11.0.0](https://github.com/llvm/llvm-project/releases/tag/llvmorg-11.0.0) *release* version (notably Chromium's buildtools has a clang-format binary which can be an in-between revision which produces different formatting results).

+

+Testing

+-------

+

+SwiftShader's Vulkan implementation can be tested using the [dEQP](https://github.com/KhronosGroup/VK-GL-CTS) test suite.

+

+See [docs/dEQP.md](docs/dEQP.md) for details.

+

+Third-Party Dependencies

+------------------------

+

+The [third_party](third_party/) directory contains projects which originated outside of SwiftShader:

+

+[subzero](third_party/subzero/) contains a fork of the [Subzero](https://chromium.googlesource.com/native_client/pnacl-subzero/) project. It originates from Google Chrome's (Portable) [Native Client](https://developer.chrome.com/native-client) project. The fork was made using [git-subtree](https://github.com/git/git/blob/master/contrib/subtree/git-subtree.txt) to include all of Subzero's history.

+

+[llvm-subzero](third_party/llvm-subzero/) contains a minimized set of LLVM dependencies of the Subzero project.

+

+[PowerVR_SDK](third_party/PowerVR_SDK/) contains a subset of the [PowerVR Graphics Native SDK](https://github.com/powervr-graphics/Native_SDK) for running several sample applications.

+

+[googletest](third_party/googletest/) contains the [Google Test](https://github.com/google/googletest) project, as a Git submodule. It is used for running unit tests for Chromium, and Reactor unit tests. Run `git submodule update --init` to obtain/update the code. Any contributions should be made upstream.

+

+Documentation

+-------------

+

+See [docs/Index.md](docs/Index.md).

+

+Contact

+-------

+

+Public mailing list: [swiftshader@googlegroups.com](https://groups.google.com/forum/#!forum/swiftshader)

+

+General bug tracker:  https://g.co/swiftshaderbugs \

+Chrome specific bugs: https://bugs.chromium.org/p/swiftshader

+

+License

+-------

+

+The SwiftShader project is licensed under the Apache License Version 2.0. You can find a copy of it in [LICENSE.txt](LICENSE.txt).

+

+Files in the third_party folder are subject to their respective license.

+

+Authors and Contributors

+------------------------

+

+The legal authors for copyright purposes are listed in [AUTHORS.txt](AUTHORS.txt).

+

+[CONTRIBUTORS.txt](CONTRIBUTORS.txt) contains a list of names of individuals who have contributed to SwiftShader. If you're not on the list, but you've signed the [Google CLA](https://cla.developers.google.com/clas) and have contributed more than a formatting change, feel free to request to be added.

+

+Notes and Disclaimers

+---------------------

+

+[^1]: This is not an official Google product.  

+[^2]: Vulkan 1.3 conformance: https://www.khronos.org/conformance/adopters/conformant-products#submission_717  

+[^3]: Trademarks are the property of their respective owners.  

+[^4]: OpenGL ES 3.1 conformance: https://www.khronos.org/conformance/adopters/conformant-products/opengles#submission_906  
\ No newline at end of file
diff --git a/build/android.toolchain.cmake b/build/android.toolchain.cmake
new file mode 100644
index 0000000..d9806e3
--- /dev/null
+++ b/build/android.toolchain.cmake
@@ -0,0 +1,13 @@
+set(CMAKE_SYSTEM_NAME Android)
+if(NOT $ENV{ANDROID_HOME} STREQUAL "")
+  set(CMAKE_ANDROID_NDK $ENV{ANDROID_HOME}/ndk-bundle)
+else()
+  set(CMAKE_ANDROID_NDK $ENV{HOME}/Android/Sdk/ndk-bundle)
+endif()
+set(CMAKE_ANDROID_NDK_TOOLCHAIN_VERSION clang)
+set(CMAKE_ANDROID_ARCH_ABI arm64-v8a)
+set(CMAKE_ANDROID_STL_TYPE c++_shared)
+if(APPLE)
+  set(CMAKE_RANLIB "${CMAKE_ANDROID_NDK}/toolchains/aarch64-linux-android-4.9/prebuilt/darwin-x86_64/aarch64-linux-android/bin/ranlib")
+  set(CMAKE_AR "${CMAKE_ANDROID_NDK}/toolchains/aarch64-linux-android-4.9/prebuilt/darwin-x86_64/aarch64-linux-android/bin/ar")
+endif()
diff --git a/build/strip_cmakelists.sh b/build/strip_cmakelists.sh
new file mode 100755
index 0000000..161fd3d
--- /dev/null
+++ b/build/strip_cmakelists.sh
@@ -0,0 +1,2 @@
+cd $(dirname $0)
+go run strip_unneeded.go --file=../CMakeLists.txt --test="cmake -DREACTOR_BACKEND=LLVM -DREACTOR_EMIT_DEBUG_INFO=1 .. && cmake --build ."
\ No newline at end of file
diff --git a/build/strip_unneeded.go b/build/strip_unneeded.go
new file mode 100644
index 0000000..a4a1bf5
--- /dev/null
+++ b/build/strip_unneeded.go
@@ -0,0 +1,123 @@
+// Copyright 2019 The SwiftShader Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// strip_unneeded is a tool that attempts to remove unnecessary lines from a
+// file by running a test script after each marked line is removed.
+//
+// strip_unneeded will scan the file specified by the --file argument for lines
+// that contain the substring specified by the --marker argument. One-by-one
+// those marked lines will be removed from the file, after which the test script
+// specified by --test will be run. If the test passes (the process completes
+// with a 0 return code), then the line will remain removed, otherwise it is
+// restored. This will repeat for every line in the file containing the marker,
+// until all lines are tested.
+package main
+
+import (
+	"flag"
+	"fmt"
+	"io/ioutil"
+	"os"
+	"os/exec"
+	"strings"
+)
+
+var (
+	file   = flag.String("file", "", "file to modify")
+	marker = flag.String("marker", "CHECK_NEEDED", "line token")
+	test   = flag.String("test", "", "test script to run with each change")
+)
+
+func main() {
+	if err := run(); err != nil {
+		fmt.Println(err.Error())
+		os.Exit(1)
+	}
+}
+
+func run() error {
+	flag.Parse()
+	if *file == "" {
+		return fmt.Errorf("Missing --file argument")
+	}
+	if *marker == "" {
+		return fmt.Errorf("Missing --marker argument")
+	}
+	if *test == "" {
+		return fmt.Errorf("Missing --test argument")
+	}
+
+	// make sure the test passes with no modifications
+	if err := runTest(); err != nil {
+		return fmt.Errorf("Test fails with no modifications.\n%v", err)
+	}
+
+	// load the test file
+	body, err := ioutil.ReadFile(*file)
+	if err != nil {
+		return fmt.Errorf("Couldn't load file '%v'", *file)
+	}
+
+	// gather all the lines
+	allLines := strings.Split(string(body), "\n")
+
+	// find all the lines with the marker
+	markerLines := make([]int, 0, len(allLines))
+	for i, l := range allLines {
+		if strings.Contains(l, *marker) {
+			markerLines = append(markerLines, i)
+		}
+	}
+
+	omit := map[int]bool{}
+
+	save := func() error {
+		f, err := os.Create(*file)
+		if err != nil {
+			return err
+		}
+		defer f.Close()
+		for i, l := range allLines {
+			if !omit[i] {
+				f.WriteString(l)
+				f.WriteString("\n")
+			}
+		}
+		return nil
+	}
+
+	for i, l := range markerLines {
+		omit[l] = true
+		if err := save(); err != nil {
+			return err
+		}
+		if err := runTest(); err != nil {
+			omit[l] = false
+			fmt.Printf("%d/%d: Test fails when removing line %v: %v\n", i, len(markerLines), l, allLines[l])
+		} else {
+			fmt.Printf("%d/%d: Test passes when removing line %v: %v\n", i, len(markerLines), l, allLines[l])
+		}
+	}
+
+	return save()
+}
+
+func runTest() error {
+	cmd := exec.Command("sh", "-c", *test)
+	out, err := cmd.CombinedOutput()
+	if err != nil {
+		return fmt.Errorf("Test failed with error: %v\n%v", err, string(out))
+	}
+	return nil
+}
diff --git a/build_overrides/build.gni b/build_overrides/build.gni
new file mode 100644
index 0000000..e2af644
--- /dev/null
+++ b/build_overrides/build.gni
@@ -0,0 +1,15 @@
+# Copyright 2019 The SwiftShader Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+build_with_chromium = false
diff --git a/build_overrides/spirv_tools.gni b/build_overrides/spirv_tools.gni
new file mode 100644
index 0000000..7364b5c
--- /dev/null
+++ b/build_overrides/spirv_tools.gni
@@ -0,0 +1,20 @@
+# Copyright 2019 The SwiftShader Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# We are building inside SwiftShader
+spirv_tools_standalone = false
+
+# Paths to SPIRV-Tools dependencies in SwiftShader
+spirv_tools_googletest_dir = "//third_party/googletest"
+spirv_tools_spirv_headers_dir = "//third_party/SPIRV-Headers"
diff --git a/build_overrides/swiftshader.gni b/build_overrides/swiftshader.gni
new file mode 100644
index 0000000..e2eda47
--- /dev/null
+++ b/build_overrides/swiftshader.gni
@@ -0,0 +1,19 @@
+# Copyright 2019 The SwiftShader Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# We are building SwiftShader standalone
+swiftshader_standalone = true
+
+# Path to SwiftShader
+swiftshader_dir = "//"
diff --git a/build_overrides/wayland.gni b/build_overrides/wayland.gni
new file mode 100644
index 0000000..e50b0fd
--- /dev/null
+++ b/build_overrides/wayland.gni
@@ -0,0 +1,18 @@
+# Copyright 2022 The SwiftShader Authors. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ozone_platform_wayland = true
+
+# SwiftShader has no wayland third-party dir
+wayland_gn_dir = ""
diff --git a/codereview.settings b/codereview.settings
new file mode 100644
index 0000000..4ff2001
--- /dev/null
+++ b/codereview.settings
@@ -0,0 +1,4 @@
+# This file is used by git cl to get repository specific information.
+PROJECT: swiftshader
+GERRIT_HOST: True
+GERRIT_SQUASH_UPLOADS: False
diff --git a/docs/ArchitectureLayers.png b/docs/ArchitectureLayers.png
new file mode 100644
index 0000000..8049804
--- /dev/null
+++ b/docs/ArchitectureLayers.png
Binary files differ
diff --git a/docs/Exp-Log-Optimization.pdf b/docs/Exp-Log-Optimization.pdf
new file mode 100644
index 0000000..a2424d0
--- /dev/null
+++ b/docs/Exp-Log-Optimization.pdf
Binary files differ
diff --git a/docs/Index.md b/docs/Index.md
new file mode 100644
index 0000000..2ec49f5
--- /dev/null
+++ b/docs/Index.md
@@ -0,0 +1,65 @@
+> :warning: **Out of date**

+

+SwiftShader Documentation

+=========================

+

+SwiftShader provides high-performance graphics rendering on the CPU. It eliminates the dependency on graphics hardware capabilities.

+

+Architecture

+------------

+

+SwiftShader provides shared libraries (DLLs) which implement standardized graphics APIs. Applications already using these APIs thus don't require any changes to use SwiftShader. It can run entirely in user space, or as a driver (for Android), and output to either a frame buffer, a window, or an offscreen buffer.

+

+To achieve exceptional performance, SwiftShader is built around two major optimizations that affect its architecture: dynamic code generation, and parallel processing. Generating code at run-time allows to eliminate code branches and optimizes register usage, specializing the processing routines for exactly the operations required by each draw call. Parallel processing means both utilizing the CPU's multiple cores and processing multiple elements accoss the width of the SIMD vector units.

+

+Structurally there are four major layers:

+

+![API, Renderer, Reactor, JIT](/docs/ArchitectureLayers.png "Architecture Layers")

+

+The API layer is an implementation of a graphics API, such as OpenGL (ES) or Direct3D, on top of the Renderer interface. It is responsible for managing API-level resources and rendering state, as well as compiling high-level shaders to bytecode form. 

+

+The Renderer layer generates specialized processing routines for draw calls and coordinates the execution of rendering tasks. It defines the data structures used and how the processing is performed.

+

+Reactor is an embedded language for C++ to dynamically generate code in a WYSIWYG fashion. It allows to specialize the processing routines for the state and shaders used by each draw call. Its syntax closely resembles C and shading languages, to make the code generation easily readable.

+

+The JIT layer is a run-time compiler, such as [LLVM](http://llvm.org/)'s JIT, or [Subzero](Subzero.md). Reactor records its operations in an in-memory intermediate form which can be materialized by the JIT into a function which can be called directly.

+

+Design

+------

+

+### Reactor

+

+To generate code for an expression such as `float y = 1 - x;` directly with LLVM, we'd need code like `Value *valueY = BinaryOperator::CreateSub(ConstantInt::get(Type::getInt32Ty(Context), 1), valueX, "y", basicBlock);`. This is very verbose and becomes hard to read for longer expressions. Using C++ operator overloading, [Reactor](../src/Reactor/) simplifies this to `Float y = 1 - x;`. Note that Reactor types have the same names as C types, but starting with a capital letter. Likewise `If()`, `Else`, and `For(,,)` implement their C counterparts.

+

+While making Reactor's syntax so similar to the C++ in which it is written might cause some confusion at first, it provides a powerful abstraction for code specialization. For example to produce the code for an addition or a subtraction, one could write `x = addOrSub ? x + y : x - y;`. Note that only one operation ends up in the generated code.

+

+We refer to the functions generated by Reactor code as [Routine](../src/Reactor/Routine.hpp)s.

+

+More details on Reactor can be found in [Reactor.md](Reactor.md).

+

+### Renderer

+

+The [Renderer](../src/Renderer/) layer is implemented in three main parts: the [VertexProcessor](../src/Renderer/VertexProcessor.cpp), [SetupProcessor](../src/Renderer/SetupProcessor.cpp), and [PixelProcessor](../src/Renderer/PixelProcessor.cpp). Each "processor" produces a corresponding Reactor routine, and manages the relevant graphics state. They also keep a cache of already generated routines, so that when a combination of states is encountered again it will reuse the routine that performs the desired processing.

+

+The [VertexRoutine](../src/Shader/VertexRoutine.cpp) produces a function for processing a batch of vertices. The fixed-function T&L pipeline is implemented by [VertexPipeline](../src/Shader/VertexPipeline.cpp), while programmable vertex processing with a shader is implemented by [VertexProgram](../src/Shader/VertexProgram.cpp). Note that the vertex routine also performs vertex attribute reading, vertex caching, viewport transform, and clip flag calculation all in the same function.

+

+The [SetupRoutine](../src/Shader/SetupRoutine.cpp) performs primitive setup. This constitutes back-face culling, computing gradients, and rasterization.

+

+The [PixelRoutine](../src/Shader/PixelRoutine.cpp) takes a batch of primitives and performs per-pixel operations. The fixed-function texture stages and legacy integer shaders are implemented by [PixelPipeline](../src/Shader/PixelPipeline.cpp), while programmable pixel processing with a shader is implemented by [PixelProgram](../src/Shader/PixelProgram.cpp). All other per-pixel operations such as the depth test, alpha test, stenciling, and alpha blending are also performed in the pixel routine. Together with the traversal of the pixels in [QuadRasterizer](../src/Renderer/QuadRasterizer.cpp), it forms one function.

+

+The PixelProgram and VertexProgram share some common functionality in [ShaderCore](../src/Shader/ShaderCore.cpp). Likewise, texture sampling is implemented by [SamplerCore](../src/Shader/SamplerCore.cpp).

+

+Aside from creating and managing the processing routines with the help of the Processor classes, the Renderer also subdivides and schedules rendering tasks onto multiple threads.

+

+### OpenGL

+

+The OpenGL (ES) and EGL APIs are implemented in [src/OpenGL/](../src/OpenGL/).

+

+The GLSL compiler is implemented in [src/OpenGL/compiler/](../src/OpenGL/compiler/). It uses [Flex](http://flex.sourceforge.net/) and [Bison](https://www.gnu.org/software/bison/) to tokenize and parse GLSL shader source. It produces an [abstract syntax tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree) (AST), which is then traversed to output assembly-level instructions in [OutputASM.cpp](../src/OpenGL/compiler/OutputASM.cpp).

+

+The [EGL](https://www.khronos.org/registry/egl/specs/eglspec.1.4.20110406.pdf) API is implemented in [src/OpenGL/libEGL/](../src/OpenGL/libEGL/). Its entry functions are listed in [libEGL.def](../src/OpenGL/libEGL/libEGL.def) (for Windows) and [libEGL.lds](../src/OpenGL/libEGL/libEGL.lds) (for Linux), and defined in [main.cpp](../src/OpenGL/libEGL/main.cpp) and implemented in [libEGL.cpp](../src/OpenGL/libEGL/libEGL.cpp). The [Display](../src/OpenGL/libEGL/Display.h), [Surface](../src/OpenGL/libEGL/Surface.h), and [Config](../src/OpenGL/libEGL/Config.h) classes are respective implementations of the abstract EGLDisplay, EGLSurface, and EGLConfig types.

+

+[OpenGL ES 2.0](https://www.khronos.org/registry/gles/specs/2.0/es_full_spec_2.0.25.pdf) is implemented in [src/OpenGL/libGLESv2/](../src/OpenGL/libGLESv2/). Note that while [OpenGL ES 3.0](https://www.khronos.org/registry/gles/specs/3.0/es_spec_3.0.0.pdf) functions are implemented in [libGLESv3.cpp](../src/OpenGL/libGLESv2/libGLESv3.cpp), it is compiled into the libGLESv2 library as standard among most implementations (some platforms have a libGLESv3 symbolically link to libGLESv2). We'll focus on OpenGL ES 2.0 in this documentation.

+

+When the application calls an OpenGL function, it lands in the C entry functions at [main.cpp](../src/OpenGL/libGLESv2/main.cpp). It then gets dispatched to [libGLESv2.cpp](../src/OpenGL/libGLESv2/libGLESv2.cpp) functions in the es2 namespace. These functions obtain the thread's OpenGL context, and perform validation of the call's parameters. Most functions then call a corresponding [Context](../src/OpenGL/libGLESv2/Context.h) method to perform the call's main operations (changing state or queuing a draw task).

+

diff --git a/docs/LLVM.md b/docs/LLVM.md
new file mode 100644
index 0000000..ce504c2
--- /dev/null
+++ b/docs/LLVM.md
@@ -0,0 +1,114 @@
+LLVM Dependency
+===============
+
+Overview
+--------
+
+SwiftShader's [Reactor](Reactor.md) library uses LLVM
+as one of its JIT-compiler backends. This page contains notes about building and
+upgrading LLVM.
+
+Directory structure
+-------------------
+
+The current version of LLVM we use is 10, and can be found in
+`third_party/llvm-10.0`.
+
+In this folder you will find the following directories:
+
+*   configs : Contains per-platform headers that LLVM sources include to
+    configure the build. These are generated by running `scripts/update.py`
+    (more on that below).
+*   llvm : Contains a subset of the LLVM source code needed to build the JIT
+    support required by SwiftShader.
+*   scripts : Contains `update.py`, which is used to update the files in the
+    `configs` folder. More on that below.
+
+Updating the current version of LLVM to latest
+----------------------------------------------
+
+Updating to the latest version of LLVM can be tricky to do manually, especially
+because the [llvm-project repo](https://github.com/llvm/llvm-project) includes
+much more than just LLVM (e.g. it includes all the Clang source). Furthermore,
+we may have local changes to our copy of LLVM that must be maintained, or at
+least considered across updates.
+
+To ease this pain, run the script `third_party/update-llvm-10.sh` on Linux. This
+script works by updating a separate branch of SwiftShader, `llvm10-clean`, on
+which the latest snapshot of LLVM is fetched and committed, and then this branch
+is merged back into `master`. During the merge, if there are conflicts to
+resolve because of local changes we've made, these can be resolved in the usual
+manner, and the merge can be resumed.
+
+The script is configured to fetch from the branch in `LLVM_REPO_BRANCH`, and
+will automatically grab the latest commit on that branch.
+
+Although not always necessary, if there were new configuration variables added
+or modified, you may need to run `update.py` as described below. Otherwise, if
+all goes well, the update to LLVM can be committed and pushed.
+
+Updating LLVM configuration files
+---------------------------------
+
+The script `third_party/llvm-10.0/scripts/update.py` is used to update the
+config files in `third_party/llvm-10.0/configs`.
+
+Before running this script, you must make sure to update two variables in it
+(and commit this change):
+
+```
+# LLVM_BRANCH must match the value of the same variable in third_party/update-llvm-10.sh
+LLVM_BRANCH = "release/10.x"
+
+# LLVM_COMMIT must be set to the commit hash that we last updated to when running third_party/update-llvm-10.sh.
+# Run 'git show -s origin/llvm10-clean' and look for 'llvm-10-update: <hash>' to retrieve it.
+LLVM_COMMIT = "d32170dbd5b0d54436537b6b75beaf44324e0c28"
+```
+
+The script takes a platform as argument, and extra CMake args. For example, to
+update the Linux configs, run:
+
+```
+python3 update.py linux -j 200
+```
+
+This script does the following:
+
+*   Clones the LLVM repo and checks out `LLVM_COMMIT` from `LLVM_BRANCH`.
+*   Builds LLVM specifically for the target architectures specified in
+    `LLVM_TRIPLES` dictionary.
+*   Copies the specified platform config files to
+    `third_party/llvm-10.0/configs`, applying certain transformations to the
+    files, such as undefining macros listed in `LLVM_UNDEF_MACROS` (see the
+    `copy_platform_file` function).
+
+Note that certain configuration options depend on the host OS, you will need to
+run the script on the right host OS. See the `LLVM_PLATFORM_TO_HOST_SYSTEM`
+dictionary for the mapping, which looks like this at the time of this writing:
+
+```
+# Mapping of target platform to the host it must be built on
+LLVM_PLATFORM_TO_HOST_SYSTEM = {
+    'android': 'Linux',
+    'darwin': 'Darwin',
+    'linux': 'Linux',
+    'windows': 'Windows',
+    'fuchsia': 'Linux'
+}
+```
+
+Generally, Windows to build Window, Darwin to build Darwin (MacOS), and Linux
+for everything else. Also note that for android and fuchsia, the config is
+closest to that of Linux, but you will likely have to manually tweak the configs
+(in particular, `configs/<platform>/include/llvm/Config/config.h`).
+
+Supported platforms, architectures, and build systems
+-----------------------------------------------------
+
+SwiftShader is used by many products on many architectures:
+
+*   OS: Windows, Linux, MacOS, Android, Fuchsia
+*   Architecture: x64, x86, ARM, ARM64, MIPS, MIPS64
+*   Build systems: CMake, GN, Soong, Blaze
+
+Upgrading/updating LLVM usually entails making sure it builds for all of these.
\ No newline at end of file
diff --git a/docs/Reactor.md b/docs/Reactor.md
new file mode 100644
index 0000000..5666851
--- /dev/null
+++ b/docs/Reactor.md
@@ -0,0 +1,311 @@
+Reactor Documentation

+=====================

+

+Reactor is an embedded language for C++ to facilitate dynamic code generation and specialization.

+

+Introduction

+------------

+

+To generate the code for an expression such as

+```C++

+float y = 1 - x;

+```

+using the LLVM compiler framework, one needs to execute

+```C++

+Value *valueY = BinaryOperator::CreateSub(ConstantInt::get(Type::getInt32Ty(Context), 1), valueX, "y", basicBlock);

+```

+

+For large expressions this quickly becomes hard to read, and tedious to write and modify.

+

+With Reactor, it becomes as simple as writing

+```C++

+Float y = 1 - x;

+```

+Note the capital letter for the type. This is not the code to perform the calculation. It's the code that when executed will record the calculation to be performed.

+

+This is possible through the use of C++ operator overloading. Reactor also supports control flow constructs and pointer arithmetic with C-like syntax.

+

+Motivation

+----------

+

+Just-in-time (JIT) compiled code has the potential to be faster than statically compiled code, through [run-time specialization](http://en.wikipedia.org/wiki/Run-time_algorithm_specialisation). However, this is rarely achieved in practice.

+

+Specialization in general is the use of a more optimal routine that is specific for a certain set of conditions. For example when sorting two numbers it is faster to swap them if they are not yet in order, than to call a generic quicksort function. Specialization can be done statically, by explicitly writing each variant or by using metaprogramming to generate multiple variants at static compile time, or dynamically by examining the parameters at run-time and generating a specialized path.

+

+Because specialization can be done statically, sometimes aided by metaprogramming, the ability of a JIT-compiler to do it at run-time is often disregarded. Specialized benchmarks show no advantage of JIT code over static code. However, having a specialized benchmark does not take into account that a typical real-world application deals with many unpredictable conditions. Systems can have one core or several dozen cores, and many different ISA extensions. This alone can make it impractical to write fully specialized routines manually, and with the help of metaprogramming it results in code bloat. Worse yet, any non-trivial application has a layered architecture in which lower layers (e.g. framework APIs) know very little or nothing about the usage by higher layers. Various parameters also depend on user input. Run-time specialization can have access to the full context in which each routine executes, and although the optimization contribution of specialization for a single parameter is small, the combined speedup can be huge. As an extreme example, interpreters can execute any kind of program in any language, but by specializing for a specific program you get a compiled version of that program. But you don't need a full-blown language to observe a huge difference between interpretation and specialization through compilation. Most applications process some form of list of commands in an interpreted fashion, and even the series of calls into a framework API can be compiled into a more efficient whole at run-time.

+

+While the benefit of run-time specialization should now be apparent, JIT-compiled languages lack many of the practical advantages of static compilation. JIT-compilers are very constrained in how much time they can spend on compiling the bytecode into machine code. This limits their ability to even reach parity with static compilation, let alone attempt to exceed it by performing run-time specialization. Also, even if the compilation time was not as constrained, they can't specialize at every opportunity because it would result in an explosive growth of the amount of generated code. There's a need to be very selective in only specializing the hotspots for often recurring conditions, and to manage a cache of the different variants. Even just selecting the size of the set of variables that form the entire condition to specialize for can get immensely complicated.

+

+Clearly we need a manageable way to benefit from run-time specialization where it would help significantly, while still resorting to static compilation for anything else. A crucial observation is that the developer has expectations about the application's behavior, which is valuable information which can be exploited to choose between static or JIT-compilation. One way to do that is to use an API which JIT-compiles the commands provided by the application developer. An example of this is an advanced DBMS which compiles the query into an optimized sequence of routines, each specialized to the data types involved, the sizes of the CPU caches, etc. Another example is a modern graphics API, which takes shaders (a routine executed per pixel or other element) and a set of parameters which affect their execution, and compiles them into GPU-specific code. However, these examples have a very hard divide between what goes on inside the API and outside. You can't exchange data between the statically compiled outside world and the JIT-compiled routines, unless through the API, and they have very different execution models. In other words they are highly domain specific and not generic ways to exploit run-time specialization in arbitrary code.

+

+This is becoming especially problematic for GPUs, as they are now just as programmable as CPUs but you can still only command them through an API. Attempts to disguise this by using a single language, such as C++AMP and SYCL, still have difficulties expressing how data is exchanged, don't actually provide control over the specialization, they have hidden overhead, and they have unpredictable performance characteristics across devices. Meanwhile CPUs gain ever more cores and wider SIMD vector units, but statically compiled languages don't readily exploit this and can't deal with the many code paths required to extract optimal performance. A different language and framework is required.

+

+Concepts and Syntax

+-------------------

+

+### Routine and Function<>

+

+Reactor allows you to create new functions at run-time. Their generation happens in C++, and after materializing them they can be called during the execution of the same C++ program. We call these dynamically generated functions "routines", to discern them from statically compiled functions and methods. Reactor's `Routine` class encapsulates a routine. Deleting a Routine object also frees the memory used to store the routine.

+

+To declare the function signature of a routine, use the `Function<>` template. The template argument is the signature of a function, using Reactor variable types. Here's a complete definition of a routine taking no arguments and returning an integer:

+

+`C++

+Function<Int(Void)> function;

+{

+    Return(1);

+}

+`

+

+The braces are superfluous. They just make the syntax look more like regular C++, and they offer a new scope for Reactor variables.

+

+The Routine is obtained and materialized by "calling" the `Function<>` object to give it a name:

+

+```C++

+auto routine = function("one");

+```

+

+Finally, we can obtain the function pointer to the entry point of the routine, and call it:

+

+```C++

+int (*callable)() = (int(*)())routine->getEntry();

+

+int result = callable();

+assert(result == 1);

+```

+

+Note that `Function<>` objects are relatively heavyweight, since they have the entire JIT-compiler behind them, while `Routine` objects are lightweight and merely provide storage and lifetime management of generated routines. So we typically allow the `Function<>` object to be destroyed (by going out of scope), while the `Routine` object is retained until we no longer need to call the routine. Hence the distinction between them and the need for a couple of lines of boilerplate code.

+

+### Arguments and Expressions

+

+Routines can take various arguments. The following example illustrates the syntax for accessing the arguments of a routine which takes two integer arguments and returns their sum:

+

+```C++

+Function<Int(Int, Int)> function;

+{

+    Int x = function.Arg<0>();

+    Int y = function.Arg<1>();

+

+    Int sum = x + y;

+

+    Return(sum);

+}

+```

+

+Reactor supports various types which correspond to C++ types:

+

+| Class name    | C++ equivalent |

+| ------------- |----------------|

+| Int           | int32_t        |

+| UInt          | uint32_t       |

+| Short         | int16_t        |

+| UShort        | uint16_t       |

+| Byte          | uint8_t        |

+| SByte         | int8_t         |

+| Long          | int64_t        |

+| ULong         | uint64_t       |

+| Float         | float          |

+

+Note that bytes are unsigned unless prefixed with S, while larger integers are signed unless prefixed with U.

+

+These scalar types support all of the C++ arithmetic operations.

+

+Reactor also supports several vector types. For example `Float4` is a vector of four floats. They support a select number of C++ operators, and several "intrinsic" functions such as `Max()` to compute the element-wise maximum and return a bit mask. Check [Reactor.hpp](../src/Reactor/Reactor.hpp) for all the types, operators and intrinsics.

+

+### Casting and Reinterpreting

+

+Types can be cast using the constructor-style syntax:

+

+```C++

+Function<Int(Float)> function;

+{

+    Float x = function.Arg<0>();

+

+    Int cast = Int(x);

+

+    Return(cast);

+}

+```

+

+You can reinterpret-cast a variable using `As<>`:

+

+```C++

+Function<Int(Float)> function;

+{

+    Float x = function.Arg<0>();

+

+    Int reinterpret = As<Int>(x);

+

+    Return(reinterpret);

+}

+```

+

+Note that this is a bitwise cast. Unlike C++'s `reinterpret_cast<>`, it does not allow casting between different sized types. Think of it as storing the value in memory and then loading from that same address into the casted type.

+

+An important exception is that 16-, 8-, and 4-byte vectors can be cast to other vectors of one of these sizes. Casting to a longer vector leaves the upper contents undefined.

+

+### Pointers

+

+Pointers also use a template class:

+

+```C++

+Function<Int(Pointer<Int>)> function;

+{

+    Pointer<Int> x = function.Arg<0>();

+

+    Int dereference = *x;

+

+    Return(dereference);

+}

+```

+

+Pointer arithmetic is only supported on `Pointer<Byte>`, and can be used to access structure fields:

+

+```C++

+struct S

+{

+    int x;

+    int y;

+};

+

+Function<Int(Pointer<Byte>)> function;

+{

+    Pointer<Byte> s = function.Arg<0>();

+

+    Int y = *Pointer<Int>(s + offsetof(S, y));

+

+    Return(y);

+}

+```

+

+Reactor also defines an `OFFSET()` macro, which is a generalization of the `offsetof()` macro defined in `<cstddef>`. It allows e.g. getting the offset of array elements, even when indexed dynamically.

+

+### Conditionals

+

+To generate for example the [unit step](https://en.wikipedia.org/wiki/Heaviside_step_function) function:

+

+```C++

+Function<Float(Float)> function;

+{

+    Pointer<Float> x = function.Arg<0>();

+

+    If(x > 0.0f)

+    {

+        Return(1.0f);

+    }

+    Else If(x < 0.0f)

+    {

+        Return(0.0f);

+    }

+    Else

+    {

+        Return(0.5f);

+    }

+}

+```

+

+There's also an IfThenElse() intrinsic function which corresponds with the C++ ?: operator.

+

+### Loops

+

+Loops also have a syntax similar to C++:

+

+```C++

+Function<Int(Pointer<Int>, Int)> function;

+{

+    Pointer<Int> p = function.Arg<0>();

+    Int n = function.Arg<1>();

+    Int total = 0;

+

+    For(Int i = 0, i < n, i++)

+    {

+        total += p[i];

+    }

+

+    Return(total);

+}

+```

+

+Note the use of commas instead of semicolons to separate the loop expressions.

+

+`While(expr) {}` also works as expected, but there is no `Do {} While(expr)` equivalent because we can't discern between them. Instead, there's a `Do {} Until(expr)` where you can use the inverse expression to exit the loop.

+

+Specialization

+--------------

+

+The above examples don't illustrate anything that can't be written as regular C++ function. The real power of Reactor is to generate routines that are specialized for a certain set of conditions, or "state".

+

+```C++

+Function<Int(Pointer<Int>, Int)> function;

+{

+    Pointer<Int> p = function.Arg<0>();

+    Int n = function.Arg<1>();

+    Int total = 0;

+

+    For(Int i = 0, i < n, i++)

+    {

+        if(state.operation == ADD)

+        {

+            total += p[i];

+        }

+        else if(state.operation == SUBTRACT)

+        {

+            total -= p[i];

+        }

+        else if(state.operation == AND)

+        {

+            total &= p[i];

+        }

+        else if(...)

+        {

+            ...

+        }

+    }

+

+    Return(total);

+}

+```

+

+Note that this example uses regular C++ `if` and `else` constructs. They only determine which code ends up in the generated routine, and don't end up in the generated code themselves. Thus the routine contains a loop with just one arithmetic or logical operation, making it more efficient than if this was written in regular C++.

+

+Of course one could write an equivalent efficient function in regular C++ like this:

+

+```C++

+int function(int *p, int n)

+{

+    int total = 0;

+

+    if(state.operation == ADD)

+    {

+        for(int i = 0; i < n; i++)

+        {

+            total += p[i];

+        }

+    }

+    else if(state.operation == SUBTRACT)

+    {

+        for(int i = 0; i < n; i++)

+        {

+            total -= p[i];

+        }

+    }

+    else if(state.operation == AND)

+    {

+        for(int i = 0; i < n; i++)

+        {

+            total &= p[i];

+        }

+    }

+    else if(...)

+    {

+        ...

+    }

+

+    return total;

+}

+```

+

+But now there's a lot of repeated code. It could be made more manageable using macros or templates, but that doesn't help reduce the binary size of the statically compiled code. That's fine when there are only a handful of state conditions to specialize for, but when you have multiple state variables with many possible values each, the total number of combinations can be prohibitive.

+

+This is especially the case when implementing APIs which offer a broad set of features but developers are likely to only use a select set. The quintessential example is graphics processing, where there are are long pipelines of optional operations and both fixed-function and programmable stages. Applications configure the state of these stages between each draw call.

+

+With Reactor, we can write the code for such pipelines in a syntax that is as easy to read as a naive unoptimized implementation, while at the same time specializing the code for exactly the operations required by the pipeline configuration.

diff --git a/docs/ReactorDebugInfo.md b/docs/ReactorDebugInfo.md
new file mode 100644
index 0000000..3b612d4
--- /dev/null
+++ b/docs/ReactorDebugInfo.md
@@ -0,0 +1,237 @@
+# Reactor Debug Info Generation
+
+## Introduction
+
+Reactor produces Just In Time compiled dynamic executable code and can be used to JIT high performance functions specialized for runtime
+configurations, or to even build a compiler.
+
+In order to debug executable code at a higher level than disassembly, source code files are required.
+
+Reactor has two potential sources of source code:
+
+1. The C++ source code of the program that calls into Reactor.
+2. External source files read by the program and passed to Reactor.
+
+While case (2) is preferable for implementing a compiler, this is currently not
+implemented.
+
+Reactor implements case (1) and this can be used by GDB to single line step and
+inspect variables.
+
+## Supported Platforms
+
+Currently:
+
+* Debug info generation is only supported on Linux with the LLVM 7
+backend.
+* GDB is the only supported debugger.
+* The program must be compiled with debug info iteself.
+
+## Enabling
+
+Debug generation is enabled with `REACTOR_EMIT_DEBUG_INFO` CMake flag (defaults
+to disabled).
+
+## Implementation details
+
+### Source Location
+
+All Reactor functions begin with a call to `RR_DEBUG_INFO_UPDATE_LOC()`, which calls into `rr::DebugInfo::EmitLocation()`.
+
+`rr::DebugInfo::EmitLocation()` calls `rr::DebugInfo::getCallerBacktrace()`,
+which in turn uses [`libbacktrace`](https://github.com/ianlancetaylor/libbacktrace)
+to unwind the stack and find the file, function and line of the caller.
+
+This information is passed to `llvm::IRBuilder<>::SetCurrentDebugLocation`
+to emit source line information for the next LLVM instructions to be built.
+
+### Variables
+
+There are 3 aspects to generating variable debug information:
+
+#### 1. Variable names
+
+Constructing a Reactor `LValue`:
+
+```C++
+rr::Int a = 1;
+```
+
+Will emit an LLVM `alloca` instruction to allocate the storage of the variable,
+and emit another to initialize it to the constant `1`. While fluent, none of the
+Reactor calls see the name of the C++ local variable "`a`", and the LLVM `alloca`
+value gets a meaningless numerical value.
+
+There are two potential ways that Reactor can obtain the variable name:
+
+1. Use the running executable's own debug information to examine the local
+   declaration and extract the local variable's name.
+2. Use the backtrace information to parse the name from the source file.
+
+While (1) is arguably a cleaner and more robust solution, (2) is
+easier to implement and can work for the majority of use cases.
+
+(2) is the current solution implemented.
+
+`rr::DebugInfo::getOrParseFileTokens()` scans a source file line by line, and
+uses a regular expression to look for patterns of `<type> <name>`. Matching is not
+precise, but is adequate to find locals constructed with and without assignment.
+
+#### 2. Variable binding
+
+Given that we can find a variable name for a given source line, we need a way of
+binding the LLVM values to the name.
+
+Given our trivial example:
+
+```C++
+rr::Int a = 1
+```
+
+The `rr::Int` constructor calls `RR_DEBUG_INFO_EMIT_VAR()` passing the storage
+value as single argument. `RR_DEBUG_INFO_EMIT_VAR()` performs the backtrace
+to find the source file and line and uses the token information produced by
+`rr::DebugInfo::getOrParseFileTokens()` to identify the variable name.
+
+However, things get a bit more complicated when there are multiple variables
+being constructed on the same line.
+
+Take for example:
+
+```C++
+rr::Int a = rr::Int(1) + rr::Int(2)
+```
+
+Here we have 3 calls to the `rr::Int` constructor, each calling down
+to `RR_DEBUG_INFO_EMIT_VAR()`.
+
+To disambiguate which of these should be bound to the variable name "`a`",
+`rr::DebugInfo::EmitVariable()` buffers the binding into
+`scope.pending` and the last binding for a given line is used by
+`DebugInfo::emitPending()`. For variable construction and assignment, C++
+guarantees that the LHS is the last value to be constructed.
+
+This solution is not perfect.
+
+Multi-line expressions, multiple assignments on a single line, macro obfuscation
+can all break variable bindings - however the majority of typical cases work.
+
+#### 3. Variable scope
+
+`rr::DebugInfo` maintains a stack of `llvm::DIScope`s and `llvm::DILocation`s
+that mirrors the current backtrace for function being called.
+
+A synthetic call stack is produced by chaining `llvm::DILocation`s with
+`InlinedAt`s.
+
+For example, at the declaration of `i`:
+
+```C++
+void B()
+{
+    rr::Int i; // <- here
+}
+
+void A()
+{
+    B();
+}
+
+int main(int argc, const char* argv[])
+{
+    A();
+}
+```
+
+The `DIScope` hierarchy would be:
+
+```C++
+                              DIFile: "foo.cpp"
+rr::DebugInfo::diScope[0].di: ↳ DISubprogram: "main"
+rr::DebugInfo::diScope[1].di: ↳ DISubprogram: "A"
+rr::DebugInfo::diScope[2].di: ↳ DISubprogram: "B"
+```
+
+The `DILocation` hierarchy would be:
+
+```C++
+rr::DebugInfo::diRootLocation:      DILocation(DISubprogram: "ReactorFunction")
+rr::DebugInfo::diScope[0].location: ↳ DILocation(DISubprogram: "main")
+rr::DebugInfo::diScope[1].location:   ↳ DILocation(DISubprogram: "A")
+rr::DebugInfo::diScope[2].location:     ↳ DILocation(DISubprogram: "B")
+```
+
+Where '↳' represents an `InlinedAt`.
+
+
+`rr::DebugInfo::diScope` is updated by `rr::DebugInfo::syncScope()`.
+
+`llvm::DIScope`s typically do not nest - there is usually a separate
+`llvm::DISubprogram` for each function in the callstack. All local variables
+within a function will typically share the same scope, regardless of whether
+they are declared within a sub-block.
+
+Loops and jumps within a function add complexity. Consider:
+
+```C++
+void B()
+{
+    rr::Int i = 0;
+}
+
+void A()
+{
+    for (int i = 0; i < 3; i++)
+    {
+        rr::Int x = 0;
+    }
+    B();
+}
+
+int main(int argc, const char* argv[])
+{
+    A();
+}
+```
+
+In this particular example Reactor will not be aware of the `for` loop, and will
+attempt to create three variables called "`x`" in the same function scope for `A()`.
+Duplicate symbols in the same `llvm::DIScope` result in undefined behavior.
+
+To solve this, `rr::DebugInfo::syncScope()` observes when a function jumps
+backwards, and forks the current `llvm::DILexicalBlock` for the function. This
+results in a number of `llvm::DILexicalBlock` chains, each declaring variables
+that shadow the previous block.
+
+At the declaration of `i`, the `DIScope` hierarchy would be:
+
+```C++
+                              DIFile: "foo.cpp"
+rr::DebugInfo::diScope[0].di: ↳ DISubprogram: "main"
+                              ↳ DISubprogram: "A"
+                              | ↳ DILexicalBlock: "A".1
+rr::DebugInfo::diScope[1].di: |   ↳ DILexicalBlock: "A".2
+rr::DebugInfo::diScope[2].di: ↳ DISubprogram: "B"
+```
+
+The `DILocation` hierarchy would be:
+
+```C++
+rr::DebugInfo::diRootLocation:      DILocation(DISubprogram: "ReactorFunction")
+rr::DebugInfo::diScope[0].location: ↳ DILocation(DISubprogram: "main")
+rr::DebugInfo::diScope[1].location:   ↳ DILocation(DILexicalBlock: "A".2)
+rr::DebugInfo::diScope[2].location:     ↳ DILocation(DISubprogram: "B")
+```
+
+### Debugger integration
+
+Once the debug information has been generated, it needs to be handed to the
+debugger.
+
+Reactor uses [`llvm::JITEventListener::createGDBRegistrationListener()`](http://llvm.org/doxygen/classllvm_1_1JITEventListener.html#a004abbb5a0d48ac376dfbe3e3c97c306)
+to inform GDB of the JIT'd program and its debugging information.
+More information [can be found here](https://llvm.org/docs/DebuggingJITedCode.html).
+
+LLDB should be able to support this same mechanism, but at the time of writing
+this does not appear to work.
+
diff --git a/docs/Regres.md b/docs/Regres.md
new file mode 100644
index 0000000..c752a72
--- /dev/null
+++ b/docs/Regres.md
@@ -0,0 +1,442 @@
+# Regres - SwiftShader automated testing
+
+## Introduction
+
+Regres is a collection of tools to perform [dEQP](https://github.com/KhronosGroup/VK-GL-CTS)
+presubmit and continuous integration testing and code coverage evaluation for
+SwiftShader.
+
+Regres provides:
+
+* [Presubmit testing](#presubmit-testing) - An automatic Vulkan
+  dEQP test run for each Gerrit patchset put up for review.
+* [Continuous integration testing](#daily-run-continuous-integration-testing) -
+  A Vulkan dEQP test run performed against the `master` branch each night. \
+  This nightly run also produces code coverage information which can be viewed at
+  [swiftshader-regres.github.io/swiftshader-coverage](https://swiftshader-regres.github.io/swiftshader-coverage/).
+* [Local dEQP test runner](#local-dEQP-test-runner) Provides a local tool for
+  efficiently running a number of dEQP tests based wildcard or regex name
+  matching.
+
+The Regres source root directory is at [`<swiftshader>/tests/regres/`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/).
+
+## Presubmit testing
+
+Regres monitors changes that have been [put up for review with Gerrit](https://swiftshader-review.googlesource.com/q/status:open).
+
+Once a new [qualifying](#qualifying) patchset has been found, regres will
+checkout, build and test the change against the parent changelist. \
+Any differences in results are reported as a review comment on the change
+[[example]](https://swiftshader-review.googlesource.com/c/SwiftShader/+/46369/5#message-4f09ea3e6d01ed94ae26183c8b6c547c90492c12).
+
+### Qualifying
+
+As Regres may be running externally authored code on Google hardware,
+Regres will only test a change if it is authored by or reviewed by a Googler.
+
+Only the most recent patchset of a change will be tested. If a new patchset is
+pushed while the previous is currently being tested, then testing will continue
+to completion and the previous patchsets will be posted, and the new patchset
+will be queued for testing.
+
+### Prioritization
+
+At the time of writing a Regres presubmit run takes a little over 20 minutes to
+complete, and there is a single Regres machine servicing all changes.
+To keep Regres responsive, changes are prioritized based on their 'readiness to
+land', which is determined by the change's `Kokoro-Presubmit`, `Code-Review` and
+`Presubmit-Ready` Gerrit labels.
+
+### Test Filtering
+
+By default, Regres will run all the test lists declared in the
+[`<swiftshader>/tests/regres/ci-tests.json`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/ci-tests.json) file.\
+As new functionally is being implemented, the test lists in `ci-tests.json` may
+reference known-passing test lists updated by the [daily run](#daily-run-continuous-integration-testing),
+so that failing tests for incomplete functionality are skipped, but tests that
+pass for new functionality *are tested* to ensure they do not regres.
+
+Additional tests names found in the files referenced by
+[`<swiftshader>/tests/regres/full-tests.json`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/full-tests.json)
+can be explicitly included in the change's presubmit run
+by including a line in the change description with the signature:
+
+```text
+Test: <dEQP-test-pattern>
+```
+
+`<dEQP-test-pattern>` can be a single dEQP test name, or you can use wildcards
+[as documented here](https://golang.org/pkg/path/filepath/#Match).
+
+You can repeat `Test:` as many times as you like. `Tests:` is also acccepted.
+
+[For example](https://swiftshader-review.googlesource.com/c/SwiftShader/+/26574):
+
+```text
+Add support for OpLogicalEqual, OpLogicalNotEqual
+
+Test: dEQP-VK.glsl.operator.bool_compare.*
+Test: dEQP-VK.glsl.operator.binary_operator.equal.*
+Test: dEQP-VK.glsl.operator.binary_operator.not_equal.*
+Bug: b/126870789
+Change-Id: I9d33444d67792274d8027b7d1632235533cfc079
+```
+
+## Daily-run continuous integration testing
+
+Once a day, regres will also test another set of tests from [`<swiftshader>/tests/regres/full-tests.json`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/full-tests.json),
+and post the test result lists as a Gerrit changelist
+[[example]](https://swiftshader-review.googlesource.com/c/SwiftShader/+/46448).
+
+The daily run also performs code coverage instrumentation per dEQP test,
+automatically uploading the results of all the dEQP tests to the viewer at
+[swiftshader-regres.github.io/swiftshader-coverage](https://swiftshader-regres.github.io/swiftshader-coverage/).
+
+## Local dEQP test runner
+
+Regres also provides a multi-threaded, [process sandboxed](#process-sandboxing),
+local dEQP test runner with a wild-card / regex based test name matcher.
+
+The local test runner can be run with:
+
+[`<swiftshader>/tests/regres/run_testlist.sh`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/run_testlist.sh) `--deqp-vk=<path to deqp-vk> [--filter=<test name filter>]`
+
+`<test name filter>` can be a single dEQP test name, or you can use wildcards
+[as documented here](https://golang.org/pkg/path/filepath/#Match).
+Alternatively, start with a `/` to use a regex filter.
+
+Other useful flags:
+
+```text
+  -limit int
+        only run a maximum of this number of tests
+  -no-results
+        disable generation of results.json file
+  -output string
+        path to an output JSON results file (default "results.json")
+  -shuffle
+        shuffle tests
+  -test-list string
+        path to a test list file (default "vk-master-PASS.txt")
+```
+
+Run [`<swiftshader>/tests/regres/run_testlist.sh`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/run_testlist.sh) with `--help` to see all available flags.
+
+## Process sandboxing
+
+Regres will run each dEQP test in a separate process to prevent state
+leakage between tests.
+
+Tests are run concurrently, and crashing processes will not take down the test
+runner.
+
+Some dEQP tests are known to perform excessive memory allocations (i.e. keep
+allocating until no more can be claimed from the OS). \
+In order to prevent a single test starving other test processes of memory, each
+process is restricted to a fraction of the system's memory using [linux resource limits](https://man7.org/linux/man-pages/man2/getrlimit.2.html).
+
+Tests may also deadlock, so each test process has a time limit before they are
+automatically killed.
+
+## Implementation details
+
+### Presubmit & daily run process
+
+Regres runs until stopped, and will:
+
+* Download a known compatible version of Clang to a cache directory. This will
+  be used for all compilation stages below.
+* Periodically poll Gerrit for recently opened changes
+* Periodically query Gerrit for details about each tracked change, determining
+  [whether it should be tested](#qualifying), and determine its current
+  [priority](#prioritization).
+* A qualifying change with the highest priority will be picked, and the
+  following is performed for the change:
+  1. The change is `git fetch`ed into a temporary directory.
+  2. If not already cached, the dEQP version described in the
+     change's [`<swiftshader>/tests/regres/deqp.json`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/deqp.json) file is downloaded and built the into a cached directory.
+  3. The source for the change is built into a temporary build directory.
+  4. The built dEQP binaries are used to test the change. The full test results
+     are stored in a cached directory.
+  5. If the parent change's test results aren't already cached, then steps 3 and
+     4 are repeated for the parent change.
+  6. The results of the two changes are diffed, and the results of the diff are
+     posted to the change as a Gerrit review comment.
+* The above is repeated until it is time to perform a daily run, upon which:
+  1. The `HEAD` change of `master` is fetched into a temporary directory.
+  2. If not already cached, the dEQP version described in the
+     change's [`<swiftshader>/tests/regres/deqp.json`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/deqp.json) file is downloaded and built the into a cached directory.
+  3. The `HEAD` change is built into a temporary directory, optionally with code
+     coverage instrumenting.
+  4. The build dEQP binaries are used to test the change.  The full test results
+     are stored in a cached directory, and the each test is binned by status and
+     written to the [`<swiftshader>/tests/regres/testlists`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/testlists) directory.
+  5. A new Gerrit change is created containing the updated test lists and put up
+     for review, along with a summary of test result changes [[example]](https://swiftshader-review.googlesource.com/c/SwiftShader/+/46448).
+     If there's an existing daily test change up for review then this is reused
+     instead of creating another.
+  6. If the build included code coverage instrumentation, then the coverage
+     results are collated from all test runs, processed and compressed, and
+     uploaded to [github.com/swiftshader-regres/swiftshader-coverage](https://github.com/swiftshader-regres/swiftshader-coverage)
+     which is immediately reflected at [swiftshader-regres.github.io/swiftshader-coverage](https://swiftshader-regres.github.io/swiftshader-coverage).
+     This process is [described in more detail below](#code-coverage).
+  7. Stages 3 - 5 are repeated for both the LLVM and Subzero backends.
+
+### Caching
+
+The cache directory is heavily used to avoid duplicated work. For example, it
+is common for patchsets to be repeatedly pushed with the same parent change, so
+the test results of the parent can be calculated once and stored. A tested
+patchset that is merged into master would also be cached when used as a parent
+of another change.
+
+The cache needs to consider more than just the change identifier as the
+cache-key for storing and retrieving data. Both the test lists and version of
+dEQP used are dictated by the change being tested, and so both used as part of
+the cache key.
+
+### Vulkan Loader usage
+
+Applications make use of the Vulkan API by loading the [Vulkan Loader](https://github.com/KhronosGroup/Vulkan-Loader)
+library (`libvulkan.so.1` on Linux), which enumerates available Vulkan
+implementations (typically GPUs and their drivers) before an actual 'instance'
+is created to communicate with a specific Installable Client Driver (ICD).
+
+However, SwiftShader can build into libvulkan.so.1 itself, which implements the
+same API entry functions as the Vulkan Loader. Regres by default will make dEQP
+load this SwiftShader library instead of the system's Vulkan Loader. It ensures
+test results are independent of the system's Vulkan setup.
+
+To override this, one can set LD_LIBRARY_PATH to point to the location of a
+Loader's libvulkan.so.1.
+
+### Code coverage
+
+The [daily run](#daily-run-continuous-integration-testing) produces code
+coverage information that can be examined for each individual dEQP test at
+[swiftshader-regres.github.io/swiftshader-coverage](https://swiftshader-regres.github.io/swiftshader-coverage/).
+
+The process for generating this information is complex, and is described in
+detail below:
+
+#### Per-test generation
+
+Code coverage instrumentation is generated with
+[clang's `--coverage`](https://clang.llvm.org/docs/SourceBasedCodeCoverage.html)
+functionality. This compiler option is enabled by using SwiftShader's
+`SWIFTSHADER_EMIT_COVERAGE` CMake flag.
+
+Each dEQP test process is run with a unique `LLVM_PROFILE_FILE` environment
+variable value which dictates where the process writes its raw coverage profile
+file. Each process gets a different path so that we can emit coverage from
+multiple, concurrent dEQP test processes.
+
+#### Parsing
+
+[Clang provides two tools](https://clang.llvm.org/docs/SourceBasedCodeCoverage.html#creating-coverage-reports) for processing coverage data:
+
+* `llvm-profdata` indexes the raw `.profraw` coverage profile file and emits a
+  `.profdata` file.
+* `llvm-cov` further processes the `.profdata` file into something human
+  readable or machine parsable.
+
+`llvm-cov` provides many options, including emitting an pretty HTML file, but is
+remarkably slow at producing easily machine-parsable data. Fortunately the core
+of `llvm-cov` is [a few hundreds of lines of code](https://github.com/llvm/llvm-project/tree/master/llvm/tools/llvm-cov), as it relies on LLVM libraries to do the heavy lifting. Regres
+replaces `llvm-cov` with ["`turbo-cov`"](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/cov/turbo-cov/) which efficiently converts a `.profdata` into a simple binary stream which can
+be consumed by Regres.
+
+#### Processing
+
+At the time of writing there are over 560,000 individual dEQP tests, and around
+176,000 lines of C++ code in [`<swiftshader>/src`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:src/).
+If you used 1 bit for each source line, per-line source coverage for all dEQP
+tests would require over 11GiB of storage. That's just for one snapshot.
+
+The processing and compression schemes described below reduces this down to
+around 10 MiB (~1100x reduction in size), and supports sub-line coverage scopes.
+
+##### Spans
+
+Code coverage information is described in spans.
+
+A span is a described as an interval of source locations, where a location is a
+line-column pair:
+
+```go
+type Location struct {
+    Line, Column int
+}
+
+type Span struct {
+    Start, End Location
+}
+```
+
+##### Test tree construction
+
+Each dEQP test is uniquely identified by a fully qualified name.
+Each test belongs to a group, and that group may be nested within any number of
+parent groups. The groups are described in the test name, using dots (`.`) to
+delimit the groups and leaf test name.
+
+For example, the fully qualified test name:
+
+`dEQP-VK.fragment_shader_interlock.basic.discard.ssbo.sample_unordered.4xaa.sample_shading.16x16`
+
+Can be broken down into the following groups and test name:
+
+```text
+dEQP-VK                       <-- root group name
+╰ fragment_shader_interlock
+  ╰ basic.discard
+    ╰ ssbo
+      ╰ sample_unordered
+        ╰ 4xaa
+          ╰ sample_shading
+            ╰ 16x16           <-- leaf test name
+```
+
+Breaking down fully qualified test names into groups provide a natural way to
+structure coverage data, as tests of the same group are likely to have similar
+coverage spans.
+
+So, for each source file in the codebase, we create a tree with test groups as
+non-leaf nodes, and tests as leaf nodes.
+
+For example, given the following test list:
+
+```text
+a.b.d.h
+a.b.d.i.n
+a.b.d.i.o
+a.b.e.j
+a.b.e.k.p
+a.b.e.k.q
+a.c.f
+a.c.g.l.r
+a.c.g.m
+```
+
+We would construct the following tree:
+
+```text
+               a
+        ╭──────┴──────╮
+        b             c
+    ╭───┴───╮     ╭───┴───╮
+    d       e     f       g
+  ╭─┴─╮   ╭─┴─╮         ╭─┴─╮
+  h   i   j   k         l   m
+     ╭┴╮     ╭┴╮        │
+     n o     p q        r
+
+```
+
+Each leaf node in this tree (`h`, `n`, `o`, `j`, `p`, `q`, `f`, `r`, `m`)
+represent a test, and non-leaf nodes (`a`, `b`, `c`, `d`, `e`, `g`, `i`, `k`,
+`l`) are a groups.
+
+To begin, we create a test tree structure, and associate the full list of test
+coverage spans with every leaf node (test) in this tree.
+
+This data structure hasn't given us any compression benefits yet, but we can
+now do a few tricks to dramatically reduce number of spans needed to describe
+the graph:
+
+##### Optimization 1: Common span promotion
+
+The first compression scheme is to promote common spans up the tree when they
+are common for all children. This will reduce the number of spans needed to be
+encoded in the final file.
+
+For example, if the test group `a` has 4 children that all share the same span
+`X`:
+
+```text
+          a
+    ╭───┬─┴─┬───╮
+    b   c   d   e
+ [X,Y] [X] [X] [X,Z]
+```
+
+Then span `X` can be promoted up to `a`:
+
+```text
+         [X]
+          a
+    ╭───┬─┴─┬───╮
+    b   c   d   e
+   [Y] []   [] [Z]
+```
+
+##### Optimization 2: Span XOR promotion
+
+This idea can be extended further, by not requiring all the children to share
+the same span before promotion. If **most** child nodes share the same span, we
+can still promote the span, but this time we **remove** the span from the
+children **if they had it**, and **add** the span to children **if they didn't
+have it**.
+
+For example, if the test group `a` has 4 children with 3 that share the span
+`X`:
+
+```text
+          a
+    ╭───┬─┴─┬───╮
+    b   c   d   e
+ [X,Y] [X]  [] [X,Z]
+```
+
+Then span `X` can be promoted up to `a` by flipping the presence of `X` on the
+child nodes:
+
+```text
+         [X]
+          a
+    ╭───┬─┴─┬───╮
+    b   c   d   e
+   [Y] []  [X] [Z]
+```
+
+This process repeats up the tree.
+
+With this optimization applied, we now need to traverse the tree from root to
+leaf in order to know whether a given span is in use for the leaf node (test):
+
+* If the span is encountered an **odd** number of times during traversal, then
+  the span is **covered**.
+* If the span is encountered an **even** number of times during traversal, then
+  the span is **not covered**.
+
+See [`tests/regres/cov/coverage_test.go`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/cov/coverage_test.go) for more examples of this optimization.
+
+##### Optimization 3: Common span grouping
+
+With real world data, we encounter groups of spans that are commonly found
+together. To further reduce coverage data, the whole graph is scanned for common
+span patterns, and are indexed by each tree node.
+The XOR'ing of spans as described above is performed as if the spans were not
+grouped.
+
+##### Optimization 4: Lookup tables
+
+All spans, span-groups and strings are stored in de-duplicated tables, and are
+indexed wherever possible.
+
+The final serialization is performed by [`tests/regres/cov/serialization.go`](https://cs.opensource.google/swiftshader/SwiftShader/+/master:tests/regres/cov/serialization.go).
+
+##### Optimization 5: zlib compression
+
+The coverage data is encoded into JSON for parsing by the web page.
+
+Before writing the JSON file, the text data is zlib compressed.
+
+#### Presentation
+
+The zlib-compressed JSON coverage data is decompressed using
+[`pako`](https://github.com/nodeca/pako), and consumed by some
+[vanilla JavaScript](https://github.com/swiftshader-regres/swiftshader-coverage/blob/gh-pages/index.html).
+
+[`codemirror`](https://codemirror.net/) is used to perform coverage span and C++
+syntax highlighting
diff --git a/docs/RuntimeConfiguration.md b/docs/RuntimeConfiguration.md
new file mode 100644
index 0000000..5083a8a
--- /dev/null
+++ b/docs/RuntimeConfiguration.md
@@ -0,0 +1,34 @@
+Runtime Configuration
+=========================
+
+SwiftShader provides a simple configuration mechanism based on a configuration file to control a variety of runtime options without needing to re-compile from source.
+
+Configuration file
+------------
+
+SwiftShader looks for a file named `SwiftShader.ini` (case-sensitive) in the working directory. At startup, SwiftShader reads this file, if it exists, and sets the options specified in it.
+
+The configuration file syntax is a series of key-value pairs, divided into sections. The following example shows three key-value pairs in two sections (`ThreadCount` and `AffinityMask` in the `[Processor]` section, and `EnableSpirvProfiling` in the `[Profiler]` section):
+```
+[Processor]
+ThreadCount=4
+AffinityMask=0xf
+
+# Comment
+[Profiler]
+EnableSpirvProfiling=true
+```
+
+The syntax rules are as follows:
+* Sections are defined via a name in brackets, e.g. `[Processor]`.
+* Key-value pairs are in the format `Key=Value`.
+* Keys are always strings, while values can be strings, booleans or integers depending on the semantics of the option:
+  * For integer options, both decimal and hexademical values are supported. 
+  * For boolean options, both decimal (`1` and `0`) and alphabetical (`true` and `false`) values are supported.
+* Comments are supported through the use of the `#` character at the beginning of a line.
+
+Options
+------------
+
+Refer to the [SwiftConfig.hpp](../src/System/SwiftConfig.hpp) header for an up-to-date overview of available options.
+
diff --git a/docs/SamplingRoutines.md b/docs/SamplingRoutines.md
new file mode 100644
index 0000000..9c4716a
--- /dev/null
+++ b/docs/SamplingRoutines.md
@@ -0,0 +1,36 @@
+Sampling Routines

+=================

+

+Introduction

+------------

+

+Like other modern real-time graphics APIs, Vulkan has support for [sampler objects](https://www.khronos.org/registry/vulkan/specs/1.2/html/vkspec.html#samplers) which provide the sampling state to be used by image reading and sampling instructions in the shaders. [Sampler descriptors](https://www.khronos.org/registry/vulkan/specs/1.2/html/vkspec.html#descriptorsets-sampler) contain or reference this state. The sampler descriptor is [combined](https://www.khronos.org/registry/spir-v/specs/unified1/SPIRV.html#OpSampledImage) with an image descriptor, and this combination may only be known at shader execution time.

+

+This poses a challenge to SwiftShader's use of [dynamic code generation](Reactor.md), where we wish to specialize the sampling code for both the image's properties (most notably the format) and the sampler state. Historically, sampler state was either part of the texture objects or both the texture and sampler object were bound to a texture unit, ahead of shader execution.

+

+JIT Trampolines

+---------------

+

+The solution is to defer code generation for the sampling instructions until shader execution. For each image sampling operation we generate a call to a C++ function which will provide the specialized routine based on the image and sampler descriptor used at run-time. Then we call the returned routine.

+

+Note that this differs from typical JIT-compilers' use of trampoline functions in that we generate code specific to the combination of state, and adapt it to changes in state dynamically.

+

+3-Level Caching

+---------------

+

+We cache the generated sampling routines, using the descriptors as well as the type of sampling instruction, as the key. This is done at three levels, described in reverse order for easier understanding:

+

+L3: At the third and last level, we use a generic least-recently-used (LRU) cache, just like the caches of the pipeline stages' routines. It is protected by a mutex, which may experience high contention due to all shader worker threads needing the sampling routines.

+

+L2: To mitigate that, there's a second-level cache which contains a 'snapshot' of the last-level cache, which can be queried concurrently without locking. The snapshot is updated at pipeline barriers. While much faster than the last-level cache's critical section, the hash table lookup is still a lot of work per sampling instruction.

+

+L1: Often the descriptors being used don't change between executions of the sampling instruction. Which is where the first-level or '[inline](https://en.wikipedia.org/wiki/Inline_caching)' cache comes in. It is a single-entry cache implemented at the compiled sampling instruction level. Before calling out to the C++ function to retrieve the routine, we check if the sampler and image descriptor haven't changed since the last execution of the instruction. Note that this cache doesn't use the instruction type as part of the lookup key, since each sampling instruction instance gets its own inline cache.

+

+Descriptor Identifiers

+----------------------

+

+To make testing whether the descriptor state remained the same fast, they have unique 32-bit identifiers. Note that sampler object state and image view state that is relevant to sampling routine specialization may not be unique among sampler and image view objects. For image views we're able to compress the state into the 32-bit identifier itself to avoid unnecessary recompiles.

+

+For sampler state, which is considerably larger than 32-bit, we keep a map of it to the unique identifiers. We keep count of how many sampler objects share each identifier, so we know when we can remove the entry.

+

+Both these 32-bit identifiers are the only thing used as the key of the first-level sampling routine cache.
\ No newline at end of file
diff --git a/docs/Sin-Cos-Optimization.pdf b/docs/Sin-Cos-Optimization.pdf
new file mode 100644
index 0000000..56150b8
--- /dev/null
+++ b/docs/Sin-Cos-Optimization.pdf
Binary files differ
diff --git a/docs/Subzero.md b/docs/Subzero.md
new file mode 100644
index 0000000..c66c15a
--- /dev/null
+++ b/docs/Subzero.md
@@ -0,0 +1,19 @@
+Subzero Documentation

+=====================

+

+Subzero is a JIT compiler used as a back-end for [Reactor](Reactor.md). It originates from Chrome's [Portable Native Client](https://developer.chrome.com/native-client) project. Its authoritative repository is at [https://chromium.googlesource.com/native_client/pnacl-subzero/](https://chromium.googlesource.com/native_client/pnacl-subzero/).

+

+Subzero for SwiftShader

+-----------------------

+

+SwiftShader contains a fork of the Subzero source code (at the time of writing they are in sync). It is an alternative JIT compiler back-end, with LLVM still being the default for CMake builds. To build SwiftShader with Subzero instead of LLVM, specify -DREACTOR_BACKEND=Subzero in your CMake command (or change LLVM to Subzero in the CMake GUI). For Chrome builds that use the BUILD.gn files, Subzero is the default as it produces significantly smaller binaries than with LLVM.

+

+Subzero Development

+-------------------

+

+Development on Subzero itself requires setting up the NaCl environment on a Linux system to be able to run its unit tests:

+

+* Install Chrome's [depot_tools](http://dev.chromium.org/developers/how-tos/install-depot-tools).

+* Run `mkdir nacl && cd nacl && fetch nacl` ([ref](http://www.chromium.org/nativeclient/how-tos/how-to-use-git-svn-with-native-client)).

+* Run `native_client/toolchain_build/toolchain_build_pnacl.py --verbose --sync --clobber --install toolchain/linux_x86/pnacl_newlib_raw` ([ref](https://sites.google.com/a/chromium.org/dev/nativeclient/pnacl/developing-pnacl#TOC-TL-DR-for-checking-out-PNaCl-sources-building-and-testing)).

+* Run all unit tests with `make -f Makefile.standalone check` ([ref](https://chromium.googlesource.com/native_client/pnacl-subzero/+/master/docs/README.rst)).

diff --git a/docs/TimelineSemaphores.md b/docs/TimelineSemaphores.md
new file mode 100644
index 0000000..c36bc85
--- /dev/null
+++ b/docs/TimelineSemaphores.md
@@ -0,0 +1,44 @@
+# Vulkan Timeline Semaphores
+
+[Vulkan Timeline
+Semaphores](https://www.khronos.org/blog/vulkan-timeline-semaphores) are a
+synchronization primitive accessible both from the device and the host. A
+timeline semaphore represents a monotonically increasing 64-bit unsigned
+value. Whereas binary Vulkan semaphores are waited on just to become signaled,
+timeline semaphores are waited on to reach a specific value. Once a timeline
+semaphore reaches a certain value, it is considered signaled for every value
+less than or equal to that
+value. [`vkWaitSemaphores`](https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/vkWaitSemaphores.html)
+is used to wait for semaphores on the host. It can operate in one of two modes:
+"wait for all" and "wait for any".
+
+In SwiftShader, Vulkan Timeline Semaphores are implemented as an unsigned 64-bit
+integer protected by a mutex with changes signaled by a condition
+variable. Waiting for all timeline semaphores in a set is implemented by simply
+waiting for each of the semaphores in turn. Waiting for any semaphore in a set
+is a bit more complex.
+
+## Wait for any semaphore
+
+A "wait for any" of a set of semaphores is represented by a
+`TimelineSemaphore::WaitForAny` object. Additionally, `TimelineSemaphore`
+contains an internal list of all `WaitForAny` objects that wait for it, as well
+as for which values they wait. When signaled, the timeline semaphore looks
+through this list and, in turn, signals any `WaitForAny` objects that are
+waiting for a value less than or equal to the timeline semaphore's new value.
+
+A `WaitForAny` object is created from a `VkSemaphoreWaitInfo`. During
+construction, it checks the value of each timeline semaphore provided against
+the value for which it is waiting. If it has not yet been reached, the wait
+object registers itself with the timeline semaphore. If it _has_ been reached,
+the wait object is immediately signaled and no further timeline semaphores are
+checked.
+
+Once a `WaitForAny` object is signaled, it remains signaled. There is no way to
+change what semaphores or values to wait for after construction. Any subsequent
+calls to `wait()` will return `VK_SUCCESS` immediately.
+
+When a `WaitForAny` object is destroyed, it unregisters itself from every
+`TimelineSemaphore` it was waiting for. It is expected that the number of
+concurrent waits are few, and that the wait objects are short-lived, so there
+should not be a build-up of wait objects in any timeline semaphore.
diff --git a/docs/VulkanShaderDebugging.md b/docs/VulkanShaderDebugging.md
new file mode 100644
index 0000000..c47a56a
--- /dev/null
+++ b/docs/VulkanShaderDebugging.md
@@ -0,0 +1,51 @@
+# Vulkan Shader Debugging
+
+SwiftShader implements a Vulkan shader debugger that uses the [Debug Adapter Protocol](https://microsoft.github.io/debug-adapter-protocol).
+
+This debugger is still actively being developed. Please see the [Known Issues](#Known-Issues).
+
+# Enabling
+
+To enable the debugger functionality, SwiftShader needs to be built using the CMake `SWIFTSHADER_ENABLE_VULKAN_DEBUGGER` flag (`-DSWIFTSHADER_ENABLE_VULKAN_DEBUGGER=1`):
+
+Once SwiftShader is built with the debugger functionality, there are two environment flags that control the runtime behavior:
+
+* `VK_DEBUGGER_PORT` - set to an unused port number that will be used to create the DAP localhost socket. If this environment variable is not set, then the debugger functionality will not be enabled.
+* `VK_WAIT_FOR_DEBUGGER` - if defined, the debugger will block on `vkCreateDevice()` until a debugger connection is established, before allowing `vkCreateDevice()` to return. This allows breakpoints to be set before execution continues.
+
+# Connecting using Visual Studio Code
+
+Once you have built SwiftShader with the debugger functionality enabled, and the `VK_DEBUGGER_PORT` environment variable set, you can connect to the debugger using the following Visual Studio Code `"debugServer"` [Launch Configuration](https://code.visualstudio.com/docs/editor/debugging#_launch-configurations):
+
+```json
+    {
+        "name": "Vulkan Shader Debugger",
+        "type": "node",
+        "request": "launch",
+        "debugServer": 19020,
+    }
+```
+
+Note that the `"type": "node"` field is unused, but is required.
+
+[TODO](https://issuetracker.google.com/issues/148373102): Create a Visual Studio Code extension that provides a pre-built SwiftShader driver and debugger type.
+
+# Shader entry breakpoints
+
+You can use the following function breakpoint names to set a breakpoint on the entry to all shaders of the corresponding shader type:
+* `"VertexShader"`
+* `"FragmentShader"`
+* `"ComputeShader"`
+
+# High-level Shader debugging
+
+The debugger, will by default, automatically disassemble the SPIR-V shader code, and provide this as the source for the shader program.
+
+However, if the shader program contains [`OpenCL.DebugInfo.100`](https://www.khronos.org/registry/spir-v/specs/unified1/OpenCL.DebugInfo.100.mobile.html) debug info instructions, then the debugger will allow you to debug the high-level shader source (please see [Known Issues](#Known-Issues)).
+
+
+# Known Issues
+
+* Currently enabling the debugger dramatically affects performance for all shader invocations. We may want to just-in-time recompile shaders that are actively being debugged to keep the invocations of non-debugged shaders performant. [Tracker bug](https://issuetracker.google.com/issues/148372410)
+* Support for [`OpenCL.DebugInfo.100`](https://www.khronos.org/registry/spir-v/specs/unified1/OpenCL.DebugInfo.100.mobile.html) is still in early, but active development. Many features are still incomplete.
+* Shader subgroup invocations are currently presented as a single thread, with each invocation presented as `Lane N` groups in the watch window(s). This approach is still being evaluated, and may be reworked.
\ No newline at end of file
diff --git a/docs/dEQP.md b/docs/dEQP.md
new file mode 100644
index 0000000..537f3c9
--- /dev/null
+++ b/docs/dEQP.md
@@ -0,0 +1,246 @@
+dEQP

+====

+

+These steps are specifically for testing SwiftShader's Vulkan implementation using dEQP on Windows (steps for Linux below the Windows instructions).

+

+Prerequisites

+-------------

+

+1. Install the latest [Python 3](https://www.python.org/downloads/)

+2. Install [Visual Studio](https://visualstudio.microsoft.com/vs/community/)

+3. Install [CMake](https://cmake.org/download/)

+4. Install [Go](https://golang.org/doc/install)

+5. Install [MinGW-W64](http://mingw-w64.org/doku.php/download)

+  * Select 'x86_64' as Architecture during setup

+6. Install [Git](https://git-scm.com/download/win)

+7. Set environment variables: Config Panel -> System and Security -> System -> Advanced system settigns -> Environment Variables

+  * Add `<path to python>` to your PATH environment variable

+  * Add `<path to MinGW-W64>\bin` to your PATH environment variable

+

+8. (Optional) Install [TortoiseGit](https://tortoisegit.org/)

+

+Getting the Code

+----------------

+

+12. Get dEQP (either in 'cmd' or by using TortoiseGit):

+

+    `git clone https://github.com/KhronosGroup/VK-GL-CTS`

+

+    You may wish to check out a stable vulkan-cts-* branch.

+

+13. Get dEQP's dependencies. In your dEQP root directory, open 'cmd' and run:

+

+    `python3 external\fetch_sources.py`

+

+14. Get Cherry (either in 'cmd' or by using TortoiseGit):

+

+    `git clone https://android.googlesource.com/platform/external/cherry`

+

+15. Set environment variable (see point 9):

+

+    Add new variable GOPATH='`<path to cherry>`'

+

+Building the code

+-----------------

+

+16. Build dEQP's Visual Studio files using the CMake GUI, or, in the dEQP root dir, run:

+    ```

+    mkdir build

+    cd build

+    cmake ..

+    ```

+    Note: don't call 'cmake .' directly in the root directory. It will make things fails later on. If you do, simply erase the files created by CMake and follow the steps above.

+

+17. Build dEQP:

+

+    Open `<path to dEQP>\build\dEQP-Core-default.sln` in Visual Studio and Build Solution

+

+    Note: Choose a 'Debug' build.

+

+18. Generate test cases:

+    ```

+    mkdir <path to cherry>\data

+    cd <path to dEQP>

+    python3 scripts\build_caselists.py <path to cherry>\data

+    ```

+

+    Note: you need to run `python3 scripts\build_caselists.py <path to cherry>\data` every time you update dEQP.

+

+Preparing the server

+--------------------

+

+19. Edit `<path to cherry>\cherry\data.go`

+* Search for `../candy-build/deqp-wgl` and replace that by `<path to deqp>/build`

+* Just above, add an option to CommandLine: `--deqp-gl-context-type=egl`

+* Remove `--deqp-watchdog=enable` to avoid timeouts during debugging.

+

+  Note: If you chose a Release build at step 17, modify the BinaryPath from 'Debug' to 'Release'.

+

+Testing Vulkan

+--------------

+

+20. Assuming you already built SwiftShader, copy and rename this file:

+

+    `<path to SwiftShader>\build\Release_x64\vk_swiftshader.dll` or\

+    `<path to SwiftShader>\build\Debug_x64\vk_swiftshader.dll`

+

+    To:

+

+    `<path to dEQP>\build\external\vulkancts\modules\vulkan\Debug\vulkan-1.dll`

+

+    This will cause dEQP to load SwiftShader's Vulkan implementatin directly, without going through a system-provided [loader](https://github.com/KhronosGroup/Vulkan-Loader/blob/master/loader/LoaderAndLayerInterface.md#the-loader) library or any layers.

+

+     This step can also be automated by setting the `SWIFTSHADER_VULKAN_API_LIBRARY_INSTALL_PATH` environment variable to a path where we'd like the drop-in API library to be installed. For example `<path to dEQP>/build/external/vulkancts/modules/vulkan/Debug/`.

+

+    To use SwiftShader as an [Installable Client Driver](https://github.com/KhronosGroup/Vulkan-Loader/blob/master/loader/LoaderAndLayerInterface.md#installable-client-drivers) (ICD) instead:

+    * Edit environment variables:

+      * Define VK_ICD_FILENAMES to `<path to SwiftShader>\src\Vulkan\vk_swiftshader_icd.json`

+    * If the location of `vk_swiftshader.dll` you're using is different than the one specified in `src\Vulkan\vk_swiftshader_icd.json`, modify it to point to the `vk_swiftshader.dll` file you want to use.

+

+Running the tests

+-----------------

+

+21. Start the test server. Go to `<path to cherry>` and run:

+

+    `go run server.go`

+

+22. Open your favorite browser and navigate to `localhost:8080`

+

+    Get Started -> Choose Device 'localhost' -> Select Tests 'dEQP-VK' -> Execute tests!

+

+Mustpass sets

+-------------

+

+dEQP contains more tests than what is expected to pass by a conformant implementation (e.g. some tests are considered too strict, or assume certain undefined behavior). The [android/cts/master/vk-master.txt](https://android.googlesource.com/platform/external/deqp/+/master/android/cts/master/vk-master.txt) text file which can be loaded in Cherry's 'Test sets' tab to only run the latest tests expected to pass by certified Android devices.

+

+Linux

+-----

+

+The Linux process is similar to Windows. However it doesn't use Release or Debug variants, paths use forward slashes, and it uses shared object files instead of DLLs.

+

+1. Install the latest [Python 3](https://www.python.org/downloads/)

+2. Install GCC and Make. In a terminal, run:

+

+    `sudo apt-get install gcc make`

+

+3. Install [CMake](https://cmake.org/download/)

+4. Install [Go](https://golang.org/doc/install)

+5. Install Git. In a terminal, run:

+

+    `sudo apt-get install git`

+

+6. Download the [Vulkan SDK](https://vulkan.lunarg.com/) and unpack it into a location you like.

+

+Getting the Code

+----------------

+

+7. Get Swiftshader. In a terminal, go to the location you want to keep Swiftshader, and run:

+

+    ```

+    git clone https://swiftshader.googlesource.com/SwiftShader && (cd SwiftShader && curl -Lo `git rev-parse --git-dir`/hooks/commit-msg https://gerrit-review.googlesource.com/tools/hooks/commit-msg ; chmod +x `git rev-parse --git-dir`/hooks/commit-msg)

+    ```

+

+    This will also install the commit hooks you need for committing to SwiftShader.

+

+8. Get dEQP:

+

+   `git clone https://github.com/KhronosGroup/VK-GL-CTS`

+

+9. Get dEQP's dependencies. In your dEQP root directory, run:

+

+    `python3 external/fetch_sources.py`

+

+10. Get Cherry, similar to step 8:

+

+    `git clone https://android.googlesource.com/platform/external/cherry`

+

+11. Set environment variable. Open ~/.bashrc in your preferred editor and add the following line:

+

+    GOPATH='`<path to cherry>`'

+

+Building the code

+-----------------

+

+12. Build Swiftshader. In the Swiftshader root dir, run:

+    ```

+    cd build

+    cmake ..

+    make --jobs=$(nproc)

+    ```

+

+13. Set your environment variables. In the terminal in which you'll be building dEQP, run the following commands:

+

+    ```

+    export LD_LIBRARY_PATH="<Vulkan SDK location>/x86_64/lib:$LD_LIBRARY_PATH"

+    export LD_LIBRARY_PATH="<Swiftshader location>/build:$LD_LIBRARY_PATH"

+    ```

+

+14. Build dEQP. In the dEQP root dir, run:

+    ```

+    mkdir build

+    cd build

+    cmake ..

+    make --jobs=$(nproc)

+    ```

+

+    Also: don't call 'cmake .' directly in the root directory. It will make things fails later on. If you do, simply erase the files created by CMake and follow the steps above.

+

+15. Generate test cases:

+    ```

+    mkdir <path to cherry>/data

+    cd <path to dEQP>

+    python3 scripts/build_caselists.py <path to cherry>/data

+    ```

+

+    Note: you need to run `python3 scripts/build_caselists.py <path to cherry>/data` every time you update dEQP.

+

+Preparing the server

+--------------------

+

+16. Edit `<path to cherry>/cherry/data.go`

+* Search for ".exe" and remove all instances.

+* Search for `../candy-build/deqp-wgl/execserver/Release` and replace that by `<path to deqp>/build/execserver/execserver`

+* Just above, add an option to CommandLine: `--deqp-gl-context-type=egl`

+* Just below, remove 'Debug/' from the BinaryPath.

+* Just one more line below, replace `../candy-build/deqp-wgl/` with `<path to deqp>/build/modules/${TestPackageDir}`.

+* Remove `--deqp-watchdog=enable` to avoid timeouts during debugging.

+

+Testing Vulkan

+--------------

+

+17. Use SwiftShader as an [Installable Client Driver](https://github.com/KhronosGroup/Vulkan-Loader/blob/master/loader/LoaderAndLayerInterface.md#installable-client-drivers) (ICD). Add the following line to your `~/.bashrc`:

+

+      `export VK_ICD_FILENAMES="<path to SwiftShader>/build/Linux/vk_swiftshader_icd.json"`

+

+    Then run `source ~/.bashrc` in the terminal(s) you'll be running tests from.

+

+

+Running the tests

+-----------------

+

+18. Start the test server. Go to `<path to cherry>` and run:

+

+    `go run server.go`

+

+19. Open your favorite browser and navigate to `localhost:8080`

+

+    Get Started -> Choose Device 'localhost' -> Select Tests 'dEQP-VK' -> Execute tests!

+

+20. To make sure that you're running SwiftShader's drivers, select only the dEQP-VK->info->device test. In the next window, click on these tests in the left pane. If you see SwiftShader in the deviceName field, then you've set your suite up properly.

+

+21. If you want to run Vulkan tests in the command line, go to the build directory in dEQP root. Then run the following command:

+

+    `external/vulkanacts/modules/vulkan/deqp-vk`

+

+    You can also run individual tests with:

+

+    `external/vulkanacts/modules/vulkan/deqp-vk --deqp-case=<test name>`

+

+    And you can find a list of the test names in `<Swiftshader root>/tests/regres/testlists/vk-master.txt` However, deqp-vk will cease upon the first failure. It's recommended that you use cherry for your testing needs unless you know what you're doing.

+

+22. To check that you're running SwiftShader in cherry, start the server 

+

+Mustpass sets

+-------------

+

+dEQP contains more tests than what is expected to pass by a conformant implementation (e.g. some tests are considered too strict, or assume certain undefined behavior). The [android/cts/master/vk-master.txt](https://android.googlesource.com/platform/external/deqp/+/master/android/cts/master/vk-master.txt) text file which can be loaded in Cherry's 'Test sets' tab to only run the latest tests expected to pass by certified Android devices.

diff --git a/include/Android/android/api-level.h b/include/Android/android/api-level.h
new file mode 100644
index 0000000..2d2f096
--- /dev/null
+++ b/include/Android/android/api-level.h
@@ -0,0 +1,38 @@
+/*
+ * Copyright (C) 2008 The Android Open Source Project
+ * All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ *  * Redistributions of source code must retain the above copyright
+ *    notice, this list of conditions and the following disclaimer.
+ *  * Redistributions in binary form must reproduce the above copyright
+ *    notice, this list of conditions and the following disclaimer in
+ *    the documentation and/or other materials provided with the
+ *    distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+ * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+ * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
+ * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
+ * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+ * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+ * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+ * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
+ * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+ * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+ * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#ifndef ANDROID_API_LEVEL_H
+#define ANDROID_API_LEVEL_H
+
+/*
+ * Magic version number for a current development build, which has
+ * not yet turned into an official release.
+ */
+#define __ANDROID_API__ 10000
+
+#endif /* ANDROID_API_LEVEL_H */
diff --git a/include/Android/android/sync.h b/include/Android/android/sync.h
new file mode 100644
index 0000000..1ae728a
--- /dev/null
+++ b/include/Android/android/sync.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+extern "C" {
+int sync_wait(int fd, int timeout);
+};
diff --git a/include/Android/cutils/native_handle.h b/include/Android/cutils/native_handle.h
new file mode 100644
index 0000000..b92a663
--- /dev/null
+++ b/include/Android/cutils/native_handle.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+struct native_handle_t;
+
+typedef const struct native_handle_t* buffer_handle_t;
diff --git a/include/Android/hardware/gralloc.h b/include/Android/hardware/gralloc.h
new file mode 100644
index 0000000..013e86a
--- /dev/null
+++ b/include/Android/hardware/gralloc.h
@@ -0,0 +1,49 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cutils/native_handle.h>
+
+#include <hardware/hardware.h>
+
+struct android_ycbcr;
+
+enum {
+    GRALLOC_USAGE_SW_READ_OFTEN = 0x00000003U,
+    GRALLOC_USAGE_SW_WRITE_OFTEN = 0x00000030U,
+    GRALLOC_USAGE_HW_TEXTURE = 0x00000100U,
+    GRALLOC_USAGE_HW_RENDER = 0x00000200U,
+};
+
+struct gralloc_module_t {
+    hw_module_t common;
+    int (*registerBuffer)(gralloc_module_t const*, buffer_handle_t);
+    int (*unregisterBuffer)(gralloc_module_t const*, buffer_handle_t);
+    int (*lock)(gralloc_module_t const*, buffer_handle_t, int, int, int, int, int, void**);
+    int (*unlock)(gralloc_module_t const*, buffer_handle_t);
+    int (*perform)(gralloc_module_t const*, int, ...);
+    int (*lock_ycbcr)(gralloc_module_t const*, buffer_handle_t, int, int, int, int, int,
+                      android_ycbcr*);
+    int (*lockAsync)(gralloc_module_t const*, buffer_handle_t, int, int, int, int, int, void**, int);
+    int (*unlockAsync)(gralloc_module_t const*, buffer_handle_t, int*);
+    int (*lockAsync_ycbcr)(gralloc_module_t const*, buffer_handle_t, int, int, int, int, int,
+                           android_ycbcr*, int);
+    int32_t (*getTransportSize)(gralloc_module_t const*, buffer_handle_t, uint32_t, uint32_t);
+    int32_t (*validateBufferSize)(gralloc_module_t const*, buffer_handle_t, uint32_t, uint32_t, int32_t, int, uint32_t);
+
+    void* reserved_proc[1];
+};
diff --git a/include/Android/hardware/gralloc1.h b/include/Android/hardware/gralloc1.h
new file mode 100644
index 0000000..b02decf
--- /dev/null
+++ b/include/Android/hardware/gralloc1.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <hardware/hardware.h>
+
+#include <cutils/native_handle.h>
+
+#define GRALLOC_MODULE_API_VERSION_1_0 HARDWARE_MAKE_API_VERSION(1, 0)
+
+#define GRALLOC_HARDWARE_MODULE_ID "gralloc"
+
+enum {
+    GRALLOC1_ERROR_NONE = 0,
+    GRALLOC1_ERROR_BAD_HANDLE = 2,
+    GRALLOC1_ERROR_BAD_VALUE = 3,
+    GRALLOC1_ERROR_UNDEFINED = 6,
+};
+
+enum {
+    GRALLOC1_FUNCTION_LOCK = 18,
+    GRALLOC1_FUNCTION_UNLOCK = 20,
+};
+
+enum {
+    GRALLOC1_CONSUMER_USAGE_CPU_READ = 1ULL << 1,
+    GRALLOC1_CONSUMER_USAGE_CPU_READ_OFTEN = 1ULL << 2 | GRALLOC1_CONSUMER_USAGE_CPU_READ,
+    GRALLOC1_CONSUMER_USAGE_CPU_WRITE = 1ULL << 5,
+    GRALLOC1_CONSUMER_USAGE_CPU_WRITE_OFTEN = 1ULL << 6 | GRALLOC1_CONSUMER_USAGE_CPU_WRITE,
+    GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE = 1ULL << 8,
+};
+
+enum {
+    GRALLOC1_PRODUCER_USAGE_CPU_READ = 1ULL << 1,
+    GRALLOC1_PRODUCER_USAGE_CPU_READ_OFTEN = 1ULL << 2 | GRALLOC1_PRODUCER_USAGE_CPU_READ,
+    GRALLOC1_PRODUCER_USAGE_CPU_WRITE = 1ULL << 5,
+    GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN = 1ULL << 6 | GRALLOC1_PRODUCER_USAGE_CPU_WRITE,
+    GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET = 1ULL << 9,
+};
+
+typedef void (*gralloc1_function_pointer_t)();
+
+struct gralloc1_rect_t {
+    int32_t left;
+    int32_t top;
+    int32_t width;
+    int32_t height;
+};
+
+struct gralloc1_device_t {
+    hw_device_t common;
+    void (*getCapabilities)(gralloc1_device_t*, uint32_t*, int32_t*);
+    gralloc1_function_pointer_t (*getFunction)(gralloc1_device_t*, int32_t);
+};
+
+typedef int32_t (*GRALLOC1_PFN_LOCK)(gralloc1_device_t*, buffer_handle_t, uint64_t, uint64_t,
+                                     const gralloc1_rect_t*, void**, int32_t);
+typedef int32_t (*GRALLOC1_PFN_UNLOCK)(gralloc1_device_t*, buffer_handle_t, int32_t*);
+
+static inline int gralloc1_open(const hw_module_t* module, gralloc1_device_t** device) {
+    return module->methods->open(module, GRALLOC_HARDWARE_MODULE_ID,
+                                 reinterpret_cast<hw_device_t**>(device));
+}
diff --git a/include/Android/hardware/hardware.h b/include/Android/hardware/hardware.h
new file mode 100644
index 0000000..21d6dc4
--- /dev/null
+++ b/include/Android/hardware/hardware.h
@@ -0,0 +1,66 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cstdint>
+
+#define MAKE_TAG_CONSTANT(A, B, C, D) (((A) << 24) | ((B) << 16) | ((C) << 8) | (D))
+
+#define HARDWARE_MODULE_TAG MAKE_TAG_CONSTANT('H', 'W', 'M', 'T')
+#define HARDWARE_DEVICE_TAG MAKE_TAG_CONSTANT('H', 'W', 'D', 'T')
+
+#define HARDWARE_MAKE_API_VERSION(maj, min) ((((maj)&0xff) << 8) | ((min)&0xff))
+
+#define HARDWARE_HAL_API_VERSION HARDWARE_MAKE_API_VERSION(1, 0)
+
+struct hw_module_methods_t;
+
+struct hw_module_t {
+    uint32_t tag;
+    uint16_t module_api_version;
+    uint16_t hal_api_version;
+    const char* id;
+    const char* name;
+    const char* author;
+    hw_module_methods_t* methods;
+    void* dso;
+#ifdef __LP64__
+    uint64_t reserved[32 - 7];
+#else
+    uint32_t reserved[32 - 7];
+#endif
+};
+
+struct hw_device_t {
+    uint32_t tag;
+    uint32_t version;
+    struct hw_module_t* module;
+#ifdef __LP64__
+    uint64_t reserved[12];
+#else
+    uint32_t reserved[12];
+#endif
+    int (*close)(hw_device_t* device);
+};
+
+struct hw_module_methods_t {
+    int (*open)(const hw_module_t*, const char*, hw_device_t**);
+};
+
+extern "C" {
+int hw_get_module(const char* id, const hw_module_t** module);
+};
diff --git a/include/Android/nativebase/nativebase.h b/include/Android/nativebase/nativebase.h
new file mode 100644
index 0000000..c2e84d7
--- /dev/null
+++ b/include/Android/nativebase/nativebase.h
@@ -0,0 +1,64 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <cutils/native_handle.h>
+
+#include <cstdint>
+#include <cstring>
+
+// clang-format off
+#define ANDROID_NATIVE_MAKE_CONSTANT(a, b, c, d) \
+    ((static_cast<unsigned int>(a) << 24) | \
+     (static_cast<unsigned int>(b) << 16) | \
+     (static_cast<unsigned int>(c) <<  8) | \
+     (static_cast<unsigned int>(d) <<  0))
+// clang-format on
+
+struct android_native_base_t {
+    int magic;
+    int version;
+    void* reserved[4];
+    void (*incRef)(android_native_base_t*);
+    void (*decRef)(android_native_base_t*);
+};
+
+#define ANDROID_NATIVE_BUFFER_MAGIC ANDROID_NATIVE_MAKE_CONSTANT('_', 'b', 'f', 'r')
+
+struct ANativeWindowBuffer {
+    ANativeWindowBuffer() {
+        common.magic = ANDROID_NATIVE_BUFFER_MAGIC;
+        common.version = sizeof(ANativeWindowBuffer);
+        memset(common.reserved, 0, sizeof(common.reserved));
+    }
+
+    android_native_base_t common;
+
+    int width;
+    int height;
+    int stride;
+    int format;
+    int usage_deprecated;
+    uintptr_t layerCount;
+
+    void* reserved[1];
+
+    const native_handle_t* handle;
+    uint64_t usage;
+
+    void* reserved_proc[8 - (sizeof(uint64_t) / sizeof(void*))];
+};
diff --git a/include/Android/sync/sync.h b/include/Android/sync/sync.h
new file mode 100644
index 0000000..1ae728a
--- /dev/null
+++ b/include/Android/sync/sync.h
@@ -0,0 +1,21 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+extern "C" {
+int sync_wait(int fd, int timeout);
+};
diff --git a/include/Android/system/graphics.h b/include/Android/system/graphics.h
new file mode 100644
index 0000000..563287a
--- /dev/null
+++ b/include/Android/system/graphics.h
@@ -0,0 +1,28 @@
+/*
+ * Copyright (C) 2018 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+enum {
+    HAL_PIXEL_FORMAT_RGBA_8888 = 1,
+    HAL_PIXEL_FORMAT_RGBX_8888 = 2,
+    HAL_PIXEL_FORMAT_RGB_888 = 3,
+    HAL_PIXEL_FORMAT_RGB_565 = 4,
+    HAL_PIXEL_FORMAT_BGRA_8888 = 5,
+    HAL_PIXEL_FORMAT_RGBA_FP16 = 22,
+    HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED = 34,
+    HAL_PIXEL_FORMAT_YV12 = 842094169,
+};
diff --git a/include/Android/vndk/window.h b/include/Android/vndk/window.h
new file mode 100644
index 0000000..8b5965f
--- /dev/null
+++ b/include/Android/vndk/window.h
@@ -0,0 +1,31 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#pragma once
+
+#include <nativebase/nativebase.h>
+
+struct ANativeWindow;
+typedef struct ANativeWindow ANativeWindow;
+
+void ANativeWindow_acquire(ANativeWindow* window);
+void ANativeWindow_release(ANativeWindow* window);
+int32_t ANativeWindow_getWidth(ANativeWindow* window);
+int32_t ANativeWindow_getHeight(ANativeWindow* window);
+int ANativeWindow_dequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer, int* fenceFd);
+int ANativeWindow_queueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer, int fenceFd);
+int ANativeWindow_cancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer, int fenceFd);
+int ANativeWindow_setUsage(ANativeWindow* window, uint64_t usage);
diff --git a/include/Wayland/wayland-client-core.h b/include/Wayland/wayland-client-core.h
new file mode 100644
index 0000000..ce91a6f
--- /dev/null
+++ b/include/Wayland/wayland-client-core.h
@@ -0,0 +1,292 @@
+/*
+ * Copyright © 2008 Kristian Høgsberg
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files (the
+ * "Software"), to deal in the Software without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Software, and to
+ * permit persons to whom the Software is furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice and this permission notice (including the
+ * next paragraph) shall be included in all copies or substantial
+ * portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT.  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+ * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef WAYLAND_CLIENT_CORE_H
+#define WAYLAND_CLIENT_CORE_H
+
+#include <stdint.h>
+#include "wayland-util.h"
+#include "wayland-version.h"
+
+#ifdef  __cplusplus
+extern "C" {
+#endif
+
+/** \class wl_proxy
+ *
+ * \brief Represents a protocol object on the client side.
+ *
+ * A wl_proxy acts as a client side proxy to an object existing in the
+ * compositor. The proxy is responsible for converting requests made by the
+ * clients with \ref wl_proxy_marshal() into Wayland's wire format. Events
+ * coming from the compositor are also handled by the proxy, which will in
+ * turn call the handler set with \ref wl_proxy_add_listener().
+ *
+ * \note With the exception of function \ref wl_proxy_set_queue(), functions
+ * accessing a wl_proxy are not normally used by client code. Clients
+ * should normally use the higher level interface generated by the scanner to
+ * interact with compositor objects.
+ *
+ */
+struct wl_proxy;
+
+/** \class wl_display
+ *
+ * \brief Represents a connection to the compositor and acts as a proxy to
+ * the wl_display singleton object.
+ *
+ * A wl_display object represents a client connection to a Wayland
+ * compositor. It is created with either \ref wl_display_connect() or
+ * \ref wl_display_connect_to_fd(). A connection is terminated using
+ * \ref wl_display_disconnect().
+ *
+ * A wl_display is also used as the \ref wl_proxy for the wl_display
+ * singleton object on the compositor side.
+ *
+ * A wl_display object handles all the data sent from and to the
+ * compositor. When a \ref wl_proxy marshals a request, it will write its wire
+ * representation to the display's write buffer. The data is sent to the
+ * compositor when the client calls \ref wl_display_flush().
+ *
+ * Incoming data is handled in two steps: queueing and dispatching. In the
+ * queue step, the data coming from the display fd is interpreted and
+ * added to a queue. On the dispatch step, the handler for the incoming
+ * event set by the client on the corresponding \ref wl_proxy is called.
+ *
+ * A wl_display has at least one event queue, called the <em>default
+ * queue</em>. Clients can create additional event queues with \ref
+ * wl_display_create_queue() and assign \ref wl_proxy's to it. Events
+ * occurring in a particular proxy are always queued in its assigned queue.
+ * A client can ensure that a certain assumption, such as holding a lock
+ * or running from a given thread, is true when a proxy event handler is
+ * called by assigning that proxy to an event queue and making sure that
+ * this queue is only dispatched when the assumption holds.
+ *
+ * The default queue is dispatched by calling \ref wl_display_dispatch().
+ * This will dispatch any events queued on the default queue and attempt
+ * to read from the display fd if it's empty. Events read are then queued
+ * on the appropriate queues according to the proxy assignment.
+ *
+ * A user created queue is dispatched with \ref wl_display_dispatch_queue().
+ * This function behaves exactly the same as wl_display_dispatch()
+ * but it dispatches given queue instead of the default queue.
+ *
+ * A real world example of event queue usage is Mesa's implementation of
+ * eglSwapBuffers() for the Wayland platform. This function might need
+ * to block until a frame callback is received, but dispatching the default
+ * queue could cause an event handler on the client to start drawing
+ * again. This problem is solved using another event queue, so that only
+ * the events handled by the EGL code are dispatched during the block.
+ *
+ * This creates a problem where a thread dispatches a non-default
+ * queue, reading all the data from the display fd. If the application
+ * would call \em poll(2) after that it would block, even though there
+ * might be events queued on the default queue. Those events should be
+ * dispatched with \ref wl_display_dispatch_pending() or \ref
+ * wl_display_dispatch_queue_pending() before flushing and blocking.
+ */
+struct wl_display;
+
+/** \class wl_event_queue
+ *
+ * \brief A queue for \ref wl_proxy object events.
+ *
+ * Event queues allows the events on a display to be handled in a thread-safe
+ * manner. See \ref wl_display for details.
+ *
+ */
+struct wl_event_queue;
+
+/** Destroy proxy after marshalling
+ * @ingroup wl_proxy
+ */
+#define WL_MARSHAL_FLAG_DESTROY (1 << 0)
+
+void
+wl_event_queue_destroy(struct wl_event_queue *queue);
+
+struct wl_proxy *
+wl_proxy_marshal_flags(struct wl_proxy *proxy, uint32_t opcode,
+		       const struct wl_interface *interface,
+		       uint32_t version,
+		       uint32_t flags, ...);
+
+struct wl_proxy *
+wl_proxy_marshal_array_flags(struct wl_proxy *proxy, uint32_t opcode,
+			     const struct wl_interface *interface,
+			     uint32_t version,
+			     uint32_t flags,
+			     union wl_argument *args);
+
+void
+wl_proxy_marshal(struct wl_proxy *p, uint32_t opcode, ...);
+
+void
+wl_proxy_marshal_array(struct wl_proxy *p, uint32_t opcode,
+		       union wl_argument *args);
+
+struct wl_proxy *
+wl_proxy_create(struct wl_proxy *factory,
+		const struct wl_interface *interface);
+
+void *
+wl_proxy_create_wrapper(void *proxy);
+
+void
+wl_proxy_wrapper_destroy(void *proxy_wrapper);
+
+struct wl_proxy *
+wl_proxy_marshal_constructor(struct wl_proxy *proxy,
+			     uint32_t opcode,
+			     const struct wl_interface *interface,
+			     ...);
+
+struct wl_proxy *
+wl_proxy_marshal_constructor_versioned(struct wl_proxy *proxy,
+				       uint32_t opcode,
+				       const struct wl_interface *interface,
+				       uint32_t version,
+				       ...);
+
+struct wl_proxy *
+wl_proxy_marshal_array_constructor(struct wl_proxy *proxy,
+				   uint32_t opcode, union wl_argument *args,
+				   const struct wl_interface *interface);
+
+struct wl_proxy *
+wl_proxy_marshal_array_constructor_versioned(struct wl_proxy *proxy,
+					     uint32_t opcode,
+					     union wl_argument *args,
+					     const struct wl_interface *interface,
+					     uint32_t version);
+
+void
+wl_proxy_destroy(struct wl_proxy *proxy);
+
+int
+wl_proxy_add_listener(struct wl_proxy *proxy,
+		      void (**implementation)(void), void *data);
+
+const void *
+wl_proxy_get_listener(struct wl_proxy *proxy);
+
+int
+wl_proxy_add_dispatcher(struct wl_proxy *proxy,
+			wl_dispatcher_func_t dispatcher_func,
+			const void * dispatcher_data, void *data);
+
+void
+wl_proxy_set_user_data(struct wl_proxy *proxy, void *user_data);
+
+void *
+wl_proxy_get_user_data(struct wl_proxy *proxy);
+
+uint32_t
+wl_proxy_get_version(struct wl_proxy *proxy);
+
+uint32_t
+wl_proxy_get_id(struct wl_proxy *proxy);
+
+void
+wl_proxy_set_tag(struct wl_proxy *proxy,
+		 const char * const *tag);
+
+const char * const *
+wl_proxy_get_tag(struct wl_proxy *proxy);
+
+const char *
+wl_proxy_get_class(struct wl_proxy *proxy);
+
+void
+wl_proxy_set_queue(struct wl_proxy *proxy, struct wl_event_queue *queue);
+
+struct wl_display *
+wl_display_connect(const char *name);
+
+struct wl_display *
+wl_display_connect_to_fd(int fd);
+
+void
+wl_display_disconnect(struct wl_display *display);
+
+int
+wl_display_get_fd(struct wl_display *display);
+
+int
+wl_display_dispatch(struct wl_display *display);
+
+int
+wl_display_dispatch_queue(struct wl_display *display,
+			  struct wl_event_queue *queue);
+
+int
+wl_display_dispatch_queue_pending(struct wl_display *display,
+				  struct wl_event_queue *queue);
+
+int
+wl_display_dispatch_pending(struct wl_display *display);
+
+int
+wl_display_get_error(struct wl_display *display);
+
+uint32_t
+wl_display_get_protocol_error(struct wl_display *display,
+			      const struct wl_interface **interface,
+			      uint32_t *id);
+
+int
+wl_display_flush(struct wl_display *display);
+
+int
+wl_display_roundtrip_queue(struct wl_display *display,
+			   struct wl_event_queue *queue);
+
+int
+wl_display_roundtrip(struct wl_display *display);
+
+struct wl_event_queue *
+wl_display_create_queue(struct wl_display *display);
+
+int
+wl_display_prepare_read_queue(struct wl_display *display,
+			      struct wl_event_queue *queue);
+
+int
+wl_display_prepare_read(struct wl_display *display);
+
+void
+wl_display_cancel_read(struct wl_display *display);
+
+int
+wl_display_read_events(struct wl_display *display);
+
+void
+wl_log_set_handler_client(wl_log_func_t handler);
+
+#ifdef  __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/Wayland/wayland-client-protocol.h b/include/Wayland/wayland-client-protocol.h
new file mode 100644
index 0000000..1f3481c
--- /dev/null
+++ b/include/Wayland/wayland-client-protocol.h
@@ -0,0 +1,6106 @@
+/* Generated by wayland-scanner 1.21.0 */
+
+#ifndef WAYLAND_CLIENT_PROTOCOL_H
+#define WAYLAND_CLIENT_PROTOCOL_H
+
+#include <stdint.h>
+#include <stddef.h>
+#include "wayland-client.h"
+
+#ifdef  __cplusplus
+extern "C" {
+#endif
+
+/**
+ * @page page_wayland The wayland protocol
+ * @section page_ifaces_wayland Interfaces
+ * - @subpage page_iface_wl_display - core global object
+ * - @subpage page_iface_wl_registry - global registry object
+ * - @subpage page_iface_wl_callback - callback object
+ * - @subpage page_iface_wl_compositor - the compositor singleton
+ * - @subpage page_iface_wl_shm_pool - a shared memory pool
+ * - @subpage page_iface_wl_shm - shared memory support
+ * - @subpage page_iface_wl_buffer - content for a wl_surface
+ * - @subpage page_iface_wl_data_offer - offer to transfer data
+ * - @subpage page_iface_wl_data_source - offer to transfer data
+ * - @subpage page_iface_wl_data_device - data transfer device
+ * - @subpage page_iface_wl_data_device_manager - data transfer interface
+ * - @subpage page_iface_wl_shell - create desktop-style surfaces
+ * - @subpage page_iface_wl_shell_surface - desktop-style metadata interface
+ * - @subpage page_iface_wl_surface - an onscreen surface
+ * - @subpage page_iface_wl_seat - group of input devices
+ * - @subpage page_iface_wl_pointer - pointer input device
+ * - @subpage page_iface_wl_keyboard - keyboard input device
+ * - @subpage page_iface_wl_touch - touchscreen input device
+ * - @subpage page_iface_wl_output - compositor output region
+ * - @subpage page_iface_wl_region - region interface
+ * - @subpage page_iface_wl_subcompositor - sub-surface compositing
+ * - @subpage page_iface_wl_subsurface - sub-surface interface to a wl_surface
+ * @section page_copyright_wayland Copyright
+ * <pre>
+ *
+ * Copyright © 2008-2011 Kristian Høgsberg
+ * Copyright © 2010-2011 Intel Corporation
+ * Copyright © 2012-2013 Collabora, Ltd.
+ *
+ * Permission is hereby granted, free of charge, to any person
+ * obtaining a copy of this software and associated documentation files
+ * (the "Software"), to deal in the Software without restriction,
+ * including without limitation the rights to use, copy, modify, merge,
+ * publish, distribute, sublicense, and/or sell copies of the Software,
+ * and to permit persons to whom the Software is furnished to do so,
+ * subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice (including the
+ * next paragraph) shall be included in all copies or substantial
+ * portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT.  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+ * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ * </pre>
+ */
+struct wl_buffer;
+struct wl_callback;
+struct wl_compositor;
+struct wl_data_device;
+struct wl_data_device_manager;
+struct wl_data_offer;
+struct wl_data_source;
+struct wl_display;
+struct wl_keyboard;
+struct wl_output;
+struct wl_pointer;
+struct wl_region;
+struct wl_registry;
+struct wl_seat;
+struct wl_shell;
+struct wl_shell_surface;
+struct wl_shm;
+struct wl_shm_pool;
+struct wl_subcompositor;
+struct wl_subsurface;
+struct wl_surface;
+struct wl_touch;
+
+#ifndef WL_DISPLAY_INTERFACE
+#define WL_DISPLAY_INTERFACE
+/**
+ * @page page_iface_wl_display wl_display
+ * @section page_iface_wl_display_desc Description
+ *
+ * The core global object.  This is a special singleton object.  It
+ * is used for internal Wayland protocol features.
+ * @section page_iface_wl_display_api API
+ * See @ref iface_wl_display.
+ */
+/**
+ * @defgroup iface_wl_display The wl_display interface
+ *
+ * The core global object.  This is a special singleton object.  It
+ * is used for internal Wayland protocol features.
+ */
+extern const struct wl_interface wl_display_interface;
+#endif
+#ifndef WL_REGISTRY_INTERFACE
+#define WL_REGISTRY_INTERFACE
+/**
+ * @page page_iface_wl_registry wl_registry
+ * @section page_iface_wl_registry_desc Description
+ *
+ * The singleton global registry object.  The server has a number of
+ * global objects that are available to all clients.  These objects
+ * typically represent an actual object in the server (for example,
+ * an input device) or they are singleton objects that provide
+ * extension functionality.
+ *
+ * When a client creates a registry object, the registry object
+ * will emit a global event for each global currently in the
+ * registry.  Globals come and go as a result of device or
+ * monitor hotplugs, reconfiguration or other events, and the
+ * registry will send out global and global_remove events to
+ * keep the client up to date with the changes.  To mark the end
+ * of the initial burst of events, the client can use the
+ * wl_display.sync request immediately after calling
+ * wl_display.get_registry.
+ *
+ * A client can bind to a global object by using the bind
+ * request.  This creates a client-side handle that lets the object
+ * emit events to the client and lets the client invoke requests on
+ * the object.
+ * @section page_iface_wl_registry_api API
+ * See @ref iface_wl_registry.
+ */
+/**
+ * @defgroup iface_wl_registry The wl_registry interface
+ *
+ * The singleton global registry object.  The server has a number of
+ * global objects that are available to all clients.  These objects
+ * typically represent an actual object in the server (for example,
+ * an input device) or they are singleton objects that provide
+ * extension functionality.
+ *
+ * When a client creates a registry object, the registry object
+ * will emit a global event for each global currently in the
+ * registry.  Globals come and go as a result of device or
+ * monitor hotplugs, reconfiguration or other events, and the
+ * registry will send out global and global_remove events to
+ * keep the client up to date with the changes.  To mark the end
+ * of the initial burst of events, the client can use the
+ * wl_display.sync request immediately after calling
+ * wl_display.get_registry.
+ *
+ * A client can bind to a global object by using the bind
+ * request.  This creates a client-side handle that lets the object
+ * emit events to the client and lets the client invoke requests on
+ * the object.
+ */
+extern const struct wl_interface wl_registry_interface;
+#endif
+#ifndef WL_CALLBACK_INTERFACE
+#define WL_CALLBACK_INTERFACE
+/**
+ * @page page_iface_wl_callback wl_callback
+ * @section page_iface_wl_callback_desc Description
+ *
+ * Clients can handle the 'done' event to get notified when
+ * the related request is done.
+ * @section page_iface_wl_callback_api API
+ * See @ref iface_wl_callback.
+ */
+/**
+ * @defgroup iface_wl_callback The wl_callback interface
+ *
+ * Clients can handle the 'done' event to get notified when
+ * the related request is done.
+ */
+extern const struct wl_interface wl_callback_interface;
+#endif
+#ifndef WL_COMPOSITOR_INTERFACE
+#define WL_COMPOSITOR_INTERFACE
+/**
+ * @page page_iface_wl_compositor wl_compositor
+ * @section page_iface_wl_compositor_desc Description
+ *
+ * A compositor.  This object is a singleton global.  The
+ * compositor is in charge of combining the contents of multiple
+ * surfaces into one displayable output.
+ * @section page_iface_wl_compositor_api API
+ * See @ref iface_wl_compositor.
+ */
+/**
+ * @defgroup iface_wl_compositor The wl_compositor interface
+ *
+ * A compositor.  This object is a singleton global.  The
+ * compositor is in charge of combining the contents of multiple
+ * surfaces into one displayable output.
+ */
+extern const struct wl_interface wl_compositor_interface;
+#endif
+#ifndef WL_SHM_POOL_INTERFACE
+#define WL_SHM_POOL_INTERFACE
+/**
+ * @page page_iface_wl_shm_pool wl_shm_pool
+ * @section page_iface_wl_shm_pool_desc Description
+ *
+ * The wl_shm_pool object encapsulates a piece of memory shared
+ * between the compositor and client.  Through the wl_shm_pool
+ * object, the client can allocate shared memory wl_buffer objects.
+ * All objects created through the same pool share the same
+ * underlying mapped memory. Reusing the mapped memory avoids the
+ * setup/teardown overhead and is useful when interactively resizing
+ * a surface or for many small buffers.
+ * @section page_iface_wl_shm_pool_api API
+ * See @ref iface_wl_shm_pool.
+ */
+/**
+ * @defgroup iface_wl_shm_pool The wl_shm_pool interface
+ *
+ * The wl_shm_pool object encapsulates a piece of memory shared
+ * between the compositor and client.  Through the wl_shm_pool
+ * object, the client can allocate shared memory wl_buffer objects.
+ * All objects created through the same pool share the same
+ * underlying mapped memory. Reusing the mapped memory avoids the
+ * setup/teardown overhead and is useful when interactively resizing
+ * a surface or for many small buffers.
+ */
+extern const struct wl_interface wl_shm_pool_interface;
+#endif
+#ifndef WL_SHM_INTERFACE
+#define WL_SHM_INTERFACE
+/**
+ * @page page_iface_wl_shm wl_shm
+ * @section page_iface_wl_shm_desc Description
+ *
+ * A singleton global object that provides support for shared
+ * memory.
+ *
+ * Clients can create wl_shm_pool objects using the create_pool
+ * request.
+ *
+ * On binding the wl_shm object one or more format events
+ * are emitted to inform clients about the valid pixel formats
+ * that can be used for buffers.
+ * @section page_iface_wl_shm_api API
+ * See @ref iface_wl_shm.
+ */
+/**
+ * @defgroup iface_wl_shm The wl_shm interface
+ *
+ * A singleton global object that provides support for shared
+ * memory.
+ *
+ * Clients can create wl_shm_pool objects using the create_pool
+ * request.
+ *
+ * On binding the wl_shm object one or more format events
+ * are emitted to inform clients about the valid pixel formats
+ * that can be used for buffers.
+ */
+extern const struct wl_interface wl_shm_interface;
+#endif
+#ifndef WL_BUFFER_INTERFACE
+#define WL_BUFFER_INTERFACE
+/**
+ * @page page_iface_wl_buffer wl_buffer
+ * @section page_iface_wl_buffer_desc Description
+ *
+ * A buffer provides the content for a wl_surface. Buffers are
+ * created through factory interfaces such as wl_shm, wp_linux_buffer_params
+ * (from the linux-dmabuf protocol extension) or similar. It has a width and
+ * a height and can be attached to a wl_surface, but the mechanism by which a
+ * client provides and updates the contents is defined by the buffer factory
+ * interface.
+ *
+ * If the buffer uses a format that has an alpha channel, the alpha channel
+ * is assumed to be premultiplied in the color channels unless otherwise
+ * specified.
+ * @section page_iface_wl_buffer_api API
+ * See @ref iface_wl_buffer.
+ */
+/**
+ * @defgroup iface_wl_buffer The wl_buffer interface
+ *
+ * A buffer provides the content for a wl_surface. Buffers are
+ * created through factory interfaces such as wl_shm, wp_linux_buffer_params
+ * (from the linux-dmabuf protocol extension) or similar. It has a width and
+ * a height and can be attached to a wl_surface, but the mechanism by which a
+ * client provides and updates the contents is defined by the buffer factory
+ * interface.
+ *
+ * If the buffer uses a format that has an alpha channel, the alpha channel
+ * is assumed to be premultiplied in the color channels unless otherwise
+ * specified.
+ */
+extern const struct wl_interface wl_buffer_interface;
+#endif
+#ifndef WL_DATA_OFFER_INTERFACE
+#define WL_DATA_OFFER_INTERFACE
+/**
+ * @page page_iface_wl_data_offer wl_data_offer
+ * @section page_iface_wl_data_offer_desc Description
+ *
+ * A wl_data_offer represents a piece of data offered for transfer
+ * by another client (the source client).  It is used by the
+ * copy-and-paste and drag-and-drop mechanisms.  The offer
+ * describes the different mime types that the data can be
+ * converted to and provides the mechanism for transferring the
+ * data directly from the source client.
+ * @section page_iface_wl_data_offer_api API
+ * See @ref iface_wl_data_offer.
+ */
+/**
+ * @defgroup iface_wl_data_offer The wl_data_offer interface
+ *
+ * A wl_data_offer represents a piece of data offered for transfer
+ * by another client (the source client).  It is used by the
+ * copy-and-paste and drag-and-drop mechanisms.  The offer
+ * describes the different mime types that the data can be
+ * converted to and provides the mechanism for transferring the
+ * data directly from the source client.
+ */
+extern const struct wl_interface wl_data_offer_interface;
+#endif
+#ifndef WL_DATA_SOURCE_INTERFACE
+#define WL_DATA_SOURCE_INTERFACE
+/**
+ * @page page_iface_wl_data_source wl_data_source
+ * @section page_iface_wl_data_source_desc Description
+ *
+ * The wl_data_source object is the source side of a wl_data_offer.
+ * It is created by the source client in a data transfer and
+ * provides a way to describe the offered data and a way to respond
+ * to requests to transfer the data.
+ * @section page_iface_wl_data_source_api API
+ * See @ref iface_wl_data_source.
+ */
+/**
+ * @defgroup iface_wl_data_source The wl_data_source interface
+ *
+ * The wl_data_source object is the source side of a wl_data_offer.
+ * It is created by the source client in a data transfer and
+ * provides a way to describe the offered data and a way to respond
+ * to requests to transfer the data.
+ */
+extern const struct wl_interface wl_data_source_interface;
+#endif
+#ifndef WL_DATA_DEVICE_INTERFACE
+#define WL_DATA_DEVICE_INTERFACE
+/**
+ * @page page_iface_wl_data_device wl_data_device
+ * @section page_iface_wl_data_device_desc Description
+ *
+ * There is one wl_data_device per seat which can be obtained
+ * from the global wl_data_device_manager singleton.
+ *
+ * A wl_data_device provides access to inter-client data transfer
+ * mechanisms such as copy-and-paste and drag-and-drop.
+ * @section page_iface_wl_data_device_api API
+ * See @ref iface_wl_data_device.
+ */
+/**
+ * @defgroup iface_wl_data_device The wl_data_device interface
+ *
+ * There is one wl_data_device per seat which can be obtained
+ * from the global wl_data_device_manager singleton.
+ *
+ * A wl_data_device provides access to inter-client data transfer
+ * mechanisms such as copy-and-paste and drag-and-drop.
+ */
+extern const struct wl_interface wl_data_device_interface;
+#endif
+#ifndef WL_DATA_DEVICE_MANAGER_INTERFACE
+#define WL_DATA_DEVICE_MANAGER_INTERFACE
+/**
+ * @page page_iface_wl_data_device_manager wl_data_device_manager
+ * @section page_iface_wl_data_device_manager_desc Description
+ *
+ * The wl_data_device_manager is a singleton global object that
+ * provides access to inter-client data transfer mechanisms such as
+ * copy-and-paste and drag-and-drop.  These mechanisms are tied to
+ * a wl_seat and this interface lets a client get a wl_data_device
+ * corresponding to a wl_seat.
+ *
+ * Depending on the version bound, the objects created from the bound
+ * wl_data_device_manager object will have different requirements for
+ * functioning properly. See wl_data_source.set_actions,
+ * wl_data_offer.accept and wl_data_offer.finish for details.
+ * @section page_iface_wl_data_device_manager_api API
+ * See @ref iface_wl_data_device_manager.
+ */
+/**
+ * @defgroup iface_wl_data_device_manager The wl_data_device_manager interface
+ *
+ * The wl_data_device_manager is a singleton global object that
+ * provides access to inter-client data transfer mechanisms such as
+ * copy-and-paste and drag-and-drop.  These mechanisms are tied to
+ * a wl_seat and this interface lets a client get a wl_data_device
+ * corresponding to a wl_seat.
+ *
+ * Depending on the version bound, the objects created from the bound
+ * wl_data_device_manager object will have different requirements for
+ * functioning properly. See wl_data_source.set_actions,
+ * wl_data_offer.accept and wl_data_offer.finish for details.
+ */
+extern const struct wl_interface wl_data_device_manager_interface;
+#endif
+#ifndef WL_SHELL_INTERFACE
+#define WL_SHELL_INTERFACE
+/**
+ * @page page_iface_wl_shell wl_shell
+ * @section page_iface_wl_shell_desc Description
+ *
+ * This interface is implemented by servers that provide
+ * desktop-style user interfaces.
+ *
+ * It allows clients to associate a wl_shell_surface with
+ * a basic surface.
+ *
+ * Note! This protocol is deprecated and not intended for production use.
+ * For desktop-style user interfaces, use xdg_shell. Compositors and clients
+ * should not implement this interface.
+ * @section page_iface_wl_shell_api API
+ * See @ref iface_wl_shell.
+ */
+/**
+ * @defgroup iface_wl_shell The wl_shell interface
+ *
+ * This interface is implemented by servers that provide
+ * desktop-style user interfaces.
+ *
+ * It allows clients to associate a wl_shell_surface with
+ * a basic surface.
+ *
+ * Note! This protocol is deprecated and not intended for production use.
+ * For desktop-style user interfaces, use xdg_shell. Compositors and clients
+ * should not implement this interface.
+ */
+extern const struct wl_interface wl_shell_interface;
+#endif
+#ifndef WL_SHELL_SURFACE_INTERFACE
+#define WL_SHELL_SURFACE_INTERFACE
+/**
+ * @page page_iface_wl_shell_surface wl_shell_surface
+ * @section page_iface_wl_shell_surface_desc Description
+ *
+ * An interface that may be implemented by a wl_surface, for
+ * implementations that provide a desktop-style user interface.
+ *
+ * It provides requests to treat surfaces like toplevel, fullscreen
+ * or popup windows, move, resize or maximize them, associate
+ * metadata like title and class, etc.
+ *
+ * On the server side the object is automatically destroyed when
+ * the related wl_surface is destroyed. On the client side,
+ * wl_shell_surface_destroy() must be called before destroying
+ * the wl_surface object.
+ * @section page_iface_wl_shell_surface_api API
+ * See @ref iface_wl_shell_surface.
+ */
+/**
+ * @defgroup iface_wl_shell_surface The wl_shell_surface interface
+ *
+ * An interface that may be implemented by a wl_surface, for
+ * implementations that provide a desktop-style user interface.
+ *
+ * It provides requests to treat surfaces like toplevel, fullscreen
+ * or popup windows, move, resize or maximize them, associate
+ * metadata like title and class, etc.
+ *
+ * On the server side the object is automatically destroyed when
+ * the related wl_surface is destroyed. On the client side,
+ * wl_shell_surface_destroy() must be called before destroying
+ * the wl_surface object.
+ */
+extern const struct wl_interface wl_shell_surface_interface;
+#endif
+#ifndef WL_SURFACE_INTERFACE
+#define WL_SURFACE_INTERFACE
+/**
+ * @page page_iface_wl_surface wl_surface
+ * @section page_iface_wl_surface_desc Description
+ *
+ * A surface is a rectangular area that may be displayed on zero
+ * or more outputs, and shown any number of times at the compositor's
+ * discretion. They can present wl_buffers, receive user input, and
+ * define a local coordinate system.
+ *
+ * The size of a surface (and relative positions on it) is described
+ * in surface-local coordinates, which may differ from the buffer
+ * coordinates of the pixel content, in case a buffer_transform
+ * or a buffer_scale is used.
+ *
+ * A surface without a "role" is fairly useless: a compositor does
+ * not know where, when or how to present it. The role is the
+ * purpose of a wl_surface. Examples of roles are a cursor for a
+ * pointer (as set by wl_pointer.set_cursor), a drag icon
+ * (wl_data_device.start_drag), a sub-surface
+ * (wl_subcompositor.get_subsurface), and a window as defined by a
+ * shell protocol (e.g. wl_shell.get_shell_surface).
+ *
+ * A surface can have only one role at a time. Initially a
+ * wl_surface does not have a role. Once a wl_surface is given a
+ * role, it is set permanently for the whole lifetime of the
+ * wl_surface object. Giving the current role again is allowed,
+ * unless explicitly forbidden by the relevant interface
+ * specification.
+ *
+ * Surface roles are given by requests in other interfaces such as
+ * wl_pointer.set_cursor. The request should explicitly mention
+ * that this request gives a role to a wl_surface. Often, this
+ * request also creates a new protocol object that represents the
+ * role and adds additional functionality to wl_surface. When a
+ * client wants to destroy a wl_surface, they must destroy this 'role
+ * object' before the wl_surface.
+ *
+ * Destroying the role object does not remove the role from the
+ * wl_surface, but it may stop the wl_surface from "playing the role".
+ * For instance, if a wl_subsurface object is destroyed, the wl_surface
+ * it was created for will be unmapped and forget its position and
+ * z-order. It is allowed to create a wl_subsurface for the same
+ * wl_surface again, but it is not allowed to use the wl_surface as
+ * a cursor (cursor is a different role than sub-surface, and role
+ * switching is not allowed).
+ * @section page_iface_wl_surface_api API
+ * See @ref iface_wl_surface.
+ */
+/**
+ * @defgroup iface_wl_surface The wl_surface interface
+ *
+ * A surface is a rectangular area that may be displayed on zero
+ * or more outputs, and shown any number of times at the compositor's
+ * discretion. They can present wl_buffers, receive user input, and
+ * define a local coordinate system.
+ *
+ * The size of a surface (and relative positions on it) is described
+ * in surface-local coordinates, which may differ from the buffer
+ * coordinates of the pixel content, in case a buffer_transform
+ * or a buffer_scale is used.
+ *
+ * A surface without a "role" is fairly useless: a compositor does
+ * not know where, when or how to present it. The role is the
+ * purpose of a wl_surface. Examples of roles are a cursor for a
+ * pointer (as set by wl_pointer.set_cursor), a drag icon
+ * (wl_data_device.start_drag), a sub-surface
+ * (wl_subcompositor.get_subsurface), and a window as defined by a
+ * shell protocol (e.g. wl_shell.get_shell_surface).
+ *
+ * A surface can have only one role at a time. Initially a
+ * wl_surface does not have a role. Once a wl_surface is given a
+ * role, it is set permanently for the whole lifetime of the
+ * wl_surface object. Giving the current role again is allowed,
+ * unless explicitly forbidden by the relevant interface
+ * specification.
+ *
+ * Surface roles are given by requests in other interfaces such as
+ * wl_pointer.set_cursor. The request should explicitly mention
+ * that this request gives a role to a wl_surface. Often, this
+ * request also creates a new protocol object that represents the
+ * role and adds additional functionality to wl_surface. When a
+ * client wants to destroy a wl_surface, they must destroy this 'role
+ * object' before the wl_surface.
+ *
+ * Destroying the role object does not remove the role from the
+ * wl_surface, but it may stop the wl_surface from "playing the role".
+ * For instance, if a wl_subsurface object is destroyed, the wl_surface
+ * it was created for will be unmapped and forget its position and
+ * z-order. It is allowed to create a wl_subsurface for the same
+ * wl_surface again, but it is not allowed to use the wl_surface as
+ * a cursor (cursor is a different role than sub-surface, and role
+ * switching is not allowed).
+ */
+extern const struct wl_interface wl_surface_interface;
+#endif
+#ifndef WL_SEAT_INTERFACE
+#define WL_SEAT_INTERFACE
+/**
+ * @page page_iface_wl_seat wl_seat
+ * @section page_iface_wl_seat_desc Description
+ *
+ * A seat is a group of keyboards, pointer and touch devices. This
+ * object is published as a global during start up, or when such a
+ * device is hot plugged.  A seat typically has a pointer and
+ * maintains a keyboard focus and a pointer focus.
+ * @section page_iface_wl_seat_api API
+ * See @ref iface_wl_seat.
+ */
+/**
+ * @defgroup iface_wl_seat The wl_seat interface
+ *
+ * A seat is a group of keyboards, pointer and touch devices. This
+ * object is published as a global during start up, or when such a
+ * device is hot plugged.  A seat typically has a pointer and
+ * maintains a keyboard focus and a pointer focus.
+ */
+extern const struct wl_interface wl_seat_interface;
+#endif
+#ifndef WL_POINTER_INTERFACE
+#define WL_POINTER_INTERFACE
+/**
+ * @page page_iface_wl_pointer wl_pointer
+ * @section page_iface_wl_pointer_desc Description
+ *
+ * The wl_pointer interface represents one or more input devices,
+ * such as mice, which control the pointer location and pointer_focus
+ * of a seat.
+ *
+ * The wl_pointer interface generates motion, enter and leave
+ * events for the surfaces that the pointer is located over,
+ * and button and axis events for button presses, button releases
+ * and scrolling.
+ * @section page_iface_wl_pointer_api API
+ * See @ref iface_wl_pointer.
+ */
+/**
+ * @defgroup iface_wl_pointer The wl_pointer interface
+ *
+ * The wl_pointer interface represents one or more input devices,
+ * such as mice, which control the pointer location and pointer_focus
+ * of a seat.
+ *
+ * The wl_pointer interface generates motion, enter and leave
+ * events for the surfaces that the pointer is located over,
+ * and button and axis events for button presses, button releases
+ * and scrolling.
+ */
+extern const struct wl_interface wl_pointer_interface;
+#endif
+#ifndef WL_KEYBOARD_INTERFACE
+#define WL_KEYBOARD_INTERFACE
+/**
+ * @page page_iface_wl_keyboard wl_keyboard
+ * @section page_iface_wl_keyboard_desc Description
+ *
+ * The wl_keyboard interface represents one or more keyboards
+ * associated with a seat.
+ * @section page_iface_wl_keyboard_api API
+ * See @ref iface_wl_keyboard.
+ */
+/**
+ * @defgroup iface_wl_keyboard The wl_keyboard interface
+ *
+ * The wl_keyboard interface represents one or more keyboards
+ * associated with a seat.
+ */
+extern const struct wl_interface wl_keyboard_interface;
+#endif
+#ifndef WL_TOUCH_INTERFACE
+#define WL_TOUCH_INTERFACE
+/**
+ * @page page_iface_wl_touch wl_touch
+ * @section page_iface_wl_touch_desc Description
+ *
+ * The wl_touch interface represents a touchscreen
+ * associated with a seat.
+ *
+ * Touch interactions can consist of one or more contacts.
+ * For each contact, a series of events is generated, starting
+ * with a down event, followed by zero or more motion events,
+ * and ending with an up event. Events relating to the same
+ * contact point can be identified by the ID of the sequence.
+ * @section page_iface_wl_touch_api API
+ * See @ref iface_wl_touch.
+ */
+/**
+ * @defgroup iface_wl_touch The wl_touch interface
+ *
+ * The wl_touch interface represents a touchscreen
+ * associated with a seat.
+ *
+ * Touch interactions can consist of one or more contacts.
+ * For each contact, a series of events is generated, starting
+ * with a down event, followed by zero or more motion events,
+ * and ending with an up event. Events relating to the same
+ * contact point can be identified by the ID of the sequence.
+ */
+extern const struct wl_interface wl_touch_interface;
+#endif
+#ifndef WL_OUTPUT_INTERFACE
+#define WL_OUTPUT_INTERFACE
+/**
+ * @page page_iface_wl_output wl_output
+ * @section page_iface_wl_output_desc Description
+ *
+ * An output describes part of the compositor geometry.  The
+ * compositor works in the 'compositor coordinate system' and an
+ * output corresponds to a rectangular area in that space that is
+ * actually visible.  This typically corresponds to a monitor that
+ * displays part of the compositor space.  This object is published
+ * as global during start up, or when a monitor is hotplugged.
+ * @section page_iface_wl_output_api API
+ * See @ref iface_wl_output.
+ */
+/**
+ * @defgroup iface_wl_output The wl_output interface
+ *
+ * An output describes part of the compositor geometry.  The
+ * compositor works in the 'compositor coordinate system' and an
+ * output corresponds to a rectangular area in that space that is
+ * actually visible.  This typically corresponds to a monitor that
+ * displays part of the compositor space.  This object is published
+ * as global during start up, or when a monitor is hotplugged.
+ */
+extern const struct wl_interface wl_output_interface;
+#endif
+#ifndef WL_REGION_INTERFACE
+#define WL_REGION_INTERFACE
+/**
+ * @page page_iface_wl_region wl_region
+ * @section page_iface_wl_region_desc Description
+ *
+ * A region object describes an area.
+ *
+ * Region objects are used to describe the opaque and input
+ * regions of a surface.
+ * @section page_iface_wl_region_api API
+ * See @ref iface_wl_region.
+ */
+/**
+ * @defgroup iface_wl_region The wl_region interface
+ *
+ * A region object describes an area.
+ *
+ * Region objects are used to describe the opaque and input
+ * regions of a surface.
+ */
+extern const struct wl_interface wl_region_interface;
+#endif
+#ifndef WL_SUBCOMPOSITOR_INTERFACE
+#define WL_SUBCOMPOSITOR_INTERFACE
+/**
+ * @page page_iface_wl_subcompositor wl_subcompositor
+ * @section page_iface_wl_subcompositor_desc Description
+ *
+ * The global interface exposing sub-surface compositing capabilities.
+ * A wl_surface, that has sub-surfaces associated, is called the
+ * parent surface. Sub-surfaces can be arbitrarily nested and create
+ * a tree of sub-surfaces.
+ *
+ * The root surface in a tree of sub-surfaces is the main
+ * surface. The main surface cannot be a sub-surface, because
+ * sub-surfaces must always have a parent.
+ *
+ * A main surface with its sub-surfaces forms a (compound) window.
+ * For window management purposes, this set of wl_surface objects is
+ * to be considered as a single window, and it should also behave as
+ * such.
+ *
+ * The aim of sub-surfaces is to offload some of the compositing work
+ * within a window from clients to the compositor. A prime example is
+ * a video player with decorations and video in separate wl_surface
+ * objects. This should allow the compositor to pass YUV video buffer
+ * processing to dedicated overlay hardware when possible.
+ * @section page_iface_wl_subcompositor_api API
+ * See @ref iface_wl_subcompositor.
+ */
+/**
+ * @defgroup iface_wl_subcompositor The wl_subcompositor interface
+ *
+ * The global interface exposing sub-surface compositing capabilities.
+ * A wl_surface, that has sub-surfaces associated, is called the
+ * parent surface. Sub-surfaces can be arbitrarily nested and create
+ * a tree of sub-surfaces.
+ *
+ * The root surface in a tree of sub-surfaces is the main
+ * surface. The main surface cannot be a sub-surface, because
+ * sub-surfaces must always have a parent.
+ *
+ * A main surface with its sub-surfaces forms a (compound) window.
+ * For window management purposes, this set of wl_surface objects is
+ * to be considered as a single window, and it should also behave as
+ * such.
+ *
+ * The aim of sub-surfaces is to offload some of the compositing work
+ * within a window from clients to the compositor. A prime example is
+ * a video player with decorations and video in separate wl_surface
+ * objects. This should allow the compositor to pass YUV video buffer
+ * processing to dedicated overlay hardware when possible.
+ */
+extern const struct wl_interface wl_subcompositor_interface;
+#endif
+#ifndef WL_SUBSURFACE_INTERFACE
+#define WL_SUBSURFACE_INTERFACE
+/**
+ * @page page_iface_wl_subsurface wl_subsurface
+ * @section page_iface_wl_subsurface_desc Description
+ *
+ * An additional interface to a wl_surface object, which has been
+ * made a sub-surface. A sub-surface has one parent surface. A
+ * sub-surface's size and position are not limited to that of the parent.
+ * Particularly, a sub-surface is not automatically clipped to its
+ * parent's area.
+ *
+ * A sub-surface becomes mapped, when a non-NULL wl_buffer is applied
+ * and the parent surface is mapped. The order of which one happens
+ * first is irrelevant. A sub-surface is hidden if the parent becomes
+ * hidden, or if a NULL wl_buffer is applied. These rules apply
+ * recursively through the tree of surfaces.
+ *
+ * The behaviour of a wl_surface.commit request on a sub-surface
+ * depends on the sub-surface's mode. The possible modes are
+ * synchronized and desynchronized, see methods
+ * wl_subsurface.set_sync and wl_subsurface.set_desync. Synchronized
+ * mode caches the wl_surface state to be applied when the parent's
+ * state gets applied, and desynchronized mode applies the pending
+ * wl_surface state directly. A sub-surface is initially in the
+ * synchronized mode.
+ *
+ * Sub-surfaces also have another kind of state, which is managed by
+ * wl_subsurface requests, as opposed to wl_surface requests. This
+ * state includes the sub-surface position relative to the parent
+ * surface (wl_subsurface.set_position), and the stacking order of
+ * the parent and its sub-surfaces (wl_subsurface.place_above and
+ * .place_below). This state is applied when the parent surface's
+ * wl_surface state is applied, regardless of the sub-surface's mode.
+ * As the exception, set_sync and set_desync are effective immediately.
+ *
+ * The main surface can be thought to be always in desynchronized mode,
+ * since it does not have a parent in the sub-surfaces sense.
+ *
+ * Even if a sub-surface is in desynchronized mode, it will behave as
+ * in synchronized mode, if its parent surface behaves as in
+ * synchronized mode. This rule is applied recursively throughout the
+ * tree of surfaces. This means, that one can set a sub-surface into
+ * synchronized mode, and then assume that all its child and grand-child
+ * sub-surfaces are synchronized, too, without explicitly setting them.
+ *
+ * If the wl_surface associated with the wl_subsurface is destroyed, the
+ * wl_subsurface object becomes inert. Note, that destroying either object
+ * takes effect immediately. If you need to synchronize the removal
+ * of a sub-surface to the parent surface update, unmap the sub-surface
+ * first by attaching a NULL wl_buffer, update parent, and then destroy
+ * the sub-surface.
+ *
+ * If the parent wl_surface object is destroyed, the sub-surface is
+ * unmapped.
+ * @section page_iface_wl_subsurface_api API
+ * See @ref iface_wl_subsurface.
+ */
+/**
+ * @defgroup iface_wl_subsurface The wl_subsurface interface
+ *
+ * An additional interface to a wl_surface object, which has been
+ * made a sub-surface. A sub-surface has one parent surface. A
+ * sub-surface's size and position are not limited to that of the parent.
+ * Particularly, a sub-surface is not automatically clipped to its
+ * parent's area.
+ *
+ * A sub-surface becomes mapped, when a non-NULL wl_buffer is applied
+ * and the parent surface is mapped. The order of which one happens
+ * first is irrelevant. A sub-surface is hidden if the parent becomes
+ * hidden, or if a NULL wl_buffer is applied. These rules apply
+ * recursively through the tree of surfaces.
+ *
+ * The behaviour of a wl_surface.commit request on a sub-surface
+ * depends on the sub-surface's mode. The possible modes are
+ * synchronized and desynchronized, see methods
+ * wl_subsurface.set_sync and wl_subsurface.set_desync. Synchronized
+ * mode caches the wl_surface state to be applied when the parent's
+ * state gets applied, and desynchronized mode applies the pending
+ * wl_surface state directly. A sub-surface is initially in the
+ * synchronized mode.
+ *
+ * Sub-surfaces also have another kind of state, which is managed by
+ * wl_subsurface requests, as opposed to wl_surface requests. This
+ * state includes the sub-surface position relative to the parent
+ * surface (wl_subsurface.set_position), and the stacking order of
+ * the parent and its sub-surfaces (wl_subsurface.place_above and
+ * .place_below). This state is applied when the parent surface's
+ * wl_surface state is applied, regardless of the sub-surface's mode.
+ * As the exception, set_sync and set_desync are effective immediately.
+ *
+ * The main surface can be thought to be always in desynchronized mode,
+ * since it does not have a parent in the sub-surfaces sense.
+ *
+ * Even if a sub-surface is in desynchronized mode, it will behave as
+ * in synchronized mode, if its parent surface behaves as in
+ * synchronized mode. This rule is applied recursively throughout the
+ * tree of surfaces. This means, that one can set a sub-surface into
+ * synchronized mode, and then assume that all its child and grand-child
+ * sub-surfaces are synchronized, too, without explicitly setting them.
+ *
+ * If the wl_surface associated with the wl_subsurface is destroyed, the
+ * wl_subsurface object becomes inert. Note, that destroying either object
+ * takes effect immediately. If you need to synchronize the removal
+ * of a sub-surface to the parent surface update, unmap the sub-surface
+ * first by attaching a NULL wl_buffer, update parent, and then destroy
+ * the sub-surface.
+ *
+ * If the parent wl_surface object is destroyed, the sub-surface is
+ * unmapped.
+ */
+extern const struct wl_interface wl_subsurface_interface;
+#endif
+
+#ifndef WL_DISPLAY_ERROR_ENUM
+#define WL_DISPLAY_ERROR_ENUM
+/**
+ * @ingroup iface_wl_display
+ * global error values
+ *
+ * These errors are global and can be emitted in response to any
+ * server request.
+ */
+enum wl_display_error {
+	/**
+	 * server couldn't find object
+	 */
+	WL_DISPLAY_ERROR_INVALID_OBJECT = 0,
+	/**
+	 * method doesn't exist on the specified interface or malformed request
+	 */
+	WL_DISPLAY_ERROR_INVALID_METHOD = 1,
+	/**
+	 * server is out of memory
+	 */
+	WL_DISPLAY_ERROR_NO_MEMORY = 2,
+	/**
+	 * implementation error in compositor
+	 */
+	WL_DISPLAY_ERROR_IMPLEMENTATION = 3,
+};
+#endif /* WL_DISPLAY_ERROR_ENUM */
+
+/**
+ * @ingroup iface_wl_display
+ * @struct wl_display_listener
+ */
+struct wl_display_listener {
+	/**
+	 * fatal error event
+	 *
+	 * The error event is sent out when a fatal (non-recoverable)
+	 * error has occurred. The object_id argument is the object where
+	 * the error occurred, most often in response to a request to that
+	 * object. The code identifies the error and is defined by the
+	 * object interface. As such, each interface defines its own set of
+	 * error codes. The message is a brief description of the error,
+	 * for (debugging) convenience.
+	 * @param object_id object where the error occurred
+	 * @param code error code
+	 * @param message error description
+	 */
+	void (*error)(void *data,
+		      struct wl_display *wl_display,
+		      void *object_id,
+		      uint32_t code,
+		      const char *message);
+	/**
+	 * acknowledge object ID deletion
+	 *
+	 * This event is used internally by the object ID management
+	 * logic. When a client deletes an object that it had created, the
+	 * server will send this event to acknowledge that it has seen the
+	 * delete request. When the client receives this event, it will
+	 * know that it can safely reuse the object ID.
+	 * @param id deleted object ID
+	 */
+	void (*delete_id)(void *data,
+			  struct wl_display *wl_display,
+			  uint32_t id);
+};
+
+/**
+ * @ingroup iface_wl_display
+ */
+static inline int
+wl_display_add_listener(struct wl_display *wl_display,
+			const struct wl_display_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_display,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_DISPLAY_SYNC 0
+#define WL_DISPLAY_GET_REGISTRY 1
+
+/**
+ * @ingroup iface_wl_display
+ */
+#define WL_DISPLAY_ERROR_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_display
+ */
+#define WL_DISPLAY_DELETE_ID_SINCE_VERSION 1
+
+/**
+ * @ingroup iface_wl_display
+ */
+#define WL_DISPLAY_SYNC_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_display
+ */
+#define WL_DISPLAY_GET_REGISTRY_SINCE_VERSION 1
+
+/** @ingroup iface_wl_display */
+static inline void
+wl_display_set_user_data(struct wl_display *wl_display, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_display, user_data);
+}
+
+/** @ingroup iface_wl_display */
+static inline void *
+wl_display_get_user_data(struct wl_display *wl_display)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_display);
+}
+
+static inline uint32_t
+wl_display_get_version(struct wl_display *wl_display)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_display);
+}
+
+/**
+ * @ingroup iface_wl_display
+ *
+ * The sync request asks the server to emit the 'done' event
+ * on the returned wl_callback object.  Since requests are
+ * handled in-order and events are delivered in-order, this can
+ * be used as a barrier to ensure all previous requests and the
+ * resulting events have been handled.
+ *
+ * The object returned by this request will be destroyed by the
+ * compositor after the callback is fired and as such the client must not
+ * attempt to use it after that point.
+ *
+ * The callback_data passed in the callback is the event serial.
+ */
+static inline struct wl_callback *
+wl_display_sync(struct wl_display *wl_display)
+{
+	struct wl_proxy *callback;
+
+	callback = wl_proxy_marshal_flags((struct wl_proxy *) wl_display,
+			 WL_DISPLAY_SYNC, &wl_callback_interface, wl_proxy_get_version((struct wl_proxy *) wl_display), 0, NULL);
+
+	return (struct wl_callback *) callback;
+}
+
+/**
+ * @ingroup iface_wl_display
+ *
+ * This request creates a registry object that allows the client
+ * to list and bind the global objects available from the
+ * compositor.
+ *
+ * It should be noted that the server side resources consumed in
+ * response to a get_registry request can only be released when the
+ * client disconnects, not when the client side proxy is destroyed.
+ * Therefore, clients should invoke get_registry as infrequently as
+ * possible to avoid wasting memory.
+ */
+static inline struct wl_registry *
+wl_display_get_registry(struct wl_display *wl_display)
+{
+	struct wl_proxy *registry;
+
+	registry = wl_proxy_marshal_flags((struct wl_proxy *) wl_display,
+			 WL_DISPLAY_GET_REGISTRY, &wl_registry_interface, wl_proxy_get_version((struct wl_proxy *) wl_display), 0, NULL);
+
+	return (struct wl_registry *) registry;
+}
+
+/**
+ * @ingroup iface_wl_registry
+ * @struct wl_registry_listener
+ */
+struct wl_registry_listener {
+	/**
+	 * announce global object
+	 *
+	 * Notify the client of global objects.
+	 *
+	 * The event notifies the client that a global object with the
+	 * given name is now available, and it implements the given version
+	 * of the given interface.
+	 * @param name numeric name of the global object
+	 * @param interface interface implemented by the object
+	 * @param version interface version
+	 */
+	void (*global)(void *data,
+		       struct wl_registry *wl_registry,
+		       uint32_t name,
+		       const char *interface,
+		       uint32_t version);
+	/**
+	 * announce removal of global object
+	 *
+	 * Notify the client of removed global objects.
+	 *
+	 * This event notifies the client that the global identified by
+	 * name is no longer available. If the client bound to the global
+	 * using the bind request, the client should now destroy that
+	 * object.
+	 *
+	 * The object remains valid and requests to the object will be
+	 * ignored until the client destroys it, to avoid races between the
+	 * global going away and a client sending a request to it.
+	 * @param name numeric name of the global object
+	 */
+	void (*global_remove)(void *data,
+			      struct wl_registry *wl_registry,
+			      uint32_t name);
+};
+
+/**
+ * @ingroup iface_wl_registry
+ */
+static inline int
+wl_registry_add_listener(struct wl_registry *wl_registry,
+			 const struct wl_registry_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_registry,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_REGISTRY_BIND 0
+
+/**
+ * @ingroup iface_wl_registry
+ */
+#define WL_REGISTRY_GLOBAL_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_registry
+ */
+#define WL_REGISTRY_GLOBAL_REMOVE_SINCE_VERSION 1
+
+/**
+ * @ingroup iface_wl_registry
+ */
+#define WL_REGISTRY_BIND_SINCE_VERSION 1
+
+/** @ingroup iface_wl_registry */
+static inline void
+wl_registry_set_user_data(struct wl_registry *wl_registry, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_registry, user_data);
+}
+
+/** @ingroup iface_wl_registry */
+static inline void *
+wl_registry_get_user_data(struct wl_registry *wl_registry)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_registry);
+}
+
+static inline uint32_t
+wl_registry_get_version(struct wl_registry *wl_registry)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_registry);
+}
+
+/** @ingroup iface_wl_registry */
+static inline void
+wl_registry_destroy(struct wl_registry *wl_registry)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_registry);
+}
+
+/**
+ * @ingroup iface_wl_registry
+ *
+ * Binds a new, client-created object to the server using the
+ * specified name as the identifier.
+ */
+static inline void *
+wl_registry_bind(struct wl_registry *wl_registry, uint32_t name, const struct wl_interface *interface, uint32_t version)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_registry,
+			 WL_REGISTRY_BIND, interface, version, 0, name, interface->name, version, NULL);
+
+	return (void *) id;
+}
+
+/**
+ * @ingroup iface_wl_callback
+ * @struct wl_callback_listener
+ */
+struct wl_callback_listener {
+	/**
+	 * done event
+	 *
+	 * Notify the client when the related request is done.
+	 * @param callback_data request-specific data for the callback
+	 */
+	void (*done)(void *data,
+		     struct wl_callback *wl_callback,
+		     uint32_t callback_data);
+};
+
+/**
+ * @ingroup iface_wl_callback
+ */
+static inline int
+wl_callback_add_listener(struct wl_callback *wl_callback,
+			 const struct wl_callback_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_callback,
+				     (void (**)(void)) listener, data);
+}
+
+/**
+ * @ingroup iface_wl_callback
+ */
+#define WL_CALLBACK_DONE_SINCE_VERSION 1
+
+
+/** @ingroup iface_wl_callback */
+static inline void
+wl_callback_set_user_data(struct wl_callback *wl_callback, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_callback, user_data);
+}
+
+/** @ingroup iface_wl_callback */
+static inline void *
+wl_callback_get_user_data(struct wl_callback *wl_callback)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_callback);
+}
+
+static inline uint32_t
+wl_callback_get_version(struct wl_callback *wl_callback)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_callback);
+}
+
+/** @ingroup iface_wl_callback */
+static inline void
+wl_callback_destroy(struct wl_callback *wl_callback)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_callback);
+}
+
+#define WL_COMPOSITOR_CREATE_SURFACE 0
+#define WL_COMPOSITOR_CREATE_REGION 1
+
+
+/**
+ * @ingroup iface_wl_compositor
+ */
+#define WL_COMPOSITOR_CREATE_SURFACE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_compositor
+ */
+#define WL_COMPOSITOR_CREATE_REGION_SINCE_VERSION 1
+
+/** @ingroup iface_wl_compositor */
+static inline void
+wl_compositor_set_user_data(struct wl_compositor *wl_compositor, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_compositor, user_data);
+}
+
+/** @ingroup iface_wl_compositor */
+static inline void *
+wl_compositor_get_user_data(struct wl_compositor *wl_compositor)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_compositor);
+}
+
+static inline uint32_t
+wl_compositor_get_version(struct wl_compositor *wl_compositor)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_compositor);
+}
+
+/** @ingroup iface_wl_compositor */
+static inline void
+wl_compositor_destroy(struct wl_compositor *wl_compositor)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_compositor);
+}
+
+/**
+ * @ingroup iface_wl_compositor
+ *
+ * Ask the compositor to create a new surface.
+ */
+static inline struct wl_surface *
+wl_compositor_create_surface(struct wl_compositor *wl_compositor)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_compositor,
+			 WL_COMPOSITOR_CREATE_SURFACE, &wl_surface_interface, wl_proxy_get_version((struct wl_proxy *) wl_compositor), 0, NULL);
+
+	return (struct wl_surface *) id;
+}
+
+/**
+ * @ingroup iface_wl_compositor
+ *
+ * Ask the compositor to create a new region.
+ */
+static inline struct wl_region *
+wl_compositor_create_region(struct wl_compositor *wl_compositor)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_compositor,
+			 WL_COMPOSITOR_CREATE_REGION, &wl_region_interface, wl_proxy_get_version((struct wl_proxy *) wl_compositor), 0, NULL);
+
+	return (struct wl_region *) id;
+}
+
+#define WL_SHM_POOL_CREATE_BUFFER 0
+#define WL_SHM_POOL_DESTROY 1
+#define WL_SHM_POOL_RESIZE 2
+
+
+/**
+ * @ingroup iface_wl_shm_pool
+ */
+#define WL_SHM_POOL_CREATE_BUFFER_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shm_pool
+ */
+#define WL_SHM_POOL_DESTROY_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shm_pool
+ */
+#define WL_SHM_POOL_RESIZE_SINCE_VERSION 1
+
+/** @ingroup iface_wl_shm_pool */
+static inline void
+wl_shm_pool_set_user_data(struct wl_shm_pool *wl_shm_pool, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_shm_pool, user_data);
+}
+
+/** @ingroup iface_wl_shm_pool */
+static inline void *
+wl_shm_pool_get_user_data(struct wl_shm_pool *wl_shm_pool)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_shm_pool);
+}
+
+static inline uint32_t
+wl_shm_pool_get_version(struct wl_shm_pool *wl_shm_pool)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_shm_pool);
+}
+
+/**
+ * @ingroup iface_wl_shm_pool
+ *
+ * Create a wl_buffer object from the pool.
+ *
+ * The buffer is created offset bytes into the pool and has
+ * width and height as specified.  The stride argument specifies
+ * the number of bytes from the beginning of one row to the beginning
+ * of the next.  The format is the pixel format of the buffer and
+ * must be one of those advertised through the wl_shm.format event.
+ *
+ * A buffer will keep a reference to the pool it was created from
+ * so it is valid to destroy the pool immediately after creating
+ * a buffer from it.
+ */
+static inline struct wl_buffer *
+wl_shm_pool_create_buffer(struct wl_shm_pool *wl_shm_pool, int32_t offset, int32_t width, int32_t height, int32_t stride, uint32_t format)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_shm_pool,
+			 WL_SHM_POOL_CREATE_BUFFER, &wl_buffer_interface, wl_proxy_get_version((struct wl_proxy *) wl_shm_pool), 0, NULL, offset, width, height, stride, format);
+
+	return (struct wl_buffer *) id;
+}
+
+/**
+ * @ingroup iface_wl_shm_pool
+ *
+ * Destroy the shared memory pool.
+ *
+ * The mmapped memory will be released when all
+ * buffers that have been created from this pool
+ * are gone.
+ */
+static inline void
+wl_shm_pool_destroy(struct wl_shm_pool *wl_shm_pool)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shm_pool,
+			 WL_SHM_POOL_DESTROY, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shm_pool), WL_MARSHAL_FLAG_DESTROY);
+}
+
+/**
+ * @ingroup iface_wl_shm_pool
+ *
+ * This request will cause the server to remap the backing memory
+ * for the pool from the file descriptor passed when the pool was
+ * created, but using the new size.  This request can only be
+ * used to make the pool bigger.
+ *
+ * This request only changes the amount of bytes that are mmapped
+ * by the server and does not touch the file corresponding to the
+ * file descriptor passed at creation time. It is the client's
+ * responsibility to ensure that the file is at least as big as
+ * the new pool size.
+ */
+static inline void
+wl_shm_pool_resize(struct wl_shm_pool *wl_shm_pool, int32_t size)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shm_pool,
+			 WL_SHM_POOL_RESIZE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shm_pool), 0, size);
+}
+
+#ifndef WL_SHM_ERROR_ENUM
+#define WL_SHM_ERROR_ENUM
+/**
+ * @ingroup iface_wl_shm
+ * wl_shm error values
+ *
+ * These errors can be emitted in response to wl_shm requests.
+ */
+enum wl_shm_error {
+	/**
+	 * buffer format is not known
+	 */
+	WL_SHM_ERROR_INVALID_FORMAT = 0,
+	/**
+	 * invalid size or stride during pool or buffer creation
+	 */
+	WL_SHM_ERROR_INVALID_STRIDE = 1,
+	/**
+	 * mmapping the file descriptor failed
+	 */
+	WL_SHM_ERROR_INVALID_FD = 2,
+};
+#endif /* WL_SHM_ERROR_ENUM */
+
+#ifndef WL_SHM_FORMAT_ENUM
+#define WL_SHM_FORMAT_ENUM
+/**
+ * @ingroup iface_wl_shm
+ * pixel formats
+ *
+ * This describes the memory layout of an individual pixel.
+ *
+ * All renderers should support argb8888 and xrgb8888 but any other
+ * formats are optional and may not be supported by the particular
+ * renderer in use.
+ *
+ * The drm format codes match the macros defined in drm_fourcc.h, except
+ * argb8888 and xrgb8888. The formats actually supported by the compositor
+ * will be reported by the format event.
+ *
+ * For all wl_shm formats and unless specified in another protocol
+ * extension, pre-multiplied alpha is used for pixel values.
+ */
+enum wl_shm_format {
+	/**
+	 * 32-bit ARGB format, [31:0] A:R:G:B 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_ARGB8888 = 0,
+	/**
+	 * 32-bit RGB format, [31:0] x:R:G:B 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_XRGB8888 = 1,
+	/**
+	 * 8-bit color index format, [7:0] C
+	 */
+	WL_SHM_FORMAT_C8 = 0x20203843,
+	/**
+	 * 8-bit RGB format, [7:0] R:G:B 3:3:2
+	 */
+	WL_SHM_FORMAT_RGB332 = 0x38424752,
+	/**
+	 * 8-bit BGR format, [7:0] B:G:R 2:3:3
+	 */
+	WL_SHM_FORMAT_BGR233 = 0x38524742,
+	/**
+	 * 16-bit xRGB format, [15:0] x:R:G:B 4:4:4:4 little endian
+	 */
+	WL_SHM_FORMAT_XRGB4444 = 0x32315258,
+	/**
+	 * 16-bit xBGR format, [15:0] x:B:G:R 4:4:4:4 little endian
+	 */
+	WL_SHM_FORMAT_XBGR4444 = 0x32314258,
+	/**
+	 * 16-bit RGBx format, [15:0] R:G:B:x 4:4:4:4 little endian
+	 */
+	WL_SHM_FORMAT_RGBX4444 = 0x32315852,
+	/**
+	 * 16-bit BGRx format, [15:0] B:G:R:x 4:4:4:4 little endian
+	 */
+	WL_SHM_FORMAT_BGRX4444 = 0x32315842,
+	/**
+	 * 16-bit ARGB format, [15:0] A:R:G:B 4:4:4:4 little endian
+	 */
+	WL_SHM_FORMAT_ARGB4444 = 0x32315241,
+	/**
+	 * 16-bit ABGR format, [15:0] A:B:G:R 4:4:4:4 little endian
+	 */
+	WL_SHM_FORMAT_ABGR4444 = 0x32314241,
+	/**
+	 * 16-bit RBGA format, [15:0] R:G:B:A 4:4:4:4 little endian
+	 */
+	WL_SHM_FORMAT_RGBA4444 = 0x32314152,
+	/**
+	 * 16-bit BGRA format, [15:0] B:G:R:A 4:4:4:4 little endian
+	 */
+	WL_SHM_FORMAT_BGRA4444 = 0x32314142,
+	/**
+	 * 16-bit xRGB format, [15:0] x:R:G:B 1:5:5:5 little endian
+	 */
+	WL_SHM_FORMAT_XRGB1555 = 0x35315258,
+	/**
+	 * 16-bit xBGR 1555 format, [15:0] x:B:G:R 1:5:5:5 little endian
+	 */
+	WL_SHM_FORMAT_XBGR1555 = 0x35314258,
+	/**
+	 * 16-bit RGBx 5551 format, [15:0] R:G:B:x 5:5:5:1 little endian
+	 */
+	WL_SHM_FORMAT_RGBX5551 = 0x35315852,
+	/**
+	 * 16-bit BGRx 5551 format, [15:0] B:G:R:x 5:5:5:1 little endian
+	 */
+	WL_SHM_FORMAT_BGRX5551 = 0x35315842,
+	/**
+	 * 16-bit ARGB 1555 format, [15:0] A:R:G:B 1:5:5:5 little endian
+	 */
+	WL_SHM_FORMAT_ARGB1555 = 0x35315241,
+	/**
+	 * 16-bit ABGR 1555 format, [15:0] A:B:G:R 1:5:5:5 little endian
+	 */
+	WL_SHM_FORMAT_ABGR1555 = 0x35314241,
+	/**
+	 * 16-bit RGBA 5551 format, [15:0] R:G:B:A 5:5:5:1 little endian
+	 */
+	WL_SHM_FORMAT_RGBA5551 = 0x35314152,
+	/**
+	 * 16-bit BGRA 5551 format, [15:0] B:G:R:A 5:5:5:1 little endian
+	 */
+	WL_SHM_FORMAT_BGRA5551 = 0x35314142,
+	/**
+	 * 16-bit RGB 565 format, [15:0] R:G:B 5:6:5 little endian
+	 */
+	WL_SHM_FORMAT_RGB565 = 0x36314752,
+	/**
+	 * 16-bit BGR 565 format, [15:0] B:G:R 5:6:5 little endian
+	 */
+	WL_SHM_FORMAT_BGR565 = 0x36314742,
+	/**
+	 * 24-bit RGB format, [23:0] R:G:B little endian
+	 */
+	WL_SHM_FORMAT_RGB888 = 0x34324752,
+	/**
+	 * 24-bit BGR format, [23:0] B:G:R little endian
+	 */
+	WL_SHM_FORMAT_BGR888 = 0x34324742,
+	/**
+	 * 32-bit xBGR format, [31:0] x:B:G:R 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_XBGR8888 = 0x34324258,
+	/**
+	 * 32-bit RGBx format, [31:0] R:G:B:x 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_RGBX8888 = 0x34325852,
+	/**
+	 * 32-bit BGRx format, [31:0] B:G:R:x 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_BGRX8888 = 0x34325842,
+	/**
+	 * 32-bit ABGR format, [31:0] A:B:G:R 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_ABGR8888 = 0x34324241,
+	/**
+	 * 32-bit RGBA format, [31:0] R:G:B:A 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_RGBA8888 = 0x34324152,
+	/**
+	 * 32-bit BGRA format, [31:0] B:G:R:A 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_BGRA8888 = 0x34324142,
+	/**
+	 * 32-bit xRGB format, [31:0] x:R:G:B 2:10:10:10 little endian
+	 */
+	WL_SHM_FORMAT_XRGB2101010 = 0x30335258,
+	/**
+	 * 32-bit xBGR format, [31:0] x:B:G:R 2:10:10:10 little endian
+	 */
+	WL_SHM_FORMAT_XBGR2101010 = 0x30334258,
+	/**
+	 * 32-bit RGBx format, [31:0] R:G:B:x 10:10:10:2 little endian
+	 */
+	WL_SHM_FORMAT_RGBX1010102 = 0x30335852,
+	/**
+	 * 32-bit BGRx format, [31:0] B:G:R:x 10:10:10:2 little endian
+	 */
+	WL_SHM_FORMAT_BGRX1010102 = 0x30335842,
+	/**
+	 * 32-bit ARGB format, [31:0] A:R:G:B 2:10:10:10 little endian
+	 */
+	WL_SHM_FORMAT_ARGB2101010 = 0x30335241,
+	/**
+	 * 32-bit ABGR format, [31:0] A:B:G:R 2:10:10:10 little endian
+	 */
+	WL_SHM_FORMAT_ABGR2101010 = 0x30334241,
+	/**
+	 * 32-bit RGBA format, [31:0] R:G:B:A 10:10:10:2 little endian
+	 */
+	WL_SHM_FORMAT_RGBA1010102 = 0x30334152,
+	/**
+	 * 32-bit BGRA format, [31:0] B:G:R:A 10:10:10:2 little endian
+	 */
+	WL_SHM_FORMAT_BGRA1010102 = 0x30334142,
+	/**
+	 * packed YCbCr format, [31:0] Cr0:Y1:Cb0:Y0 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_YUYV = 0x56595559,
+	/**
+	 * packed YCbCr format, [31:0] Cb0:Y1:Cr0:Y0 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_YVYU = 0x55595659,
+	/**
+	 * packed YCbCr format, [31:0] Y1:Cr0:Y0:Cb0 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_UYVY = 0x59565955,
+	/**
+	 * packed YCbCr format, [31:0] Y1:Cb0:Y0:Cr0 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_VYUY = 0x59555956,
+	/**
+	 * packed AYCbCr format, [31:0] A:Y:Cb:Cr 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_AYUV = 0x56555941,
+	/**
+	 * 2 plane YCbCr Cr:Cb format, 2x2 subsampled Cr:Cb plane
+	 */
+	WL_SHM_FORMAT_NV12 = 0x3231564e,
+	/**
+	 * 2 plane YCbCr Cb:Cr format, 2x2 subsampled Cb:Cr plane
+	 */
+	WL_SHM_FORMAT_NV21 = 0x3132564e,
+	/**
+	 * 2 plane YCbCr Cr:Cb format, 2x1 subsampled Cr:Cb plane
+	 */
+	WL_SHM_FORMAT_NV16 = 0x3631564e,
+	/**
+	 * 2 plane YCbCr Cb:Cr format, 2x1 subsampled Cb:Cr plane
+	 */
+	WL_SHM_FORMAT_NV61 = 0x3136564e,
+	/**
+	 * 3 plane YCbCr format, 4x4 subsampled Cb (1) and Cr (2) planes
+	 */
+	WL_SHM_FORMAT_YUV410 = 0x39565559,
+	/**
+	 * 3 plane YCbCr format, 4x4 subsampled Cr (1) and Cb (2) planes
+	 */
+	WL_SHM_FORMAT_YVU410 = 0x39555659,
+	/**
+	 * 3 plane YCbCr format, 4x1 subsampled Cb (1) and Cr (2) planes
+	 */
+	WL_SHM_FORMAT_YUV411 = 0x31315559,
+	/**
+	 * 3 plane YCbCr format, 4x1 subsampled Cr (1) and Cb (2) planes
+	 */
+	WL_SHM_FORMAT_YVU411 = 0x31315659,
+	/**
+	 * 3 plane YCbCr format, 2x2 subsampled Cb (1) and Cr (2) planes
+	 */
+	WL_SHM_FORMAT_YUV420 = 0x32315559,
+	/**
+	 * 3 plane YCbCr format, 2x2 subsampled Cr (1) and Cb (2) planes
+	 */
+	WL_SHM_FORMAT_YVU420 = 0x32315659,
+	/**
+	 * 3 plane YCbCr format, 2x1 subsampled Cb (1) and Cr (2) planes
+	 */
+	WL_SHM_FORMAT_YUV422 = 0x36315559,
+	/**
+	 * 3 plane YCbCr format, 2x1 subsampled Cr (1) and Cb (2) planes
+	 */
+	WL_SHM_FORMAT_YVU422 = 0x36315659,
+	/**
+	 * 3 plane YCbCr format, non-subsampled Cb (1) and Cr (2) planes
+	 */
+	WL_SHM_FORMAT_YUV444 = 0x34325559,
+	/**
+	 * 3 plane YCbCr format, non-subsampled Cr (1) and Cb (2) planes
+	 */
+	WL_SHM_FORMAT_YVU444 = 0x34325659,
+	/**
+	 * [7:0] R
+	 */
+	WL_SHM_FORMAT_R8 = 0x20203852,
+	/**
+	 * [15:0] R little endian
+	 */
+	WL_SHM_FORMAT_R16 = 0x20363152,
+	/**
+	 * [15:0] R:G 8:8 little endian
+	 */
+	WL_SHM_FORMAT_RG88 = 0x38384752,
+	/**
+	 * [15:0] G:R 8:8 little endian
+	 */
+	WL_SHM_FORMAT_GR88 = 0x38385247,
+	/**
+	 * [31:0] R:G 16:16 little endian
+	 */
+	WL_SHM_FORMAT_RG1616 = 0x32334752,
+	/**
+	 * [31:0] G:R 16:16 little endian
+	 */
+	WL_SHM_FORMAT_GR1616 = 0x32335247,
+	/**
+	 * [63:0] x:R:G:B 16:16:16:16 little endian
+	 */
+	WL_SHM_FORMAT_XRGB16161616F = 0x48345258,
+	/**
+	 * [63:0] x:B:G:R 16:16:16:16 little endian
+	 */
+	WL_SHM_FORMAT_XBGR16161616F = 0x48344258,
+	/**
+	 * [63:0] A:R:G:B 16:16:16:16 little endian
+	 */
+	WL_SHM_FORMAT_ARGB16161616F = 0x48345241,
+	/**
+	 * [63:0] A:B:G:R 16:16:16:16 little endian
+	 */
+	WL_SHM_FORMAT_ABGR16161616F = 0x48344241,
+	/**
+	 * [31:0] X:Y:Cb:Cr 8:8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_XYUV8888 = 0x56555958,
+	/**
+	 * [23:0] Cr:Cb:Y 8:8:8 little endian
+	 */
+	WL_SHM_FORMAT_VUY888 = 0x34325556,
+	/**
+	 * Y followed by U then V, 10:10:10. Non-linear modifier only
+	 */
+	WL_SHM_FORMAT_VUY101010 = 0x30335556,
+	/**
+	 * [63:0] Cr0:0:Y1:0:Cb0:0:Y0:0 10:6:10:6:10:6:10:6 little endian per 2 Y pixels
+	 */
+	WL_SHM_FORMAT_Y210 = 0x30313259,
+	/**
+	 * [63:0] Cr0:0:Y1:0:Cb0:0:Y0:0 12:4:12:4:12:4:12:4 little endian per 2 Y pixels
+	 */
+	WL_SHM_FORMAT_Y212 = 0x32313259,
+	/**
+	 * [63:0] Cr0:Y1:Cb0:Y0 16:16:16:16 little endian per 2 Y pixels
+	 */
+	WL_SHM_FORMAT_Y216 = 0x36313259,
+	/**
+	 * [31:0] A:Cr:Y:Cb 2:10:10:10 little endian
+	 */
+	WL_SHM_FORMAT_Y410 = 0x30313459,
+	/**
+	 * [63:0] A:0:Cr:0:Y:0:Cb:0 12:4:12:4:12:4:12:4 little endian
+	 */
+	WL_SHM_FORMAT_Y412 = 0x32313459,
+	/**
+	 * [63:0] A:Cr:Y:Cb 16:16:16:16 little endian
+	 */
+	WL_SHM_FORMAT_Y416 = 0x36313459,
+	/**
+	 * [31:0] X:Cr:Y:Cb 2:10:10:10 little endian
+	 */
+	WL_SHM_FORMAT_XVYU2101010 = 0x30335658,
+	/**
+	 * [63:0] X:0:Cr:0:Y:0:Cb:0 12:4:12:4:12:4:12:4 little endian
+	 */
+	WL_SHM_FORMAT_XVYU12_16161616 = 0x36335658,
+	/**
+	 * [63:0] X:Cr:Y:Cb 16:16:16:16 little endian
+	 */
+	WL_SHM_FORMAT_XVYU16161616 = 0x38345658,
+	/**
+	 * [63:0]   A3:A2:Y3:0:Cr0:0:Y2:0:A1:A0:Y1:0:Cb0:0:Y0:0  1:1:8:2:8:2:8:2:1:1:8:2:8:2:8:2 little endian
+	 */
+	WL_SHM_FORMAT_Y0L0 = 0x304c3059,
+	/**
+	 * [63:0]   X3:X2:Y3:0:Cr0:0:Y2:0:X1:X0:Y1:0:Cb0:0:Y0:0  1:1:8:2:8:2:8:2:1:1:8:2:8:2:8:2 little endian
+	 */
+	WL_SHM_FORMAT_X0L0 = 0x304c3058,
+	/**
+	 * [63:0]   A3:A2:Y3:Cr0:Y2:A1:A0:Y1:Cb0:Y0  1:1:10:10:10:1:1:10:10:10 little endian
+	 */
+	WL_SHM_FORMAT_Y0L2 = 0x324c3059,
+	/**
+	 * [63:0]   X3:X2:Y3:Cr0:Y2:X1:X0:Y1:Cb0:Y0  1:1:10:10:10:1:1:10:10:10 little endian
+	 */
+	WL_SHM_FORMAT_X0L2 = 0x324c3058,
+	WL_SHM_FORMAT_YUV420_8BIT = 0x38305559,
+	WL_SHM_FORMAT_YUV420_10BIT = 0x30315559,
+	WL_SHM_FORMAT_XRGB8888_A8 = 0x38415258,
+	WL_SHM_FORMAT_XBGR8888_A8 = 0x38414258,
+	WL_SHM_FORMAT_RGBX8888_A8 = 0x38415852,
+	WL_SHM_FORMAT_BGRX8888_A8 = 0x38415842,
+	WL_SHM_FORMAT_RGB888_A8 = 0x38413852,
+	WL_SHM_FORMAT_BGR888_A8 = 0x38413842,
+	WL_SHM_FORMAT_RGB565_A8 = 0x38413552,
+	WL_SHM_FORMAT_BGR565_A8 = 0x38413542,
+	/**
+	 * non-subsampled Cr:Cb plane
+	 */
+	WL_SHM_FORMAT_NV24 = 0x3432564e,
+	/**
+	 * non-subsampled Cb:Cr plane
+	 */
+	WL_SHM_FORMAT_NV42 = 0x3234564e,
+	/**
+	 * 2x1 subsampled Cr:Cb plane, 10 bit per channel
+	 */
+	WL_SHM_FORMAT_P210 = 0x30313250,
+	/**
+	 * 2x2 subsampled Cr:Cb plane 10 bits per channel
+	 */
+	WL_SHM_FORMAT_P010 = 0x30313050,
+	/**
+	 * 2x2 subsampled Cr:Cb plane 12 bits per channel
+	 */
+	WL_SHM_FORMAT_P012 = 0x32313050,
+	/**
+	 * 2x2 subsampled Cr:Cb plane 16 bits per channel
+	 */
+	WL_SHM_FORMAT_P016 = 0x36313050,
+	/**
+	 * [63:0] A:x:B:x:G:x:R:x 10:6:10:6:10:6:10:6 little endian
+	 */
+	WL_SHM_FORMAT_AXBXGXRX106106106106 = 0x30314241,
+	/**
+	 * 2x2 subsampled Cr:Cb plane
+	 */
+	WL_SHM_FORMAT_NV15 = 0x3531564e,
+	WL_SHM_FORMAT_Q410 = 0x30313451,
+	WL_SHM_FORMAT_Q401 = 0x31303451,
+	/**
+	 * [63:0] x:R:G:B 16:16:16:16 little endian
+	 */
+	WL_SHM_FORMAT_XRGB16161616 = 0x38345258,
+	/**
+	 * [63:0] x:B:G:R 16:16:16:16 little endian
+	 */
+	WL_SHM_FORMAT_XBGR16161616 = 0x38344258,
+	/**
+	 * [63:0] A:R:G:B 16:16:16:16 little endian
+	 */
+	WL_SHM_FORMAT_ARGB16161616 = 0x38345241,
+	/**
+	 * [63:0] A:B:G:R 16:16:16:16 little endian
+	 */
+	WL_SHM_FORMAT_ABGR16161616 = 0x38344241,
+};
+#endif /* WL_SHM_FORMAT_ENUM */
+
+/**
+ * @ingroup iface_wl_shm
+ * @struct wl_shm_listener
+ */
+struct wl_shm_listener {
+	/**
+	 * pixel format description
+	 *
+	 * Informs the client about a valid pixel format that can be used
+	 * for buffers. Known formats include argb8888 and xrgb8888.
+	 * @param format buffer pixel format
+	 */
+	void (*format)(void *data,
+		       struct wl_shm *wl_shm,
+		       uint32_t format);
+};
+
+/**
+ * @ingroup iface_wl_shm
+ */
+static inline int
+wl_shm_add_listener(struct wl_shm *wl_shm,
+		    const struct wl_shm_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_shm,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_SHM_CREATE_POOL 0
+
+/**
+ * @ingroup iface_wl_shm
+ */
+#define WL_SHM_FORMAT_SINCE_VERSION 1
+
+/**
+ * @ingroup iface_wl_shm
+ */
+#define WL_SHM_CREATE_POOL_SINCE_VERSION 1
+
+/** @ingroup iface_wl_shm */
+static inline void
+wl_shm_set_user_data(struct wl_shm *wl_shm, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_shm, user_data);
+}
+
+/** @ingroup iface_wl_shm */
+static inline void *
+wl_shm_get_user_data(struct wl_shm *wl_shm)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_shm);
+}
+
+static inline uint32_t
+wl_shm_get_version(struct wl_shm *wl_shm)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_shm);
+}
+
+/** @ingroup iface_wl_shm */
+static inline void
+wl_shm_destroy(struct wl_shm *wl_shm)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_shm);
+}
+
+/**
+ * @ingroup iface_wl_shm
+ *
+ * Create a new wl_shm_pool object.
+ *
+ * The pool can be used to create shared memory based buffer
+ * objects.  The server will mmap size bytes of the passed file
+ * descriptor, to use as backing memory for the pool.
+ */
+static inline struct wl_shm_pool *
+wl_shm_create_pool(struct wl_shm *wl_shm, int32_t fd, int32_t size)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_shm,
+			 WL_SHM_CREATE_POOL, &wl_shm_pool_interface, wl_proxy_get_version((struct wl_proxy *) wl_shm), 0, NULL, fd, size);
+
+	return (struct wl_shm_pool *) id;
+}
+
+/**
+ * @ingroup iface_wl_buffer
+ * @struct wl_buffer_listener
+ */
+struct wl_buffer_listener {
+	/**
+	 * compositor releases buffer
+	 *
+	 * Sent when this wl_buffer is no longer used by the compositor.
+	 * The client is now free to reuse or destroy this buffer and its
+	 * backing storage.
+	 *
+	 * If a client receives a release event before the frame callback
+	 * requested in the same wl_surface.commit that attaches this
+	 * wl_buffer to a surface, then the client is immediately free to
+	 * reuse the buffer and its backing storage, and does not need a
+	 * second buffer for the next surface content update. Typically
+	 * this is possible, when the compositor maintains a copy of the
+	 * wl_surface contents, e.g. as a GL texture. This is an important
+	 * optimization for GL(ES) compositors with wl_shm clients.
+	 */
+	void (*release)(void *data,
+			struct wl_buffer *wl_buffer);
+};
+
+/**
+ * @ingroup iface_wl_buffer
+ */
+static inline int
+wl_buffer_add_listener(struct wl_buffer *wl_buffer,
+		       const struct wl_buffer_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_buffer,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_BUFFER_DESTROY 0
+
+/**
+ * @ingroup iface_wl_buffer
+ */
+#define WL_BUFFER_RELEASE_SINCE_VERSION 1
+
+/**
+ * @ingroup iface_wl_buffer
+ */
+#define WL_BUFFER_DESTROY_SINCE_VERSION 1
+
+/** @ingroup iface_wl_buffer */
+static inline void
+wl_buffer_set_user_data(struct wl_buffer *wl_buffer, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_buffer, user_data);
+}
+
+/** @ingroup iface_wl_buffer */
+static inline void *
+wl_buffer_get_user_data(struct wl_buffer *wl_buffer)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_buffer);
+}
+
+static inline uint32_t
+wl_buffer_get_version(struct wl_buffer *wl_buffer)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_buffer);
+}
+
+/**
+ * @ingroup iface_wl_buffer
+ *
+ * Destroy a buffer. If and how you need to release the backing
+ * storage is defined by the buffer factory interface.
+ *
+ * For possible side-effects to a surface, see wl_surface.attach.
+ */
+static inline void
+wl_buffer_destroy(struct wl_buffer *wl_buffer)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_buffer,
+			 WL_BUFFER_DESTROY, NULL, wl_proxy_get_version((struct wl_proxy *) wl_buffer), WL_MARSHAL_FLAG_DESTROY);
+}
+
+#ifndef WL_DATA_OFFER_ERROR_ENUM
+#define WL_DATA_OFFER_ERROR_ENUM
+enum wl_data_offer_error {
+	/**
+	 * finish request was called untimely
+	 */
+	WL_DATA_OFFER_ERROR_INVALID_FINISH = 0,
+	/**
+	 * action mask contains invalid values
+	 */
+	WL_DATA_OFFER_ERROR_INVALID_ACTION_MASK = 1,
+	/**
+	 * action argument has an invalid value
+	 */
+	WL_DATA_OFFER_ERROR_INVALID_ACTION = 2,
+	/**
+	 * offer doesn't accept this request
+	 */
+	WL_DATA_OFFER_ERROR_INVALID_OFFER = 3,
+};
+#endif /* WL_DATA_OFFER_ERROR_ENUM */
+
+/**
+ * @ingroup iface_wl_data_offer
+ * @struct wl_data_offer_listener
+ */
+struct wl_data_offer_listener {
+	/**
+	 * advertise offered mime type
+	 *
+	 * Sent immediately after creating the wl_data_offer object. One
+	 * event per offered mime type.
+	 * @param mime_type offered mime type
+	 */
+	void (*offer)(void *data,
+		      struct wl_data_offer *wl_data_offer,
+		      const char *mime_type);
+	/**
+	 * notify the source-side available actions
+	 *
+	 * This event indicates the actions offered by the data source.
+	 * It will be sent right after wl_data_device.enter, or anytime the
+	 * source side changes its offered actions through
+	 * wl_data_source.set_actions.
+	 * @param source_actions actions offered by the data source
+	 * @since 3
+	 */
+	void (*source_actions)(void *data,
+			       struct wl_data_offer *wl_data_offer,
+			       uint32_t source_actions);
+	/**
+	 * notify the selected action
+	 *
+	 * This event indicates the action selected by the compositor
+	 * after matching the source/destination side actions. Only one
+	 * action (or none) will be offered here.
+	 *
+	 * This event can be emitted multiple times during the
+	 * drag-and-drop operation in response to destination side action
+	 * changes through wl_data_offer.set_actions.
+	 *
+	 * This event will no longer be emitted after wl_data_device.drop
+	 * happened on the drag-and-drop destination, the client must honor
+	 * the last action received, or the last preferred one set through
+	 * wl_data_offer.set_actions when handling an "ask" action.
+	 *
+	 * Compositors may also change the selected action on the fly,
+	 * mainly in response to keyboard modifier changes during the
+	 * drag-and-drop operation.
+	 *
+	 * The most recent action received is always the valid one. Prior
+	 * to receiving wl_data_device.drop, the chosen action may change
+	 * (e.g. due to keyboard modifiers being pressed). At the time of
+	 * receiving wl_data_device.drop the drag-and-drop destination must
+	 * honor the last action received.
+	 *
+	 * Action changes may still happen after wl_data_device.drop,
+	 * especially on "ask" actions, where the drag-and-drop destination
+	 * may choose another action afterwards. Action changes happening
+	 * at this stage are always the result of inter-client negotiation,
+	 * the compositor shall no longer be able to induce a different
+	 * action.
+	 *
+	 * Upon "ask" actions, it is expected that the drag-and-drop
+	 * destination may potentially choose a different action and/or
+	 * mime type, based on wl_data_offer.source_actions and finally
+	 * chosen by the user (e.g. popping up a menu with the available
+	 * options). The final wl_data_offer.set_actions and
+	 * wl_data_offer.accept requests must happen before the call to
+	 * wl_data_offer.finish.
+	 * @param dnd_action action selected by the compositor
+	 * @since 3
+	 */
+	void (*action)(void *data,
+		       struct wl_data_offer *wl_data_offer,
+		       uint32_t dnd_action);
+};
+
+/**
+ * @ingroup iface_wl_data_offer
+ */
+static inline int
+wl_data_offer_add_listener(struct wl_data_offer *wl_data_offer,
+			   const struct wl_data_offer_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_data_offer,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_DATA_OFFER_ACCEPT 0
+#define WL_DATA_OFFER_RECEIVE 1
+#define WL_DATA_OFFER_DESTROY 2
+#define WL_DATA_OFFER_FINISH 3
+#define WL_DATA_OFFER_SET_ACTIONS 4
+
+/**
+ * @ingroup iface_wl_data_offer
+ */
+#define WL_DATA_OFFER_OFFER_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_offer
+ */
+#define WL_DATA_OFFER_SOURCE_ACTIONS_SINCE_VERSION 3
+/**
+ * @ingroup iface_wl_data_offer
+ */
+#define WL_DATA_OFFER_ACTION_SINCE_VERSION 3
+
+/**
+ * @ingroup iface_wl_data_offer
+ */
+#define WL_DATA_OFFER_ACCEPT_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_offer
+ */
+#define WL_DATA_OFFER_RECEIVE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_offer
+ */
+#define WL_DATA_OFFER_DESTROY_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_offer
+ */
+#define WL_DATA_OFFER_FINISH_SINCE_VERSION 3
+/**
+ * @ingroup iface_wl_data_offer
+ */
+#define WL_DATA_OFFER_SET_ACTIONS_SINCE_VERSION 3
+
+/** @ingroup iface_wl_data_offer */
+static inline void
+wl_data_offer_set_user_data(struct wl_data_offer *wl_data_offer, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_data_offer, user_data);
+}
+
+/** @ingroup iface_wl_data_offer */
+static inline void *
+wl_data_offer_get_user_data(struct wl_data_offer *wl_data_offer)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_data_offer);
+}
+
+static inline uint32_t
+wl_data_offer_get_version(struct wl_data_offer *wl_data_offer)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_data_offer);
+}
+
+/**
+ * @ingroup iface_wl_data_offer
+ *
+ * Indicate that the client can accept the given mime type, or
+ * NULL for not accepted.
+ *
+ * For objects of version 2 or older, this request is used by the
+ * client to give feedback whether the client can receive the given
+ * mime type, or NULL if none is accepted; the feedback does not
+ * determine whether the drag-and-drop operation succeeds or not.
+ *
+ * For objects of version 3 or newer, this request determines the
+ * final result of the drag-and-drop operation. If the end result
+ * is that no mime types were accepted, the drag-and-drop operation
+ * will be cancelled and the corresponding drag source will receive
+ * wl_data_source.cancelled. Clients may still use this event in
+ * conjunction with wl_data_source.action for feedback.
+ */
+static inline void
+wl_data_offer_accept(struct wl_data_offer *wl_data_offer, uint32_t serial, const char *mime_type)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_offer,
+			 WL_DATA_OFFER_ACCEPT, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_offer), 0, serial, mime_type);
+}
+
+/**
+ * @ingroup iface_wl_data_offer
+ *
+ * To transfer the offered data, the client issues this request
+ * and indicates the mime type it wants to receive.  The transfer
+ * happens through the passed file descriptor (typically created
+ * with the pipe system call).  The source client writes the data
+ * in the mime type representation requested and then closes the
+ * file descriptor.
+ *
+ * The receiving client reads from the read end of the pipe until
+ * EOF and then closes its end, at which point the transfer is
+ * complete.
+ *
+ * This request may happen multiple times for different mime types,
+ * both before and after wl_data_device.drop. Drag-and-drop destination
+ * clients may preemptively fetch data or examine it more closely to
+ * determine acceptance.
+ */
+static inline void
+wl_data_offer_receive(struct wl_data_offer *wl_data_offer, const char *mime_type, int32_t fd)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_offer,
+			 WL_DATA_OFFER_RECEIVE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_offer), 0, mime_type, fd);
+}
+
+/**
+ * @ingroup iface_wl_data_offer
+ *
+ * Destroy the data offer.
+ */
+static inline void
+wl_data_offer_destroy(struct wl_data_offer *wl_data_offer)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_offer,
+			 WL_DATA_OFFER_DESTROY, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_offer), WL_MARSHAL_FLAG_DESTROY);
+}
+
+/**
+ * @ingroup iface_wl_data_offer
+ *
+ * Notifies the compositor that the drag destination successfully
+ * finished the drag-and-drop operation.
+ *
+ * Upon receiving this request, the compositor will emit
+ * wl_data_source.dnd_finished on the drag source client.
+ *
+ * It is a client error to perform other requests than
+ * wl_data_offer.destroy after this one. It is also an error to perform
+ * this request after a NULL mime type has been set in
+ * wl_data_offer.accept or no action was received through
+ * wl_data_offer.action.
+ *
+ * If wl_data_offer.finish request is received for a non drag and drop
+ * operation, the invalid_finish protocol error is raised.
+ */
+static inline void
+wl_data_offer_finish(struct wl_data_offer *wl_data_offer)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_offer,
+			 WL_DATA_OFFER_FINISH, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_offer), 0);
+}
+
+/**
+ * @ingroup iface_wl_data_offer
+ *
+ * Sets the actions that the destination side client supports for
+ * this operation. This request may trigger the emission of
+ * wl_data_source.action and wl_data_offer.action events if the compositor
+ * needs to change the selected action.
+ *
+ * This request can be called multiple times throughout the
+ * drag-and-drop operation, typically in response to wl_data_device.enter
+ * or wl_data_device.motion events.
+ *
+ * This request determines the final result of the drag-and-drop
+ * operation. If the end result is that no action is accepted,
+ * the drag source will receive wl_data_source.cancelled.
+ *
+ * The dnd_actions argument must contain only values expressed in the
+ * wl_data_device_manager.dnd_actions enum, and the preferred_action
+ * argument must only contain one of those values set, otherwise it
+ * will result in a protocol error.
+ *
+ * While managing an "ask" action, the destination drag-and-drop client
+ * may perform further wl_data_offer.receive requests, and is expected
+ * to perform one last wl_data_offer.set_actions request with a preferred
+ * action other than "ask" (and optionally wl_data_offer.accept) before
+ * requesting wl_data_offer.finish, in order to convey the action selected
+ * by the user. If the preferred action is not in the
+ * wl_data_offer.source_actions mask, an error will be raised.
+ *
+ * If the "ask" action is dismissed (e.g. user cancellation), the client
+ * is expected to perform wl_data_offer.destroy right away.
+ *
+ * This request can only be made on drag-and-drop offers, a protocol error
+ * will be raised otherwise.
+ */
+static inline void
+wl_data_offer_set_actions(struct wl_data_offer *wl_data_offer, uint32_t dnd_actions, uint32_t preferred_action)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_offer,
+			 WL_DATA_OFFER_SET_ACTIONS, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_offer), 0, dnd_actions, preferred_action);
+}
+
+#ifndef WL_DATA_SOURCE_ERROR_ENUM
+#define WL_DATA_SOURCE_ERROR_ENUM
+enum wl_data_source_error {
+	/**
+	 * action mask contains invalid values
+	 */
+	WL_DATA_SOURCE_ERROR_INVALID_ACTION_MASK = 0,
+	/**
+	 * source doesn't accept this request
+	 */
+	WL_DATA_SOURCE_ERROR_INVALID_SOURCE = 1,
+};
+#endif /* WL_DATA_SOURCE_ERROR_ENUM */
+
+/**
+ * @ingroup iface_wl_data_source
+ * @struct wl_data_source_listener
+ */
+struct wl_data_source_listener {
+	/**
+	 * a target accepts an offered mime type
+	 *
+	 * Sent when a target accepts pointer_focus or motion events. If
+	 * a target does not accept any of the offered types, type is NULL.
+	 *
+	 * Used for feedback during drag-and-drop.
+	 * @param mime_type mime type accepted by the target
+	 */
+	void (*target)(void *data,
+		       struct wl_data_source *wl_data_source,
+		       const char *mime_type);
+	/**
+	 * send the data
+	 *
+	 * Request for data from the client. Send the data as the
+	 * specified mime type over the passed file descriptor, then close
+	 * it.
+	 * @param mime_type mime type for the data
+	 * @param fd file descriptor for the data
+	 */
+	void (*send)(void *data,
+		     struct wl_data_source *wl_data_source,
+		     const char *mime_type,
+		     int32_t fd);
+	/**
+	 * selection was cancelled
+	 *
+	 * This data source is no longer valid. There are several reasons
+	 * why this could happen:
+	 *
+	 * - The data source has been replaced by another data source. -
+	 * The drag-and-drop operation was performed, but the drop
+	 * destination did not accept any of the mime types offered through
+	 * wl_data_source.target. - The drag-and-drop operation was
+	 * performed, but the drop destination did not select any of the
+	 * actions present in the mask offered through
+	 * wl_data_source.action. - The drag-and-drop operation was
+	 * performed but didn't happen over a surface. - The compositor
+	 * cancelled the drag-and-drop operation (e.g. compositor dependent
+	 * timeouts to avoid stale drag-and-drop transfers).
+	 *
+	 * The client should clean up and destroy this data source.
+	 *
+	 * For objects of version 2 or older, wl_data_source.cancelled will
+	 * only be emitted if the data source was replaced by another data
+	 * source.
+	 */
+	void (*cancelled)(void *data,
+			  struct wl_data_source *wl_data_source);
+	/**
+	 * the drag-and-drop operation physically finished
+	 *
+	 * The user performed the drop action. This event does not
+	 * indicate acceptance, wl_data_source.cancelled may still be
+	 * emitted afterwards if the drop destination does not accept any
+	 * mime type.
+	 *
+	 * However, this event might however not be received if the
+	 * compositor cancelled the drag-and-drop operation before this
+	 * event could happen.
+	 *
+	 * Note that the data_source may still be used in the future and
+	 * should not be destroyed here.
+	 * @since 3
+	 */
+	void (*dnd_drop_performed)(void *data,
+				   struct wl_data_source *wl_data_source);
+	/**
+	 * the drag-and-drop operation concluded
+	 *
+	 * The drop destination finished interoperating with this data
+	 * source, so the client is now free to destroy this data source
+	 * and free all associated data.
+	 *
+	 * If the action used to perform the operation was "move", the
+	 * source can now delete the transferred data.
+	 * @since 3
+	 */
+	void (*dnd_finished)(void *data,
+			     struct wl_data_source *wl_data_source);
+	/**
+	 * notify the selected action
+	 *
+	 * This event indicates the action selected by the compositor
+	 * after matching the source/destination side actions. Only one
+	 * action (or none) will be offered here.
+	 *
+	 * This event can be emitted multiple times during the
+	 * drag-and-drop operation, mainly in response to destination side
+	 * changes through wl_data_offer.set_actions, and as the data
+	 * device enters/leaves surfaces.
+	 *
+	 * It is only possible to receive this event after
+	 * wl_data_source.dnd_drop_performed if the drag-and-drop operation
+	 * ended in an "ask" action, in which case the final
+	 * wl_data_source.action event will happen immediately before
+	 * wl_data_source.dnd_finished.
+	 *
+	 * Compositors may also change the selected action on the fly,
+	 * mainly in response to keyboard modifier changes during the
+	 * drag-and-drop operation.
+	 *
+	 * The most recent action received is always the valid one. The
+	 * chosen action may change alongside negotiation (e.g. an "ask"
+	 * action can turn into a "move" operation), so the effects of the
+	 * final action must always be applied in
+	 * wl_data_offer.dnd_finished.
+	 *
+	 * Clients can trigger cursor surface changes from this point, so
+	 * they reflect the current action.
+	 * @param dnd_action action selected by the compositor
+	 * @since 3
+	 */
+	void (*action)(void *data,
+		       struct wl_data_source *wl_data_source,
+		       uint32_t dnd_action);
+};
+
+/**
+ * @ingroup iface_wl_data_source
+ */
+static inline int
+wl_data_source_add_listener(struct wl_data_source *wl_data_source,
+			    const struct wl_data_source_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_data_source,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_DATA_SOURCE_OFFER 0
+#define WL_DATA_SOURCE_DESTROY 1
+#define WL_DATA_SOURCE_SET_ACTIONS 2
+
+/**
+ * @ingroup iface_wl_data_source
+ */
+#define WL_DATA_SOURCE_TARGET_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_source
+ */
+#define WL_DATA_SOURCE_SEND_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_source
+ */
+#define WL_DATA_SOURCE_CANCELLED_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_source
+ */
+#define WL_DATA_SOURCE_DND_DROP_PERFORMED_SINCE_VERSION 3
+/**
+ * @ingroup iface_wl_data_source
+ */
+#define WL_DATA_SOURCE_DND_FINISHED_SINCE_VERSION 3
+/**
+ * @ingroup iface_wl_data_source
+ */
+#define WL_DATA_SOURCE_ACTION_SINCE_VERSION 3
+
+/**
+ * @ingroup iface_wl_data_source
+ */
+#define WL_DATA_SOURCE_OFFER_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_source
+ */
+#define WL_DATA_SOURCE_DESTROY_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_source
+ */
+#define WL_DATA_SOURCE_SET_ACTIONS_SINCE_VERSION 3
+
+/** @ingroup iface_wl_data_source */
+static inline void
+wl_data_source_set_user_data(struct wl_data_source *wl_data_source, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_data_source, user_data);
+}
+
+/** @ingroup iface_wl_data_source */
+static inline void *
+wl_data_source_get_user_data(struct wl_data_source *wl_data_source)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_data_source);
+}
+
+static inline uint32_t
+wl_data_source_get_version(struct wl_data_source *wl_data_source)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_data_source);
+}
+
+/**
+ * @ingroup iface_wl_data_source
+ *
+ * This request adds a mime type to the set of mime types
+ * advertised to targets.  Can be called several times to offer
+ * multiple types.
+ */
+static inline void
+wl_data_source_offer(struct wl_data_source *wl_data_source, const char *mime_type)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_source,
+			 WL_DATA_SOURCE_OFFER, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_source), 0, mime_type);
+}
+
+/**
+ * @ingroup iface_wl_data_source
+ *
+ * Destroy the data source.
+ */
+static inline void
+wl_data_source_destroy(struct wl_data_source *wl_data_source)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_source,
+			 WL_DATA_SOURCE_DESTROY, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_source), WL_MARSHAL_FLAG_DESTROY);
+}
+
+/**
+ * @ingroup iface_wl_data_source
+ *
+ * Sets the actions that the source side client supports for this
+ * operation. This request may trigger wl_data_source.action and
+ * wl_data_offer.action events if the compositor needs to change the
+ * selected action.
+ *
+ * The dnd_actions argument must contain only values expressed in the
+ * wl_data_device_manager.dnd_actions enum, otherwise it will result
+ * in a protocol error.
+ *
+ * This request must be made once only, and can only be made on sources
+ * used in drag-and-drop, so it must be performed before
+ * wl_data_device.start_drag. Attempting to use the source other than
+ * for drag-and-drop will raise a protocol error.
+ */
+static inline void
+wl_data_source_set_actions(struct wl_data_source *wl_data_source, uint32_t dnd_actions)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_source,
+			 WL_DATA_SOURCE_SET_ACTIONS, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_source), 0, dnd_actions);
+}
+
+#ifndef WL_DATA_DEVICE_ERROR_ENUM
+#define WL_DATA_DEVICE_ERROR_ENUM
+enum wl_data_device_error {
+	/**
+	 * given wl_surface has another role
+	 */
+	WL_DATA_DEVICE_ERROR_ROLE = 0,
+};
+#endif /* WL_DATA_DEVICE_ERROR_ENUM */
+
+/**
+ * @ingroup iface_wl_data_device
+ * @struct wl_data_device_listener
+ */
+struct wl_data_device_listener {
+	/**
+	 * introduce a new wl_data_offer
+	 *
+	 * The data_offer event introduces a new wl_data_offer object,
+	 * which will subsequently be used in either the data_device.enter
+	 * event (for drag-and-drop) or the data_device.selection event
+	 * (for selections). Immediately following the
+	 * data_device.data_offer event, the new data_offer object will
+	 * send out data_offer.offer events to describe the mime types it
+	 * offers.
+	 * @param id the new data_offer object
+	 */
+	void (*data_offer)(void *data,
+			   struct wl_data_device *wl_data_device,
+			   struct wl_data_offer *id);
+	/**
+	 * initiate drag-and-drop session
+	 *
+	 * This event is sent when an active drag-and-drop pointer enters
+	 * a surface owned by the client. The position of the pointer at
+	 * enter time is provided by the x and y arguments, in
+	 * surface-local coordinates.
+	 * @param serial serial number of the enter event
+	 * @param surface client surface entered
+	 * @param x surface-local x coordinate
+	 * @param y surface-local y coordinate
+	 * @param id source data_offer object
+	 */
+	void (*enter)(void *data,
+		      struct wl_data_device *wl_data_device,
+		      uint32_t serial,
+		      struct wl_surface *surface,
+		      wl_fixed_t x,
+		      wl_fixed_t y,
+		      struct wl_data_offer *id);
+	/**
+	 * end drag-and-drop session
+	 *
+	 * This event is sent when the drag-and-drop pointer leaves the
+	 * surface and the session ends. The client must destroy the
+	 * wl_data_offer introduced at enter time at this point.
+	 */
+	void (*leave)(void *data,
+		      struct wl_data_device *wl_data_device);
+	/**
+	 * drag-and-drop session motion
+	 *
+	 * This event is sent when the drag-and-drop pointer moves within
+	 * the currently focused surface. The new position of the pointer
+	 * is provided by the x and y arguments, in surface-local
+	 * coordinates.
+	 * @param time timestamp with millisecond granularity
+	 * @param x surface-local x coordinate
+	 * @param y surface-local y coordinate
+	 */
+	void (*motion)(void *data,
+		       struct wl_data_device *wl_data_device,
+		       uint32_t time,
+		       wl_fixed_t x,
+		       wl_fixed_t y);
+	/**
+	 * end drag-and-drop session successfully
+	 *
+	 * The event is sent when a drag-and-drop operation is ended
+	 * because the implicit grab is removed.
+	 *
+	 * The drag-and-drop destination is expected to honor the last
+	 * action received through wl_data_offer.action, if the resulting
+	 * action is "copy" or "move", the destination can still perform
+	 * wl_data_offer.receive requests, and is expected to end all
+	 * transfers with a wl_data_offer.finish request.
+	 *
+	 * If the resulting action is "ask", the action will not be
+	 * considered final. The drag-and-drop destination is expected to
+	 * perform one last wl_data_offer.set_actions request, or
+	 * wl_data_offer.destroy in order to cancel the operation.
+	 */
+	void (*drop)(void *data,
+		     struct wl_data_device *wl_data_device);
+	/**
+	 * advertise new selection
+	 *
+	 * The selection event is sent out to notify the client of a new
+	 * wl_data_offer for the selection for this device. The
+	 * data_device.data_offer and the data_offer.offer events are sent
+	 * out immediately before this event to introduce the data offer
+	 * object. The selection event is sent to a client immediately
+	 * before receiving keyboard focus and when a new selection is set
+	 * while the client has keyboard focus. The data_offer is valid
+	 * until a new data_offer or NULL is received or until the client
+	 * loses keyboard focus. Switching surface with keyboard focus
+	 * within the same client doesn't mean a new selection will be
+	 * sent. The client must destroy the previous selection data_offer,
+	 * if any, upon receiving this event.
+	 * @param id selection data_offer object
+	 */
+	void (*selection)(void *data,
+			  struct wl_data_device *wl_data_device,
+			  struct wl_data_offer *id);
+};
+
+/**
+ * @ingroup iface_wl_data_device
+ */
+static inline int
+wl_data_device_add_listener(struct wl_data_device *wl_data_device,
+			    const struct wl_data_device_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_data_device,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_DATA_DEVICE_START_DRAG 0
+#define WL_DATA_DEVICE_SET_SELECTION 1
+#define WL_DATA_DEVICE_RELEASE 2
+
+/**
+ * @ingroup iface_wl_data_device
+ */
+#define WL_DATA_DEVICE_DATA_OFFER_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_device
+ */
+#define WL_DATA_DEVICE_ENTER_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_device
+ */
+#define WL_DATA_DEVICE_LEAVE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_device
+ */
+#define WL_DATA_DEVICE_MOTION_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_device
+ */
+#define WL_DATA_DEVICE_DROP_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_device
+ */
+#define WL_DATA_DEVICE_SELECTION_SINCE_VERSION 1
+
+/**
+ * @ingroup iface_wl_data_device
+ */
+#define WL_DATA_DEVICE_START_DRAG_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_device
+ */
+#define WL_DATA_DEVICE_SET_SELECTION_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_device
+ */
+#define WL_DATA_DEVICE_RELEASE_SINCE_VERSION 2
+
+/** @ingroup iface_wl_data_device */
+static inline void
+wl_data_device_set_user_data(struct wl_data_device *wl_data_device, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_data_device, user_data);
+}
+
+/** @ingroup iface_wl_data_device */
+static inline void *
+wl_data_device_get_user_data(struct wl_data_device *wl_data_device)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_data_device);
+}
+
+static inline uint32_t
+wl_data_device_get_version(struct wl_data_device *wl_data_device)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_data_device);
+}
+
+/** @ingroup iface_wl_data_device */
+static inline void
+wl_data_device_destroy(struct wl_data_device *wl_data_device)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_data_device);
+}
+
+/**
+ * @ingroup iface_wl_data_device
+ *
+ * This request asks the compositor to start a drag-and-drop
+ * operation on behalf of the client.
+ *
+ * The source argument is the data source that provides the data
+ * for the eventual data transfer. If source is NULL, enter, leave
+ * and motion events are sent only to the client that initiated the
+ * drag and the client is expected to handle the data passing
+ * internally. If source is destroyed, the drag-and-drop session will be
+ * cancelled.
+ *
+ * The origin surface is the surface where the drag originates and
+ * the client must have an active implicit grab that matches the
+ * serial.
+ *
+ * The icon surface is an optional (can be NULL) surface that
+ * provides an icon to be moved around with the cursor.  Initially,
+ * the top-left corner of the icon surface is placed at the cursor
+ * hotspot, but subsequent wl_surface.attach request can move the
+ * relative position. Attach requests must be confirmed with
+ * wl_surface.commit as usual. The icon surface is given the role of
+ * a drag-and-drop icon. If the icon surface already has another role,
+ * it raises a protocol error.
+ *
+ * The current and pending input regions of the icon wl_surface are
+ * cleared, and wl_surface.set_input_region is ignored until the
+ * wl_surface is no longer used as the icon surface. When the use
+ * as an icon ends, the current and pending input regions become
+ * undefined, and the wl_surface is unmapped.
+ */
+static inline void
+wl_data_device_start_drag(struct wl_data_device *wl_data_device, struct wl_data_source *source, struct wl_surface *origin, struct wl_surface *icon, uint32_t serial)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_device,
+			 WL_DATA_DEVICE_START_DRAG, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_device), 0, source, origin, icon, serial);
+}
+
+/**
+ * @ingroup iface_wl_data_device
+ *
+ * This request asks the compositor to set the selection
+ * to the data from the source on behalf of the client.
+ *
+ * To unset the selection, set the source to NULL.
+ */
+static inline void
+wl_data_device_set_selection(struct wl_data_device *wl_data_device, struct wl_data_source *source, uint32_t serial)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_device,
+			 WL_DATA_DEVICE_SET_SELECTION, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_device), 0, source, serial);
+}
+
+/**
+ * @ingroup iface_wl_data_device
+ *
+ * This request destroys the data device.
+ */
+static inline void
+wl_data_device_release(struct wl_data_device *wl_data_device)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_data_device,
+			 WL_DATA_DEVICE_RELEASE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_data_device), WL_MARSHAL_FLAG_DESTROY);
+}
+
+#ifndef WL_DATA_DEVICE_MANAGER_DND_ACTION_ENUM
+#define WL_DATA_DEVICE_MANAGER_DND_ACTION_ENUM
+/**
+ * @ingroup iface_wl_data_device_manager
+ * drag and drop actions
+ *
+ * This is a bitmask of the available/preferred actions in a
+ * drag-and-drop operation.
+ *
+ * In the compositor, the selected action is a result of matching the
+ * actions offered by the source and destination sides.  "action" events
+ * with a "none" action will be sent to both source and destination if
+ * there is no match. All further checks will effectively happen on
+ * (source actions ∩ destination actions).
+ *
+ * In addition, compositors may also pick different actions in
+ * reaction to key modifiers being pressed. One common design that
+ * is used in major toolkits (and the behavior recommended for
+ * compositors) is:
+ *
+ * - If no modifiers are pressed, the first match (in bit order)
+ * will be used.
+ * - Pressing Shift selects "move", if enabled in the mask.
+ * - Pressing Control selects "copy", if enabled in the mask.
+ *
+ * Behavior beyond that is considered implementation-dependent.
+ * Compositors may for example bind other modifiers (like Alt/Meta)
+ * or drags initiated with other buttons than BTN_LEFT to specific
+ * actions (e.g. "ask").
+ */
+enum wl_data_device_manager_dnd_action {
+	/**
+	 * no action
+	 */
+	WL_DATA_DEVICE_MANAGER_DND_ACTION_NONE = 0,
+	/**
+	 * copy action
+	 */
+	WL_DATA_DEVICE_MANAGER_DND_ACTION_COPY = 1,
+	/**
+	 * move action
+	 */
+	WL_DATA_DEVICE_MANAGER_DND_ACTION_MOVE = 2,
+	/**
+	 * ask action
+	 */
+	WL_DATA_DEVICE_MANAGER_DND_ACTION_ASK = 4,
+};
+#endif /* WL_DATA_DEVICE_MANAGER_DND_ACTION_ENUM */
+
+#define WL_DATA_DEVICE_MANAGER_CREATE_DATA_SOURCE 0
+#define WL_DATA_DEVICE_MANAGER_GET_DATA_DEVICE 1
+
+
+/**
+ * @ingroup iface_wl_data_device_manager
+ */
+#define WL_DATA_DEVICE_MANAGER_CREATE_DATA_SOURCE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_data_device_manager
+ */
+#define WL_DATA_DEVICE_MANAGER_GET_DATA_DEVICE_SINCE_VERSION 1
+
+/** @ingroup iface_wl_data_device_manager */
+static inline void
+wl_data_device_manager_set_user_data(struct wl_data_device_manager *wl_data_device_manager, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_data_device_manager, user_data);
+}
+
+/** @ingroup iface_wl_data_device_manager */
+static inline void *
+wl_data_device_manager_get_user_data(struct wl_data_device_manager *wl_data_device_manager)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_data_device_manager);
+}
+
+static inline uint32_t
+wl_data_device_manager_get_version(struct wl_data_device_manager *wl_data_device_manager)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_data_device_manager);
+}
+
+/** @ingroup iface_wl_data_device_manager */
+static inline void
+wl_data_device_manager_destroy(struct wl_data_device_manager *wl_data_device_manager)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_data_device_manager);
+}
+
+/**
+ * @ingroup iface_wl_data_device_manager
+ *
+ * Create a new data source.
+ */
+static inline struct wl_data_source *
+wl_data_device_manager_create_data_source(struct wl_data_device_manager *wl_data_device_manager)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_data_device_manager,
+			 WL_DATA_DEVICE_MANAGER_CREATE_DATA_SOURCE, &wl_data_source_interface, wl_proxy_get_version((struct wl_proxy *) wl_data_device_manager), 0, NULL);
+
+	return (struct wl_data_source *) id;
+}
+
+/**
+ * @ingroup iface_wl_data_device_manager
+ *
+ * Create a new data device for a given seat.
+ */
+static inline struct wl_data_device *
+wl_data_device_manager_get_data_device(struct wl_data_device_manager *wl_data_device_manager, struct wl_seat *seat)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_data_device_manager,
+			 WL_DATA_DEVICE_MANAGER_GET_DATA_DEVICE, &wl_data_device_interface, wl_proxy_get_version((struct wl_proxy *) wl_data_device_manager), 0, NULL, seat);
+
+	return (struct wl_data_device *) id;
+}
+
+#ifndef WL_SHELL_ERROR_ENUM
+#define WL_SHELL_ERROR_ENUM
+enum wl_shell_error {
+	/**
+	 * given wl_surface has another role
+	 */
+	WL_SHELL_ERROR_ROLE = 0,
+};
+#endif /* WL_SHELL_ERROR_ENUM */
+
+#define WL_SHELL_GET_SHELL_SURFACE 0
+
+
+/**
+ * @ingroup iface_wl_shell
+ */
+#define WL_SHELL_GET_SHELL_SURFACE_SINCE_VERSION 1
+
+/** @ingroup iface_wl_shell */
+static inline void
+wl_shell_set_user_data(struct wl_shell *wl_shell, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_shell, user_data);
+}
+
+/** @ingroup iface_wl_shell */
+static inline void *
+wl_shell_get_user_data(struct wl_shell *wl_shell)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_shell);
+}
+
+static inline uint32_t
+wl_shell_get_version(struct wl_shell *wl_shell)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_shell);
+}
+
+/** @ingroup iface_wl_shell */
+static inline void
+wl_shell_destroy(struct wl_shell *wl_shell)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_shell);
+}
+
+/**
+ * @ingroup iface_wl_shell
+ *
+ * Create a shell surface for an existing surface. This gives
+ * the wl_surface the role of a shell surface. If the wl_surface
+ * already has another role, it raises a protocol error.
+ *
+ * Only one shell surface can be associated with a given surface.
+ */
+static inline struct wl_shell_surface *
+wl_shell_get_shell_surface(struct wl_shell *wl_shell, struct wl_surface *surface)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_shell,
+			 WL_SHELL_GET_SHELL_SURFACE, &wl_shell_surface_interface, wl_proxy_get_version((struct wl_proxy *) wl_shell), 0, NULL, surface);
+
+	return (struct wl_shell_surface *) id;
+}
+
+#ifndef WL_SHELL_SURFACE_RESIZE_ENUM
+#define WL_SHELL_SURFACE_RESIZE_ENUM
+/**
+ * @ingroup iface_wl_shell_surface
+ * edge values for resizing
+ *
+ * These values are used to indicate which edge of a surface
+ * is being dragged in a resize operation. The server may
+ * use this information to adapt its behavior, e.g. choose
+ * an appropriate cursor image.
+ */
+enum wl_shell_surface_resize {
+	/**
+	 * no edge
+	 */
+	WL_SHELL_SURFACE_RESIZE_NONE = 0,
+	/**
+	 * top edge
+	 */
+	WL_SHELL_SURFACE_RESIZE_TOP = 1,
+	/**
+	 * bottom edge
+	 */
+	WL_SHELL_SURFACE_RESIZE_BOTTOM = 2,
+	/**
+	 * left edge
+	 */
+	WL_SHELL_SURFACE_RESIZE_LEFT = 4,
+	/**
+	 * top and left edges
+	 */
+	WL_SHELL_SURFACE_RESIZE_TOP_LEFT = 5,
+	/**
+	 * bottom and left edges
+	 */
+	WL_SHELL_SURFACE_RESIZE_BOTTOM_LEFT = 6,
+	/**
+	 * right edge
+	 */
+	WL_SHELL_SURFACE_RESIZE_RIGHT = 8,
+	/**
+	 * top and right edges
+	 */
+	WL_SHELL_SURFACE_RESIZE_TOP_RIGHT = 9,
+	/**
+	 * bottom and right edges
+	 */
+	WL_SHELL_SURFACE_RESIZE_BOTTOM_RIGHT = 10,
+};
+#endif /* WL_SHELL_SURFACE_RESIZE_ENUM */
+
+#ifndef WL_SHELL_SURFACE_TRANSIENT_ENUM
+#define WL_SHELL_SURFACE_TRANSIENT_ENUM
+/**
+ * @ingroup iface_wl_shell_surface
+ * details of transient behaviour
+ *
+ * These flags specify details of the expected behaviour
+ * of transient surfaces. Used in the set_transient request.
+ */
+enum wl_shell_surface_transient {
+	/**
+	 * do not set keyboard focus
+	 */
+	WL_SHELL_SURFACE_TRANSIENT_INACTIVE = 0x1,
+};
+#endif /* WL_SHELL_SURFACE_TRANSIENT_ENUM */
+
+#ifndef WL_SHELL_SURFACE_FULLSCREEN_METHOD_ENUM
+#define WL_SHELL_SURFACE_FULLSCREEN_METHOD_ENUM
+/**
+ * @ingroup iface_wl_shell_surface
+ * different method to set the surface fullscreen
+ *
+ * Hints to indicate to the compositor how to deal with a conflict
+ * between the dimensions of the surface and the dimensions of the
+ * output. The compositor is free to ignore this parameter.
+ */
+enum wl_shell_surface_fullscreen_method {
+	/**
+	 * no preference, apply default policy
+	 */
+	WL_SHELL_SURFACE_FULLSCREEN_METHOD_DEFAULT = 0,
+	/**
+	 * scale, preserve the surface's aspect ratio and center on output
+	 */
+	WL_SHELL_SURFACE_FULLSCREEN_METHOD_SCALE = 1,
+	/**
+	 * switch output mode to the smallest mode that can fit the surface, add black borders to compensate size mismatch
+	 */
+	WL_SHELL_SURFACE_FULLSCREEN_METHOD_DRIVER = 2,
+	/**
+	 * no upscaling, center on output and add black borders to compensate size mismatch
+	 */
+	WL_SHELL_SURFACE_FULLSCREEN_METHOD_FILL = 3,
+};
+#endif /* WL_SHELL_SURFACE_FULLSCREEN_METHOD_ENUM */
+
+/**
+ * @ingroup iface_wl_shell_surface
+ * @struct wl_shell_surface_listener
+ */
+struct wl_shell_surface_listener {
+	/**
+	 * ping client
+	 *
+	 * Ping a client to check if it is receiving events and sending
+	 * requests. A client is expected to reply with a pong request.
+	 * @param serial serial number of the ping
+	 */
+	void (*ping)(void *data,
+		     struct wl_shell_surface *wl_shell_surface,
+		     uint32_t serial);
+	/**
+	 * suggest resize
+	 *
+	 * The configure event asks the client to resize its surface.
+	 *
+	 * The size is a hint, in the sense that the client is free to
+	 * ignore it if it doesn't resize, pick a smaller size (to satisfy
+	 * aspect ratio or resize in steps of NxM pixels).
+	 *
+	 * The edges parameter provides a hint about how the surface was
+	 * resized. The client may use this information to decide how to
+	 * adjust its content to the new size (e.g. a scrolling area might
+	 * adjust its content position to leave the viewable content
+	 * unmoved).
+	 *
+	 * The client is free to dismiss all but the last configure event
+	 * it received.
+	 *
+	 * The width and height arguments specify the size of the window in
+	 * surface-local coordinates.
+	 * @param edges how the surface was resized
+	 * @param width new width of the surface
+	 * @param height new height of the surface
+	 */
+	void (*configure)(void *data,
+			  struct wl_shell_surface *wl_shell_surface,
+			  uint32_t edges,
+			  int32_t width,
+			  int32_t height);
+	/**
+	 * popup interaction is done
+	 *
+	 * The popup_done event is sent out when a popup grab is broken,
+	 * that is, when the user clicks a surface that doesn't belong to
+	 * the client owning the popup surface.
+	 */
+	void (*popup_done)(void *data,
+			   struct wl_shell_surface *wl_shell_surface);
+};
+
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+static inline int
+wl_shell_surface_add_listener(struct wl_shell_surface *wl_shell_surface,
+			      const struct wl_shell_surface_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_shell_surface,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_SHELL_SURFACE_PONG 0
+#define WL_SHELL_SURFACE_MOVE 1
+#define WL_SHELL_SURFACE_RESIZE 2
+#define WL_SHELL_SURFACE_SET_TOPLEVEL 3
+#define WL_SHELL_SURFACE_SET_TRANSIENT 4
+#define WL_SHELL_SURFACE_SET_FULLSCREEN 5
+#define WL_SHELL_SURFACE_SET_POPUP 6
+#define WL_SHELL_SURFACE_SET_MAXIMIZED 7
+#define WL_SHELL_SURFACE_SET_TITLE 8
+#define WL_SHELL_SURFACE_SET_CLASS 9
+
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_PING_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_CONFIGURE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_POPUP_DONE_SINCE_VERSION 1
+
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_PONG_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_MOVE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_RESIZE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_SET_TOPLEVEL_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_SET_TRANSIENT_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_SET_FULLSCREEN_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_SET_POPUP_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_SET_MAXIMIZED_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_SET_TITLE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_shell_surface
+ */
+#define WL_SHELL_SURFACE_SET_CLASS_SINCE_VERSION 1
+
+/** @ingroup iface_wl_shell_surface */
+static inline void
+wl_shell_surface_set_user_data(struct wl_shell_surface *wl_shell_surface, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_shell_surface, user_data);
+}
+
+/** @ingroup iface_wl_shell_surface */
+static inline void *
+wl_shell_surface_get_user_data(struct wl_shell_surface *wl_shell_surface)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_shell_surface);
+}
+
+static inline uint32_t
+wl_shell_surface_get_version(struct wl_shell_surface *wl_shell_surface)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_shell_surface);
+}
+
+/** @ingroup iface_wl_shell_surface */
+static inline void
+wl_shell_surface_destroy(struct wl_shell_surface *wl_shell_surface)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_shell_surface);
+}
+
+/**
+ * @ingroup iface_wl_shell_surface
+ *
+ * A client must respond to a ping event with a pong request or
+ * the client may be deemed unresponsive.
+ */
+static inline void
+wl_shell_surface_pong(struct wl_shell_surface *wl_shell_surface, uint32_t serial)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shell_surface,
+			 WL_SHELL_SURFACE_PONG, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shell_surface), 0, serial);
+}
+
+/**
+ * @ingroup iface_wl_shell_surface
+ *
+ * Start a pointer-driven move of the surface.
+ *
+ * This request must be used in response to a button press event.
+ * The server may ignore move requests depending on the state of
+ * the surface (e.g. fullscreen or maximized).
+ */
+static inline void
+wl_shell_surface_move(struct wl_shell_surface *wl_shell_surface, struct wl_seat *seat, uint32_t serial)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shell_surface,
+			 WL_SHELL_SURFACE_MOVE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shell_surface), 0, seat, serial);
+}
+
+/**
+ * @ingroup iface_wl_shell_surface
+ *
+ * Start a pointer-driven resizing of the surface.
+ *
+ * This request must be used in response to a button press event.
+ * The server may ignore resize requests depending on the state of
+ * the surface (e.g. fullscreen or maximized).
+ */
+static inline void
+wl_shell_surface_resize(struct wl_shell_surface *wl_shell_surface, struct wl_seat *seat, uint32_t serial, uint32_t edges)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shell_surface,
+			 WL_SHELL_SURFACE_RESIZE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shell_surface), 0, seat, serial, edges);
+}
+
+/**
+ * @ingroup iface_wl_shell_surface
+ *
+ * Map the surface as a toplevel surface.
+ *
+ * A toplevel surface is not fullscreen, maximized or transient.
+ */
+static inline void
+wl_shell_surface_set_toplevel(struct wl_shell_surface *wl_shell_surface)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shell_surface,
+			 WL_SHELL_SURFACE_SET_TOPLEVEL, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shell_surface), 0);
+}
+
+/**
+ * @ingroup iface_wl_shell_surface
+ *
+ * Map the surface relative to an existing surface.
+ *
+ * The x and y arguments specify the location of the upper left
+ * corner of the surface relative to the upper left corner of the
+ * parent surface, in surface-local coordinates.
+ *
+ * The flags argument controls details of the transient behaviour.
+ */
+static inline void
+wl_shell_surface_set_transient(struct wl_shell_surface *wl_shell_surface, struct wl_surface *parent, int32_t x, int32_t y, uint32_t flags)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shell_surface,
+			 WL_SHELL_SURFACE_SET_TRANSIENT, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shell_surface), 0, parent, x, y, flags);
+}
+
+/**
+ * @ingroup iface_wl_shell_surface
+ *
+ * Map the surface as a fullscreen surface.
+ *
+ * If an output parameter is given then the surface will be made
+ * fullscreen on that output. If the client does not specify the
+ * output then the compositor will apply its policy - usually
+ * choosing the output on which the surface has the biggest surface
+ * area.
+ *
+ * The client may specify a method to resolve a size conflict
+ * between the output size and the surface size - this is provided
+ * through the method parameter.
+ *
+ * The framerate parameter is used only when the method is set
+ * to "driver", to indicate the preferred framerate. A value of 0
+ * indicates that the client does not care about framerate.  The
+ * framerate is specified in mHz, that is framerate of 60000 is 60Hz.
+ *
+ * A method of "scale" or "driver" implies a scaling operation of
+ * the surface, either via a direct scaling operation or a change of
+ * the output mode. This will override any kind of output scaling, so
+ * that mapping a surface with a buffer size equal to the mode can
+ * fill the screen independent of buffer_scale.
+ *
+ * A method of "fill" means we don't scale up the buffer, however
+ * any output scale is applied. This means that you may run into
+ * an edge case where the application maps a buffer with the same
+ * size of the output mode but buffer_scale 1 (thus making a
+ * surface larger than the output). In this case it is allowed to
+ * downscale the results to fit the screen.
+ *
+ * The compositor must reply to this request with a configure event
+ * with the dimensions for the output on which the surface will
+ * be made fullscreen.
+ */
+static inline void
+wl_shell_surface_set_fullscreen(struct wl_shell_surface *wl_shell_surface, uint32_t method, uint32_t framerate, struct wl_output *output)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shell_surface,
+			 WL_SHELL_SURFACE_SET_FULLSCREEN, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shell_surface), 0, method, framerate, output);
+}
+
+/**
+ * @ingroup iface_wl_shell_surface
+ *
+ * Map the surface as a popup.
+ *
+ * A popup surface is a transient surface with an added pointer
+ * grab.
+ *
+ * An existing implicit grab will be changed to owner-events mode,
+ * and the popup grab will continue after the implicit grab ends
+ * (i.e. releasing the mouse button does not cause the popup to
+ * be unmapped).
+ *
+ * The popup grab continues until the window is destroyed or a
+ * mouse button is pressed in any other client's window. A click
+ * in any of the client's surfaces is reported as normal, however,
+ * clicks in other clients' surfaces will be discarded and trigger
+ * the callback.
+ *
+ * The x and y arguments specify the location of the upper left
+ * corner of the surface relative to the upper left corner of the
+ * parent surface, in surface-local coordinates.
+ */
+static inline void
+wl_shell_surface_set_popup(struct wl_shell_surface *wl_shell_surface, struct wl_seat *seat, uint32_t serial, struct wl_surface *parent, int32_t x, int32_t y, uint32_t flags)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shell_surface,
+			 WL_SHELL_SURFACE_SET_POPUP, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shell_surface), 0, seat, serial, parent, x, y, flags);
+}
+
+/**
+ * @ingroup iface_wl_shell_surface
+ *
+ * Map the surface as a maximized surface.
+ *
+ * If an output parameter is given then the surface will be
+ * maximized on that output. If the client does not specify the
+ * output then the compositor will apply its policy - usually
+ * choosing the output on which the surface has the biggest surface
+ * area.
+ *
+ * The compositor will reply with a configure event telling
+ * the expected new surface size. The operation is completed
+ * on the next buffer attach to this surface.
+ *
+ * A maximized surface typically fills the entire output it is
+ * bound to, except for desktop elements such as panels. This is
+ * the main difference between a maximized shell surface and a
+ * fullscreen shell surface.
+ *
+ * The details depend on the compositor implementation.
+ */
+static inline void
+wl_shell_surface_set_maximized(struct wl_shell_surface *wl_shell_surface, struct wl_output *output)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shell_surface,
+			 WL_SHELL_SURFACE_SET_MAXIMIZED, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shell_surface), 0, output);
+}
+
+/**
+ * @ingroup iface_wl_shell_surface
+ *
+ * Set a short title for the surface.
+ *
+ * This string may be used to identify the surface in a task bar,
+ * window list, or other user interface elements provided by the
+ * compositor.
+ *
+ * The string must be encoded in UTF-8.
+ */
+static inline void
+wl_shell_surface_set_title(struct wl_shell_surface *wl_shell_surface, const char *title)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shell_surface,
+			 WL_SHELL_SURFACE_SET_TITLE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shell_surface), 0, title);
+}
+
+/**
+ * @ingroup iface_wl_shell_surface
+ *
+ * Set a class for the surface.
+ *
+ * The surface class identifies the general class of applications
+ * to which the surface belongs. A common convention is to use the
+ * file name (or the full path if it is a non-standard location) of
+ * the application's .desktop file as the class.
+ */
+static inline void
+wl_shell_surface_set_class(struct wl_shell_surface *wl_shell_surface, const char *class_)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_shell_surface,
+			 WL_SHELL_SURFACE_SET_CLASS, NULL, wl_proxy_get_version((struct wl_proxy *) wl_shell_surface), 0, class_);
+}
+
+#ifndef WL_SURFACE_ERROR_ENUM
+#define WL_SURFACE_ERROR_ENUM
+/**
+ * @ingroup iface_wl_surface
+ * wl_surface error values
+ *
+ * These errors can be emitted in response to wl_surface requests.
+ */
+enum wl_surface_error {
+	/**
+	 * buffer scale value is invalid
+	 */
+	WL_SURFACE_ERROR_INVALID_SCALE = 0,
+	/**
+	 * buffer transform value is invalid
+	 */
+	WL_SURFACE_ERROR_INVALID_TRANSFORM = 1,
+	/**
+	 * buffer size is invalid
+	 */
+	WL_SURFACE_ERROR_INVALID_SIZE = 2,
+	/**
+	 * buffer offset is invalid
+	 */
+	WL_SURFACE_ERROR_INVALID_OFFSET = 3,
+};
+#endif /* WL_SURFACE_ERROR_ENUM */
+
+/**
+ * @ingroup iface_wl_surface
+ * @struct wl_surface_listener
+ */
+struct wl_surface_listener {
+	/**
+	 * surface enters an output
+	 *
+	 * This is emitted whenever a surface's creation, movement, or
+	 * resizing results in some part of it being within the scanout
+	 * region of an output.
+	 *
+	 * Note that a surface may be overlapping with zero or more
+	 * outputs.
+	 * @param output output entered by the surface
+	 */
+	void (*enter)(void *data,
+		      struct wl_surface *wl_surface,
+		      struct wl_output *output);
+	/**
+	 * surface leaves an output
+	 *
+	 * This is emitted whenever a surface's creation, movement, or
+	 * resizing results in it no longer having any part of it within
+	 * the scanout region of an output.
+	 *
+	 * Clients should not use the number of outputs the surface is on
+	 * for frame throttling purposes. The surface might be hidden even
+	 * if no leave event has been sent, and the compositor might expect
+	 * new surface content updates even if no enter event has been
+	 * sent. The frame event should be used instead.
+	 * @param output output left by the surface
+	 */
+	void (*leave)(void *data,
+		      struct wl_surface *wl_surface,
+		      struct wl_output *output);
+};
+
+/**
+ * @ingroup iface_wl_surface
+ */
+static inline int
+wl_surface_add_listener(struct wl_surface *wl_surface,
+			const struct wl_surface_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_surface,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_SURFACE_DESTROY 0
+#define WL_SURFACE_ATTACH 1
+#define WL_SURFACE_DAMAGE 2
+#define WL_SURFACE_FRAME 3
+#define WL_SURFACE_SET_OPAQUE_REGION 4
+#define WL_SURFACE_SET_INPUT_REGION 5
+#define WL_SURFACE_COMMIT 6
+#define WL_SURFACE_SET_BUFFER_TRANSFORM 7
+#define WL_SURFACE_SET_BUFFER_SCALE 8
+#define WL_SURFACE_DAMAGE_BUFFER 9
+#define WL_SURFACE_OFFSET 10
+
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_ENTER_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_LEAVE_SINCE_VERSION 1
+
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_DESTROY_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_ATTACH_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_DAMAGE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_FRAME_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_SET_OPAQUE_REGION_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_SET_INPUT_REGION_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_COMMIT_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_SET_BUFFER_TRANSFORM_SINCE_VERSION 2
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_SET_BUFFER_SCALE_SINCE_VERSION 3
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_DAMAGE_BUFFER_SINCE_VERSION 4
+/**
+ * @ingroup iface_wl_surface
+ */
+#define WL_SURFACE_OFFSET_SINCE_VERSION 5
+
+/** @ingroup iface_wl_surface */
+static inline void
+wl_surface_set_user_data(struct wl_surface *wl_surface, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_surface, user_data);
+}
+
+/** @ingroup iface_wl_surface */
+static inline void *
+wl_surface_get_user_data(struct wl_surface *wl_surface)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_surface);
+}
+
+static inline uint32_t
+wl_surface_get_version(struct wl_surface *wl_surface)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_surface);
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * Deletes the surface and invalidates its object ID.
+ */
+static inline void
+wl_surface_destroy(struct wl_surface *wl_surface)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_DESTROY, NULL, wl_proxy_get_version((struct wl_proxy *) wl_surface), WL_MARSHAL_FLAG_DESTROY);
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * Set a buffer as the content of this surface.
+ *
+ * The new size of the surface is calculated based on the buffer
+ * size transformed by the inverse buffer_transform and the
+ * inverse buffer_scale. This means that at commit time the supplied
+ * buffer size must be an integer multiple of the buffer_scale. If
+ * that's not the case, an invalid_size error is sent.
+ *
+ * The x and y arguments specify the location of the new pending
+ * buffer's upper left corner, relative to the current buffer's upper
+ * left corner, in surface-local coordinates. In other words, the
+ * x and y, combined with the new surface size define in which
+ * directions the surface's size changes. Setting anything other than 0
+ * as x and y arguments is discouraged, and should instead be replaced
+ * with using the separate wl_surface.offset request.
+ *
+ * When the bound wl_surface version is 5 or higher, passing any
+ * non-zero x or y is a protocol violation, and will result in an
+ * 'invalid_offset' error being raised. To achieve equivalent semantics,
+ * use wl_surface.offset.
+ *
+ * Surface contents are double-buffered state, see wl_surface.commit.
+ *
+ * The initial surface contents are void; there is no content.
+ * wl_surface.attach assigns the given wl_buffer as the pending
+ * wl_buffer. wl_surface.commit makes the pending wl_buffer the new
+ * surface contents, and the size of the surface becomes the size
+ * calculated from the wl_buffer, as described above. After commit,
+ * there is no pending buffer until the next attach.
+ *
+ * Committing a pending wl_buffer allows the compositor to read the
+ * pixels in the wl_buffer. The compositor may access the pixels at
+ * any time after the wl_surface.commit request. When the compositor
+ * will not access the pixels anymore, it will send the
+ * wl_buffer.release event. Only after receiving wl_buffer.release,
+ * the client may reuse the wl_buffer. A wl_buffer that has been
+ * attached and then replaced by another attach instead of committed
+ * will not receive a release event, and is not used by the
+ * compositor.
+ *
+ * If a pending wl_buffer has been committed to more than one wl_surface,
+ * the delivery of wl_buffer.release events becomes undefined. A well
+ * behaved client should not rely on wl_buffer.release events in this
+ * case. Alternatively, a client could create multiple wl_buffer objects
+ * from the same backing storage or use wp_linux_buffer_release.
+ *
+ * Destroying the wl_buffer after wl_buffer.release does not change
+ * the surface contents. Destroying the wl_buffer before wl_buffer.release
+ * is allowed as long as the underlying buffer storage isn't re-used (this
+ * can happen e.g. on client process termination). However, if the client
+ * destroys the wl_buffer before receiving the wl_buffer.release event and
+ * mutates the underlying buffer storage, the surface contents become
+ * undefined immediately.
+ *
+ * If wl_surface.attach is sent with a NULL wl_buffer, the
+ * following wl_surface.commit will remove the surface content.
+ */
+static inline void
+wl_surface_attach(struct wl_surface *wl_surface, struct wl_buffer *buffer, int32_t x, int32_t y)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_ATTACH, NULL, wl_proxy_get_version((struct wl_proxy *) wl_surface), 0, buffer, x, y);
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * This request is used to describe the regions where the pending
+ * buffer is different from the current surface contents, and where
+ * the surface therefore needs to be repainted. The compositor
+ * ignores the parts of the damage that fall outside of the surface.
+ *
+ * Damage is double-buffered state, see wl_surface.commit.
+ *
+ * The damage rectangle is specified in surface-local coordinates,
+ * where x and y specify the upper left corner of the damage rectangle.
+ *
+ * The initial value for pending damage is empty: no damage.
+ * wl_surface.damage adds pending damage: the new pending damage
+ * is the union of old pending damage and the given rectangle.
+ *
+ * wl_surface.commit assigns pending damage as the current damage,
+ * and clears pending damage. The server will clear the current
+ * damage as it repaints the surface.
+ *
+ * Note! New clients should not use this request. Instead damage can be
+ * posted with wl_surface.damage_buffer which uses buffer coordinates
+ * instead of surface coordinates.
+ */
+static inline void
+wl_surface_damage(struct wl_surface *wl_surface, int32_t x, int32_t y, int32_t width, int32_t height)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_DAMAGE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_surface), 0, x, y, width, height);
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * Request a notification when it is a good time to start drawing a new
+ * frame, by creating a frame callback. This is useful for throttling
+ * redrawing operations, and driving animations.
+ *
+ * When a client is animating on a wl_surface, it can use the 'frame'
+ * request to get notified when it is a good time to draw and commit the
+ * next frame of animation. If the client commits an update earlier than
+ * that, it is likely that some updates will not make it to the display,
+ * and the client is wasting resources by drawing too often.
+ *
+ * The frame request will take effect on the next wl_surface.commit.
+ * The notification will only be posted for one frame unless
+ * requested again. For a wl_surface, the notifications are posted in
+ * the order the frame requests were committed.
+ *
+ * The server must send the notifications so that a client
+ * will not send excessive updates, while still allowing
+ * the highest possible update rate for clients that wait for the reply
+ * before drawing again. The server should give some time for the client
+ * to draw and commit after sending the frame callback events to let it
+ * hit the next output refresh.
+ *
+ * A server should avoid signaling the frame callbacks if the
+ * surface is not visible in any way, e.g. the surface is off-screen,
+ * or completely obscured by other opaque surfaces.
+ *
+ * The object returned by this request will be destroyed by the
+ * compositor after the callback is fired and as such the client must not
+ * attempt to use it after that point.
+ *
+ * The callback_data passed in the callback is the current time, in
+ * milliseconds, with an undefined base.
+ */
+static inline struct wl_callback *
+wl_surface_frame(struct wl_surface *wl_surface)
+{
+	struct wl_proxy *callback;
+
+	callback = wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_FRAME, &wl_callback_interface, wl_proxy_get_version((struct wl_proxy *) wl_surface), 0, NULL);
+
+	return (struct wl_callback *) callback;
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * This request sets the region of the surface that contains
+ * opaque content.
+ *
+ * The opaque region is an optimization hint for the compositor
+ * that lets it optimize the redrawing of content behind opaque
+ * regions.  Setting an opaque region is not required for correct
+ * behaviour, but marking transparent content as opaque will result
+ * in repaint artifacts.
+ *
+ * The opaque region is specified in surface-local coordinates.
+ *
+ * The compositor ignores the parts of the opaque region that fall
+ * outside of the surface.
+ *
+ * Opaque region is double-buffered state, see wl_surface.commit.
+ *
+ * wl_surface.set_opaque_region changes the pending opaque region.
+ * wl_surface.commit copies the pending region to the current region.
+ * Otherwise, the pending and current regions are never changed.
+ *
+ * The initial value for an opaque region is empty. Setting the pending
+ * opaque region has copy semantics, and the wl_region object can be
+ * destroyed immediately. A NULL wl_region causes the pending opaque
+ * region to be set to empty.
+ */
+static inline void
+wl_surface_set_opaque_region(struct wl_surface *wl_surface, struct wl_region *region)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_SET_OPAQUE_REGION, NULL, wl_proxy_get_version((struct wl_proxy *) wl_surface), 0, region);
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * This request sets the region of the surface that can receive
+ * pointer and touch events.
+ *
+ * Input events happening outside of this region will try the next
+ * surface in the server surface stack. The compositor ignores the
+ * parts of the input region that fall outside of the surface.
+ *
+ * The input region is specified in surface-local coordinates.
+ *
+ * Input region is double-buffered state, see wl_surface.commit.
+ *
+ * wl_surface.set_input_region changes the pending input region.
+ * wl_surface.commit copies the pending region to the current region.
+ * Otherwise the pending and current regions are never changed,
+ * except cursor and icon surfaces are special cases, see
+ * wl_pointer.set_cursor and wl_data_device.start_drag.
+ *
+ * The initial value for an input region is infinite. That means the
+ * whole surface will accept input. Setting the pending input region
+ * has copy semantics, and the wl_region object can be destroyed
+ * immediately. A NULL wl_region causes the input region to be set
+ * to infinite.
+ */
+static inline void
+wl_surface_set_input_region(struct wl_surface *wl_surface, struct wl_region *region)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_SET_INPUT_REGION, NULL, wl_proxy_get_version((struct wl_proxy *) wl_surface), 0, region);
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * Surface state (input, opaque, and damage regions, attached buffers,
+ * etc.) is double-buffered. Protocol requests modify the pending state,
+ * as opposed to the current state in use by the compositor. A commit
+ * request atomically applies all pending state, replacing the current
+ * state. After commit, the new pending state is as documented for each
+ * related request.
+ *
+ * On commit, a pending wl_buffer is applied first, and all other state
+ * second. This means that all coordinates in double-buffered state are
+ * relative to the new wl_buffer coming into use, except for
+ * wl_surface.attach itself. If there is no pending wl_buffer, the
+ * coordinates are relative to the current surface contents.
+ *
+ * All requests that need a commit to become effective are documented
+ * to affect double-buffered state.
+ *
+ * Other interfaces may add further double-buffered surface state.
+ */
+static inline void
+wl_surface_commit(struct wl_surface *wl_surface)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_COMMIT, NULL, wl_proxy_get_version((struct wl_proxy *) wl_surface), 0);
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * This request sets an optional transformation on how the compositor
+ * interprets the contents of the buffer attached to the surface. The
+ * accepted values for the transform parameter are the values for
+ * wl_output.transform.
+ *
+ * Buffer transform is double-buffered state, see wl_surface.commit.
+ *
+ * A newly created surface has its buffer transformation set to normal.
+ *
+ * wl_surface.set_buffer_transform changes the pending buffer
+ * transformation. wl_surface.commit copies the pending buffer
+ * transformation to the current one. Otherwise, the pending and current
+ * values are never changed.
+ *
+ * The purpose of this request is to allow clients to render content
+ * according to the output transform, thus permitting the compositor to
+ * use certain optimizations even if the display is rotated. Using
+ * hardware overlays and scanning out a client buffer for fullscreen
+ * surfaces are examples of such optimizations. Those optimizations are
+ * highly dependent on the compositor implementation, so the use of this
+ * request should be considered on a case-by-case basis.
+ *
+ * Note that if the transform value includes 90 or 270 degree rotation,
+ * the width of the buffer will become the surface height and the height
+ * of the buffer will become the surface width.
+ *
+ * If transform is not one of the values from the
+ * wl_output.transform enum the invalid_transform protocol error
+ * is raised.
+ */
+static inline void
+wl_surface_set_buffer_transform(struct wl_surface *wl_surface, int32_t transform)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_SET_BUFFER_TRANSFORM, NULL, wl_proxy_get_version((struct wl_proxy *) wl_surface), 0, transform);
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * This request sets an optional scaling factor on how the compositor
+ * interprets the contents of the buffer attached to the window.
+ *
+ * Buffer scale is double-buffered state, see wl_surface.commit.
+ *
+ * A newly created surface has its buffer scale set to 1.
+ *
+ * wl_surface.set_buffer_scale changes the pending buffer scale.
+ * wl_surface.commit copies the pending buffer scale to the current one.
+ * Otherwise, the pending and current values are never changed.
+ *
+ * The purpose of this request is to allow clients to supply higher
+ * resolution buffer data for use on high resolution outputs. It is
+ * intended that you pick the same buffer scale as the scale of the
+ * output that the surface is displayed on. This means the compositor
+ * can avoid scaling when rendering the surface on that output.
+ *
+ * Note that if the scale is larger than 1, then you have to attach
+ * a buffer that is larger (by a factor of scale in each dimension)
+ * than the desired surface size.
+ *
+ * If scale is not positive the invalid_scale protocol error is
+ * raised.
+ */
+static inline void
+wl_surface_set_buffer_scale(struct wl_surface *wl_surface, int32_t scale)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_SET_BUFFER_SCALE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_surface), 0, scale);
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * This request is used to describe the regions where the pending
+ * buffer is different from the current surface contents, and where
+ * the surface therefore needs to be repainted. The compositor
+ * ignores the parts of the damage that fall outside of the surface.
+ *
+ * Damage is double-buffered state, see wl_surface.commit.
+ *
+ * The damage rectangle is specified in buffer coordinates,
+ * where x and y specify the upper left corner of the damage rectangle.
+ *
+ * The initial value for pending damage is empty: no damage.
+ * wl_surface.damage_buffer adds pending damage: the new pending
+ * damage is the union of old pending damage and the given rectangle.
+ *
+ * wl_surface.commit assigns pending damage as the current damage,
+ * and clears pending damage. The server will clear the current
+ * damage as it repaints the surface.
+ *
+ * This request differs from wl_surface.damage in only one way - it
+ * takes damage in buffer coordinates instead of surface-local
+ * coordinates. While this generally is more intuitive than surface
+ * coordinates, it is especially desirable when using wp_viewport
+ * or when a drawing library (like EGL) is unaware of buffer scale
+ * and buffer transform.
+ *
+ * Note: Because buffer transformation changes and damage requests may
+ * be interleaved in the protocol stream, it is impossible to determine
+ * the actual mapping between surface and buffer damage until
+ * wl_surface.commit time. Therefore, compositors wishing to take both
+ * kinds of damage into account will have to accumulate damage from the
+ * two requests separately and only transform from one to the other
+ * after receiving the wl_surface.commit.
+ */
+static inline void
+wl_surface_damage_buffer(struct wl_surface *wl_surface, int32_t x, int32_t y, int32_t width, int32_t height)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_DAMAGE_BUFFER, NULL, wl_proxy_get_version((struct wl_proxy *) wl_surface), 0, x, y, width, height);
+}
+
+/**
+ * @ingroup iface_wl_surface
+ *
+ * The x and y arguments specify the location of the new pending
+ * buffer's upper left corner, relative to the current buffer's upper
+ * left corner, in surface-local coordinates. In other words, the
+ * x and y, combined with the new surface size define in which
+ * directions the surface's size changes.
+ *
+ * Surface location offset is double-buffered state, see
+ * wl_surface.commit.
+ *
+ * This request is semantically equivalent to and the replaces the x and y
+ * arguments in the wl_surface.attach request in wl_surface versions prior
+ * to 5. See wl_surface.attach for details.
+ */
+static inline void
+wl_surface_offset(struct wl_surface *wl_surface, int32_t x, int32_t y)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_surface,
+			 WL_SURFACE_OFFSET, NULL, wl_proxy_get_version((struct wl_proxy *) wl_surface), 0, x, y);
+}
+
+#ifndef WL_SEAT_CAPABILITY_ENUM
+#define WL_SEAT_CAPABILITY_ENUM
+/**
+ * @ingroup iface_wl_seat
+ * seat capability bitmask
+ *
+ * This is a bitmask of capabilities this seat has; if a member is
+ * set, then it is present on the seat.
+ */
+enum wl_seat_capability {
+	/**
+	 * the seat has pointer devices
+	 */
+	WL_SEAT_CAPABILITY_POINTER = 1,
+	/**
+	 * the seat has one or more keyboards
+	 */
+	WL_SEAT_CAPABILITY_KEYBOARD = 2,
+	/**
+	 * the seat has touch devices
+	 */
+	WL_SEAT_CAPABILITY_TOUCH = 4,
+};
+#endif /* WL_SEAT_CAPABILITY_ENUM */
+
+#ifndef WL_SEAT_ERROR_ENUM
+#define WL_SEAT_ERROR_ENUM
+/**
+ * @ingroup iface_wl_seat
+ * wl_seat error values
+ *
+ * These errors can be emitted in response to wl_seat requests.
+ */
+enum wl_seat_error {
+	/**
+	 * get_pointer, get_keyboard or get_touch called on seat without the matching capability
+	 */
+	WL_SEAT_ERROR_MISSING_CAPABILITY = 0,
+};
+#endif /* WL_SEAT_ERROR_ENUM */
+
+/**
+ * @ingroup iface_wl_seat
+ * @struct wl_seat_listener
+ */
+struct wl_seat_listener {
+	/**
+	 * seat capabilities changed
+	 *
+	 * This is emitted whenever a seat gains or loses the pointer,
+	 * keyboard or touch capabilities. The argument is a capability
+	 * enum containing the complete set of capabilities this seat has.
+	 *
+	 * When the pointer capability is added, a client may create a
+	 * wl_pointer object using the wl_seat.get_pointer request. This
+	 * object will receive pointer events until the capability is
+	 * removed in the future.
+	 *
+	 * When the pointer capability is removed, a client should destroy
+	 * the wl_pointer objects associated with the seat where the
+	 * capability was removed, using the wl_pointer.release request. No
+	 * further pointer events will be received on these objects.
+	 *
+	 * In some compositors, if a seat regains the pointer capability
+	 * and a client has a previously obtained wl_pointer object of
+	 * version 4 or less, that object may start sending pointer events
+	 * again. This behavior is considered a misinterpretation of the
+	 * intended behavior and must not be relied upon by the client.
+	 * wl_pointer objects of version 5 or later must not send events if
+	 * created before the most recent event notifying the client of an
+	 * added pointer capability.
+	 *
+	 * The above behavior also applies to wl_keyboard and wl_touch with
+	 * the keyboard and touch capabilities, respectively.
+	 * @param capabilities capabilities of the seat
+	 */
+	void (*capabilities)(void *data,
+			     struct wl_seat *wl_seat,
+			     uint32_t capabilities);
+	/**
+	 * unique identifier for this seat
+	 *
+	 * In a multi-seat configuration the seat name can be used by
+	 * clients to help identify which physical devices the seat
+	 * represents.
+	 *
+	 * The seat name is a UTF-8 string with no convention defined for
+	 * its contents. Each name is unique among all wl_seat globals. The
+	 * name is only guaranteed to be unique for the current compositor
+	 * instance.
+	 *
+	 * The same seat names are used for all clients. Thus, the name can
+	 * be shared across processes to refer to a specific wl_seat
+	 * global.
+	 *
+	 * The name event is sent after binding to the seat global. This
+	 * event is only sent once per seat object, and the name does not
+	 * change over the lifetime of the wl_seat global.
+	 *
+	 * Compositors may re-use the same seat name if the wl_seat global
+	 * is destroyed and re-created later.
+	 * @param name seat identifier
+	 * @since 2
+	 */
+	void (*name)(void *data,
+		     struct wl_seat *wl_seat,
+		     const char *name);
+};
+
+/**
+ * @ingroup iface_wl_seat
+ */
+static inline int
+wl_seat_add_listener(struct wl_seat *wl_seat,
+		     const struct wl_seat_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_seat,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_SEAT_GET_POINTER 0
+#define WL_SEAT_GET_KEYBOARD 1
+#define WL_SEAT_GET_TOUCH 2
+#define WL_SEAT_RELEASE 3
+
+/**
+ * @ingroup iface_wl_seat
+ */
+#define WL_SEAT_CAPABILITIES_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_seat
+ */
+#define WL_SEAT_NAME_SINCE_VERSION 2
+
+/**
+ * @ingroup iface_wl_seat
+ */
+#define WL_SEAT_GET_POINTER_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_seat
+ */
+#define WL_SEAT_GET_KEYBOARD_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_seat
+ */
+#define WL_SEAT_GET_TOUCH_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_seat
+ */
+#define WL_SEAT_RELEASE_SINCE_VERSION 5
+
+/** @ingroup iface_wl_seat */
+static inline void
+wl_seat_set_user_data(struct wl_seat *wl_seat, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_seat, user_data);
+}
+
+/** @ingroup iface_wl_seat */
+static inline void *
+wl_seat_get_user_data(struct wl_seat *wl_seat)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_seat);
+}
+
+static inline uint32_t
+wl_seat_get_version(struct wl_seat *wl_seat)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_seat);
+}
+
+/** @ingroup iface_wl_seat */
+static inline void
+wl_seat_destroy(struct wl_seat *wl_seat)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_seat);
+}
+
+/**
+ * @ingroup iface_wl_seat
+ *
+ * The ID provided will be initialized to the wl_pointer interface
+ * for this seat.
+ *
+ * This request only takes effect if the seat has the pointer
+ * capability, or has had the pointer capability in the past.
+ * It is a protocol violation to issue this request on a seat that has
+ * never had the pointer capability. The missing_capability error will
+ * be sent in this case.
+ */
+static inline struct wl_pointer *
+wl_seat_get_pointer(struct wl_seat *wl_seat)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_seat,
+			 WL_SEAT_GET_POINTER, &wl_pointer_interface, wl_proxy_get_version((struct wl_proxy *) wl_seat), 0, NULL);
+
+	return (struct wl_pointer *) id;
+}
+
+/**
+ * @ingroup iface_wl_seat
+ *
+ * The ID provided will be initialized to the wl_keyboard interface
+ * for this seat.
+ *
+ * This request only takes effect if the seat has the keyboard
+ * capability, or has had the keyboard capability in the past.
+ * It is a protocol violation to issue this request on a seat that has
+ * never had the keyboard capability. The missing_capability error will
+ * be sent in this case.
+ */
+static inline struct wl_keyboard *
+wl_seat_get_keyboard(struct wl_seat *wl_seat)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_seat,
+			 WL_SEAT_GET_KEYBOARD, &wl_keyboard_interface, wl_proxy_get_version((struct wl_proxy *) wl_seat), 0, NULL);
+
+	return (struct wl_keyboard *) id;
+}
+
+/**
+ * @ingroup iface_wl_seat
+ *
+ * The ID provided will be initialized to the wl_touch interface
+ * for this seat.
+ *
+ * This request only takes effect if the seat has the touch
+ * capability, or has had the touch capability in the past.
+ * It is a protocol violation to issue this request on a seat that has
+ * never had the touch capability. The missing_capability error will
+ * be sent in this case.
+ */
+static inline struct wl_touch *
+wl_seat_get_touch(struct wl_seat *wl_seat)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_seat,
+			 WL_SEAT_GET_TOUCH, &wl_touch_interface, wl_proxy_get_version((struct wl_proxy *) wl_seat), 0, NULL);
+
+	return (struct wl_touch *) id;
+}
+
+/**
+ * @ingroup iface_wl_seat
+ *
+ * Using this request a client can tell the server that it is not going to
+ * use the seat object anymore.
+ */
+static inline void
+wl_seat_release(struct wl_seat *wl_seat)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_seat,
+			 WL_SEAT_RELEASE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_seat), WL_MARSHAL_FLAG_DESTROY);
+}
+
+#ifndef WL_POINTER_ERROR_ENUM
+#define WL_POINTER_ERROR_ENUM
+enum wl_pointer_error {
+	/**
+	 * given wl_surface has another role
+	 */
+	WL_POINTER_ERROR_ROLE = 0,
+};
+#endif /* WL_POINTER_ERROR_ENUM */
+
+#ifndef WL_POINTER_BUTTON_STATE_ENUM
+#define WL_POINTER_BUTTON_STATE_ENUM
+/**
+ * @ingroup iface_wl_pointer
+ * physical button state
+ *
+ * Describes the physical state of a button that produced the button
+ * event.
+ */
+enum wl_pointer_button_state {
+	/**
+	 * the button is not pressed
+	 */
+	WL_POINTER_BUTTON_STATE_RELEASED = 0,
+	/**
+	 * the button is pressed
+	 */
+	WL_POINTER_BUTTON_STATE_PRESSED = 1,
+};
+#endif /* WL_POINTER_BUTTON_STATE_ENUM */
+
+#ifndef WL_POINTER_AXIS_ENUM
+#define WL_POINTER_AXIS_ENUM
+/**
+ * @ingroup iface_wl_pointer
+ * axis types
+ *
+ * Describes the axis types of scroll events.
+ */
+enum wl_pointer_axis {
+	/**
+	 * vertical axis
+	 */
+	WL_POINTER_AXIS_VERTICAL_SCROLL = 0,
+	/**
+	 * horizontal axis
+	 */
+	WL_POINTER_AXIS_HORIZONTAL_SCROLL = 1,
+};
+#endif /* WL_POINTER_AXIS_ENUM */
+
+#ifndef WL_POINTER_AXIS_SOURCE_ENUM
+#define WL_POINTER_AXIS_SOURCE_ENUM
+/**
+ * @ingroup iface_wl_pointer
+ * axis source types
+ *
+ * Describes the source types for axis events. This indicates to the
+ * client how an axis event was physically generated; a client may
+ * adjust the user interface accordingly. For example, scroll events
+ * from a "finger" source may be in a smooth coordinate space with
+ * kinetic scrolling whereas a "wheel" source may be in discrete steps
+ * of a number of lines.
+ *
+ * The "continuous" axis source is a device generating events in a
+ * continuous coordinate space, but using something other than a
+ * finger. One example for this source is button-based scrolling where
+ * the vertical motion of a device is converted to scroll events while
+ * a button is held down.
+ *
+ * The "wheel tilt" axis source indicates that the actual device is a
+ * wheel but the scroll event is not caused by a rotation but a
+ * (usually sideways) tilt of the wheel.
+ */
+enum wl_pointer_axis_source {
+	/**
+	 * a physical wheel rotation
+	 */
+	WL_POINTER_AXIS_SOURCE_WHEEL = 0,
+	/**
+	 * finger on a touch surface
+	 */
+	WL_POINTER_AXIS_SOURCE_FINGER = 1,
+	/**
+	 * continuous coordinate space
+	 */
+	WL_POINTER_AXIS_SOURCE_CONTINUOUS = 2,
+	/**
+	 * a physical wheel tilt
+	 * @since 6
+	 */
+	WL_POINTER_AXIS_SOURCE_WHEEL_TILT = 3,
+};
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_AXIS_SOURCE_WHEEL_TILT_SINCE_VERSION 6
+#endif /* WL_POINTER_AXIS_SOURCE_ENUM */
+
+/**
+ * @ingroup iface_wl_pointer
+ * @struct wl_pointer_listener
+ */
+struct wl_pointer_listener {
+	/**
+	 * enter event
+	 *
+	 * Notification that this seat's pointer is focused on a certain
+	 * surface.
+	 *
+	 * When a seat's focus enters a surface, the pointer image is
+	 * undefined and a client should respond to this event by setting
+	 * an appropriate pointer image with the set_cursor request.
+	 * @param serial serial number of the enter event
+	 * @param surface surface entered by the pointer
+	 * @param surface_x surface-local x coordinate
+	 * @param surface_y surface-local y coordinate
+	 */
+	void (*enter)(void *data,
+		      struct wl_pointer *wl_pointer,
+		      uint32_t serial,
+		      struct wl_surface *surface,
+		      wl_fixed_t surface_x,
+		      wl_fixed_t surface_y);
+	/**
+	 * leave event
+	 *
+	 * Notification that this seat's pointer is no longer focused on
+	 * a certain surface.
+	 *
+	 * The leave notification is sent before the enter notification for
+	 * the new focus.
+	 * @param serial serial number of the leave event
+	 * @param surface surface left by the pointer
+	 */
+	void (*leave)(void *data,
+		      struct wl_pointer *wl_pointer,
+		      uint32_t serial,
+		      struct wl_surface *surface);
+	/**
+	 * pointer motion event
+	 *
+	 * Notification of pointer location change. The arguments
+	 * surface_x and surface_y are the location relative to the focused
+	 * surface.
+	 * @param time timestamp with millisecond granularity
+	 * @param surface_x surface-local x coordinate
+	 * @param surface_y surface-local y coordinate
+	 */
+	void (*motion)(void *data,
+		       struct wl_pointer *wl_pointer,
+		       uint32_t time,
+		       wl_fixed_t surface_x,
+		       wl_fixed_t surface_y);
+	/**
+	 * pointer button event
+	 *
+	 * Mouse button click and release notifications.
+	 *
+	 * The location of the click is given by the last motion or enter
+	 * event. The time argument is a timestamp with millisecond
+	 * granularity, with an undefined base.
+	 *
+	 * The button is a button code as defined in the Linux kernel's
+	 * linux/input-event-codes.h header file, e.g. BTN_LEFT.
+	 *
+	 * Any 16-bit button code value is reserved for future additions to
+	 * the kernel's event code list. All other button codes above
+	 * 0xFFFF are currently undefined but may be used in future
+	 * versions of this protocol.
+	 * @param serial serial number of the button event
+	 * @param time timestamp with millisecond granularity
+	 * @param button button that produced the event
+	 * @param state physical state of the button
+	 */
+	void (*button)(void *data,
+		       struct wl_pointer *wl_pointer,
+		       uint32_t serial,
+		       uint32_t time,
+		       uint32_t button,
+		       uint32_t state);
+	/**
+	 * axis event
+	 *
+	 * Scroll and other axis notifications.
+	 *
+	 * For scroll events (vertical and horizontal scroll axes), the
+	 * value parameter is the length of a vector along the specified
+	 * axis in a coordinate space identical to those of motion events,
+	 * representing a relative movement along the specified axis.
+	 *
+	 * For devices that support movements non-parallel to axes multiple
+	 * axis events will be emitted.
+	 *
+	 * When applicable, for example for touch pads, the server can
+	 * choose to emit scroll events where the motion vector is
+	 * equivalent to a motion event vector.
+	 *
+	 * When applicable, a client can transform its content relative to
+	 * the scroll distance.
+	 * @param time timestamp with millisecond granularity
+	 * @param axis axis type
+	 * @param value length of vector in surface-local coordinate space
+	 */
+	void (*axis)(void *data,
+		     struct wl_pointer *wl_pointer,
+		     uint32_t time,
+		     uint32_t axis,
+		     wl_fixed_t value);
+	/**
+	 * end of a pointer event sequence
+	 *
+	 * Indicates the end of a set of events that logically belong
+	 * together. A client is expected to accumulate the data in all
+	 * events within the frame before proceeding.
+	 *
+	 * All wl_pointer events before a wl_pointer.frame event belong
+	 * logically together. For example, in a diagonal scroll motion the
+	 * compositor will send an optional wl_pointer.axis_source event,
+	 * two wl_pointer.axis events (horizontal and vertical) and finally
+	 * a wl_pointer.frame event. The client may use this information to
+	 * calculate a diagonal vector for scrolling.
+	 *
+	 * When multiple wl_pointer.axis events occur within the same
+	 * frame, the motion vector is the combined motion of all events.
+	 * When a wl_pointer.axis and a wl_pointer.axis_stop event occur
+	 * within the same frame, this indicates that axis movement in one
+	 * axis has stopped but continues in the other axis. When multiple
+	 * wl_pointer.axis_stop events occur within the same frame, this
+	 * indicates that these axes stopped in the same instance.
+	 *
+	 * A wl_pointer.frame event is sent for every logical event group,
+	 * even if the group only contains a single wl_pointer event.
+	 * Specifically, a client may get a sequence: motion, frame,
+	 * button, frame, axis, frame, axis_stop, frame.
+	 *
+	 * The wl_pointer.enter and wl_pointer.leave events are logical
+	 * events generated by the compositor and not the hardware. These
+	 * events are also grouped by a wl_pointer.frame. When a pointer
+	 * moves from one surface to another, a compositor should group the
+	 * wl_pointer.leave event within the same wl_pointer.frame.
+	 * However, a client must not rely on wl_pointer.leave and
+	 * wl_pointer.enter being in the same wl_pointer.frame.
+	 * Compositor-specific policies may require the wl_pointer.leave
+	 * and wl_pointer.enter event being split across multiple
+	 * wl_pointer.frame groups.
+	 * @since 5
+	 */
+	void (*frame)(void *data,
+		      struct wl_pointer *wl_pointer);
+	/**
+	 * axis source event
+	 *
+	 * Source information for scroll and other axes.
+	 *
+	 * This event does not occur on its own. It is sent before a
+	 * wl_pointer.frame event and carries the source information for
+	 * all events within that frame.
+	 *
+	 * The source specifies how this event was generated. If the source
+	 * is wl_pointer.axis_source.finger, a wl_pointer.axis_stop event
+	 * will be sent when the user lifts the finger off the device.
+	 *
+	 * If the source is wl_pointer.axis_source.wheel,
+	 * wl_pointer.axis_source.wheel_tilt or
+	 * wl_pointer.axis_source.continuous, a wl_pointer.axis_stop event
+	 * may or may not be sent. Whether a compositor sends an axis_stop
+	 * event for these sources is hardware-specific and
+	 * implementation-dependent; clients must not rely on receiving an
+	 * axis_stop event for these scroll sources and should treat scroll
+	 * sequences from these scroll sources as unterminated by default.
+	 *
+	 * This event is optional. If the source is unknown for a
+	 * particular axis event sequence, no event is sent. Only one
+	 * wl_pointer.axis_source event is permitted per frame.
+	 *
+	 * The order of wl_pointer.axis_discrete and wl_pointer.axis_source
+	 * is not guaranteed.
+	 * @param axis_source source of the axis event
+	 * @since 5
+	 */
+	void (*axis_source)(void *data,
+			    struct wl_pointer *wl_pointer,
+			    uint32_t axis_source);
+	/**
+	 * axis stop event
+	 *
+	 * Stop notification for scroll and other axes.
+	 *
+	 * For some wl_pointer.axis_source types, a wl_pointer.axis_stop
+	 * event is sent to notify a client that the axis sequence has
+	 * terminated. This enables the client to implement kinetic
+	 * scrolling. See the wl_pointer.axis_source documentation for
+	 * information on when this event may be generated.
+	 *
+	 * Any wl_pointer.axis events with the same axis_source after this
+	 * event should be considered as the start of a new axis motion.
+	 *
+	 * The timestamp is to be interpreted identical to the timestamp in
+	 * the wl_pointer.axis event. The timestamp value may be the same
+	 * as a preceding wl_pointer.axis event.
+	 * @param time timestamp with millisecond granularity
+	 * @param axis the axis stopped with this event
+	 * @since 5
+	 */
+	void (*axis_stop)(void *data,
+			  struct wl_pointer *wl_pointer,
+			  uint32_t time,
+			  uint32_t axis);
+	/**
+	 * axis click event
+	 *
+	 * Discrete step information for scroll and other axes.
+	 *
+	 * This event carries the axis value of the wl_pointer.axis event
+	 * in discrete steps (e.g. mouse wheel clicks).
+	 *
+	 * This event is deprecated with wl_pointer version 8 - this event
+	 * is not sent to clients supporting version 8 or later.
+	 *
+	 * This event does not occur on its own, it is coupled with a
+	 * wl_pointer.axis event that represents this axis value on a
+	 * continuous scale. The protocol guarantees that each
+	 * axis_discrete event is always followed by exactly one axis event
+	 * with the same axis number within the same wl_pointer.frame. Note
+	 * that the protocol allows for other events to occur between the
+	 * axis_discrete and its coupled axis event, including other
+	 * axis_discrete or axis events. A wl_pointer.frame must not
+	 * contain more than one axis_discrete event per axis type.
+	 *
+	 * This event is optional; continuous scrolling devices like
+	 * two-finger scrolling on touchpads do not have discrete steps and
+	 * do not generate this event.
+	 *
+	 * The discrete value carries the directional information. e.g. a
+	 * value of -2 is two steps towards the negative direction of this
+	 * axis.
+	 *
+	 * The axis number is identical to the axis number in the
+	 * associated axis event.
+	 *
+	 * The order of wl_pointer.axis_discrete and wl_pointer.axis_source
+	 * is not guaranteed.
+	 * @param axis axis type
+	 * @param discrete number of steps
+	 * @since 5
+	 */
+	void (*axis_discrete)(void *data,
+			      struct wl_pointer *wl_pointer,
+			      uint32_t axis,
+			      int32_t discrete);
+	/**
+	 * axis high-resolution scroll event
+	 *
+	 * Discrete high-resolution scroll information.
+	 *
+	 * This event carries high-resolution wheel scroll information,
+	 * with each multiple of 120 representing one logical scroll step
+	 * (a wheel detent). For example, an axis_value120 of 30 is one
+	 * quarter of a logical scroll step in the positive direction, a
+	 * value120 of -240 are two logical scroll steps in the negative
+	 * direction within the same hardware event. Clients that rely on
+	 * discrete scrolling should accumulate the value120 to multiples
+	 * of 120 before processing the event.
+	 *
+	 * The value120 must not be zero.
+	 *
+	 * This event replaces the wl_pointer.axis_discrete event in
+	 * clients supporting wl_pointer version 8 or later.
+	 *
+	 * Where a wl_pointer.axis_source event occurs in the same
+	 * wl_pointer.frame, the axis source applies to this event.
+	 *
+	 * The order of wl_pointer.axis_value120 and wl_pointer.axis_source
+	 * is not guaranteed.
+	 * @param axis axis type
+	 * @param value120 scroll distance as fraction of 120
+	 * @since 8
+	 */
+	void (*axis_value120)(void *data,
+			      struct wl_pointer *wl_pointer,
+			      uint32_t axis,
+			      int32_t value120);
+};
+
+/**
+ * @ingroup iface_wl_pointer
+ */
+static inline int
+wl_pointer_add_listener(struct wl_pointer *wl_pointer,
+			const struct wl_pointer_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_pointer,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_POINTER_SET_CURSOR 0
+#define WL_POINTER_RELEASE 1
+
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_ENTER_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_LEAVE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_MOTION_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_BUTTON_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_AXIS_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_FRAME_SINCE_VERSION 5
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_AXIS_SOURCE_SINCE_VERSION 5
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_AXIS_STOP_SINCE_VERSION 5
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_AXIS_DISCRETE_SINCE_VERSION 5
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_AXIS_VALUE120_SINCE_VERSION 8
+
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_SET_CURSOR_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_pointer
+ */
+#define WL_POINTER_RELEASE_SINCE_VERSION 3
+
+/** @ingroup iface_wl_pointer */
+static inline void
+wl_pointer_set_user_data(struct wl_pointer *wl_pointer, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_pointer, user_data);
+}
+
+/** @ingroup iface_wl_pointer */
+static inline void *
+wl_pointer_get_user_data(struct wl_pointer *wl_pointer)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_pointer);
+}
+
+static inline uint32_t
+wl_pointer_get_version(struct wl_pointer *wl_pointer)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_pointer);
+}
+
+/** @ingroup iface_wl_pointer */
+static inline void
+wl_pointer_destroy(struct wl_pointer *wl_pointer)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_pointer);
+}
+
+/**
+ * @ingroup iface_wl_pointer
+ *
+ * Set the pointer surface, i.e., the surface that contains the
+ * pointer image (cursor). This request gives the surface the role
+ * of a cursor. If the surface already has another role, it raises
+ * a protocol error.
+ *
+ * The cursor actually changes only if the pointer
+ * focus for this device is one of the requesting client's surfaces
+ * or the surface parameter is the current pointer surface. If
+ * there was a previous surface set with this request it is
+ * replaced. If surface is NULL, the pointer image is hidden.
+ *
+ * The parameters hotspot_x and hotspot_y define the position of
+ * the pointer surface relative to the pointer location. Its
+ * top-left corner is always at (x, y) - (hotspot_x, hotspot_y),
+ * where (x, y) are the coordinates of the pointer location, in
+ * surface-local coordinates.
+ *
+ * On surface.attach requests to the pointer surface, hotspot_x
+ * and hotspot_y are decremented by the x and y parameters
+ * passed to the request. Attach must be confirmed by
+ * wl_surface.commit as usual.
+ *
+ * The hotspot can also be updated by passing the currently set
+ * pointer surface to this request with new values for hotspot_x
+ * and hotspot_y.
+ *
+ * The current and pending input regions of the wl_surface are
+ * cleared, and wl_surface.set_input_region is ignored until the
+ * wl_surface is no longer used as the cursor. When the use as a
+ * cursor ends, the current and pending input regions become
+ * undefined, and the wl_surface is unmapped.
+ *
+ * The serial parameter must match the latest wl_pointer.enter
+ * serial number sent to the client. Otherwise the request will be
+ * ignored.
+ */
+static inline void
+wl_pointer_set_cursor(struct wl_pointer *wl_pointer, uint32_t serial, struct wl_surface *surface, int32_t hotspot_x, int32_t hotspot_y)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_pointer,
+			 WL_POINTER_SET_CURSOR, NULL, wl_proxy_get_version((struct wl_proxy *) wl_pointer), 0, serial, surface, hotspot_x, hotspot_y);
+}
+
+/**
+ * @ingroup iface_wl_pointer
+ *
+ * Using this request a client can tell the server that it is not going to
+ * use the pointer object anymore.
+ *
+ * This request destroys the pointer proxy object, so clients must not call
+ * wl_pointer_destroy() after using this request.
+ */
+static inline void
+wl_pointer_release(struct wl_pointer *wl_pointer)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_pointer,
+			 WL_POINTER_RELEASE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_pointer), WL_MARSHAL_FLAG_DESTROY);
+}
+
+#ifndef WL_KEYBOARD_KEYMAP_FORMAT_ENUM
+#define WL_KEYBOARD_KEYMAP_FORMAT_ENUM
+/**
+ * @ingroup iface_wl_keyboard
+ * keyboard mapping format
+ *
+ * This specifies the format of the keymap provided to the
+ * client with the wl_keyboard.keymap event.
+ */
+enum wl_keyboard_keymap_format {
+	/**
+	 * no keymap; client must understand how to interpret the raw keycode
+	 */
+	WL_KEYBOARD_KEYMAP_FORMAT_NO_KEYMAP = 0,
+	/**
+	 * libxkbcommon compatible, null-terminated string; to determine the xkb keycode, clients must add 8 to the key event keycode
+	 */
+	WL_KEYBOARD_KEYMAP_FORMAT_XKB_V1 = 1,
+};
+#endif /* WL_KEYBOARD_KEYMAP_FORMAT_ENUM */
+
+#ifndef WL_KEYBOARD_KEY_STATE_ENUM
+#define WL_KEYBOARD_KEY_STATE_ENUM
+/**
+ * @ingroup iface_wl_keyboard
+ * physical key state
+ *
+ * Describes the physical state of a key that produced the key event.
+ */
+enum wl_keyboard_key_state {
+	/**
+	 * key is not pressed
+	 */
+	WL_KEYBOARD_KEY_STATE_RELEASED = 0,
+	/**
+	 * key is pressed
+	 */
+	WL_KEYBOARD_KEY_STATE_PRESSED = 1,
+};
+#endif /* WL_KEYBOARD_KEY_STATE_ENUM */
+
+/**
+ * @ingroup iface_wl_keyboard
+ * @struct wl_keyboard_listener
+ */
+struct wl_keyboard_listener {
+	/**
+	 * keyboard mapping
+	 *
+	 * This event provides a file descriptor to the client which can
+	 * be memory-mapped in read-only mode to provide a keyboard mapping
+	 * description.
+	 *
+	 * From version 7 onwards, the fd must be mapped with MAP_PRIVATE
+	 * by the recipient, as MAP_SHARED may fail.
+	 * @param format keymap format
+	 * @param fd keymap file descriptor
+	 * @param size keymap size, in bytes
+	 */
+	void (*keymap)(void *data,
+		       struct wl_keyboard *wl_keyboard,
+		       uint32_t format,
+		       int32_t fd,
+		       uint32_t size);
+	/**
+	 * enter event
+	 *
+	 * Notification that this seat's keyboard focus is on a certain
+	 * surface.
+	 *
+	 * The compositor must send the wl_keyboard.modifiers event after
+	 * this event.
+	 * @param serial serial number of the enter event
+	 * @param surface surface gaining keyboard focus
+	 * @param keys the currently pressed keys
+	 */
+	void (*enter)(void *data,
+		      struct wl_keyboard *wl_keyboard,
+		      uint32_t serial,
+		      struct wl_surface *surface,
+		      struct wl_array *keys);
+	/**
+	 * leave event
+	 *
+	 * Notification that this seat's keyboard focus is no longer on a
+	 * certain surface.
+	 *
+	 * The leave notification is sent before the enter notification for
+	 * the new focus.
+	 *
+	 * After this event client must assume that all keys, including
+	 * modifiers, are lifted and also it must stop key repeating if
+	 * there's some going on.
+	 * @param serial serial number of the leave event
+	 * @param surface surface that lost keyboard focus
+	 */
+	void (*leave)(void *data,
+		      struct wl_keyboard *wl_keyboard,
+		      uint32_t serial,
+		      struct wl_surface *surface);
+	/**
+	 * key event
+	 *
+	 * A key was pressed or released. The time argument is a
+	 * timestamp with millisecond granularity, with an undefined base.
+	 *
+	 * The key is a platform-specific key code that can be interpreted
+	 * by feeding it to the keyboard mapping (see the keymap event).
+	 *
+	 * If this event produces a change in modifiers, then the resulting
+	 * wl_keyboard.modifiers event must be sent after this event.
+	 * @param serial serial number of the key event
+	 * @param time timestamp with millisecond granularity
+	 * @param key key that produced the event
+	 * @param state physical state of the key
+	 */
+	void (*key)(void *data,
+		    struct wl_keyboard *wl_keyboard,
+		    uint32_t serial,
+		    uint32_t time,
+		    uint32_t key,
+		    uint32_t state);
+	/**
+	 * modifier and group state
+	 *
+	 * Notifies clients that the modifier and/or group state has
+	 * changed, and it should update its local state.
+	 * @param serial serial number of the modifiers event
+	 * @param mods_depressed depressed modifiers
+	 * @param mods_latched latched modifiers
+	 * @param mods_locked locked modifiers
+	 * @param group keyboard layout
+	 */
+	void (*modifiers)(void *data,
+			  struct wl_keyboard *wl_keyboard,
+			  uint32_t serial,
+			  uint32_t mods_depressed,
+			  uint32_t mods_latched,
+			  uint32_t mods_locked,
+			  uint32_t group);
+	/**
+	 * repeat rate and delay
+	 *
+	 * Informs the client about the keyboard's repeat rate and delay.
+	 *
+	 * This event is sent as soon as the wl_keyboard object has been
+	 * created, and is guaranteed to be received by the client before
+	 * any key press event.
+	 *
+	 * Negative values for either rate or delay are illegal. A rate of
+	 * zero will disable any repeating (regardless of the value of
+	 * delay).
+	 *
+	 * This event can be sent later on as well with a new value if
+	 * necessary, so clients should continue listening for the event
+	 * past the creation of wl_keyboard.
+	 * @param rate the rate of repeating keys in characters per second
+	 * @param delay delay in milliseconds since key down until repeating starts
+	 * @since 4
+	 */
+	void (*repeat_info)(void *data,
+			    struct wl_keyboard *wl_keyboard,
+			    int32_t rate,
+			    int32_t delay);
+};
+
+/**
+ * @ingroup iface_wl_keyboard
+ */
+static inline int
+wl_keyboard_add_listener(struct wl_keyboard *wl_keyboard,
+			 const struct wl_keyboard_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_keyboard,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_KEYBOARD_RELEASE 0
+
+/**
+ * @ingroup iface_wl_keyboard
+ */
+#define WL_KEYBOARD_KEYMAP_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_keyboard
+ */
+#define WL_KEYBOARD_ENTER_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_keyboard
+ */
+#define WL_KEYBOARD_LEAVE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_keyboard
+ */
+#define WL_KEYBOARD_KEY_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_keyboard
+ */
+#define WL_KEYBOARD_MODIFIERS_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_keyboard
+ */
+#define WL_KEYBOARD_REPEAT_INFO_SINCE_VERSION 4
+
+/**
+ * @ingroup iface_wl_keyboard
+ */
+#define WL_KEYBOARD_RELEASE_SINCE_VERSION 3
+
+/** @ingroup iface_wl_keyboard */
+static inline void
+wl_keyboard_set_user_data(struct wl_keyboard *wl_keyboard, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_keyboard, user_data);
+}
+
+/** @ingroup iface_wl_keyboard */
+static inline void *
+wl_keyboard_get_user_data(struct wl_keyboard *wl_keyboard)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_keyboard);
+}
+
+static inline uint32_t
+wl_keyboard_get_version(struct wl_keyboard *wl_keyboard)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_keyboard);
+}
+
+/** @ingroup iface_wl_keyboard */
+static inline void
+wl_keyboard_destroy(struct wl_keyboard *wl_keyboard)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_keyboard);
+}
+
+/**
+ * @ingroup iface_wl_keyboard
+ */
+static inline void
+wl_keyboard_release(struct wl_keyboard *wl_keyboard)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_keyboard,
+			 WL_KEYBOARD_RELEASE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_keyboard), WL_MARSHAL_FLAG_DESTROY);
+}
+
+/**
+ * @ingroup iface_wl_touch
+ * @struct wl_touch_listener
+ */
+struct wl_touch_listener {
+	/**
+	 * touch down event and beginning of a touch sequence
+	 *
+	 * A new touch point has appeared on the surface. This touch
+	 * point is assigned a unique ID. Future events from this touch
+	 * point reference this ID. The ID ceases to be valid after a touch
+	 * up event and may be reused in the future.
+	 * @param serial serial number of the touch down event
+	 * @param time timestamp with millisecond granularity
+	 * @param surface surface touched
+	 * @param id the unique ID of this touch point
+	 * @param x surface-local x coordinate
+	 * @param y surface-local y coordinate
+	 */
+	void (*down)(void *data,
+		     struct wl_touch *wl_touch,
+		     uint32_t serial,
+		     uint32_t time,
+		     struct wl_surface *surface,
+		     int32_t id,
+		     wl_fixed_t x,
+		     wl_fixed_t y);
+	/**
+	 * end of a touch event sequence
+	 *
+	 * The touch point has disappeared. No further events will be
+	 * sent for this touch point and the touch point's ID is released
+	 * and may be reused in a future touch down event.
+	 * @param serial serial number of the touch up event
+	 * @param time timestamp with millisecond granularity
+	 * @param id the unique ID of this touch point
+	 */
+	void (*up)(void *data,
+		   struct wl_touch *wl_touch,
+		   uint32_t serial,
+		   uint32_t time,
+		   int32_t id);
+	/**
+	 * update of touch point coordinates
+	 *
+	 * A touch point has changed coordinates.
+	 * @param time timestamp with millisecond granularity
+	 * @param id the unique ID of this touch point
+	 * @param x surface-local x coordinate
+	 * @param y surface-local y coordinate
+	 */
+	void (*motion)(void *data,
+		       struct wl_touch *wl_touch,
+		       uint32_t time,
+		       int32_t id,
+		       wl_fixed_t x,
+		       wl_fixed_t y);
+	/**
+	 * end of touch frame event
+	 *
+	 * Indicates the end of a set of events that logically belong
+	 * together. A client is expected to accumulate the data in all
+	 * events within the frame before proceeding.
+	 *
+	 * A wl_touch.frame terminates at least one event but otherwise no
+	 * guarantee is provided about the set of events within a frame. A
+	 * client must assume that any state not updated in a frame is
+	 * unchanged from the previously known state.
+	 */
+	void (*frame)(void *data,
+		      struct wl_touch *wl_touch);
+	/**
+	 * touch session cancelled
+	 *
+	 * Sent if the compositor decides the touch stream is a global
+	 * gesture. No further events are sent to the clients from that
+	 * particular gesture. Touch cancellation applies to all touch
+	 * points currently active on this client's surface. The client is
+	 * responsible for finalizing the touch points, future touch points
+	 * on this surface may reuse the touch point ID.
+	 */
+	void (*cancel)(void *data,
+		       struct wl_touch *wl_touch);
+	/**
+	 * update shape of touch point
+	 *
+	 * Sent when a touchpoint has changed its shape.
+	 *
+	 * This event does not occur on its own. It is sent before a
+	 * wl_touch.frame event and carries the new shape information for
+	 * any previously reported, or new touch points of that frame.
+	 *
+	 * Other events describing the touch point such as wl_touch.down,
+	 * wl_touch.motion or wl_touch.orientation may be sent within the
+	 * same wl_touch.frame. A client should treat these events as a
+	 * single logical touch point update. The order of wl_touch.shape,
+	 * wl_touch.orientation and wl_touch.motion is not guaranteed. A
+	 * wl_touch.down event is guaranteed to occur before the first
+	 * wl_touch.shape event for this touch ID but both events may occur
+	 * within the same wl_touch.frame.
+	 *
+	 * A touchpoint shape is approximated by an ellipse through the
+	 * major and minor axis length. The major axis length describes the
+	 * longer diameter of the ellipse, while the minor axis length
+	 * describes the shorter diameter. Major and minor are orthogonal
+	 * and both are specified in surface-local coordinates. The center
+	 * of the ellipse is always at the touchpoint location as reported
+	 * by wl_touch.down or wl_touch.move.
+	 *
+	 * This event is only sent by the compositor if the touch device
+	 * supports shape reports. The client has to make reasonable
+	 * assumptions about the shape if it did not receive this event.
+	 * @param id the unique ID of this touch point
+	 * @param major length of the major axis in surface-local coordinates
+	 * @param minor length of the minor axis in surface-local coordinates
+	 * @since 6
+	 */
+	void (*shape)(void *data,
+		      struct wl_touch *wl_touch,
+		      int32_t id,
+		      wl_fixed_t major,
+		      wl_fixed_t minor);
+	/**
+	 * update orientation of touch point
+	 *
+	 * Sent when a touchpoint has changed its orientation.
+	 *
+	 * This event does not occur on its own. It is sent before a
+	 * wl_touch.frame event and carries the new shape information for
+	 * any previously reported, or new touch points of that frame.
+	 *
+	 * Other events describing the touch point such as wl_touch.down,
+	 * wl_touch.motion or wl_touch.shape may be sent within the same
+	 * wl_touch.frame. A client should treat these events as a single
+	 * logical touch point update. The order of wl_touch.shape,
+	 * wl_touch.orientation and wl_touch.motion is not guaranteed. A
+	 * wl_touch.down event is guaranteed to occur before the first
+	 * wl_touch.orientation event for this touch ID but both events may
+	 * occur within the same wl_touch.frame.
+	 *
+	 * The orientation describes the clockwise angle of a touchpoint's
+	 * major axis to the positive surface y-axis and is normalized to
+	 * the -180 to +180 degree range. The granularity of orientation
+	 * depends on the touch device, some devices only support binary
+	 * rotation values between 0 and 90 degrees.
+	 *
+	 * This event is only sent by the compositor if the touch device
+	 * supports orientation reports.
+	 * @param id the unique ID of this touch point
+	 * @param orientation angle between major axis and positive surface y-axis in degrees
+	 * @since 6
+	 */
+	void (*orientation)(void *data,
+			    struct wl_touch *wl_touch,
+			    int32_t id,
+			    wl_fixed_t orientation);
+};
+
+/**
+ * @ingroup iface_wl_touch
+ */
+static inline int
+wl_touch_add_listener(struct wl_touch *wl_touch,
+		      const struct wl_touch_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_touch,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_TOUCH_RELEASE 0
+
+/**
+ * @ingroup iface_wl_touch
+ */
+#define WL_TOUCH_DOWN_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_touch
+ */
+#define WL_TOUCH_UP_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_touch
+ */
+#define WL_TOUCH_MOTION_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_touch
+ */
+#define WL_TOUCH_FRAME_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_touch
+ */
+#define WL_TOUCH_CANCEL_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_touch
+ */
+#define WL_TOUCH_SHAPE_SINCE_VERSION 6
+/**
+ * @ingroup iface_wl_touch
+ */
+#define WL_TOUCH_ORIENTATION_SINCE_VERSION 6
+
+/**
+ * @ingroup iface_wl_touch
+ */
+#define WL_TOUCH_RELEASE_SINCE_VERSION 3
+
+/** @ingroup iface_wl_touch */
+static inline void
+wl_touch_set_user_data(struct wl_touch *wl_touch, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_touch, user_data);
+}
+
+/** @ingroup iface_wl_touch */
+static inline void *
+wl_touch_get_user_data(struct wl_touch *wl_touch)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_touch);
+}
+
+static inline uint32_t
+wl_touch_get_version(struct wl_touch *wl_touch)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_touch);
+}
+
+/** @ingroup iface_wl_touch */
+static inline void
+wl_touch_destroy(struct wl_touch *wl_touch)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_touch);
+}
+
+/**
+ * @ingroup iface_wl_touch
+ */
+static inline void
+wl_touch_release(struct wl_touch *wl_touch)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_touch,
+			 WL_TOUCH_RELEASE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_touch), WL_MARSHAL_FLAG_DESTROY);
+}
+
+#ifndef WL_OUTPUT_SUBPIXEL_ENUM
+#define WL_OUTPUT_SUBPIXEL_ENUM
+/**
+ * @ingroup iface_wl_output
+ * subpixel geometry information
+ *
+ * This enumeration describes how the physical
+ * pixels on an output are laid out.
+ */
+enum wl_output_subpixel {
+	/**
+	 * unknown geometry
+	 */
+	WL_OUTPUT_SUBPIXEL_UNKNOWN = 0,
+	/**
+	 * no geometry
+	 */
+	WL_OUTPUT_SUBPIXEL_NONE = 1,
+	/**
+	 * horizontal RGB
+	 */
+	WL_OUTPUT_SUBPIXEL_HORIZONTAL_RGB = 2,
+	/**
+	 * horizontal BGR
+	 */
+	WL_OUTPUT_SUBPIXEL_HORIZONTAL_BGR = 3,
+	/**
+	 * vertical RGB
+	 */
+	WL_OUTPUT_SUBPIXEL_VERTICAL_RGB = 4,
+	/**
+	 * vertical BGR
+	 */
+	WL_OUTPUT_SUBPIXEL_VERTICAL_BGR = 5,
+};
+#endif /* WL_OUTPUT_SUBPIXEL_ENUM */
+
+#ifndef WL_OUTPUT_TRANSFORM_ENUM
+#define WL_OUTPUT_TRANSFORM_ENUM
+/**
+ * @ingroup iface_wl_output
+ * transform from framebuffer to output
+ *
+ * This describes the transform that a compositor will apply to a
+ * surface to compensate for the rotation or mirroring of an
+ * output device.
+ *
+ * The flipped values correspond to an initial flip around a
+ * vertical axis followed by rotation.
+ *
+ * The purpose is mainly to allow clients to render accordingly and
+ * tell the compositor, so that for fullscreen surfaces, the
+ * compositor will still be able to scan out directly from client
+ * surfaces.
+ */
+enum wl_output_transform {
+	/**
+	 * no transform
+	 */
+	WL_OUTPUT_TRANSFORM_NORMAL = 0,
+	/**
+	 * 90 degrees counter-clockwise
+	 */
+	WL_OUTPUT_TRANSFORM_90 = 1,
+	/**
+	 * 180 degrees counter-clockwise
+	 */
+	WL_OUTPUT_TRANSFORM_180 = 2,
+	/**
+	 * 270 degrees counter-clockwise
+	 */
+	WL_OUTPUT_TRANSFORM_270 = 3,
+	/**
+	 * 180 degree flip around a vertical axis
+	 */
+	WL_OUTPUT_TRANSFORM_FLIPPED = 4,
+	/**
+	 * flip and rotate 90 degrees counter-clockwise
+	 */
+	WL_OUTPUT_TRANSFORM_FLIPPED_90 = 5,
+	/**
+	 * flip and rotate 180 degrees counter-clockwise
+	 */
+	WL_OUTPUT_TRANSFORM_FLIPPED_180 = 6,
+	/**
+	 * flip and rotate 270 degrees counter-clockwise
+	 */
+	WL_OUTPUT_TRANSFORM_FLIPPED_270 = 7,
+};
+#endif /* WL_OUTPUT_TRANSFORM_ENUM */
+
+#ifndef WL_OUTPUT_MODE_ENUM
+#define WL_OUTPUT_MODE_ENUM
+/**
+ * @ingroup iface_wl_output
+ * mode information
+ *
+ * These flags describe properties of an output mode.
+ * They are used in the flags bitfield of the mode event.
+ */
+enum wl_output_mode {
+	/**
+	 * indicates this is the current mode
+	 */
+	WL_OUTPUT_MODE_CURRENT = 0x1,
+	/**
+	 * indicates this is the preferred mode
+	 */
+	WL_OUTPUT_MODE_PREFERRED = 0x2,
+};
+#endif /* WL_OUTPUT_MODE_ENUM */
+
+/**
+ * @ingroup iface_wl_output
+ * @struct wl_output_listener
+ */
+struct wl_output_listener {
+	/**
+	 * properties of the output
+	 *
+	 * The geometry event describes geometric properties of the
+	 * output. The event is sent when binding to the output object and
+	 * whenever any of the properties change.
+	 *
+	 * The physical size can be set to zero if it doesn't make sense
+	 * for this output (e.g. for projectors or virtual outputs).
+	 *
+	 * The geometry event will be followed by a done event (starting
+	 * from version 2).
+	 *
+	 * Note: wl_output only advertises partial information about the
+	 * output position and identification. Some compositors, for
+	 * instance those not implementing a desktop-style output layout or
+	 * those exposing virtual outputs, might fake this information.
+	 * Instead of using x and y, clients should use
+	 * xdg_output.logical_position. Instead of using make and model,
+	 * clients should use name and description.
+	 * @param x x position within the global compositor space
+	 * @param y y position within the global compositor space
+	 * @param physical_width width in millimeters of the output
+	 * @param physical_height height in millimeters of the output
+	 * @param subpixel subpixel orientation of the output
+	 * @param make textual description of the manufacturer
+	 * @param model textual description of the model
+	 * @param transform transform that maps framebuffer to output
+	 */
+	void (*geometry)(void *data,
+			 struct wl_output *wl_output,
+			 int32_t x,
+			 int32_t y,
+			 int32_t physical_width,
+			 int32_t physical_height,
+			 int32_t subpixel,
+			 const char *make,
+			 const char *model,
+			 int32_t transform);
+	/**
+	 * advertise available modes for the output
+	 *
+	 * The mode event describes an available mode for the output.
+	 *
+	 * The event is sent when binding to the output object and there
+	 * will always be one mode, the current mode. The event is sent
+	 * again if an output changes mode, for the mode that is now
+	 * current. In other words, the current mode is always the last
+	 * mode that was received with the current flag set.
+	 *
+	 * Non-current modes are deprecated. A compositor can decide to
+	 * only advertise the current mode and never send other modes.
+	 * Clients should not rely on non-current modes.
+	 *
+	 * The size of a mode is given in physical hardware units of the
+	 * output device. This is not necessarily the same as the output
+	 * size in the global compositor space. For instance, the output
+	 * may be scaled, as described in wl_output.scale, or transformed,
+	 * as described in wl_output.transform. Clients willing to retrieve
+	 * the output size in the global compositor space should use
+	 * xdg_output.logical_size instead.
+	 *
+	 * The vertical refresh rate can be set to zero if it doesn't make
+	 * sense for this output (e.g. for virtual outputs).
+	 *
+	 * The mode event will be followed by a done event (starting from
+	 * version 2).
+	 *
+	 * Clients should not use the refresh rate to schedule frames.
+	 * Instead, they should use the wl_surface.frame event or the
+	 * presentation-time protocol.
+	 *
+	 * Note: this information is not always meaningful for all outputs.
+	 * Some compositors, such as those exposing virtual outputs, might
+	 * fake the refresh rate or the size.
+	 * @param flags bitfield of mode flags
+	 * @param width width of the mode in hardware units
+	 * @param height height of the mode in hardware units
+	 * @param refresh vertical refresh rate in mHz
+	 */
+	void (*mode)(void *data,
+		     struct wl_output *wl_output,
+		     uint32_t flags,
+		     int32_t width,
+		     int32_t height,
+		     int32_t refresh);
+	/**
+	 * sent all information about output
+	 *
+	 * This event is sent after all other properties have been sent
+	 * after binding to the output object and after any other property
+	 * changes done after that. This allows changes to the output
+	 * properties to be seen as atomic, even if they happen via
+	 * multiple events.
+	 * @since 2
+	 */
+	void (*done)(void *data,
+		     struct wl_output *wl_output);
+	/**
+	 * output scaling properties
+	 *
+	 * This event contains scaling geometry information that is not
+	 * in the geometry event. It may be sent after binding the output
+	 * object or if the output scale changes later. If it is not sent,
+	 * the client should assume a scale of 1.
+	 *
+	 * A scale larger than 1 means that the compositor will
+	 * automatically scale surface buffers by this amount when
+	 * rendering. This is used for very high resolution displays where
+	 * applications rendering at the native resolution would be too
+	 * small to be legible.
+	 *
+	 * It is intended that scaling aware clients track the current
+	 * output of a surface, and if it is on a scaled output it should
+	 * use wl_surface.set_buffer_scale with the scale of the output.
+	 * That way the compositor can avoid scaling the surface, and the
+	 * client can supply a higher detail image.
+	 *
+	 * The scale event will be followed by a done event.
+	 * @param factor scaling factor of output
+	 * @since 2
+	 */
+	void (*scale)(void *data,
+		      struct wl_output *wl_output,
+		      int32_t factor);
+	/**
+	 * name of this output
+	 *
+	 * Many compositors will assign user-friendly names to their
+	 * outputs, show them to the user, allow the user to refer to an
+	 * output, etc. The client may wish to know this name as well to
+	 * offer the user similar behaviors.
+	 *
+	 * The name is a UTF-8 string with no convention defined for its
+	 * contents. Each name is unique among all wl_output globals. The
+	 * name is only guaranteed to be unique for the compositor
+	 * instance.
+	 *
+	 * The same output name is used for all clients for a given
+	 * wl_output global. Thus, the name can be shared across processes
+	 * to refer to a specific wl_output global.
+	 *
+	 * The name is not guaranteed to be persistent across sessions,
+	 * thus cannot be used to reliably identify an output in e.g.
+	 * configuration files.
+	 *
+	 * Examples of names include 'HDMI-A-1', 'WL-1', 'X11-1', etc.
+	 * However, do not assume that the name is a reflection of an
+	 * underlying DRM connector, X11 connection, etc.
+	 *
+	 * The name event is sent after binding the output object. This
+	 * event is only sent once per output object, and the name does not
+	 * change over the lifetime of the wl_output global.
+	 *
+	 * Compositors may re-use the same output name if the wl_output
+	 * global is destroyed and re-created later. Compositors should
+	 * avoid re-using the same name if possible.
+	 *
+	 * The name event will be followed by a done event.
+	 * @param name output name
+	 * @since 4
+	 */
+	void (*name)(void *data,
+		     struct wl_output *wl_output,
+		     const char *name);
+	/**
+	 * human-readable description of this output
+	 *
+	 * Many compositors can produce human-readable descriptions of
+	 * their outputs. The client may wish to know this description as
+	 * well, e.g. for output selection purposes.
+	 *
+	 * The description is a UTF-8 string with no convention defined for
+	 * its contents. The description is not guaranteed to be unique
+	 * among all wl_output globals. Examples might include 'Foocorp 11"
+	 * Display' or 'Virtual X11 output via :1'.
+	 *
+	 * The description event is sent after binding the output object
+	 * and whenever the description changes. The description is
+	 * optional, and may not be sent at all.
+	 *
+	 * The description event will be followed by a done event.
+	 * @param description output description
+	 * @since 4
+	 */
+	void (*description)(void *data,
+			    struct wl_output *wl_output,
+			    const char *description);
+};
+
+/**
+ * @ingroup iface_wl_output
+ */
+static inline int
+wl_output_add_listener(struct wl_output *wl_output,
+		       const struct wl_output_listener *listener, void *data)
+{
+	return wl_proxy_add_listener((struct wl_proxy *) wl_output,
+				     (void (**)(void)) listener, data);
+}
+
+#define WL_OUTPUT_RELEASE 0
+
+/**
+ * @ingroup iface_wl_output
+ */
+#define WL_OUTPUT_GEOMETRY_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_output
+ */
+#define WL_OUTPUT_MODE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_output
+ */
+#define WL_OUTPUT_DONE_SINCE_VERSION 2
+/**
+ * @ingroup iface_wl_output
+ */
+#define WL_OUTPUT_SCALE_SINCE_VERSION 2
+/**
+ * @ingroup iface_wl_output
+ */
+#define WL_OUTPUT_NAME_SINCE_VERSION 4
+/**
+ * @ingroup iface_wl_output
+ */
+#define WL_OUTPUT_DESCRIPTION_SINCE_VERSION 4
+
+/**
+ * @ingroup iface_wl_output
+ */
+#define WL_OUTPUT_RELEASE_SINCE_VERSION 3
+
+/** @ingroup iface_wl_output */
+static inline void
+wl_output_set_user_data(struct wl_output *wl_output, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_output, user_data);
+}
+
+/** @ingroup iface_wl_output */
+static inline void *
+wl_output_get_user_data(struct wl_output *wl_output)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_output);
+}
+
+static inline uint32_t
+wl_output_get_version(struct wl_output *wl_output)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_output);
+}
+
+/** @ingroup iface_wl_output */
+static inline void
+wl_output_destroy(struct wl_output *wl_output)
+{
+	wl_proxy_destroy((struct wl_proxy *) wl_output);
+}
+
+/**
+ * @ingroup iface_wl_output
+ *
+ * Using this request a client can tell the server that it is not going to
+ * use the output object anymore.
+ */
+static inline void
+wl_output_release(struct wl_output *wl_output)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_output,
+			 WL_OUTPUT_RELEASE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_output), WL_MARSHAL_FLAG_DESTROY);
+}
+
+#define WL_REGION_DESTROY 0
+#define WL_REGION_ADD 1
+#define WL_REGION_SUBTRACT 2
+
+
+/**
+ * @ingroup iface_wl_region
+ */
+#define WL_REGION_DESTROY_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_region
+ */
+#define WL_REGION_ADD_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_region
+ */
+#define WL_REGION_SUBTRACT_SINCE_VERSION 1
+
+/** @ingroup iface_wl_region */
+static inline void
+wl_region_set_user_data(struct wl_region *wl_region, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_region, user_data);
+}
+
+/** @ingroup iface_wl_region */
+static inline void *
+wl_region_get_user_data(struct wl_region *wl_region)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_region);
+}
+
+static inline uint32_t
+wl_region_get_version(struct wl_region *wl_region)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_region);
+}
+
+/**
+ * @ingroup iface_wl_region
+ *
+ * Destroy the region.  This will invalidate the object ID.
+ */
+static inline void
+wl_region_destroy(struct wl_region *wl_region)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_region,
+			 WL_REGION_DESTROY, NULL, wl_proxy_get_version((struct wl_proxy *) wl_region), WL_MARSHAL_FLAG_DESTROY);
+}
+
+/**
+ * @ingroup iface_wl_region
+ *
+ * Add the specified rectangle to the region.
+ */
+static inline void
+wl_region_add(struct wl_region *wl_region, int32_t x, int32_t y, int32_t width, int32_t height)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_region,
+			 WL_REGION_ADD, NULL, wl_proxy_get_version((struct wl_proxy *) wl_region), 0, x, y, width, height);
+}
+
+/**
+ * @ingroup iface_wl_region
+ *
+ * Subtract the specified rectangle from the region.
+ */
+static inline void
+wl_region_subtract(struct wl_region *wl_region, int32_t x, int32_t y, int32_t width, int32_t height)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_region,
+			 WL_REGION_SUBTRACT, NULL, wl_proxy_get_version((struct wl_proxy *) wl_region), 0, x, y, width, height);
+}
+
+#ifndef WL_SUBCOMPOSITOR_ERROR_ENUM
+#define WL_SUBCOMPOSITOR_ERROR_ENUM
+enum wl_subcompositor_error {
+	/**
+	 * the to-be sub-surface is invalid
+	 */
+	WL_SUBCOMPOSITOR_ERROR_BAD_SURFACE = 0,
+};
+#endif /* WL_SUBCOMPOSITOR_ERROR_ENUM */
+
+#define WL_SUBCOMPOSITOR_DESTROY 0
+#define WL_SUBCOMPOSITOR_GET_SUBSURFACE 1
+
+
+/**
+ * @ingroup iface_wl_subcompositor
+ */
+#define WL_SUBCOMPOSITOR_DESTROY_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_subcompositor
+ */
+#define WL_SUBCOMPOSITOR_GET_SUBSURFACE_SINCE_VERSION 1
+
+/** @ingroup iface_wl_subcompositor */
+static inline void
+wl_subcompositor_set_user_data(struct wl_subcompositor *wl_subcompositor, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_subcompositor, user_data);
+}
+
+/** @ingroup iface_wl_subcompositor */
+static inline void *
+wl_subcompositor_get_user_data(struct wl_subcompositor *wl_subcompositor)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_subcompositor);
+}
+
+static inline uint32_t
+wl_subcompositor_get_version(struct wl_subcompositor *wl_subcompositor)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_subcompositor);
+}
+
+/**
+ * @ingroup iface_wl_subcompositor
+ *
+ * Informs the server that the client will not be using this
+ * protocol object anymore. This does not affect any other
+ * objects, wl_subsurface objects included.
+ */
+static inline void
+wl_subcompositor_destroy(struct wl_subcompositor *wl_subcompositor)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_subcompositor,
+			 WL_SUBCOMPOSITOR_DESTROY, NULL, wl_proxy_get_version((struct wl_proxy *) wl_subcompositor), WL_MARSHAL_FLAG_DESTROY);
+}
+
+/**
+ * @ingroup iface_wl_subcompositor
+ *
+ * Create a sub-surface interface for the given surface, and
+ * associate it with the given parent surface. This turns a
+ * plain wl_surface into a sub-surface.
+ *
+ * The to-be sub-surface must not already have another role, and it
+ * must not have an existing wl_subsurface object. Otherwise a protocol
+ * error is raised.
+ *
+ * Adding sub-surfaces to a parent is a double-buffered operation on the
+ * parent (see wl_surface.commit). The effect of adding a sub-surface
+ * becomes visible on the next time the state of the parent surface is
+ * applied.
+ *
+ * This request modifies the behaviour of wl_surface.commit request on
+ * the sub-surface, see the documentation on wl_subsurface interface.
+ */
+static inline struct wl_subsurface *
+wl_subcompositor_get_subsurface(struct wl_subcompositor *wl_subcompositor, struct wl_surface *surface, struct wl_surface *parent)
+{
+	struct wl_proxy *id;
+
+	id = wl_proxy_marshal_flags((struct wl_proxy *) wl_subcompositor,
+			 WL_SUBCOMPOSITOR_GET_SUBSURFACE, &wl_subsurface_interface, wl_proxy_get_version((struct wl_proxy *) wl_subcompositor), 0, NULL, surface, parent);
+
+	return (struct wl_subsurface *) id;
+}
+
+#ifndef WL_SUBSURFACE_ERROR_ENUM
+#define WL_SUBSURFACE_ERROR_ENUM
+enum wl_subsurface_error {
+	/**
+	 * wl_surface is not a sibling or the parent
+	 */
+	WL_SUBSURFACE_ERROR_BAD_SURFACE = 0,
+};
+#endif /* WL_SUBSURFACE_ERROR_ENUM */
+
+#define WL_SUBSURFACE_DESTROY 0
+#define WL_SUBSURFACE_SET_POSITION 1
+#define WL_SUBSURFACE_PLACE_ABOVE 2
+#define WL_SUBSURFACE_PLACE_BELOW 3
+#define WL_SUBSURFACE_SET_SYNC 4
+#define WL_SUBSURFACE_SET_DESYNC 5
+
+
+/**
+ * @ingroup iface_wl_subsurface
+ */
+#define WL_SUBSURFACE_DESTROY_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_subsurface
+ */
+#define WL_SUBSURFACE_SET_POSITION_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_subsurface
+ */
+#define WL_SUBSURFACE_PLACE_ABOVE_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_subsurface
+ */
+#define WL_SUBSURFACE_PLACE_BELOW_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_subsurface
+ */
+#define WL_SUBSURFACE_SET_SYNC_SINCE_VERSION 1
+/**
+ * @ingroup iface_wl_subsurface
+ */
+#define WL_SUBSURFACE_SET_DESYNC_SINCE_VERSION 1
+
+/** @ingroup iface_wl_subsurface */
+static inline void
+wl_subsurface_set_user_data(struct wl_subsurface *wl_subsurface, void *user_data)
+{
+	wl_proxy_set_user_data((struct wl_proxy *) wl_subsurface, user_data);
+}
+
+/** @ingroup iface_wl_subsurface */
+static inline void *
+wl_subsurface_get_user_data(struct wl_subsurface *wl_subsurface)
+{
+	return wl_proxy_get_user_data((struct wl_proxy *) wl_subsurface);
+}
+
+static inline uint32_t
+wl_subsurface_get_version(struct wl_subsurface *wl_subsurface)
+{
+	return wl_proxy_get_version((struct wl_proxy *) wl_subsurface);
+}
+
+/**
+ * @ingroup iface_wl_subsurface
+ *
+ * The sub-surface interface is removed from the wl_surface object
+ * that was turned into a sub-surface with a
+ * wl_subcompositor.get_subsurface request. The wl_surface's association
+ * to the parent is deleted, and the wl_surface loses its role as
+ * a sub-surface. The wl_surface is unmapped immediately.
+ */
+static inline void
+wl_subsurface_destroy(struct wl_subsurface *wl_subsurface)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_subsurface,
+			 WL_SUBSURFACE_DESTROY, NULL, wl_proxy_get_version((struct wl_proxy *) wl_subsurface), WL_MARSHAL_FLAG_DESTROY);
+}
+
+/**
+ * @ingroup iface_wl_subsurface
+ *
+ * This schedules a sub-surface position change.
+ * The sub-surface will be moved so that its origin (top left
+ * corner pixel) will be at the location x, y of the parent surface
+ * coordinate system. The coordinates are not restricted to the parent
+ * surface area. Negative values are allowed.
+ *
+ * The scheduled coordinates will take effect whenever the state of the
+ * parent surface is applied. When this happens depends on whether the
+ * parent surface is in synchronized mode or not. See
+ * wl_subsurface.set_sync and wl_subsurface.set_desync for details.
+ *
+ * If more than one set_position request is invoked by the client before
+ * the commit of the parent surface, the position of a new request always
+ * replaces the scheduled position from any previous request.
+ *
+ * The initial position is 0, 0.
+ */
+static inline void
+wl_subsurface_set_position(struct wl_subsurface *wl_subsurface, int32_t x, int32_t y)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_subsurface,
+			 WL_SUBSURFACE_SET_POSITION, NULL, wl_proxy_get_version((struct wl_proxy *) wl_subsurface), 0, x, y);
+}
+
+/**
+ * @ingroup iface_wl_subsurface
+ *
+ * This sub-surface is taken from the stack, and put back just
+ * above the reference surface, changing the z-order of the sub-surfaces.
+ * The reference surface must be one of the sibling surfaces, or the
+ * parent surface. Using any other surface, including this sub-surface,
+ * will cause a protocol error.
+ *
+ * The z-order is double-buffered. Requests are handled in order and
+ * applied immediately to a pending state. The final pending state is
+ * copied to the active state the next time the state of the parent
+ * surface is applied. When this happens depends on whether the parent
+ * surface is in synchronized mode or not. See wl_subsurface.set_sync and
+ * wl_subsurface.set_desync for details.
+ *
+ * A new sub-surface is initially added as the top-most in the stack
+ * of its siblings and parent.
+ */
+static inline void
+wl_subsurface_place_above(struct wl_subsurface *wl_subsurface, struct wl_surface *sibling)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_subsurface,
+			 WL_SUBSURFACE_PLACE_ABOVE, NULL, wl_proxy_get_version((struct wl_proxy *) wl_subsurface), 0, sibling);
+}
+
+/**
+ * @ingroup iface_wl_subsurface
+ *
+ * The sub-surface is placed just below the reference surface.
+ * See wl_subsurface.place_above.
+ */
+static inline void
+wl_subsurface_place_below(struct wl_subsurface *wl_subsurface, struct wl_surface *sibling)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_subsurface,
+			 WL_SUBSURFACE_PLACE_BELOW, NULL, wl_proxy_get_version((struct wl_proxy *) wl_subsurface), 0, sibling);
+}
+
+/**
+ * @ingroup iface_wl_subsurface
+ *
+ * Change the commit behaviour of the sub-surface to synchronized
+ * mode, also described as the parent dependent mode.
+ *
+ * In synchronized mode, wl_surface.commit on a sub-surface will
+ * accumulate the committed state in a cache, but the state will
+ * not be applied and hence will not change the compositor output.
+ * The cached state is applied to the sub-surface immediately after
+ * the parent surface's state is applied. This ensures atomic
+ * updates of the parent and all its synchronized sub-surfaces.
+ * Applying the cached state will invalidate the cache, so further
+ * parent surface commits do not (re-)apply old state.
+ *
+ * See wl_subsurface for the recursive effect of this mode.
+ */
+static inline void
+wl_subsurface_set_sync(struct wl_subsurface *wl_subsurface)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_subsurface,
+			 WL_SUBSURFACE_SET_SYNC, NULL, wl_proxy_get_version((struct wl_proxy *) wl_subsurface), 0);
+}
+
+/**
+ * @ingroup iface_wl_subsurface
+ *
+ * Change the commit behaviour of the sub-surface to desynchronized
+ * mode, also described as independent or freely running mode.
+ *
+ * In desynchronized mode, wl_surface.commit on a sub-surface will
+ * apply the pending state directly, without caching, as happens
+ * normally with a wl_surface. Calling wl_surface.commit on the
+ * parent surface has no effect on the sub-surface's wl_surface
+ * state. This mode allows a sub-surface to be updated on its own.
+ *
+ * If cached state exists when wl_surface.commit is called in
+ * desynchronized mode, the pending state is added to the cached
+ * state, and applied as a whole. This invalidates the cache.
+ *
+ * Note: even if a sub-surface is set to desynchronized, a parent
+ * sub-surface may override it to behave as synchronized. For details,
+ * see wl_subsurface.
+ *
+ * If a surface's parent surface behaves as desynchronized, then
+ * the cached state is applied on set_desync.
+ */
+static inline void
+wl_subsurface_set_desync(struct wl_subsurface *wl_subsurface)
+{
+	wl_proxy_marshal_flags((struct wl_proxy *) wl_subsurface,
+			 WL_SUBSURFACE_SET_DESYNC, NULL, wl_proxy_get_version((struct wl_proxy *) wl_subsurface), 0);
+}
+
+#ifdef  __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/Wayland/wayland-client.h b/include/Wayland/wayland-client.h
new file mode 100644
index 0000000..9f70fa3
--- /dev/null
+++ b/include/Wayland/wayland-client.h
@@ -0,0 +1,42 @@
+/*
+ * Copyright © 2008 Kristian Høgsberg
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files (the
+ * "Software"), to deal in the Software without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Software, and to
+ * permit persons to whom the Software is furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice and this permission notice (including the
+ * next paragraph) shall be included in all copies or substantial
+ * portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT.  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+ * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+/** \file
+ *
+ *  \brief Include the client API and protocol C API.
+ *
+ *  \warning Use of this header file is discouraged. Prefer including
+ *  wayland-client-core.h instead, which does not include the
+ *  client protocol header and as such only defines the library
+ *  API.
+ */
+
+#ifndef WAYLAND_CLIENT_H
+#define WAYLAND_CLIENT_H
+
+#include "wayland-client-core.h"
+#include "wayland-client-protocol.h"
+
+#endif
diff --git a/include/Wayland/wayland-util.h b/include/Wayland/wayland-util.h
new file mode 100644
index 0000000..18c512e
--- /dev/null
+++ b/include/Wayland/wayland-util.h
@@ -0,0 +1,765 @@
+/*
+ * Copyright © 2008 Kristian Høgsberg
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files (the
+ * "Software"), to deal in the Software without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Software, and to
+ * permit persons to whom the Software is furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice and this permission notice (including the
+ * next paragraph) shall be included in all copies or substantial
+ * portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT.  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+ * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+/** \file wayland-util.h
+ *
+ * \brief Utility classes, functions, and macros.
+ */
+
+#ifndef WAYLAND_UTIL_H
+#define WAYLAND_UTIL_H
+
+#include <math.h>
+#include <stddef.h>
+#include <inttypes.h>
+#include <stdarg.h>
+
+#ifdef  __cplusplus
+extern "C" {
+#endif
+
+/** Visibility attribute */
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define WL_EXPORT __attribute__ ((visibility("default")))
+#else
+#define WL_EXPORT
+#endif
+
+/** Deprecated attribute */
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define WL_DEPRECATED __attribute__ ((deprecated))
+#else
+#define WL_DEPRECATED
+#endif
+
+/**
+ * Printf-style argument attribute
+ *
+ * \param x Ordinality of the format string argument
+ * \param y Ordinality of the argument to check against the format string
+ *
+ * \sa https://gcc.gnu.org/onlinedocs/gcc-3.2.1/gcc/Function-Attributes.html
+ */
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define WL_PRINTF(x, y) __attribute__((__format__(__printf__, x, y)))
+#else
+#define WL_PRINTF(x, y)
+#endif
+
+/** \class wl_object
+ *
+ * \brief A protocol object.
+ *
+ * A `wl_object` is an opaque struct identifying the protocol object
+ * underlying a `wl_proxy` or `wl_resource`.
+ *
+ * \note Functions accessing a `wl_object` are not normally used by client code.
+ * Clients should normally use the higher level interface generated by the
+ * scanner to interact with compositor objects.
+ *
+ */
+struct wl_object;
+
+/**
+ * Protocol message signature
+ *
+ * A wl_message describes the signature of an actual protocol message, such as a
+ * request or event, that adheres to the Wayland protocol wire format. The
+ * protocol implementation uses a wl_message within its demarshal machinery for
+ * decoding messages between a compositor and its clients. In a sense, a
+ * wl_message is to a protocol message like a class is to an object.
+ *
+ * The `name` of a wl_message is the name of the corresponding protocol message.
+ *
+ * The `signature` is an ordered list of symbols representing the data types
+ * of message arguments and, optionally, a protocol version and indicators for
+ * nullability. A leading integer in the `signature` indicates the _since_
+ * version of the protocol message. A `?` preceding a data type symbol indicates
+ * that the following argument type is nullable. While it is a protocol violation
+ * to send messages with non-nullable arguments set to `NULL`, event handlers in
+ * clients might still get called with non-nullable object arguments set to
+ * `NULL`. This can happen when the client destroyed the object being used as
+ * argument on its side and an event referencing that object was sent before the
+ * server knew about its destruction. As this race cannot be prevented, clients
+ * should - as a general rule - program their event handlers such that they can
+ * handle object arguments declared non-nullable being `NULL` gracefully.
+ *
+ * When no arguments accompany a message, `signature` is an empty string.
+ *
+ * Symbols:
+ *
+ * * `i`: int
+ * * `u`: uint
+ * * `f`: fixed
+ * * `s`: string
+ * * `o`: object
+ * * `n`: new_id
+ * * `a`: array
+ * * `h`: fd
+ * * `?`: following argument is nullable
+ *
+ * While demarshaling primitive arguments is straightforward, when demarshaling
+ * messages containing `object` or `new_id` arguments, the protocol
+ * implementation often must determine the type of the object. The `types` of a
+ * wl_message is an array of wl_interface references that correspond to `o` and
+ * `n` arguments in `signature`, with `NULL` placeholders for arguments with
+ * non-object types.
+ *
+ * Consider the protocol event wl_display `delete_id` that has a single `uint`
+ * argument. The wl_message is:
+ *
+ * \code
+ * { "delete_id", "u", [NULL] }
+ * \endcode
+ *
+ * Here, the message `name` is `"delete_id"`, the `signature` is `"u"`, and the
+ * argument `types` is `[NULL]`, indicating that the `uint` argument has no
+ * corresponding wl_interface since it is a primitive argument.
+ *
+ * In contrast, consider a `wl_foo` interface supporting protocol request `bar`
+ * that has existed since version 2, and has two arguments: a `uint` and an
+ * object of type `wl_baz_interface` that may be `NULL`. Such a `wl_message`
+ * might be:
+ *
+ * \code
+ * { "bar", "2u?o", [NULL, &wl_baz_interface] }
+ * \endcode
+ *
+ * Here, the message `name` is `"bar"`, and the `signature` is `"2u?o"`. Notice
+ * how the `2` indicates the protocol version, the `u` indicates the first
+ * argument type is `uint`, and the `?o` indicates that the second argument
+ * is an object that may be `NULL`. Lastly, the argument `types` array indicates
+ * that no wl_interface corresponds to the first argument, while the type
+ * `wl_baz_interface` corresponds to the second argument.
+ *
+ * \sa wl_argument
+ * \sa wl_interface
+ * \sa <a href="https://wayland.freedesktop.org/docs/html/ch04.html#sect-Protocol-Wire-Format">Wire Format</a>
+ */
+struct wl_message {
+	/** Message name */
+	const char *name;
+	/** Message signature */
+	const char *signature;
+	/** Object argument interfaces */
+	const struct wl_interface **types;
+};
+
+/**
+ * Protocol object interface
+ *
+ * A wl_interface describes the API of a protocol object defined in the Wayland
+ * protocol specification. The protocol implementation uses a wl_interface
+ * within its marshalling machinery for encoding client requests.
+ *
+ * The `name` of a wl_interface is the name of the corresponding protocol
+ * interface, and `version` represents the version of the interface. The members
+ * `method_count` and `event_count` represent the number of `methods` (requests)
+ * and `events` in the respective wl_message members.
+ *
+ * For example, consider a protocol interface `foo`, marked as version `1`, with
+ * two requests and one event.
+ *
+ * \code{.xml}
+ * <interface name="foo" version="1">
+ *   <request name="a"></request>
+ *   <request name="b"></request>
+ *   <event name="c"></event>
+ * </interface>
+ * \endcode
+ *
+ * Given two wl_message arrays `foo_requests` and `foo_events`, a wl_interface
+ * for `foo` might be:
+ *
+ * \code
+ * struct wl_interface foo_interface = {
+ *         "foo", 1,
+ *         2, foo_requests,
+ *         1, foo_events
+ * };
+ * \endcode
+ *
+ * \note The server side of the protocol may define interface <em>implementation
+ *       types</em> that incorporate the term `interface` in their name. Take
+ *       care to not confuse these server-side `struct`s with a wl_interface
+ *       variable whose name also ends in `interface`. For example, while the
+ *       server may define a type `struct wl_foo_interface`, the client may
+ *       define a `struct wl_interface wl_foo_interface`.
+ *
+ * \sa wl_message
+ * \sa wl_proxy
+ * \sa <a href="https://wayland.freedesktop.org/docs/html/ch04.html#sect-Protocol-Interfaces">Interfaces</a>
+ * \sa <a href="https://wayland.freedesktop.org/docs/html/ch04.html#sect-Protocol-Versioning">Versioning</a>
+ */
+struct wl_interface {
+	/** Interface name */
+	const char *name;
+	/** Interface version */
+	int version;
+	/** Number of methods (requests) */
+	int method_count;
+	/** Method (request) signatures */
+	const struct wl_message *methods;
+	/** Number of events */
+	int event_count;
+	/** Event signatures */
+	const struct wl_message *events;
+};
+
+/** \class wl_list
+ *
+ * \brief Doubly-linked list
+ *
+ * On its own, an instance of `struct wl_list` represents the sentinel head of
+ * a doubly-linked list, and must be initialized using wl_list_init().
+ * When empty, the list head's `next` and `prev` members point to the list head
+ * itself, otherwise `next` references the first element in the list, and `prev`
+ * refers to the last element in the list.
+ *
+ * Use the `struct wl_list` type to represent both the list head and the links
+ * between elements within the list. Use wl_list_empty() to determine if the
+ * list is empty in O(1).
+ *
+ * All elements in the list must be of the same type. The element type must have
+ * a `struct wl_list` member, often named `link` by convention. Prior to
+ * insertion, there is no need to initialize an element's `link` - invoking
+ * wl_list_init() on an individual list element's `struct wl_list` member is
+ * unnecessary if the very next operation is wl_list_insert(). However, a
+ * common idiom is to initialize an element's `link` prior to removal - ensure
+ * safety by invoking wl_list_init() before wl_list_remove().
+ *
+ * Consider a list reference `struct wl_list foo_list`, an element type as
+ * `struct element`, and an element's link member as `struct wl_list link`.
+ *
+ * The following code initializes a list and adds three elements to it.
+ *
+ * \code
+ * struct wl_list foo_list;
+ *
+ * struct element {
+ *         int foo;
+ *         struct wl_list link;
+ * };
+ * struct element e1, e2, e3;
+ *
+ * wl_list_init(&foo_list);
+ * wl_list_insert(&foo_list, &e1.link);   // e1 is the first element
+ * wl_list_insert(&foo_list, &e2.link);   // e2 is now the first element
+ * wl_list_insert(&e2.link, &e3.link); // insert e3 after e2
+ * \endcode
+ *
+ * The list now looks like <em>[e2, e3, e1]</em>.
+ *
+ * The `wl_list` API provides some iterator macros. For example, to iterate
+ * a list in ascending order:
+ *
+ * \code
+ * struct element *e;
+ * wl_list_for_each(e, foo_list, link) {
+ *         do_something_with_element(e);
+ * }
+ * \endcode
+ *
+ * See the documentation of each iterator for details.
+ * \sa http://git.kernel.org/cgit/linux/kernel/git/torvalds/linux.git/tree/include/linux/list.h
+ */
+struct wl_list {
+	/** Previous list element */
+	struct wl_list *prev;
+	/** Next list element */
+	struct wl_list *next;
+};
+
+/**
+ * Initializes the list.
+ *
+ * \param list List to initialize
+ *
+ * \memberof wl_list
+ */
+void
+wl_list_init(struct wl_list *list);
+
+/**
+ * Inserts an element into the list, after the element represented by \p list.
+ * When \p list is a reference to the list itself (the head), set the containing
+ * struct of \p elm as the first element in the list.
+ *
+ * \note If \p elm is already part of a list, inserting it again will lead to
+ *       list corruption.
+ *
+ * \param list List element after which the new element is inserted
+ * \param elm Link of the containing struct to insert into the list
+ *
+ * \memberof wl_list
+ */
+void
+wl_list_insert(struct wl_list *list, struct wl_list *elm);
+
+/**
+ * Removes an element from the list.
+ *
+ * \note This operation leaves \p elm in an invalid state.
+ *
+ * \param elm Link of the containing struct to remove from the list
+ *
+ * \memberof wl_list
+ */
+void
+wl_list_remove(struct wl_list *elm);
+
+/**
+ * Determines the length of the list.
+ *
+ * \note This is an O(n) operation.
+ *
+ * \param list List whose length is to be determined
+ *
+ * \return Number of elements in the list
+ *
+ * \memberof wl_list
+ */
+int
+wl_list_length(const struct wl_list *list);
+
+/**
+ * Determines if the list is empty.
+ *
+ * \param list List whose emptiness is to be determined
+ *
+ * \return 1 if empty, or 0 if not empty
+ *
+ * \memberof wl_list
+ */
+int
+wl_list_empty(const struct wl_list *list);
+
+/**
+ * Inserts all of the elements of one list into another, after the element
+ * represented by \p list.
+ *
+ * \note This leaves \p other in an invalid state.
+ *
+ * \param list List element after which the other list elements will be inserted
+ * \param other List of elements to insert
+ *
+ * \memberof wl_list
+ */
+void
+wl_list_insert_list(struct wl_list *list, struct wl_list *other);
+
+/**
+ * Retrieves a pointer to a containing struct, given a member name.
+ *
+ * This macro allows "conversion" from a pointer to a member to its containing
+ * struct. This is useful if you have a contained item like a wl_list,
+ * wl_listener, or wl_signal, provided via a callback or other means, and would
+ * like to retrieve the struct that contains it.
+ *
+ * To demonstrate, the following example retrieves a pointer to
+ * `example_container` given only its `destroy_listener` member:
+ *
+ * \code
+ * struct example_container {
+ *         struct wl_listener destroy_listener;
+ *         // other members...
+ * };
+ *
+ * void example_container_destroy(struct wl_listener *listener, void *data)
+ * {
+ *         struct example_container *ctr;
+ *
+ *         ctr = wl_container_of(listener, ctr, destroy_listener);
+ *         // destroy ctr...
+ * }
+ * \endcode
+ *
+ * \note `sample` need not be a valid pointer. A null or uninitialised pointer
+ *       is sufficient.
+ *
+ * \param ptr Valid pointer to the contained member
+ * \param sample Pointer to a struct whose type contains \p ptr
+ * \param member Named location of \p ptr within the \p sample type
+ *
+ * \return The container for the specified pointer
+ */
+#define wl_container_of(ptr, sample, member)				\
+	(__typeof__(sample))((char *)(ptr) -				\
+			     offsetof(__typeof__(*sample), member))
+
+/**
+ * Iterates over a list.
+ *
+ * This macro expresses a for-each iterator for wl_list. Given a list and
+ * wl_list link member name (often named `link` by convention), this macro
+ * assigns each element in the list to \p pos, which can then be referenced in
+ * a trailing code block. For example, given a wl_list of `struct message`
+ * elements:
+ *
+ * \code
+ * struct message {
+ *         char *contents;
+ *         wl_list link;
+ * };
+ *
+ * struct wl_list *message_list;
+ * // Assume message_list now "contains" many messages
+ *
+ * struct message *m;
+ * wl_list_for_each(m, message_list, link) {
+ *         do_something_with_message(m);
+ * }
+ * \endcode
+ *
+ * \param pos Cursor that each list element will be assigned to
+ * \param head Head of the list to iterate over
+ * \param member Name of the link member within the element struct
+ *
+ * \relates wl_list
+ */
+#define wl_list_for_each(pos, head, member)				\
+	for (pos = wl_container_of((head)->next, pos, member);	\
+	     &pos->member != (head);					\
+	     pos = wl_container_of(pos->member.next, pos, member))
+
+/**
+ * Iterates over a list, safe against removal of the list element.
+ *
+ * \note Only removal of the current element, \p pos, is safe. Removing
+ *       any other element during traversal may lead to a loop malfunction.
+ *
+ * \sa wl_list_for_each()
+ *
+ * \param pos Cursor that each list element will be assigned to
+ * \param tmp Temporary pointer of the same type as \p pos
+ * \param head Head of the list to iterate over
+ * \param member Name of the link member within the element struct
+ *
+ * \relates wl_list
+ */
+#define wl_list_for_each_safe(pos, tmp, head, member)			\
+	for (pos = wl_container_of((head)->next, pos, member),		\
+	     tmp = wl_container_of((pos)->member.next, tmp, member);	\
+	     &pos->member != (head);					\
+	     pos = tmp,							\
+	     tmp = wl_container_of(pos->member.next, tmp, member))
+
+/**
+ * Iterates backwards over a list.
+ *
+ * \sa wl_list_for_each()
+ *
+ * \param pos Cursor that each list element will be assigned to
+ * \param head Head of the list to iterate over
+ * \param member Name of the link member within the element struct
+ *
+ * \relates wl_list
+ */
+#define wl_list_for_each_reverse(pos, head, member)			\
+	for (pos = wl_container_of((head)->prev, pos, member);	\
+	     &pos->member != (head);					\
+	     pos = wl_container_of(pos->member.prev, pos, member))
+
+/**
+ * Iterates backwards over a list, safe against removal of the list element.
+ *
+ * \note Only removal of the current element, \p pos, is safe. Removing
+ *       any other element during traversal may lead to a loop malfunction.
+ *
+ * \sa wl_list_for_each()
+ *
+ * \param pos Cursor that each list element will be assigned to
+ * \param tmp Temporary pointer of the same type as \p pos
+ * \param head Head of the list to iterate over
+ * \param member Name of the link member within the element struct
+ *
+ * \relates wl_list
+ */
+#define wl_list_for_each_reverse_safe(pos, tmp, head, member)		\
+	for (pos = wl_container_of((head)->prev, pos, member),	\
+	     tmp = wl_container_of((pos)->member.prev, tmp, member);	\
+	     &pos->member != (head);					\
+	     pos = tmp,							\
+	     tmp = wl_container_of(pos->member.prev, tmp, member))
+
+/**
+ * \class wl_array
+ *
+ * Dynamic array
+ *
+ * A wl_array is a dynamic array that can only grow until released. It is
+ * intended for relatively small allocations whose size is variable or not known
+ * in advance. While construction of a wl_array does not require all elements to
+ * be of the same size, wl_array_for_each() does require all elements to have
+ * the same type and size.
+ *
+ */
+struct wl_array {
+	/** Array size */
+	size_t size;
+	/** Allocated space */
+	size_t alloc;
+	/** Array data */
+	void *data;
+};
+
+/**
+ * Initializes the array.
+ *
+ * \param array Array to initialize
+ *
+ * \memberof wl_array
+ */
+void
+wl_array_init(struct wl_array *array);
+
+/**
+ * Releases the array data.
+ *
+ * \note Leaves the array in an invalid state.
+ *
+ * \param array Array whose data is to be released
+ *
+ * \memberof wl_array
+ */
+void
+wl_array_release(struct wl_array *array);
+
+/**
+ * Increases the size of the array by \p size bytes.
+ *
+ * \param array Array whose size is to be increased
+ * \param size Number of bytes to increase the size of the array by
+ *
+ * \return A pointer to the beginning of the newly appended space, or NULL when
+ *         resizing fails.
+ *
+ * \memberof wl_array
+ */
+void *
+wl_array_add(struct wl_array *array, size_t size);
+
+/**
+ * Copies the contents of \p source to \p array.
+ *
+ * \param array Destination array to copy to
+ * \param source Source array to copy from
+ *
+ * \return 0 on success, or -1 on failure
+ *
+ * \memberof wl_array
+ */
+int
+wl_array_copy(struct wl_array *array, struct wl_array *source);
+
+/**
+ * Iterates over an array.
+ *
+ * This macro expresses a for-each iterator for wl_array. It assigns each
+ * element in the array to \p pos, which can then be referenced in a trailing
+ * code block. \p pos must be a pointer to the array element type, and all
+ * array elements must be of the same type and size.
+ *
+ * \param pos Cursor that each array element will be assigned to
+ * \param array Array to iterate over
+ *
+ * \relates wl_array
+ * \sa wl_list_for_each()
+ */
+#define wl_array_for_each(pos, array)					\
+	for (pos = (array)->data;					\
+	     (const char *) pos < ((const char *) (array)->data + (array)->size); \
+	     (pos)++)
+
+/**
+ * Fixed-point number
+ *
+ * A `wl_fixed_t` is a 24.8 signed fixed-point number with a sign bit, 23 bits
+ * of integer precision and 8 bits of decimal precision. Consider `wl_fixed_t`
+ * as an opaque struct with methods that facilitate conversion to and from
+ * `double` and `int` types.
+ */
+typedef int32_t wl_fixed_t;
+
+/**
+ * Converts a fixed-point number to a floating-point number.
+ *
+ * \param f Fixed-point number to convert
+ *
+ * \return Floating-point representation of the fixed-point argument
+ */
+static inline double
+wl_fixed_to_double(wl_fixed_t f)
+{
+	union {
+		double d;
+		int64_t i;
+	} u;
+
+	u.i = ((1023LL + 44LL) << 52) + (1LL << 51) + f;
+
+	return u.d - (3LL << 43);
+}
+
+/**
+ * Converts a floating-point number to a fixed-point number.
+ *
+ * \param d Floating-point number to convert
+ *
+ * \return Fixed-point representation of the floating-point argument
+ */
+static inline wl_fixed_t
+wl_fixed_from_double(double d)
+{
+	union {
+		double d;
+		int64_t i;
+	} u;
+
+	u.d = d + (3LL << (51 - 8));
+
+	return (wl_fixed_t)u.i;
+}
+
+/**
+ * Converts a fixed-point number to an integer.
+ *
+ * \param f Fixed-point number to convert
+ *
+ * \return Integer component of the fixed-point argument
+ */
+static inline int
+wl_fixed_to_int(wl_fixed_t f)
+{
+	return f / 256;
+}
+
+/**
+ * Converts an integer to a fixed-point number.
+ *
+ * \param i Integer to convert
+ *
+ * \return Fixed-point representation of the integer argument
+ */
+static inline wl_fixed_t
+wl_fixed_from_int(int i)
+{
+	return i * 256;
+}
+
+/**
+ * Protocol message argument data types
+ *
+ * This union represents all of the argument types in the Wayland protocol wire
+ * format. The protocol implementation uses wl_argument within its marshalling
+ * machinery for dispatching messages between a client and a compositor.
+ *
+ * \sa wl_message
+ * \sa wl_interface
+ * \sa <a href="https://wayland.freedesktop.org/docs/html/ch04.html#sect-Protocol-wire-Format">Wire Format</a>
+ */
+union wl_argument {
+	int32_t i;           /**< `int`    */
+	uint32_t u;          /**< `uint`   */
+	wl_fixed_t f;        /**< `fixed`  */
+	const char *s;       /**< `string` */
+	struct wl_object *o; /**< `object` */
+	uint32_t n;          /**< `new_id` */
+	struct wl_array *a;  /**< `array`  */
+	int32_t h;           /**< `fd`     */
+};
+
+/**
+ * Dispatcher function type alias
+ *
+ * A dispatcher is a function that handles the emitting of callbacks in client
+ * code. For programs directly using the C library, this is done by using
+ * libffi to call function pointers. When binding to languages other than C,
+ * dispatchers provide a way to abstract the function calling process to be
+ * friendlier to other function calling systems.
+ *
+ * A dispatcher takes five arguments: The first is the dispatcher-specific
+ * implementation associated with the target object. The second is the object
+ * upon which the callback is being invoked (either wl_proxy or wl_resource).
+ * The third and fourth arguments are the opcode and the wl_message
+ * corresponding to the callback. The final argument is an array of arguments
+ * received from the other process via the wire protocol.
+ *
+ * \param "const void *" Dispatcher-specific implementation data
+ * \param "void *" Callback invocation target (wl_proxy or `wl_resource`)
+ * \param uint32_t Callback opcode
+ * \param "const struct wl_message *" Callback message signature
+ * \param "union wl_argument *" Array of received arguments
+ *
+ * \return 0 on success, or -1 on failure
+ */
+typedef int (*wl_dispatcher_func_t)(const void *, void *, uint32_t,
+				    const struct wl_message *,
+				    union wl_argument *);
+
+/**
+ * Log function type alias
+ *
+ * The C implementation of the Wayland protocol abstracts the details of
+ * logging. Users may customize the logging behavior, with a function conforming
+ * to the `wl_log_func_t` type, via `wl_log_set_handler_client` and
+ * `wl_log_set_handler_server`.
+ *
+ * A `wl_log_func_t` must conform to the expectations of `vprintf`, and
+ * expects two arguments: a string to write and a corresponding variable
+ * argument list. While the string to write may contain format specifiers and
+ * use values in the variable argument list, the behavior of any `wl_log_func_t`
+ * depends on the implementation.
+ *
+ * \note Take care to not confuse this with `wl_protocol_logger_func_t`, which
+ *       is a specific server-side logger for requests and events.
+ *
+ * \param "const char *" String to write to the log, containing optional format
+ *                       specifiers
+ * \param "va_list" Variable argument list
+ *
+ * \sa wl_log_set_handler_client
+ * \sa wl_log_set_handler_server
+ */
+typedef void (*wl_log_func_t)(const char *, va_list) WL_PRINTF(1, 0);
+
+/**
+ * Return value of an iterator function
+ *
+ * \sa wl_client_for_each_resource_iterator_func_t
+ * \sa wl_client_for_each_resource
+ */
+enum wl_iterator_result {
+	/** Stop the iteration */
+	WL_ITERATOR_STOP,
+	/** Continue the iteration */
+	WL_ITERATOR_CONTINUE
+};
+
+#ifdef  __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/Wayland/wayland-version.h b/include/Wayland/wayland-version.h
new file mode 100644
index 0000000..38f53e5
--- /dev/null
+++ b/include/Wayland/wayland-version.h
@@ -0,0 +1,34 @@
+/*
+ * Copyright © 2012 Intel Corporation
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining
+ * a copy of this software and associated documentation files (the
+ * "Software"), to deal in the Software without restriction, including
+ * without limitation the rights to use, copy, modify, merge, publish,
+ * distribute, sublicense, and/or sell copies of the Software, and to
+ * permit persons to whom the Software is furnished to do so, subject to
+ * the following conditions:
+ *
+ * The above copyright notice and this permission notice (including the
+ * next paragraph) shall be included in all copies or substantial
+ * portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ * NONINFRINGEMENT.  IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+ * BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ * ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+ * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ * SOFTWARE.
+ */
+
+#ifndef WAYLAND_VERSION_H
+#define WAYLAND_VERSION_H
+
+#define WAYLAND_VERSION_MAJOR 1
+#define WAYLAND_VERSION_MINOR 21
+#define WAYLAND_VERSION_MICRO 0
+#define WAYLAND_VERSION "1.21.0"
+
+#endif
diff --git a/include/marl/thread_local.h b/include/marl/thread_local.h
deleted file mode 100644
index 375ba5f..0000000
--- a/include/marl/thread_local.h
+++ /dev/null
@@ -1,67 +0,0 @@
-// Copyright 2023 The Marl Authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//     https://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// A wrapper around a thread_local variable, or a pthread key
-
-#ifndef marl_thread_local_h
-#define marl_thread_local_h
-
-#ifdef MARL_USE_PTHREAD_THREAD_LOCAL
-#include "debug.h"
-
-#include <pthread.h>
-#include <type_traits>
-
-template <typename T>
-class ThreadLocal {
-  static_assert(std::is_pointer<T>::value,
-                "The current implementation of ThreadLocal requires that T "
-                "must be a pointer");
-
- public:
-  inline ThreadLocal(T v) {
-    pthread_key_create(&key, NULL);
-    pthread_setspecific(key, v);
-  }
-  inline ~ThreadLocal() { pthread_key_delete(key); }
-  inline operator T() const { return static_cast<T>(pthread_getspecific(key)); }
-  inline ThreadLocal& operator=(T v) {
-    pthread_setspecific(key, v);
-    return *this;
-  }
-
- private:
-  ThreadLocal(const ThreadLocal&) = delete;
-  ThreadLocal& operator=(const ThreadLocal&) = delete;
-
-  pthread_key_t key;
-};
-
-#define MARL_DECLARE_THREAD_LOCAL(TYPE, NAME) static ThreadLocal<TYPE> NAME
-#define MARL_INSTANTIATE_THREAD_LOCAL(TYPE, NAME, VALUE) \
-  ThreadLocal<TYPE> NAME {                               \
-    VALUE                                                \
-  }
-
-#else
-
-#define MARL_DECLARE_THREAD_LOCAL(TYPE, NAME) static thread_local TYPE NAME
-#define MARL_INSTANTIATE_THREAD_LOCAL(TYPE, NAME, VALUE) \
-  thread_local TYPE NAME {                               \
-    VALUE                                                \
-  }
-
-#endif
-
-#endif  // marl_thread_local_h
diff --git a/include/vk_video/vulkan_video_codec_h264std.h b/include/vk_video/vulkan_video_codec_h264std.h
new file mode 100644
index 0000000..d3ebec6
--- /dev/null
+++ b/include/vk_video/vulkan_video_codec_h264std.h
@@ -0,0 +1,310 @@
+#ifndef VULKAN_VIDEO_CODEC_H264STD_H_
+#define VULKAN_VIDEO_CODEC_H264STD_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h264std 1
+#include <stdint.h>
+#define STD_VIDEO_H264_CPB_CNT_LIST_SIZE  32
+#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS 6
+#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS 16
+#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS 6
+#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS 64
+#define STD_VIDEO_H264_MAX_NUM_LIST_REF   32
+#define STD_VIDEO_H264_MAX_CHROMA_PLANES  2
+
+typedef enum StdVideoH264ChromaFormatIdc {
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME = 0,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_420 = 1,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_422 = 2,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_444 = 3,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264ChromaFormatIdc;
+
+typedef enum StdVideoH264ProfileIdc {
+    STD_VIDEO_H264_PROFILE_IDC_BASELINE = 66,
+    STD_VIDEO_H264_PROFILE_IDC_MAIN = 77,
+    STD_VIDEO_H264_PROFILE_IDC_HIGH = 100,
+    STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE = 244,
+    STD_VIDEO_H264_PROFILE_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264ProfileIdc;
+
+typedef enum StdVideoH264LevelIdc {
+    STD_VIDEO_H264_LEVEL_IDC_1_0 = 0,
+    STD_VIDEO_H264_LEVEL_IDC_1_1 = 1,
+    STD_VIDEO_H264_LEVEL_IDC_1_2 = 2,
+    STD_VIDEO_H264_LEVEL_IDC_1_3 = 3,
+    STD_VIDEO_H264_LEVEL_IDC_2_0 = 4,
+    STD_VIDEO_H264_LEVEL_IDC_2_1 = 5,
+    STD_VIDEO_H264_LEVEL_IDC_2_2 = 6,
+    STD_VIDEO_H264_LEVEL_IDC_3_0 = 7,
+    STD_VIDEO_H264_LEVEL_IDC_3_1 = 8,
+    STD_VIDEO_H264_LEVEL_IDC_3_2 = 9,
+    STD_VIDEO_H264_LEVEL_IDC_4_0 = 10,
+    STD_VIDEO_H264_LEVEL_IDC_4_1 = 11,
+    STD_VIDEO_H264_LEVEL_IDC_4_2 = 12,
+    STD_VIDEO_H264_LEVEL_IDC_5_0 = 13,
+    STD_VIDEO_H264_LEVEL_IDC_5_1 = 14,
+    STD_VIDEO_H264_LEVEL_IDC_5_2 = 15,
+    STD_VIDEO_H264_LEVEL_IDC_6_0 = 16,
+    STD_VIDEO_H264_LEVEL_IDC_6_1 = 17,
+    STD_VIDEO_H264_LEVEL_IDC_6_2 = 18,
+    STD_VIDEO_H264_LEVEL_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264LevelIdc;
+
+typedef enum StdVideoH264PocType {
+    STD_VIDEO_H264_POC_TYPE_0 = 0,
+    STD_VIDEO_H264_POC_TYPE_1 = 1,
+    STD_VIDEO_H264_POC_TYPE_2 = 2,
+    STD_VIDEO_H264_POC_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_POC_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264PocType;
+
+typedef enum StdVideoH264AspectRatioIdc {
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED = 0,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE = 1,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11 = 2,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11 = 3,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11 = 4,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33 = 5,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11 = 6,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11 = 7,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11 = 8,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33 = 9,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11 = 10,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11 = 11,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33 = 12,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99 = 13,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3 = 14,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2 = 15,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1 = 16,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR = 255,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264AspectRatioIdc;
+
+typedef enum StdVideoH264WeightedBipredIdc {
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT = 0,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT = 1,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT = 2,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264WeightedBipredIdc;
+
+typedef enum StdVideoH264ModificationOfPicNumsIdc {
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT = 0,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD = 1,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM = 2,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END = 3,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264ModificationOfPicNumsIdc;
+
+typedef enum StdVideoH264MemMgmtControlOp {
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END = 0,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM = 1,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM = 2,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM = 3,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX = 4,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL = 5,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM = 6,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264MemMgmtControlOp;
+
+typedef enum StdVideoH264CabacInitIdc {
+    STD_VIDEO_H264_CABAC_INIT_IDC_0 = 0,
+    STD_VIDEO_H264_CABAC_INIT_IDC_1 = 1,
+    STD_VIDEO_H264_CABAC_INIT_IDC_2 = 2,
+    STD_VIDEO_H264_CABAC_INIT_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_CABAC_INIT_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264CabacInitIdc;
+
+typedef enum StdVideoH264DisableDeblockingFilterIdc {
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED = 0,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED = 1,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL = 2,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264DisableDeblockingFilterIdc;
+
+typedef enum StdVideoH264SliceType {
+    STD_VIDEO_H264_SLICE_TYPE_P = 0,
+    STD_VIDEO_H264_SLICE_TYPE_B = 1,
+    STD_VIDEO_H264_SLICE_TYPE_I = 2,
+    STD_VIDEO_H264_SLICE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264SliceType;
+
+typedef enum StdVideoH264PictureType {
+    STD_VIDEO_H264_PICTURE_TYPE_P = 0,
+    STD_VIDEO_H264_PICTURE_TYPE_B = 1,
+    STD_VIDEO_H264_PICTURE_TYPE_I = 2,
+    STD_VIDEO_H264_PICTURE_TYPE_IDR = 5,
+    STD_VIDEO_H264_PICTURE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264PictureType;
+
+typedef enum StdVideoH264NonVclNaluType {
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS = 0,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS = 1,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD = 2,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX = 3,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE = 4,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM = 5,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED = 6,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264NonVclNaluType;
+typedef struct StdVideoH264SpsVuiFlags {
+    uint32_t    aspect_ratio_info_present_flag : 1;
+    uint32_t    overscan_info_present_flag : 1;
+    uint32_t    overscan_appropriate_flag : 1;
+    uint32_t    video_signal_type_present_flag : 1;
+    uint32_t    video_full_range_flag : 1;
+    uint32_t    color_description_present_flag : 1;
+    uint32_t    chroma_loc_info_present_flag : 1;
+    uint32_t    timing_info_present_flag : 1;
+    uint32_t    fixed_frame_rate_flag : 1;
+    uint32_t    bitstream_restriction_flag : 1;
+    uint32_t    nal_hrd_parameters_present_flag : 1;
+    uint32_t    vcl_hrd_parameters_present_flag : 1;
+} StdVideoH264SpsVuiFlags;
+
+typedef struct StdVideoH264HrdParameters {
+    uint8_t     cpb_cnt_minus1;
+    uint8_t     bit_rate_scale;
+    uint8_t     cpb_size_scale;
+    uint8_t     reserved1;
+    uint32_t    bit_rate_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
+    uint32_t    cpb_size_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
+    uint8_t     cbr_flag[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
+    uint32_t    initial_cpb_removal_delay_length_minus1;
+    uint32_t    cpb_removal_delay_length_minus1;
+    uint32_t    dpb_output_delay_length_minus1;
+    uint32_t    time_offset_length;
+} StdVideoH264HrdParameters;
+
+typedef struct StdVideoH264SequenceParameterSetVui {
+    StdVideoH264SpsVuiFlags             flags;
+    StdVideoH264AspectRatioIdc          aspect_ratio_idc;
+    uint16_t                            sar_width;
+    uint16_t                            sar_height;
+    uint8_t                             video_format;
+    uint8_t                             colour_primaries;
+    uint8_t                             transfer_characteristics;
+    uint8_t                             matrix_coefficients;
+    uint32_t                            num_units_in_tick;
+    uint32_t                            time_scale;
+    uint8_t                             max_num_reorder_frames;
+    uint8_t                             max_dec_frame_buffering;
+    uint8_t                             chroma_sample_loc_type_top_field;
+    uint8_t                             chroma_sample_loc_type_bottom_field;
+    uint32_t                            reserved1;
+    const StdVideoH264HrdParameters*    pHrdParameters;
+} StdVideoH264SequenceParameterSetVui;
+
+typedef struct StdVideoH264SpsFlags {
+    uint32_t    constraint_set0_flag : 1;
+    uint32_t    constraint_set1_flag : 1;
+    uint32_t    constraint_set2_flag : 1;
+    uint32_t    constraint_set3_flag : 1;
+    uint32_t    constraint_set4_flag : 1;
+    uint32_t    constraint_set5_flag : 1;
+    uint32_t    direct_8x8_inference_flag : 1;
+    uint32_t    mb_adaptive_frame_field_flag : 1;
+    uint32_t    frame_mbs_only_flag : 1;
+    uint32_t    delta_pic_order_always_zero_flag : 1;
+    uint32_t    separate_colour_plane_flag : 1;
+    uint32_t    gaps_in_frame_num_value_allowed_flag : 1;
+    uint32_t    qpprime_y_zero_transform_bypass_flag : 1;
+    uint32_t    frame_cropping_flag : 1;
+    uint32_t    seq_scaling_matrix_present_flag : 1;
+    uint32_t    vui_parameters_present_flag : 1;
+} StdVideoH264SpsFlags;
+
+typedef struct StdVideoH264ScalingLists {
+    uint16_t    scaling_list_present_mask;
+    uint16_t    use_default_scaling_matrix_mask;
+    uint8_t     ScalingList4x4[STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS];
+    uint8_t     ScalingList8x8[STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS];
+} StdVideoH264ScalingLists;
+
+typedef struct StdVideoH264SequenceParameterSet {
+    StdVideoH264SpsFlags                          flags;
+    StdVideoH264ProfileIdc                        profile_idc;
+    StdVideoH264LevelIdc                          level_idc;
+    StdVideoH264ChromaFormatIdc                   chroma_format_idc;
+    uint8_t                                       seq_parameter_set_id;
+    uint8_t                                       bit_depth_luma_minus8;
+    uint8_t                                       bit_depth_chroma_minus8;
+    uint8_t                                       log2_max_frame_num_minus4;
+    StdVideoH264PocType                           pic_order_cnt_type;
+    int32_t                                       offset_for_non_ref_pic;
+    int32_t                                       offset_for_top_to_bottom_field;
+    uint8_t                                       log2_max_pic_order_cnt_lsb_minus4;
+    uint8_t                                       num_ref_frames_in_pic_order_cnt_cycle;
+    uint8_t                                       max_num_ref_frames;
+    uint8_t                                       reserved1;
+    uint32_t                                      pic_width_in_mbs_minus1;
+    uint32_t                                      pic_height_in_map_units_minus1;
+    uint32_t                                      frame_crop_left_offset;
+    uint32_t                                      frame_crop_right_offset;
+    uint32_t                                      frame_crop_top_offset;
+    uint32_t                                      frame_crop_bottom_offset;
+    uint32_t                                      reserved2;
+    const int32_t*                                pOffsetForRefFrame;
+    const StdVideoH264ScalingLists*               pScalingLists;
+    const StdVideoH264SequenceParameterSetVui*    pSequenceParameterSetVui;
+} StdVideoH264SequenceParameterSet;
+
+typedef struct StdVideoH264PpsFlags {
+    uint32_t    transform_8x8_mode_flag : 1;
+    uint32_t    redundant_pic_cnt_present_flag : 1;
+    uint32_t    constrained_intra_pred_flag : 1;
+    uint32_t    deblocking_filter_control_present_flag : 1;
+    uint32_t    weighted_pred_flag : 1;
+    uint32_t    bottom_field_pic_order_in_frame_present_flag : 1;
+    uint32_t    entropy_coding_mode_flag : 1;
+    uint32_t    pic_scaling_matrix_present_flag : 1;
+} StdVideoH264PpsFlags;
+
+typedef struct StdVideoH264PictureParameterSet {
+    StdVideoH264PpsFlags               flags;
+    uint8_t                            seq_parameter_set_id;
+    uint8_t                            pic_parameter_set_id;
+    uint8_t                            num_ref_idx_l0_default_active_minus1;
+    uint8_t                            num_ref_idx_l1_default_active_minus1;
+    StdVideoH264WeightedBipredIdc      weighted_bipred_idc;
+    int8_t                             pic_init_qp_minus26;
+    int8_t                             pic_init_qs_minus26;
+    int8_t                             chroma_qp_index_offset;
+    int8_t                             second_chroma_qp_index_offset;
+    const StdVideoH264ScalingLists*    pScalingLists;
+} StdVideoH264PictureParameterSet;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vk_video/vulkan_video_codec_h264std_decode.h b/include/vk_video/vulkan_video_codec_h264std_decode.h
new file mode 100644
index 0000000..b1e7942
--- /dev/null
+++ b/include/vk_video/vulkan_video_codec_h264std_decode.h
@@ -0,0 +1,75 @@
+#ifndef VULKAN_VIDEO_CODEC_H264STD_DECODE_H_
+#define VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h264std_decode 1
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0)
+
+#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_decode"
+
+typedef enum StdVideoDecodeH264FieldOrderCount {
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP = 0,
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM = 1,
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_MAX_ENUM = 0x7FFFFFFF
+} StdVideoDecodeH264FieldOrderCount;
+typedef struct StdVideoDecodeH264PictureInfoFlags {
+    uint32_t    field_pic_flag : 1;
+    uint32_t    is_intra : 1;
+    uint32_t    IdrPicFlag : 1;
+    uint32_t    bottom_field_flag : 1;
+    uint32_t    is_reference : 1;
+    uint32_t    complementary_field_pair : 1;
+} StdVideoDecodeH264PictureInfoFlags;
+
+typedef struct StdVideoDecodeH264PictureInfo {
+    StdVideoDecodeH264PictureInfoFlags    flags;
+    uint8_t                               seq_parameter_set_id;
+    uint8_t                               pic_parameter_set_id;
+    uint8_t                               reserved1;
+    uint8_t                               reserved2;
+    uint16_t                              frame_num;
+    uint16_t                              idr_pic_id;
+    int32_t                               PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE];
+} StdVideoDecodeH264PictureInfo;
+
+typedef struct StdVideoDecodeH264ReferenceInfoFlags {
+    uint32_t    top_field_flag : 1;
+    uint32_t    bottom_field_flag : 1;
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    is_non_existing : 1;
+} StdVideoDecodeH264ReferenceInfoFlags;
+
+typedef struct StdVideoDecodeH264ReferenceInfo {
+    StdVideoDecodeH264ReferenceInfoFlags    flags;
+    uint16_t                                FrameNum;
+    uint16_t                                reserved;
+    int32_t                                 PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE];
+} StdVideoDecodeH264ReferenceInfo;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vk_video/vulkan_video_codec_h264std_encode.h b/include/vk_video/vulkan_video_codec_h264std_encode.h
new file mode 100644
index 0000000..7bd96aa
--- /dev/null
+++ b/include/vk_video/vulkan_video_codec_h264std_encode.h
@@ -0,0 +1,132 @@
+#ifndef VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_
+#define VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h264std_encode 1
+// Vulkan 0.9 provisional Vulkan video H.264 encode std specification version number
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_8 VK_MAKE_VIDEO_STD_VERSION(0, 9, 8)
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_8
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_encode"
+typedef struct StdVideoEncodeH264WeightTableFlags {
+    uint32_t    luma_weight_l0_flag;
+    uint32_t    chroma_weight_l0_flag;
+    uint32_t    luma_weight_l1_flag;
+    uint32_t    chroma_weight_l1_flag;
+} StdVideoEncodeH264WeightTableFlags;
+
+typedef struct StdVideoEncodeH264WeightTable {
+    StdVideoEncodeH264WeightTableFlags    flags;
+    uint8_t                               luma_log2_weight_denom;
+    uint8_t                               chroma_log2_weight_denom;
+    int8_t                                luma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                chroma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+    int8_t                                chroma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+    int8_t                                luma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                chroma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+    int8_t                                chroma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+} StdVideoEncodeH264WeightTable;
+
+typedef struct StdVideoEncodeH264SliceHeaderFlags {
+    uint32_t    direct_spatial_mv_pred_flag : 1;
+    uint32_t    num_ref_idx_active_override_flag : 1;
+    uint32_t    no_output_of_prior_pics_flag : 1;
+    uint32_t    adaptive_ref_pic_marking_mode_flag : 1;
+    uint32_t    no_prior_references_available_flag : 1;
+} StdVideoEncodeH264SliceHeaderFlags;
+
+typedef struct StdVideoEncodeH264PictureInfoFlags {
+    uint32_t    idr_flag : 1;
+    uint32_t    is_reference_flag : 1;
+    uint32_t    used_for_long_term_reference : 1;
+} StdVideoEncodeH264PictureInfoFlags;
+
+typedef struct StdVideoEncodeH264ReferenceInfoFlags {
+    uint32_t    used_for_long_term_reference : 1;
+} StdVideoEncodeH264ReferenceInfoFlags;
+
+typedef struct StdVideoEncodeH264RefMgmtFlags {
+    uint32_t    ref_pic_list_modification_l0_flag : 1;
+    uint32_t    ref_pic_list_modification_l1_flag : 1;
+} StdVideoEncodeH264RefMgmtFlags;
+
+typedef struct StdVideoEncodeH264RefListModEntry {
+    StdVideoH264ModificationOfPicNumsIdc    modification_of_pic_nums_idc;
+    uint16_t                                abs_diff_pic_num_minus1;
+    uint16_t                                long_term_pic_num;
+} StdVideoEncodeH264RefListModEntry;
+
+typedef struct StdVideoEncodeH264RefPicMarkingEntry {
+    StdVideoH264MemMgmtControlOp    operation;
+    uint16_t                        difference_of_pic_nums_minus1;
+    uint16_t                        long_term_pic_num;
+    uint16_t                        long_term_frame_idx;
+    uint16_t                        max_long_term_frame_idx_plus1;
+} StdVideoEncodeH264RefPicMarkingEntry;
+
+typedef struct StdVideoEncodeH264RefMemMgmtCtrlOperations {
+    StdVideoEncodeH264RefMgmtFlags                 flags;
+    uint8_t                                        refList0ModOpCount;
+    const StdVideoEncodeH264RefListModEntry*       pRefList0ModOperations;
+    uint8_t                                        refList1ModOpCount;
+    const StdVideoEncodeH264RefListModEntry*       pRefList1ModOperations;
+    uint8_t                                        refPicMarkingOpCount;
+    const StdVideoEncodeH264RefPicMarkingEntry*    pRefPicMarkingOperations;
+} StdVideoEncodeH264RefMemMgmtCtrlOperations;
+
+typedef struct StdVideoEncodeH264PictureInfo {
+    StdVideoEncodeH264PictureInfoFlags    flags;
+    uint8_t                               seq_parameter_set_id;
+    uint8_t                               pic_parameter_set_id;
+    StdVideoH264PictureType               pictureType;
+    uint32_t                              frame_num;
+    int32_t                               PicOrderCnt;
+} StdVideoEncodeH264PictureInfo;
+
+typedef struct StdVideoEncodeH264ReferenceInfo {
+    StdVideoEncodeH264ReferenceInfoFlags    flags;
+    uint32_t                                FrameNum;
+    int32_t                                 PicOrderCnt;
+    uint16_t                                long_term_pic_num;
+    uint16_t                                long_term_frame_idx;
+} StdVideoEncodeH264ReferenceInfo;
+
+typedef struct StdVideoEncodeH264SliceHeader {
+    StdVideoEncodeH264SliceHeaderFlags        flags;
+    uint32_t                                  first_mb_in_slice;
+    StdVideoH264SliceType                     slice_type;
+    uint16_t                                  idr_pic_id;
+    uint8_t                                   num_ref_idx_l0_active_minus1;
+    uint8_t                                   num_ref_idx_l1_active_minus1;
+    StdVideoH264CabacInitIdc                  cabac_init_idc;
+    StdVideoH264DisableDeblockingFilterIdc    disable_deblocking_filter_idc;
+    int8_t                                    slice_alpha_c0_offset_div2;
+    int8_t                                    slice_beta_offset_div2;
+    const StdVideoEncodeH264WeightTable*      pWeightTable;
+} StdVideoEncodeH264SliceHeader;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vk_video/vulkan_video_codec_h265std.h b/include/vk_video/vulkan_video_codec_h265std.h
new file mode 100644
index 0000000..862f881
--- /dev/null
+++ b/include/vk_video/vulkan_video_codec_h265std.h
@@ -0,0 +1,443 @@
+#ifndef VULKAN_VIDEO_CODEC_H265STD_H_
+#define VULKAN_VIDEO_CODEC_H265STD_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h265std 1
+#define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7
+#define STD_VIDEO_H265_CPB_CNT_LIST_SIZE  32
+#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS 6
+#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS 16
+#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS 6
+#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS 64
+#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS 6
+#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS 64
+#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS 2
+#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS 64
+#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE 3
+#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128
+#define STD_VIDEO_H265_MAX_DPB_SIZE       16
+#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE 19
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE 21
+#define STD_VIDEO_H265_MAX_NUM_LIST_REF   15
+#define STD_VIDEO_H265_MAX_CHROMA_PLANES  2
+#define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64
+#define STD_VIDEO_H265_MAX_LONG_TERM_PICS 16
+#define STD_VIDEO_H265_MAX_DELTA_POC      48
+
+typedef enum StdVideoH265ChromaFormatIdc {
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME = 0,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_420 = 1,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_422 = 2,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_444 = 3,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265ChromaFormatIdc;
+
+typedef enum StdVideoH265ProfileIdc {
+    STD_VIDEO_H265_PROFILE_IDC_MAIN = 1,
+    STD_VIDEO_H265_PROFILE_IDC_MAIN_10 = 2,
+    STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE = 3,
+    STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS = 4,
+    STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS = 9,
+    STD_VIDEO_H265_PROFILE_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265ProfileIdc;
+
+typedef enum StdVideoH265LevelIdc {
+    STD_VIDEO_H265_LEVEL_IDC_1_0 = 0,
+    STD_VIDEO_H265_LEVEL_IDC_2_0 = 1,
+    STD_VIDEO_H265_LEVEL_IDC_2_1 = 2,
+    STD_VIDEO_H265_LEVEL_IDC_3_0 = 3,
+    STD_VIDEO_H265_LEVEL_IDC_3_1 = 4,
+    STD_VIDEO_H265_LEVEL_IDC_4_0 = 5,
+    STD_VIDEO_H265_LEVEL_IDC_4_1 = 6,
+    STD_VIDEO_H265_LEVEL_IDC_5_0 = 7,
+    STD_VIDEO_H265_LEVEL_IDC_5_1 = 8,
+    STD_VIDEO_H265_LEVEL_IDC_5_2 = 9,
+    STD_VIDEO_H265_LEVEL_IDC_6_0 = 10,
+    STD_VIDEO_H265_LEVEL_IDC_6_1 = 11,
+    STD_VIDEO_H265_LEVEL_IDC_6_2 = 12,
+    STD_VIDEO_H265_LEVEL_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265LevelIdc;
+
+typedef enum StdVideoH265SliceType {
+    STD_VIDEO_H265_SLICE_TYPE_B = 0,
+    STD_VIDEO_H265_SLICE_TYPE_P = 1,
+    STD_VIDEO_H265_SLICE_TYPE_I = 2,
+    STD_VIDEO_H265_SLICE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265SliceType;
+
+typedef enum StdVideoH265PictureType {
+    STD_VIDEO_H265_PICTURE_TYPE_P = 0,
+    STD_VIDEO_H265_PICTURE_TYPE_B = 1,
+    STD_VIDEO_H265_PICTURE_TYPE_I = 2,
+    STD_VIDEO_H265_PICTURE_TYPE_IDR = 3,
+    STD_VIDEO_H265_PICTURE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265PictureType;
+
+typedef enum StdVideoH265AspectRatioIdc {
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED = 0,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE = 1,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11 = 2,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11 = 3,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11 = 4,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33 = 5,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11 = 6,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11 = 7,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11 = 8,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33 = 9,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11 = 10,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11 = 11,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33 = 12,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99 = 13,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3 = 14,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2 = 15,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1 = 16,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR = 255,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265AspectRatioIdc;
+typedef struct StdVideoH265DecPicBufMgr {
+    uint32_t    max_latency_increase_plus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint8_t     max_dec_pic_buffering_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint8_t     max_num_reorder_pics[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+} StdVideoH265DecPicBufMgr;
+
+typedef struct StdVideoH265SubLayerHrdParameters {
+    uint32_t    bit_rate_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    cpb_size_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    cpb_size_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    bit_rate_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    cbr_flag;
+} StdVideoH265SubLayerHrdParameters;
+
+typedef struct StdVideoH265HrdFlags {
+    uint32_t    nal_hrd_parameters_present_flag : 1;
+    uint32_t    vcl_hrd_parameters_present_flag : 1;
+    uint32_t    sub_pic_hrd_params_present_flag : 1;
+    uint32_t    sub_pic_cpb_params_in_pic_timing_sei_flag : 1;
+    uint32_t    fixed_pic_rate_general_flag : 8;
+    uint32_t    fixed_pic_rate_within_cvs_flag : 8;
+    uint32_t    low_delay_hrd_flag : 8;
+} StdVideoH265HrdFlags;
+
+typedef struct StdVideoH265HrdParameters {
+    StdVideoH265HrdFlags                        flags;
+    uint8_t                                     tick_divisor_minus2;
+    uint8_t                                     du_cpb_removal_delay_increment_length_minus1;
+    uint8_t                                     dpb_output_delay_du_length_minus1;
+    uint8_t                                     bit_rate_scale;
+    uint8_t                                     cpb_size_scale;
+    uint8_t                                     cpb_size_du_scale;
+    uint8_t                                     initial_cpb_removal_delay_length_minus1;
+    uint8_t                                     au_cpb_removal_delay_length_minus1;
+    uint8_t                                     dpb_output_delay_length_minus1;
+    uint8_t                                     cpb_cnt_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint16_t                                    elemental_duration_in_tc_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint16_t                                    reserved[3];
+    const StdVideoH265SubLayerHrdParameters*    pSubLayerHrdParametersNal;
+    const StdVideoH265SubLayerHrdParameters*    pSubLayerHrdParametersVcl;
+} StdVideoH265HrdParameters;
+
+typedef struct StdVideoH265VpsFlags {
+    uint32_t    vps_temporal_id_nesting_flag : 1;
+    uint32_t    vps_sub_layer_ordering_info_present_flag : 1;
+    uint32_t    vps_timing_info_present_flag : 1;
+    uint32_t    vps_poc_proportional_to_timing_flag : 1;
+} StdVideoH265VpsFlags;
+
+typedef struct StdVideoH265ProfileTierLevelFlags {
+    uint32_t    general_tier_flag : 1;
+    uint32_t    general_progressive_source_flag : 1;
+    uint32_t    general_interlaced_source_flag : 1;
+    uint32_t    general_non_packed_constraint_flag : 1;
+    uint32_t    general_frame_only_constraint_flag : 1;
+} StdVideoH265ProfileTierLevelFlags;
+
+typedef struct StdVideoH265ProfileTierLevel {
+    StdVideoH265ProfileTierLevelFlags    flags;
+    StdVideoH265ProfileIdc               general_profile_idc;
+    StdVideoH265LevelIdc                 general_level_idc;
+} StdVideoH265ProfileTierLevel;
+
+typedef struct StdVideoH265VideoParameterSet {
+    StdVideoH265VpsFlags                   flags;
+    uint8_t                                vps_video_parameter_set_id;
+    uint8_t                                vps_max_sub_layers_minus1;
+    uint8_t                                reserved1;
+    uint8_t                                reserved2;
+    uint32_t                               vps_num_units_in_tick;
+    uint32_t                               vps_time_scale;
+    uint32_t                               vps_num_ticks_poc_diff_one_minus1;
+    uint32_t                               reserved3;
+    const StdVideoH265DecPicBufMgr*        pDecPicBufMgr;
+    const StdVideoH265HrdParameters*       pHrdParameters;
+    const StdVideoH265ProfileTierLevel*    pProfileTierLevel;
+} StdVideoH265VideoParameterSet;
+
+typedef struct StdVideoH265ScalingLists {
+    uint8_t    ScalingList4x4[STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS];
+    uint8_t    ScalingList8x8[STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS];
+    uint8_t    ScalingList16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS];
+    uint8_t    ScalingList32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS];
+    uint8_t    ScalingListDCCoef16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS];
+    uint8_t    ScalingListDCCoef32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS];
+} StdVideoH265ScalingLists;
+
+typedef struct StdVideoH265SpsVuiFlags {
+    uint32_t    aspect_ratio_info_present_flag : 1;
+    uint32_t    overscan_info_present_flag : 1;
+    uint32_t    overscan_appropriate_flag : 1;
+    uint32_t    video_signal_type_present_flag : 1;
+    uint32_t    video_full_range_flag : 1;
+    uint32_t    colour_description_present_flag : 1;
+    uint32_t    chroma_loc_info_present_flag : 1;
+    uint32_t    neutral_chroma_indication_flag : 1;
+    uint32_t    field_seq_flag : 1;
+    uint32_t    frame_field_info_present_flag : 1;
+    uint32_t    default_display_window_flag : 1;
+    uint32_t    vui_timing_info_present_flag : 1;
+    uint32_t    vui_poc_proportional_to_timing_flag : 1;
+    uint32_t    vui_hrd_parameters_present_flag : 1;
+    uint32_t    bitstream_restriction_flag : 1;
+    uint32_t    tiles_fixed_structure_flag : 1;
+    uint32_t    motion_vectors_over_pic_boundaries_flag : 1;
+    uint32_t    restricted_ref_pic_lists_flag : 1;
+} StdVideoH265SpsVuiFlags;
+
+typedef struct StdVideoH265SequenceParameterSetVui {
+    StdVideoH265SpsVuiFlags             flags;
+    StdVideoH265AspectRatioIdc          aspect_ratio_idc;
+    uint16_t                            sar_width;
+    uint16_t                            sar_height;
+    uint8_t                             video_format;
+    uint8_t                             colour_primaries;
+    uint8_t                             transfer_characteristics;
+    uint8_t                             matrix_coeffs;
+    uint8_t                             chroma_sample_loc_type_top_field;
+    uint8_t                             chroma_sample_loc_type_bottom_field;
+    uint8_t                             reserved1;
+    uint8_t                             reserved2;
+    uint16_t                            def_disp_win_left_offset;
+    uint16_t                            def_disp_win_right_offset;
+    uint16_t                            def_disp_win_top_offset;
+    uint16_t                            def_disp_win_bottom_offset;
+    uint32_t                            vui_num_units_in_tick;
+    uint32_t                            vui_time_scale;
+    uint32_t                            vui_num_ticks_poc_diff_one_minus1;
+    uint16_t                            min_spatial_segmentation_idc;
+    uint16_t                            reserved3;
+    uint8_t                             max_bytes_per_pic_denom;
+    uint8_t                             max_bits_per_min_cu_denom;
+    uint8_t                             log2_max_mv_length_horizontal;
+    uint8_t                             log2_max_mv_length_vertical;
+    const StdVideoH265HrdParameters*    pHrdParameters;
+} StdVideoH265SequenceParameterSetVui;
+
+typedef struct StdVideoH265PredictorPaletteEntries {
+    uint16_t    PredictorPaletteEntries[STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE][STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE];
+} StdVideoH265PredictorPaletteEntries;
+
+typedef struct StdVideoH265SpsFlags {
+    uint32_t    sps_temporal_id_nesting_flag : 1;
+    uint32_t    separate_colour_plane_flag : 1;
+    uint32_t    conformance_window_flag : 1;
+    uint32_t    sps_sub_layer_ordering_info_present_flag : 1;
+    uint32_t    scaling_list_enabled_flag : 1;
+    uint32_t    sps_scaling_list_data_present_flag : 1;
+    uint32_t    amp_enabled_flag : 1;
+    uint32_t    sample_adaptive_offset_enabled_flag : 1;
+    uint32_t    pcm_enabled_flag : 1;
+    uint32_t    pcm_loop_filter_disabled_flag : 1;
+    uint32_t    long_term_ref_pics_present_flag : 1;
+    uint32_t    sps_temporal_mvp_enabled_flag : 1;
+    uint32_t    strong_intra_smoothing_enabled_flag : 1;
+    uint32_t    vui_parameters_present_flag : 1;
+    uint32_t    sps_extension_present_flag : 1;
+    uint32_t    sps_range_extension_flag : 1;
+    uint32_t    transform_skip_rotation_enabled_flag : 1;
+    uint32_t    transform_skip_context_enabled_flag : 1;
+    uint32_t    implicit_rdpcm_enabled_flag : 1;
+    uint32_t    explicit_rdpcm_enabled_flag : 1;
+    uint32_t    extended_precision_processing_flag : 1;
+    uint32_t    intra_smoothing_disabled_flag : 1;
+    uint32_t    high_precision_offsets_enabled_flag : 1;
+    uint32_t    persistent_rice_adaptation_enabled_flag : 1;
+    uint32_t    cabac_bypass_alignment_enabled_flag : 1;
+    uint32_t    sps_scc_extension_flag : 1;
+    uint32_t    sps_curr_pic_ref_enabled_flag : 1;
+    uint32_t    palette_mode_enabled_flag : 1;
+    uint32_t    sps_palette_predictor_initializers_present_flag : 1;
+    uint32_t    intra_boundary_filtering_disabled_flag : 1;
+} StdVideoH265SpsFlags;
+
+typedef struct StdVideoH265ShortTermRefPicSetFlags {
+    uint32_t    inter_ref_pic_set_prediction_flag : 1;
+    uint32_t    delta_rps_sign : 1;
+} StdVideoH265ShortTermRefPicSetFlags;
+
+typedef struct StdVideoH265ShortTermRefPicSet {
+    StdVideoH265ShortTermRefPicSetFlags    flags;
+    uint32_t                               delta_idx_minus1;
+    uint16_t                               use_delta_flag;
+    uint16_t                               abs_delta_rps_minus1;
+    uint16_t                               used_by_curr_pic_flag;
+    uint16_t                               used_by_curr_pic_s0_flag;
+    uint16_t                               used_by_curr_pic_s1_flag;
+    uint16_t                               reserved1;
+    uint8_t                                reserved2;
+    uint8_t                                reserved3;
+    uint8_t                                num_negative_pics;
+    uint8_t                                num_positive_pics;
+    uint16_t                               delta_poc_s0_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
+    uint16_t                               delta_poc_s1_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
+} StdVideoH265ShortTermRefPicSet;
+
+typedef struct StdVideoH265LongTermRefPicsSps {
+    uint32_t    used_by_curr_pic_lt_sps_flag;
+    uint32_t    lt_ref_pic_poc_lsb_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS];
+} StdVideoH265LongTermRefPicsSps;
+
+typedef struct StdVideoH265SequenceParameterSet {
+    StdVideoH265SpsFlags                          flags;
+    StdVideoH265ChromaFormatIdc                   chroma_format_idc;
+    uint32_t                                      pic_width_in_luma_samples;
+    uint32_t                                      pic_height_in_luma_samples;
+    uint8_t                                       sps_video_parameter_set_id;
+    uint8_t                                       sps_max_sub_layers_minus1;
+    uint8_t                                       sps_seq_parameter_set_id;
+    uint8_t                                       bit_depth_luma_minus8;
+    uint8_t                                       bit_depth_chroma_minus8;
+    uint8_t                                       log2_max_pic_order_cnt_lsb_minus4;
+    uint8_t                                       log2_min_luma_coding_block_size_minus3;
+    uint8_t                                       log2_diff_max_min_luma_coding_block_size;
+    uint8_t                                       log2_min_luma_transform_block_size_minus2;
+    uint8_t                                       log2_diff_max_min_luma_transform_block_size;
+    uint8_t                                       max_transform_hierarchy_depth_inter;
+    uint8_t                                       max_transform_hierarchy_depth_intra;
+    uint8_t                                       num_short_term_ref_pic_sets;
+    uint8_t                                       num_long_term_ref_pics_sps;
+    uint8_t                                       pcm_sample_bit_depth_luma_minus1;
+    uint8_t                                       pcm_sample_bit_depth_chroma_minus1;
+    uint8_t                                       log2_min_pcm_luma_coding_block_size_minus3;
+    uint8_t                                       log2_diff_max_min_pcm_luma_coding_block_size;
+    uint8_t                                       reserved1;
+    uint8_t                                       reserved2;
+    uint8_t                                       palette_max_size;
+    uint8_t                                       delta_palette_max_predictor_size;
+    uint8_t                                       motion_vector_resolution_control_idc;
+    uint8_t                                       sps_num_palette_predictor_initializers_minus1;
+    uint32_t                                      conf_win_left_offset;
+    uint32_t                                      conf_win_right_offset;
+    uint32_t                                      conf_win_top_offset;
+    uint32_t                                      conf_win_bottom_offset;
+    const StdVideoH265ProfileTierLevel*           pProfileTierLevel;
+    const StdVideoH265DecPicBufMgr*               pDecPicBufMgr;
+    const StdVideoH265ScalingLists*               pScalingLists;
+    const StdVideoH265ShortTermRefPicSet*         pShortTermRefPicSet;
+    const StdVideoH265LongTermRefPicsSps*         pLongTermRefPicsSps;
+    const StdVideoH265SequenceParameterSetVui*    pSequenceParameterSetVui;
+    const StdVideoH265PredictorPaletteEntries*    pPredictorPaletteEntries;
+} StdVideoH265SequenceParameterSet;
+
+typedef struct StdVideoH265PpsFlags {
+    uint32_t    dependent_slice_segments_enabled_flag : 1;
+    uint32_t    output_flag_present_flag : 1;
+    uint32_t    sign_data_hiding_enabled_flag : 1;
+    uint32_t    cabac_init_present_flag : 1;
+    uint32_t    constrained_intra_pred_flag : 1;
+    uint32_t    transform_skip_enabled_flag : 1;
+    uint32_t    cu_qp_delta_enabled_flag : 1;
+    uint32_t    pps_slice_chroma_qp_offsets_present_flag : 1;
+    uint32_t    weighted_pred_flag : 1;
+    uint32_t    weighted_bipred_flag : 1;
+    uint32_t    transquant_bypass_enabled_flag : 1;
+    uint32_t    tiles_enabled_flag : 1;
+    uint32_t    entropy_coding_sync_enabled_flag : 1;
+    uint32_t    uniform_spacing_flag : 1;
+    uint32_t    loop_filter_across_tiles_enabled_flag : 1;
+    uint32_t    pps_loop_filter_across_slices_enabled_flag : 1;
+    uint32_t    deblocking_filter_control_present_flag : 1;
+    uint32_t    deblocking_filter_override_enabled_flag : 1;
+    uint32_t    pps_deblocking_filter_disabled_flag : 1;
+    uint32_t    pps_scaling_list_data_present_flag : 1;
+    uint32_t    lists_modification_present_flag : 1;
+    uint32_t    slice_segment_header_extension_present_flag : 1;
+    uint32_t    pps_extension_present_flag : 1;
+    uint32_t    cross_component_prediction_enabled_flag : 1;
+    uint32_t    chroma_qp_offset_list_enabled_flag : 1;
+    uint32_t    pps_curr_pic_ref_enabled_flag : 1;
+    uint32_t    residual_adaptive_colour_transform_enabled_flag : 1;
+    uint32_t    pps_slice_act_qp_offsets_present_flag : 1;
+    uint32_t    pps_palette_predictor_initializers_present_flag : 1;
+    uint32_t    monochrome_palette_flag : 1;
+    uint32_t    pps_range_extension_flag : 1;
+} StdVideoH265PpsFlags;
+
+typedef struct StdVideoH265PictureParameterSet {
+    StdVideoH265PpsFlags                          flags;
+    uint8_t                                       pps_pic_parameter_set_id;
+    uint8_t                                       pps_seq_parameter_set_id;
+    uint8_t                                       sps_video_parameter_set_id;
+    uint8_t                                       num_extra_slice_header_bits;
+    uint8_t                                       num_ref_idx_l0_default_active_minus1;
+    uint8_t                                       num_ref_idx_l1_default_active_minus1;
+    int8_t                                        init_qp_minus26;
+    uint8_t                                       diff_cu_qp_delta_depth;
+    int8_t                                        pps_cb_qp_offset;
+    int8_t                                        pps_cr_qp_offset;
+    int8_t                                        pps_beta_offset_div2;
+    int8_t                                        pps_tc_offset_div2;
+    uint8_t                                       log2_parallel_merge_level_minus2;
+    uint8_t                                       log2_max_transform_skip_block_size_minus2;
+    uint8_t                                       diff_cu_chroma_qp_offset_depth;
+    uint8_t                                       chroma_qp_offset_list_len_minus1;
+    int8_t                                        cb_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE];
+    int8_t                                        cr_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE];
+    uint8_t                                       log2_sao_offset_scale_luma;
+    uint8_t                                       log2_sao_offset_scale_chroma;
+    int8_t                                        pps_act_y_qp_offset_plus5;
+    int8_t                                        pps_act_cb_qp_offset_plus5;
+    int8_t                                        pps_act_cr_qp_offset_plus3;
+    uint8_t                                       pps_num_palette_predictor_initializers;
+    uint8_t                                       luma_bit_depth_entry_minus8;
+    uint8_t                                       chroma_bit_depth_entry_minus8;
+    uint8_t                                       num_tile_columns_minus1;
+    uint8_t                                       num_tile_rows_minus1;
+    uint8_t                                       reserved1;
+    uint8_t                                       reserved2;
+    uint16_t                                      column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE];
+    uint16_t                                      row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE];
+    uint32_t                                      reserved3;
+    const StdVideoH265ScalingLists*               pScalingLists;
+    const StdVideoH265PredictorPaletteEntries*    pPredictorPaletteEntries;
+} StdVideoH265PictureParameterSet;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vk_video/vulkan_video_codec_h265std_decode.h b/include/vk_video/vulkan_video_codec_h265std_decode.h
new file mode 100644
index 0000000..d8660d1
--- /dev/null
+++ b/include/vk_video/vulkan_video_codec_h265std_decode.h
@@ -0,0 +1,65 @@
+#ifndef VULKAN_VIDEO_CODEC_H265STD_DECODE_H_
+#define VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h265std_decode 1
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0)
+
+#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_decode"
+typedef struct StdVideoDecodeH265PictureInfoFlags {
+    uint32_t    IrapPicFlag : 1;
+    uint32_t    IdrPicFlag  : 1;
+    uint32_t    IsReference : 1;
+    uint32_t    short_term_ref_pic_set_sps_flag : 1;
+} StdVideoDecodeH265PictureInfoFlags;
+
+typedef struct StdVideoDecodeH265PictureInfo {
+    StdVideoDecodeH265PictureInfoFlags    flags;
+    uint8_t                               sps_video_parameter_set_id;
+    uint8_t                               pps_seq_parameter_set_id;
+    uint8_t                               pps_pic_parameter_set_id;
+    uint8_t                               NumDeltaPocsOfRefRpsIdx;
+    int32_t                               PicOrderCntVal;
+    uint16_t                              NumBitsForSTRefPicSetInSlice;
+    uint16_t                              reserved;
+    uint8_t                               RefPicSetStCurrBefore[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
+    uint8_t                               RefPicSetStCurrAfter[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
+    uint8_t                               RefPicSetLtCurr[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
+} StdVideoDecodeH265PictureInfo;
+
+typedef struct StdVideoDecodeH265ReferenceInfoFlags {
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    unused_for_reference : 1;
+} StdVideoDecodeH265ReferenceInfoFlags;
+
+typedef struct StdVideoDecodeH265ReferenceInfo {
+    StdVideoDecodeH265ReferenceInfoFlags    flags;
+    int32_t                                 PicOrderCntVal;
+} StdVideoDecodeH265ReferenceInfo;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vk_video/vulkan_video_codec_h265std_encode.h b/include/vk_video/vulkan_video_codec_h265std_encode.h
new file mode 100644
index 0000000..5a419b1
--- /dev/null
+++ b/include/vk_video/vulkan_video_codec_h265std_encode.h
@@ -0,0 +1,146 @@
+#ifndef VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_
+#define VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codec_h265std_encode 1
+// Vulkan 0.9 provisional Vulkan video H.265 encode std specification version number
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_9 VK_MAKE_VIDEO_STD_VERSION(0, 9, 9)
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_9
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_encode"
+typedef struct StdVideoEncodeH265WeightTableFlags {
+    uint16_t    luma_weight_l0_flag;
+    uint16_t    chroma_weight_l0_flag;
+    uint16_t    luma_weight_l1_flag;
+    uint16_t    chroma_weight_l1_flag;
+} StdVideoEncodeH265WeightTableFlags;
+
+typedef struct StdVideoEncodeH265WeightTable {
+    StdVideoEncodeH265WeightTableFlags    flags;
+    uint8_t                               luma_log2_weight_denom;
+    int8_t                                delta_chroma_log2_weight_denom;
+    int8_t                                delta_luma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                delta_chroma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+    int8_t                                delta_chroma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+    int8_t                                delta_luma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                delta_chroma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+    int8_t                                delta_chroma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+} StdVideoEncodeH265WeightTable;
+
+typedef struct StdVideoEncodeH265SliceSegmentHeaderFlags {
+    uint32_t    first_slice_segment_in_pic_flag : 1;
+    uint32_t    no_output_of_prior_pics_flag : 1;
+    uint32_t    dependent_slice_segment_flag : 1;
+    uint32_t    pic_output_flag : 1;
+    uint32_t    short_term_ref_pic_set_sps_flag : 1;
+    uint32_t    slice_temporal_mvp_enable_flag : 1;
+    uint32_t    slice_sao_luma_flag : 1;
+    uint32_t    slice_sao_chroma_flag : 1;
+    uint32_t    num_ref_idx_active_override_flag : 1;
+    uint32_t    mvd_l1_zero_flag : 1;
+    uint32_t    cabac_init_flag : 1;
+    uint32_t    cu_chroma_qp_offset_enabled_flag : 1;
+    uint32_t    deblocking_filter_override_flag : 1;
+    uint32_t    slice_deblocking_filter_disabled_flag : 1;
+    uint32_t    collocated_from_l0_flag : 1;
+    uint32_t    slice_loop_filter_across_slices_enabled_flag : 1;
+} StdVideoEncodeH265SliceSegmentHeaderFlags;
+
+typedef struct StdVideoEncodeH265SliceSegmentLongTermRefPics {
+    uint8_t     num_long_term_sps;
+    uint8_t     num_long_term_pics;
+    uint8_t     lt_idx_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS];
+    uint8_t     poc_lsb_lt[STD_VIDEO_H265_MAX_LONG_TERM_PICS];
+    uint16_t    used_by_curr_pic_lt_flag;
+    uint8_t     delta_poc_msb_present_flag[STD_VIDEO_H265_MAX_DELTA_POC];
+    uint8_t     delta_poc_msb_cycle_lt[STD_VIDEO_H265_MAX_DELTA_POC];
+} StdVideoEncodeH265SliceSegmentLongTermRefPics;
+
+typedef struct StdVideoEncodeH265SliceSegmentHeader {
+    StdVideoEncodeH265SliceSegmentHeaderFlags               flags;
+    StdVideoH265SliceType                                   slice_type;
+    uint32_t                                                slice_segment_address;
+    uint8_t                                                 short_term_ref_pic_set_idx;
+    uint8_t                                                 collocated_ref_idx;
+    uint8_t                                                 num_ref_idx_l0_active_minus1;
+    uint8_t                                                 num_ref_idx_l1_active_minus1;
+    uint8_t                                                 MaxNumMergeCand;
+    int8_t                                                  slice_cb_qp_offset;
+    int8_t                                                  slice_cr_qp_offset;
+    int8_t                                                  slice_beta_offset_div2;
+    int8_t                                                  slice_tc_offset_div2;
+    int8_t                                                  slice_act_y_qp_offset;
+    int8_t                                                  slice_act_cb_qp_offset;
+    int8_t                                                  slice_act_cr_qp_offset;
+    const StdVideoH265ShortTermRefPicSet*                   pShortTermRefPicSet;
+    const StdVideoEncodeH265SliceSegmentLongTermRefPics*    pLongTermRefPics;
+    const StdVideoEncodeH265WeightTable*                    pWeightTable;
+} StdVideoEncodeH265SliceSegmentHeader;
+
+typedef struct StdVideoEncodeH265ReferenceModificationFlags {
+    uint32_t    ref_pic_list_modification_flag_l0 : 1;
+    uint32_t    ref_pic_list_modification_flag_l1 : 1;
+} StdVideoEncodeH265ReferenceModificationFlags;
+
+typedef struct StdVideoEncodeH265ReferenceModifications {
+    StdVideoEncodeH265ReferenceModificationFlags    flags;
+    uint8_t                                         referenceList0ModificationsCount;
+    const uint8_t*                                  pReferenceList0Modifications;
+    uint8_t                                         referenceList1ModificationsCount;
+    const uint8_t*                                  pReferenceList1Modifications;
+} StdVideoEncodeH265ReferenceModifications;
+
+typedef struct StdVideoEncodeH265PictureInfoFlags {
+    uint32_t    is_reference_flag : 1;
+    uint32_t    IrapPicFlag : 1;
+    uint32_t    long_term_flag : 1;
+    uint32_t    discardable_flag : 1;
+    uint32_t    cross_layer_bla_flag : 1;
+} StdVideoEncodeH265PictureInfoFlags;
+
+typedef struct StdVideoEncodeH265PictureInfo {
+    StdVideoEncodeH265PictureInfoFlags    flags;
+    StdVideoH265PictureType               PictureType;
+    uint8_t                               sps_video_parameter_set_id;
+    uint8_t                               pps_seq_parameter_set_id;
+    uint8_t                               pps_pic_parameter_set_id;
+    int32_t                               PicOrderCntVal;
+    uint8_t                               TemporalId;
+} StdVideoEncodeH265PictureInfo;
+
+typedef struct StdVideoEncodeH265ReferenceInfoFlags {
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    unused_for_reference : 1;
+} StdVideoEncodeH265ReferenceInfoFlags;
+
+typedef struct StdVideoEncodeH265ReferenceInfo {
+    StdVideoEncodeH265ReferenceInfoFlags    flags;
+    int32_t                                 PicOrderCntVal;
+    uint8_t                                 TemporalId;
+} StdVideoEncodeH265ReferenceInfo;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vk_video/vulkan_video_codecs_common.h b/include/vk_video/vulkan_video_codecs_common.h
new file mode 100644
index 0000000..1e49826
--- /dev/null
+++ b/include/vk_video/vulkan_video_codecs_common.h
@@ -0,0 +1,31 @@
+#ifndef VULKAN_VIDEO_CODECS_COMMON_H_
+#define VULKAN_VIDEO_CODECS_COMMON_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define vulkan_video_codecs_common 1
+#define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \
+    ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch)))
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vulkan/vk_google_filtering_precision.h b/include/vulkan/vk_google_filtering_precision.h
new file mode 100644
index 0000000..65e3bcf
--- /dev/null
+++ b/include/vulkan/vk_google_filtering_precision.h
@@ -0,0 +1,41 @@
+// Copyright 2020 The SwiftShader Authors. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+#include "vulkan_core.h"
+
+// THIS FILE SHOULD BE DELETED IF VK_GOOGLE_sampler_filtering_precision IS EVER ADDED TO THE VULKAN HEADERS
+#ifdef VK_GOOGLE_sampler_filtering_precision
+#error "VK_GOOGLE_sampler_filtering_precision is already defined in the Vulkan headers, you can delete this file"
+#endif
+
+static constexpr VkStructureType VK_STRUCTURE_TYPE_SAMPLER_FILTERING_PRECISION_GOOGLE = static_cast<VkStructureType>(1000264000);
+
+#define VK_GOOGLE_sampler_filtering_precisions 1
+#define VK_GOOGLE_SAMPLER_FILTERING_PRECISION_SPEC_VERSION 1
+#define VK_GOOGLE_SAMPLER_FILTERING_PRECISION_EXTENSION_NAME "VK_GOOGLE_sampler_filtering_precision"
+
+typedef enum VkSamplerFilteringPrecisionModeGOOGLE {
+    VK_SAMPLER_FILTERING_PRECISION_MODE_LOW_GOOGLE = 0,
+    VK_SAMPLER_FILTERING_PRECISION_MODE_HIGH_GOOGLE = 1,
+    VK_SAMPLER_FILTERING_PRECISION_MODE_BEGIN_RANGE_GOOGLE = VK_SAMPLER_FILTERING_PRECISION_MODE_LOW_GOOGLE,
+    VK_SAMPLER_FILTERING_PRECISION_MODE_END_RANGE_GOOGLE = VK_SAMPLER_FILTERING_PRECISION_MODE_HIGH_GOOGLE,
+    VK_SAMPLER_FILTERING_PRECISION_MODE_RANGE_SIZE_GOOGLE = (VK_SAMPLER_FILTERING_PRECISION_MODE_HIGH_GOOGLE - VK_SAMPLER_FILTERING_PRECISION_MODE_LOW_GOOGLE + 1),
+    VK_SAMPLER_FILTERING_PRECISION_MODE_MAX_ENUM_GOOGLE = 0x7FFFFFFF
+} VkSamplerFilteringPrecisionModeGOOGLE;
+
+typedef struct VkSamplerFilteringPrecisionGOOGLE {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkSamplerFilteringPrecisionModeGOOGLE samplerFilteringPrecisionMode;
+} VkSamplerFilteringPrecisionGOOGLE;
diff --git a/include/vulkan/vk_icd.h b/include/vulkan/vk_icd.h
new file mode 100644
index 0000000..fa90fcf
--- /dev/null
+++ b/include/vulkan/vk_icd.h
@@ -0,0 +1,262 @@
+//
+// File: vk_icd.h
+//
+/*
+ * Copyright (c) 2015-2016, 2022 The Khronos Group Inc.
+ * Copyright (c) 2015-2016, 2022 Valve Corporation
+ * Copyright (c) 2015-2016, 2022 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+#ifndef VKICD_H
+#define VKICD_H
+
+#include "vulkan.h"
+#include <stdbool.h>
+
+// Loader-ICD version negotiation API.  Versions add the following features:
+//   Version 0 - Initial.  Doesn't support vk_icdGetInstanceProcAddr
+//               or vk_icdNegotiateLoaderICDInterfaceVersion.
+//   Version 1 - Add support for vk_icdGetInstanceProcAddr.
+//   Version 2 - Add Loader/ICD Interface version negotiation
+//               via vk_icdNegotiateLoaderICDInterfaceVersion.
+//   Version 3 - Add ICD creation/destruction of KHR_surface objects.
+//   Version 4 - Add unknown physical device extension querying via
+//               vk_icdGetPhysicalDeviceProcAddr.
+//   Version 5 - Tells ICDs that the loader is now paying attention to the
+//               application version of Vulkan passed into the ApplicationInfo
+//               structure during vkCreateInstance.  This will tell the ICD
+//               that if the loader is older, it should automatically fail a
+//               call for any API version > 1.0.  Otherwise, the loader will
+//               manually determine if it can support the expected version.
+//   Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices.
+//   Version 7 - If an ICD supports any of the following functions, they must be
+//               queryable with vk_icdGetInstanceProcAddr:
+//                   vk_icdNegotiateLoaderICDInterfaceVersion
+//                   vk_icdGetPhysicalDeviceProcAddr
+//                   vk_icdEnumerateAdapterPhysicalDevices (Windows only)
+//               In addition, these functions no longer need to be exported directly.
+//               This version allows drivers provided through the extension
+//               VK_LUNARG_direct_driver_loading be able to support the entire
+//               Driver-Loader interface.
+
+#define CURRENT_LOADER_ICD_INTERFACE_VERSION 7
+#define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0
+#define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4
+
+// Old typedefs that don't follow a proper naming convention but are preserved for compatibility
+typedef VkResult(VKAPI_PTR *PFN_vkNegotiateLoaderICDInterfaceVersion)(uint32_t *pVersion);
+// This is defined in vk_layer.h which will be found by the loader, but if an ICD is building against this
+// file directly, it won't be found.
+#ifndef PFN_GetPhysicalDeviceProcAddr
+typedef PFN_vkVoidFunction(VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char *pName);
+#endif
+
+// Typedefs for loader/ICD interface
+typedef VkResult (VKAPI_PTR *PFN_vk_icdNegotiateLoaderICDInterfaceVersion)(uint32_t* pVersion);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetInstanceProcAddr)(VkInstance instance, const char* pName);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vk_icdGetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+typedef VkResult (VKAPI_PTR *PFN_vk_icdEnumerateAdapterPhysicalDevices)(VkInstance instance, LUID adapterLUID,
+    uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+#endif
+
+// Prototypes for loader/ICD interface
+#if !defined(VK_NO_PROTOTYPES)
+#ifdef __cplusplus
+extern "C" {
+#endif
+    VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion);
+    VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName);
+    VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance isntance, const char* pName);
+#if defined(VK_USE_PLATFORM_WIN32_KHR)
+    VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID,
+        uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+#endif
+#ifdef __cplusplus
+}
+#endif
+#endif
+
+/*
+ * The ICD must reserve space for a pointer for the loader's dispatch
+ * table, at the start of <each object>.
+ * The ICD must initialize this variable using the SET_LOADER_MAGIC_VALUE macro.
+ */
+
+#define ICD_LOADER_MAGIC 0x01CDC0DE
+
+typedef union {
+    uintptr_t loaderMagic;
+    void *loaderData;
+} VK_LOADER_DATA;
+
+static inline void set_loader_magic_value(void *pNewObject) {
+    VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject;
+    loader_info->loaderMagic = ICD_LOADER_MAGIC;
+}
+
+static inline bool valid_loader_magic_value(void *pNewObject) {
+    const VK_LOADER_DATA *loader_info = (VK_LOADER_DATA *)pNewObject;
+    return (loader_info->loaderMagic & 0xffffffff) == ICD_LOADER_MAGIC;
+}
+
+/*
+ * Windows and Linux ICDs will treat VkSurfaceKHR as a pointer to a struct that
+ * contains the platform-specific connection and surface information.
+ */
+typedef enum {
+    VK_ICD_WSI_PLATFORM_MIR,
+    VK_ICD_WSI_PLATFORM_WAYLAND,
+    VK_ICD_WSI_PLATFORM_WIN32,
+    VK_ICD_WSI_PLATFORM_XCB,
+    VK_ICD_WSI_PLATFORM_XLIB,
+    VK_ICD_WSI_PLATFORM_ANDROID,
+    VK_ICD_WSI_PLATFORM_MACOS,
+    VK_ICD_WSI_PLATFORM_IOS,
+    VK_ICD_WSI_PLATFORM_DISPLAY,
+    VK_ICD_WSI_PLATFORM_HEADLESS,
+    VK_ICD_WSI_PLATFORM_METAL,
+    VK_ICD_WSI_PLATFORM_DIRECTFB,
+    VK_ICD_WSI_PLATFORM_VI,
+    VK_ICD_WSI_PLATFORM_GGP,
+    VK_ICD_WSI_PLATFORM_SCREEN,
+    VK_ICD_WSI_PLATFORM_FUCHSIA,
+} VkIcdWsiPlatform;
+
+typedef struct {
+    VkIcdWsiPlatform platform;
+} VkIcdSurfaceBase;
+
+#ifdef VK_USE_PLATFORM_MIR_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    MirConnection *connection;
+    MirSurface *mirSurface;
+} VkIcdSurfaceMir;
+#endif  // VK_USE_PLATFORM_MIR_KHR
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    struct wl_display *display;
+    struct wl_surface *surface;
+} VkIcdSurfaceWayland;
+#endif  // VK_USE_PLATFORM_WAYLAND_KHR
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    HINSTANCE hinstance;
+    HWND hwnd;
+} VkIcdSurfaceWin32;
+#endif  // VK_USE_PLATFORM_WIN32_KHR
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    xcb_connection_t *connection;
+    xcb_window_t window;
+} VkIcdSurfaceXcb;
+#endif  // VK_USE_PLATFORM_XCB_KHR
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    Display *dpy;
+    Window window;
+} VkIcdSurfaceXlib;
+#endif  // VK_USE_PLATFORM_XLIB_KHR
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+typedef struct {
+    VkIcdSurfaceBase base;
+    IDirectFB *dfb;
+    IDirectFBSurface *surface;
+} VkIcdSurfaceDirectFB;
+#endif  // VK_USE_PLATFORM_DIRECTFB_EXT
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+typedef struct {
+    VkIcdSurfaceBase base;
+    struct ANativeWindow *window;
+} VkIcdSurfaceAndroid;
+#endif  // VK_USE_PLATFORM_ANDROID_KHR
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+typedef struct {
+    VkIcdSurfaceBase base;
+    const void *pView;
+} VkIcdSurfaceMacOS;
+#endif  // VK_USE_PLATFORM_MACOS_MVK
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+typedef struct {
+    VkIcdSurfaceBase base;
+    const void *pView;
+} VkIcdSurfaceIOS;
+#endif  // VK_USE_PLATFORM_IOS_MVK
+
+#ifdef VK_USE_PLATFORM_GGP
+typedef struct {
+    VkIcdSurfaceBase base;
+    GgpStreamDescriptor streamDescriptor;
+} VkIcdSurfaceGgp;
+#endif  // VK_USE_PLATFORM_GGP
+
+typedef struct {
+    VkIcdSurfaceBase base;
+    VkDisplayModeKHR displayMode;
+    uint32_t planeIndex;
+    uint32_t planeStackIndex;
+    VkSurfaceTransformFlagBitsKHR transform;
+    float globalAlpha;
+    VkDisplayPlaneAlphaFlagBitsKHR alphaMode;
+    VkExtent2D imageExtent;
+} VkIcdSurfaceDisplay;
+
+typedef struct {
+    VkIcdSurfaceBase base;
+} VkIcdSurfaceHeadless;
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+typedef struct {
+    VkIcdSurfaceBase base;
+    const CAMetalLayer *pLayer;
+} VkIcdSurfaceMetal;
+#endif // VK_USE_PLATFORM_METAL_EXT
+
+#ifdef VK_USE_PLATFORM_VI_NN
+typedef struct {
+    VkIcdSurfaceBase base;
+    void *window;
+} VkIcdSurfaceVi;
+#endif // VK_USE_PLATFORM_VI_NN
+
+#ifdef VK_USE_PLATFORM_SCREEN_QNX
+typedef struct {
+    VkIcdSurfaceBase base;
+    struct _screen_context *context;
+    struct _screen_window *window;
+} VkIcdSurfaceScreen;
+#endif  // VK_USE_PLATFORM_SCREEN_QNX
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+typedef struct {
+  VkIcdSurfaceBase base;
+} VkIcdSurfaceImagePipe;
+#endif // VK_USE_PLATFORM_FUCHSIA
+
+#endif  // VKICD_H
diff --git a/include/vulkan/vk_layer.h b/include/vulkan/vk_layer.h
new file mode 100644
index 0000000..0651870
--- /dev/null
+++ b/include/vulkan/vk_layer.h
@@ -0,0 +1,210 @@
+//
+// File: vk_layer.h
+//
+/*
+ * Copyright (c) 2015-2017 The Khronos Group Inc.
+ * Copyright (c) 2015-2017 Valve Corporation
+ * Copyright (c) 2015-2017 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+/* Need to define dispatch table
+ * Core struct can then have ptr to dispatch table at the top
+ * Along with object ptrs for current and next OBJ
+ */
+#pragma once
+
+#include "vulkan.h"
+#if defined(__GNUC__) && __GNUC__ >= 4
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
+#define VK_LAYER_EXPORT __attribute__((visibility("default")))
+#else
+#define VK_LAYER_EXPORT
+#endif
+
+#define MAX_NUM_UNKNOWN_EXTS 250
+
+ // Loader-Layer version negotiation API.  Versions add the following features:
+ //   Versions 0/1 - Initial.  Doesn't support vk_layerGetPhysicalDeviceProcAddr
+ //                  or vk_icdNegotiateLoaderLayerInterfaceVersion.
+ //   Version 2    - Add support for vk_layerGetPhysicalDeviceProcAddr and
+ //                  vk_icdNegotiateLoaderLayerInterfaceVersion.
+#define CURRENT_LOADER_LAYER_INTERFACE_VERSION 2
+#define MIN_SUPPORTED_LOADER_LAYER_INTERFACE_VERSION 1
+
+#define VK_CURRENT_CHAIN_VERSION 1
+
+// Typedef for use in the interfaces below
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_GetPhysicalDeviceProcAddr)(VkInstance instance, const char* pName);
+
+// Version negotiation values
+typedef enum VkNegotiateLayerStructType {
+    LAYER_NEGOTIATE_UNINTIALIZED = 0,
+    LAYER_NEGOTIATE_INTERFACE_STRUCT = 1,
+} VkNegotiateLayerStructType;
+
+// Version negotiation structures
+typedef struct VkNegotiateLayerInterface {
+    VkNegotiateLayerStructType sType;
+    void *pNext;
+    uint32_t loaderLayerInterfaceVersion;
+    PFN_vkGetInstanceProcAddr pfnGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr pfnGetDeviceProcAddr;
+    PFN_GetPhysicalDeviceProcAddr pfnGetPhysicalDeviceProcAddr;
+} VkNegotiateLayerInterface;
+
+// Version negotiation functions
+typedef VkResult (VKAPI_PTR *PFN_vkNegotiateLoaderLayerInterfaceVersion)(VkNegotiateLayerInterface *pVersionStruct);
+
+// Function prototype for unknown physical device extension command
+typedef VkResult(VKAPI_PTR *PFN_PhysDevExt)(VkPhysicalDevice phys_device);
+
+// ------------------------------------------------------------------------------------------------
+// CreateInstance and CreateDevice support structures
+
+/* Sub type of structure for instance and device loader ext of CreateInfo.
+ * When sType == VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO
+ * or sType == VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO
+ * then VkLayerFunction indicates struct type pointed to by pNext
+ */
+typedef enum VkLayerFunction_ {
+    VK_LAYER_LINK_INFO = 0,
+    VK_LOADER_DATA_CALLBACK = 1,
+    VK_LOADER_LAYER_CREATE_DEVICE_CALLBACK = 2,
+    VK_LOADER_FEATURES = 3,
+} VkLayerFunction;
+
+typedef struct VkLayerInstanceLink_ {
+    struct VkLayerInstanceLink_ *pNext;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+    PFN_GetPhysicalDeviceProcAddr pfnNextGetPhysicalDeviceProcAddr;
+} VkLayerInstanceLink;
+
+/*
+ * When creating the device chain the loader needs to pass
+ * down information about it's device structure needed at
+ * the end of the chain. Passing the data via the
+ * VkLayerDeviceInfo avoids issues with finding the
+ * exact instance being used.
+ */
+typedef struct VkLayerDeviceInfo_ {
+    void *device_info;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+} VkLayerDeviceInfo;
+
+typedef VkResult (VKAPI_PTR *PFN_vkSetInstanceLoaderData)(VkInstance instance,
+        void *object);
+typedef VkResult (VKAPI_PTR *PFN_vkSetDeviceLoaderData)(VkDevice device,
+        void *object);
+typedef VkResult (VKAPI_PTR *PFN_vkLayerCreateDevice)(VkInstance instance, VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo *pCreateInfo,
+						      const VkAllocationCallbacks *pAllocator, VkDevice *pDevice, PFN_vkGetInstanceProcAddr layerGIPA, PFN_vkGetDeviceProcAddr *nextGDPA);
+typedef void (VKAPI_PTR *PFN_vkLayerDestroyDevice)(VkDevice physicalDevice, const VkAllocationCallbacks *pAllocator, PFN_vkDestroyDevice destroyFunction);
+
+typedef enum VkLoaderFeastureFlagBits {
+    VK_LOADER_FEATURE_PHYSICAL_DEVICE_SORTING = 0x00000001,
+} VkLoaderFlagBits;
+typedef VkFlags VkLoaderFeatureFlags;
+
+typedef struct {
+    VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO
+    const void *pNext;
+    VkLayerFunction function;
+    union {
+        VkLayerInstanceLink *pLayerInfo;
+        PFN_vkSetInstanceLoaderData pfnSetInstanceLoaderData;
+        struct {
+	        PFN_vkLayerCreateDevice pfnLayerCreateDevice;
+	        PFN_vkLayerDestroyDevice pfnLayerDestroyDevice;
+	    } layerDevice;
+        VkLoaderFeatureFlags loaderFeatures;
+    } u;
+} VkLayerInstanceCreateInfo;
+
+typedef struct VkLayerDeviceLink_ {
+    struct VkLayerDeviceLink_ *pNext;
+    PFN_vkGetInstanceProcAddr pfnNextGetInstanceProcAddr;
+    PFN_vkGetDeviceProcAddr pfnNextGetDeviceProcAddr;
+} VkLayerDeviceLink;
+
+typedef struct {
+    VkStructureType sType; // VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO
+    const void *pNext;
+    VkLayerFunction function;
+    union {
+        VkLayerDeviceLink *pLayerInfo;
+        PFN_vkSetDeviceLoaderData pfnSetDeviceLoaderData;
+    } u;
+} VkLayerDeviceCreateInfo;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+VKAPI_ATTR VkResult VKAPI_CALL vkNegotiateLoaderLayerInterfaceVersion(VkNegotiateLayerInterface *pVersionStruct);
+
+typedef enum VkChainType {
+    VK_CHAIN_TYPE_UNKNOWN = 0,
+    VK_CHAIN_TYPE_ENUMERATE_INSTANCE_EXTENSION_PROPERTIES = 1,
+    VK_CHAIN_TYPE_ENUMERATE_INSTANCE_LAYER_PROPERTIES = 2,
+    VK_CHAIN_TYPE_ENUMERATE_INSTANCE_VERSION = 3,
+} VkChainType;
+
+typedef struct VkChainHeader {
+    VkChainType type;
+    uint32_t version;
+    uint32_t size;
+} VkChainHeader;
+
+typedef struct VkEnumerateInstanceExtensionPropertiesChain {
+    VkChainHeader header;
+    VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceExtensionPropertiesChain *, const char *, uint32_t *,
+                                      VkExtensionProperties *);
+    const struct VkEnumerateInstanceExtensionPropertiesChain *pNextLink;
+
+#if defined(__cplusplus)
+    inline VkResult CallDown(const char *pLayerName, uint32_t *pPropertyCount, VkExtensionProperties *pProperties) const {
+        return pfnNextLayer(pNextLink, pLayerName, pPropertyCount, pProperties);
+    }
+#endif
+} VkEnumerateInstanceExtensionPropertiesChain;
+
+typedef struct VkEnumerateInstanceLayerPropertiesChain {
+    VkChainHeader header;
+    VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceLayerPropertiesChain *, uint32_t *, VkLayerProperties *);
+    const struct VkEnumerateInstanceLayerPropertiesChain *pNextLink;
+
+#if defined(__cplusplus)
+    inline VkResult CallDown(uint32_t *pPropertyCount, VkLayerProperties *pProperties) const {
+        return pfnNextLayer(pNextLink, pPropertyCount, pProperties);
+    }
+#endif
+} VkEnumerateInstanceLayerPropertiesChain;
+
+typedef struct VkEnumerateInstanceVersionChain {
+    VkChainHeader header;
+    VkResult(VKAPI_PTR *pfnNextLayer)(const struct VkEnumerateInstanceVersionChain *, uint32_t *);
+    const struct VkEnumerateInstanceVersionChain *pNextLink;
+
+#if defined(__cplusplus)
+    inline VkResult CallDown(uint32_t *pApiVersion) const {
+        return pfnNextLayer(pNextLink, pApiVersion);
+    }
+#endif
+} VkEnumerateInstanceVersionChain;
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/include/vulkan/vk_platform.h b/include/vulkan/vk_platform.h
new file mode 100644
index 0000000..3ff8c5d
--- /dev/null
+++ b/include/vulkan/vk_platform.h
@@ -0,0 +1,84 @@
+//
+// File: vk_platform.h
+//
+/*
+** Copyright 2014-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+
+#ifndef VK_PLATFORM_H_
+#define VK_PLATFORM_H_
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif // __cplusplus
+
+/*
+***************************************************************************************************
+*   Platform-specific directives and type declarations
+***************************************************************************************************
+*/
+
+/* Platform-specific calling convention macros.
+ *
+ * Platforms should define these so that Vulkan clients call Vulkan commands
+ * with the same calling conventions that the Vulkan implementation expects.
+ *
+ * VKAPI_ATTR - Placed before the return type in function declarations.
+ *              Useful for C++11 and GCC/Clang-style function attribute syntax.
+ * VKAPI_CALL - Placed after the return type in function declarations.
+ *              Useful for MSVC-style calling convention syntax.
+ * VKAPI_PTR  - Placed between the '(' and '*' in function pointer types.
+ *
+ * Function declaration:  VKAPI_ATTR void VKAPI_CALL vkCommand(void);
+ * Function pointer type: typedef void (VKAPI_PTR *PFN_vkCommand)(void);
+ */
+#if defined(_WIN32)
+    // On Windows, Vulkan commands use the stdcall convention
+    #define VKAPI_ATTR
+    #define VKAPI_CALL __stdcall
+    #define VKAPI_PTR  VKAPI_CALL
+#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH < 7
+    #error "Vulkan is not supported for the 'armeabi' NDK ABI"
+#elif defined(__ANDROID__) && defined(__ARM_ARCH) && __ARM_ARCH >= 7 && defined(__ARM_32BIT_STATE)
+    // On Android 32-bit ARM targets, Vulkan functions use the "hardfloat"
+    // calling convention, i.e. float parameters are passed in registers. This
+    // is true even if the rest of the application passes floats on the stack,
+    // as it does by default when compiling for the armeabi-v7a NDK ABI.
+    #define VKAPI_ATTR __attribute__((pcs("aapcs-vfp")))
+    #define VKAPI_CALL
+    #define VKAPI_PTR  VKAPI_ATTR
+#else
+    // On other platforms, use the default calling convention
+    #define VKAPI_ATTR
+    #define VKAPI_CALL
+    #define VKAPI_PTR
+#endif
+
+#if !defined(VK_NO_STDDEF_H)
+    #include <stddef.h>
+#endif // !defined(VK_NO_STDDEF_H)
+
+#if !defined(VK_NO_STDINT_H)
+    #if defined(_MSC_VER) && (_MSC_VER < 1600)
+        typedef signed   __int8  int8_t;
+        typedef unsigned __int8  uint8_t;
+        typedef signed   __int16 int16_t;
+        typedef unsigned __int16 uint16_t;
+        typedef signed   __int32 int32_t;
+        typedef unsigned __int32 uint32_t;
+        typedef signed   __int64 int64_t;
+        typedef unsigned __int64 uint64_t;
+    #else
+        #include <stdint.h>
+    #endif
+#endif // !defined(VK_NO_STDINT_H)
+
+#ifdef __cplusplus
+} // extern "C"
+#endif // __cplusplus
+
+#endif
diff --git a/include/vulkan/vk_sdk_platform.h b/include/vulkan/vk_sdk_platform.h
new file mode 100644
index 0000000..f192c1c
--- /dev/null
+++ b/include/vulkan/vk_sdk_platform.h
@@ -0,0 +1,71 @@
+//
+// File: vk_sdk_platform.h
+//
+/*
+ * Copyright (c) 2015-2016 The Khronos Group Inc.
+ * Copyright (c) 2015-2016 Valve Corporation
+ * Copyright (c) 2015-2016 LunarG, Inc.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef VK_SDK_PLATFORM_H
+#define VK_SDK_PLATFORM_H
+
+#if defined(_WIN32)
+#ifndef NOMINMAX
+#define NOMINMAX
+#endif
+#ifndef __cplusplus
+#undef inline
+#define inline __inline
+#endif // __cplusplus
+
+#if (defined(_MSC_VER) && _MSC_VER < 1900 /*vs2015*/)
+// C99:
+// Microsoft didn't implement C99 in Visual Studio; but started adding it with
+// VS2013.  However, VS2013 still didn't have snprintf().  The following is a
+// work-around (Note: The _CRT_SECURE_NO_WARNINGS macro must be set in the
+// "CMakeLists.txt" file).
+// NOTE: This is fixed in Visual Studio 2015.
+#define snprintf _snprintf
+#endif
+
+#define strdup _strdup
+
+#endif // _WIN32
+
+// Check for noexcept support using clang, with fallback to Windows or GCC version numbers
+#ifndef NOEXCEPT
+#if defined(__clang__)
+#if __has_feature(cxx_noexcept)
+#define HAS_NOEXCEPT
+#endif
+#else
+#if defined(__GXX_EXPERIMENTAL_CXX0X__) && __GNUC__ * 10 + __GNUC_MINOR__ >= 46
+#define HAS_NOEXCEPT
+#else
+#if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023026 && defined(_HAS_EXCEPTIONS) && _HAS_EXCEPTIONS
+#define HAS_NOEXCEPT
+#endif
+#endif
+#endif
+
+#ifdef HAS_NOEXCEPT
+#define NOEXCEPT noexcept
+#else
+#define NOEXCEPT
+#endif
+#endif
+
+#endif  // VK_SDK_PLATFORM_H
diff --git a/include/vulkan/vulkan.h b/include/vulkan/vulkan.h
new file mode 100644
index 0000000..3510ac9
--- /dev/null
+++ b/include/vulkan/vulkan.h
@@ -0,0 +1,91 @@
+#ifndef VULKAN_H_
+#define VULKAN_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+#include "vk_platform.h"
+#include "vulkan_core.h"
+
+#ifdef VK_USE_PLATFORM_ANDROID_KHR
+#include "vulkan_android.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_FUCHSIA
+#include <zircon/types.h>
+#include "vulkan_fuchsia.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_IOS_MVK
+#include "vulkan_ios.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_MACOS_MVK
+#include "vulkan_macos.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_METAL_EXT
+#include "vulkan_metal.h"
+#endif
+
+#ifdef VK_USE_PLATFORM_VI_NN
+#include "vulkan_vi.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_WAYLAND_KHR
+#include "vulkan_wayland.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_WIN32_KHR
+#include <windows.h>
+#include "vulkan_win32.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_XCB_KHR
+#include <xcb/xcb.h>
+#include "vulkan_xcb.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_XLIB_KHR
+#include <X11/Xlib.h>
+#include "vulkan_xlib.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_DIRECTFB_EXT
+#include <directfb.h>
+#include "vulkan_directfb.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_XLIB_XRANDR_EXT
+#include <X11/Xlib.h>
+#include <X11/extensions/Xrandr.h>
+#include "vulkan_xlib_xrandr.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_GGP
+#include <ggp_c/vulkan_types.h>
+#include "vulkan_ggp.h"
+#endif
+
+
+#ifdef VK_USE_PLATFORM_SCREEN_QNX
+#include <screen/screen.h>
+#include "vulkan_screen.h"
+#endif
+
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+#include "vulkan_beta.h"
+#endif
+
+#endif // VULKAN_H_
diff --git a/include/vulkan/vulkan.hpp b/include/vulkan/vulkan.hpp
new file mode 100644
index 0000000..5f4be92
--- /dev/null
+++ b/include/vulkan/vulkan.hpp
@@ -0,0 +1,10357 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_HPP
+#define VULKAN_HPP
+
+#if defined( _MSVC_LANG )
+#  define VULKAN_HPP_CPLUSPLUS _MSVC_LANG
+#else
+#  define VULKAN_HPP_CPLUSPLUS __cplusplus
+#endif
+
+#if 201703L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 20
+#elif 201402L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 17
+#elif 201103L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 14
+#elif 199711L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 11
+#else
+#  error "vulkan.hpp needs at least c++ standard version 11"
+#endif
+
+#include <algorithm>
+#include <array>   // ArrayWrapperND
+#include <string>  // std::string
+#include <vulkan/vulkan.h>
+#if 17 <= VULKAN_HPP_CPP_VERSION
+#  include <string_view>    // std::string_view
+#endif
+
+#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+#  if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+#    define VULKAN_HPP_NO_SMART_HANDLE
+#  endif
+#else
+#  include <tuple>  // std::tie
+#  include <vector> // std::vector
+#endif
+
+#if !defined( VULKAN_HPP_NO_EXCEPTIONS )
+#  include <system_error>  // std::is_error_code_enum
+#endif
+
+#if defined( VULKAN_HPP_NO_CONSTRUCTORS )
+#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+#    define VULKAN_HPP_NO_STRUCT_CONSTRUCTORS
+#  endif
+#  if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+#    define VULKAN_HPP_NO_UNION_CONSTRUCTORS
+#  endif
+#endif
+
+#if defined( VULKAN_HPP_NO_SETTERS )
+#  if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+#    define VULKAN_HPP_NO_STRUCT_SETTERS
+#  endif
+#  if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+#    define VULKAN_HPP_NO_UNION_SETTERS
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_ASSERT )
+#  include <cassert>
+#  define VULKAN_HPP_ASSERT assert
+#endif
+
+#if !defined( VULKAN_HPP_ASSERT_ON_RESULT )
+#  define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT
+#endif
+
+#if !defined( VULKAN_HPP_STATIC_ASSERT )
+# define VULKAN_HPP_STATIC_ASSERT static_assert
+#endif
+
+#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL )
+#  define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
+#endif
+
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
+#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+#    include <dlfcn.h>
+#  elif defined( _WIN32 )
+typedef struct HINSTANCE__ * HINSTANCE;
+#    if defined( _WIN64 )
+typedef int64_t( __stdcall * FARPROC )();
+#    else
+typedef int( __stdcall * FARPROC )();
+#    endif
+extern "C" __declspec( dllimport ) HINSTANCE __stdcall LoadLibraryA( char const * lpLibFileName );
+extern "C" __declspec( dllimport ) int __stdcall FreeLibrary( HINSTANCE hLibModule );
+extern "C" __declspec( dllimport ) FARPROC __stdcall GetProcAddress( HINSTANCE hModule, const char * lpProcName );
+#  endif
+#endif
+
+#if !defined( __has_include )
+#  define __has_include( x ) false
+#endif
+
+#if ( 201907 <= __cpp_lib_three_way_comparison ) && __has_include( <compare> ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR )
+#  define VULKAN_HPP_HAS_SPACESHIP_OPERATOR
+#endif
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+#  include <compare>
+#endif
+
+#if ( 201803 <= __cpp_lib_span )
+#  define VULKAN_HPP_SUPPORT_SPAN
+#  include <span>
+#endif
+
+
+static_assert( VK_HEADER_VERSION ==  238, "Wrong VK_HEADER_VERSION!" );
+
+// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
+// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
+#if ( VK_USE_64_BIT_PTR_DEFINES == 1 )
+#  if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+#    define VULKAN_HPP_TYPESAFE_CONVERSION
+#  endif
+#endif
+
+// <tuple> includes <sys/sysmacros.h> through some other header
+// this results in major(x) being resolved to gnu_dev_major(x)
+// which is an expression in a constructor initializer list.
+#if defined( major )
+#  undef major
+#endif
+#if defined( minor )
+#  undef minor
+#endif
+
+// Windows defines MemoryBarrier which is deprecated and collides
+// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct.
+#if defined( MemoryBarrier )
+#  undef MemoryBarrier
+#endif
+
+#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS )
+#  if defined( __clang__ )
+#    if __has_feature( cxx_unrestricted_unions )
+#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#    endif
+#  elif defined( __GNUC__ )
+#    define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ )
+#    if 40600 <= GCC_VERSION
+#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#    endif
+#  elif defined( _MSC_VER )
+#    if 1900 <= _MSC_VER
+#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#    endif
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_INLINE )
+#  if defined( __clang__ )
+#    if __has_attribute( always_inline )
+#      define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__
+#    else
+#      define VULKAN_HPP_INLINE inline
+#    endif
+#  elif defined( __GNUC__ )
+#    define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__
+#  elif defined( _MSC_VER )
+#    define VULKAN_HPP_INLINE inline
+#  else
+#    define VULKAN_HPP_INLINE inline
+#  endif
+#endif
+
+#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+#  define VULKAN_HPP_TYPESAFE_EXPLICIT
+#else
+#  define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
+#endif
+
+#if defined( __cpp_constexpr )
+#  define VULKAN_HPP_CONSTEXPR constexpr
+#  if __cpp_constexpr >= 201304
+#    define VULKAN_HPP_CONSTEXPR_14 constexpr
+#  else
+#    define VULKAN_HPP_CONSTEXPR_14
+#  endif
+#  define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr
+#else
+#  define VULKAN_HPP_CONSTEXPR
+#  define VULKAN_HPP_CONSTEXPR_14
+#  define VULKAN_HPP_CONST_OR_CONSTEXPR const
+#endif
+
+#if !defined( VULKAN_HPP_NOEXCEPT )
+#  if defined( _MSC_VER ) && ( _MSC_VER <= 1800 )
+#    define VULKAN_HPP_NOEXCEPT
+#  else
+#    define VULKAN_HPP_NOEXCEPT     noexcept
+#    define VULKAN_HPP_HAS_NOEXCEPT 1
+#    if defined( VULKAN_HPP_NO_EXCEPTIONS )
+#      define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept
+#    else
+#      define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+#    endif
+#  endif
+#endif
+
+#if 14 <= VULKAN_HPP_CPP_VERSION
+#  define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]]
+#else
+#  define VULKAN_HPP_DEPRECATED( msg )
+#endif
+
+#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS )
+#  define VULKAN_HPP_NODISCARD [[nodiscard]]
+#  if defined( VULKAN_HPP_NO_EXCEPTIONS )
+#    define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]]
+#  else
+#    define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+#  endif
+#else
+#  define VULKAN_HPP_NODISCARD
+#  define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+#endif
+
+#if !defined( VULKAN_HPP_NAMESPACE )
+#  define VULKAN_HPP_NAMESPACE vk
+#endif
+
+#define VULKAN_HPP_STRINGIFY2( text ) #text
+#define VULKAN_HPP_STRINGIFY( text )  VULKAN_HPP_STRINGIFY2( text )
+#define VULKAN_HPP_NAMESPACE_STRING   VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE )
+
+
+namespace VULKAN_HPP_NAMESPACE
+{
+  template <typename T, size_t N>
+  class ArrayWrapper1D : public std::array<T, N>
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT
+      : std::array<T, N>()
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayWrapper1D( std::array<T, N> const & data ) VULKAN_HPP_NOEXCEPT
+      : std::array<T, N>( data )
+    {}
+
+#if ( VK_USE_64_BIT_PTR_DEFINES == 0 )
+    // on 32 bit compiles, needs overloads on index type int to resolve ambiguities
+    VULKAN_HPP_CONSTEXPR T const & operator[]( int index ) const VULKAN_HPP_NOEXCEPT
+    {
+      return std::array<T, N>::operator[]( index );
+    }
+
+    T & operator[]( int index ) VULKAN_HPP_NOEXCEPT
+    {
+      return std::array<T, N>::operator[]( index );
+    }
+#endif
+
+    operator T const * () const VULKAN_HPP_NOEXCEPT
+    {
+      return this->data();
+    }
+
+    operator T * () VULKAN_HPP_NOEXCEPT
+    {
+      return this->data();
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    operator std::string() const
+    {
+      return std::string( this->data() );
+    }
+
+#if 17 <= VULKAN_HPP_CPP_VERSION
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    operator std::string_view() const
+    {
+      return std::string_view( this->data() );
+    }
+#endif
+
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    std::strong_ordering operator<=>( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) <=> *static_cast<std::array<char, N> const *>( &rhs );
+    }
+#else
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator<( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) < *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator<=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) <= *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator>( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) > *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator>=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) >= *static_cast<std::array<char, N> const *>( &rhs );
+    }
+#endif
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator==( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) == *static_cast<std::array<char, N> const *>( &rhs );
+    }
+
+    template <typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
+    bool operator!=( ArrayWrapper1D<char, N> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return *static_cast<std::array<char, N> const *>( this ) != *static_cast<std::array<char, N> const *>( &rhs );
+    }
+  };
+
+  // specialization of relational operators between std::string and arrays of chars
+  template <size_t N>
+  bool operator<( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs < rhs.data();
+  }
+
+  template <size_t N>
+  bool operator<=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs <= rhs.data();
+  }
+
+  template <size_t N>
+  bool operator>( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs > rhs.data();
+  }
+
+  template <size_t N>
+  bool operator>=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs >= rhs.data();
+  }
+
+  template <size_t N>
+  bool operator==( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs == rhs.data();
+  }
+
+  template <size_t N>
+  bool operator!=( std::string const & lhs, ArrayWrapper1D<char, N> const & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    return lhs != rhs.data();
+  }
+
+  template <typename T, size_t N, size_t M>
+  class ArrayWrapper2D : public std::array<ArrayWrapper1D<T, M>, N>
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT
+      : std::array<ArrayWrapper1D<T, M>, N>()
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayWrapper2D( std::array<std::array<T, M>, N> const & data ) VULKAN_HPP_NOEXCEPT
+      : std::array<ArrayWrapper1D<T, M>, N>( *reinterpret_cast<std::array<ArrayWrapper1D<T, M>, N> const *>( &data ) )
+    {}
+  };
+
+  template <typename FlagBitsType>
+  struct FlagTraits
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = false;
+  };
+
+  template <typename BitType>
+  class Flags
+  {
+  public:
+    using MaskType = typename std::underlying_type<BitType>::type;
+
+    // constructors
+    VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT
+      : m_mask( 0 )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Flags( BitType bit ) VULKAN_HPP_NOEXCEPT
+      : m_mask( static_cast<MaskType>( bit ) )
+    {}
+
+    VULKAN_HPP_CONSTEXPR Flags( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VULKAN_HPP_CONSTEXPR explicit Flags( MaskType flags ) VULKAN_HPP_NOEXCEPT
+      : m_mask( flags )
+    {}
+
+    // relational operators
+#if defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+    auto operator<=>( Flags<BitType> const & ) const = default;
+#else
+    VULKAN_HPP_CONSTEXPR bool operator<( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask < rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator<=( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask <= rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator>( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask > rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator>=( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask >= rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator==( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask == rhs.m_mask;
+    }
+
+    VULKAN_HPP_CONSTEXPR bool operator!=( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask != rhs.m_mask;
+    }
+#endif
+
+    // logical operator
+    VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT
+    {
+      return !m_mask;
+    }
+
+    // bitwise operators
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator&( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>( m_mask & rhs.m_mask );
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator|( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>( m_mask | rhs.m_mask );
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator^( Flags<BitType> const & rhs ) const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>( m_mask ^ rhs.m_mask );
+    }
+
+    VULKAN_HPP_CONSTEXPR Flags<BitType> operator~() const VULKAN_HPP_NOEXCEPT
+    {
+      return Flags<BitType>( m_mask ^ FlagTraits<BitType>::allFlags.m_mask );
+    }
+
+    // assignment operators
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator|=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      m_mask |= rhs.m_mask;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator&=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      m_mask &= rhs.m_mask;
+      return *this;
+    }
+
+    VULKAN_HPP_CONSTEXPR_14 Flags<BitType> & operator^=( Flags<BitType> const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      m_mask ^= rhs.m_mask;
+      return *this;
+    }
+
+    // cast operators
+    explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return !!m_mask;
+    }
+
+    explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_mask;
+    }
+
+#if defined( VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC )
+  public:
+#else
+  private:
+#endif
+    MaskType m_mask;
+  };
+
+#if !defined( VULKAN_HPP_HAS_SPACESHIP_OPERATOR )
+  // relational operators only needed for pre C++20
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator<( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator>( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator<=( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator>=( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator>( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator<( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator>=( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator<=( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator==( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator==( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR bool operator!=( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator!=( bit );
+  }
+#endif
+
+  // bitwise operators
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator&( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator&( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator|( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator|( bit );
+  }
+
+  template <typename BitType>
+  VULKAN_HPP_CONSTEXPR Flags<BitType> operator^( BitType bit, Flags<BitType> const & flags ) VULKAN_HPP_NOEXCEPT
+  {
+    return flags.operator^( bit );
+  }
+
+  // bitwise operators on BitType
+  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(BitType lhs, BitType rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return Flags<BitType>( lhs ) & rhs;
+  }
+
+  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(BitType lhs, BitType rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return Flags<BitType>( lhs ) | rhs;
+  }
+
+  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(BitType lhs, BitType rhs) VULKAN_HPP_NOEXCEPT
+  {
+    return Flags<BitType>( lhs ) ^ rhs;
+  }
+
+  template <typename BitType, typename std::enable_if<FlagTraits<BitType>::isBitmask, bool>::type = true>
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR Flags<BitType> operator~( BitType bit ) VULKAN_HPP_NOEXCEPT
+  {
+    return ~( Flags<BitType>( bit ) );
+  }
+
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+  template <typename T>
+  class ArrayProxy
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayProxy( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    ArrayProxy( T const & value ) VULKAN_HPP_NOEXCEPT
+      : m_count( 1 )
+      , m_ptr( &value )
+    {}
+
+    ArrayProxy( uint32_t count, T const * ptr ) VULKAN_HPP_NOEXCEPT
+      : m_count( count )
+      , m_ptr( ptr )
+    {}
+
+    template <std::size_t C>
+    ArrayProxy( T const ( &ptr )[C] ) VULKAN_HPP_NOEXCEPT
+      : m_count( C )
+      , m_ptr( ptr )
+    {}
+
+#  if __GNUC__ >= 9
+#    pragma GCC diagnostic push
+#    pragma GCC diagnostic ignored "-Winit-list-lifetime"
+#  endif
+
+    ArrayProxy( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxy( std::initializer_list<typename std::remove_const<T>::type> const & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {
+    }
+
+#if __GNUC__ >= 9
+#    pragma GCC diagnostic pop
+#  endif
+
+    // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly
+    // convertible to size_t. The const version can capture temporaries, with lifetime ending at end of statement.
+    template <typename V,
+              typename std::enable_if<
+                std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
+                std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
+    ArrayProxy( V const & v ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( v.size() ) )
+      , m_ptr( v.data() )
+    {}
+
+    const T * begin() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+    const T * end() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr + m_count;
+    }
+
+    const T & front() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *m_ptr;
+    }
+
+    const T & back() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *( m_ptr + m_count - 1 );
+    }
+
+    bool empty() const VULKAN_HPP_NOEXCEPT
+    {
+      return ( m_count == 0 );
+    }
+
+    uint32_t size() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_count;
+    }
+
+    T const * data() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+  private:
+    uint32_t  m_count;
+    T const * m_ptr;
+  };
+
+  template <typename T>
+  class ArrayProxyNoTemporaries
+  {
+  public:
+    VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+      : m_count( 0 )
+      , m_ptr( nullptr )
+    {}
+
+    ArrayProxyNoTemporaries( T & value ) VULKAN_HPP_NOEXCEPT
+      : m_count( 1 )
+      , m_ptr( &value )
+    {}
+
+    template <typename V>
+    ArrayProxyNoTemporaries( V && value ) = delete;
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( typename std::remove_const<T>::type & value ) VULKAN_HPP_NOEXCEPT
+      : m_count( 1 )
+      , m_ptr( &value )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( typename std::remove_const<T>::type && value ) = delete;
+
+    ArrayProxyNoTemporaries( uint32_t count, T * ptr ) VULKAN_HPP_NOEXCEPT
+      : m_count( count )
+      , m_ptr( ptr )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( uint32_t count, typename std::remove_const<T>::type * ptr ) VULKAN_HPP_NOEXCEPT
+      : m_count( count )
+      , m_ptr( ptr )
+    {}
+
+    template <std::size_t C>
+    ArrayProxyNoTemporaries( T (& ptr)[C] ) VULKAN_HPP_NOEXCEPT
+      : m_count( C )
+      , m_ptr( ptr )
+    {}
+
+    template <std::size_t C>
+    ArrayProxyNoTemporaries( T (&& ptr)[C] ) = delete;
+
+    template <std::size_t C, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( typename std::remove_const<T>::type (& ptr)[C] ) VULKAN_HPP_NOEXCEPT
+      : m_count( C )
+      , m_ptr( ptr )
+    {}
+
+    template <std::size_t C, typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( typename std::remove_const<T>::type (&& ptr)[C] ) = delete;
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> const & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> const && list ) = delete;
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const & list )
+      VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> const && list ) = delete;
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    ArrayProxyNoTemporaries( std::initializer_list<T> && list ) = delete;
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> & list ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( list.size() ) )
+      , m_ptr( list.begin() )
+    {}
+
+    template <typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
+    ArrayProxyNoTemporaries( std::initializer_list<typename std::remove_const<T>::type> && list ) = delete;
+
+    // Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly convertible to size_t.
+    template <typename V,
+              typename std::enable_if<
+                std::is_convertible<decltype( std::declval<V>().data() ), T *>::value &&
+                std::is_convertible<decltype( std::declval<V>().size() ), std::size_t>::value>::type * = nullptr>
+    ArrayProxyNoTemporaries( V & v ) VULKAN_HPP_NOEXCEPT
+      : m_count( static_cast<uint32_t>( v.size() ) )
+      , m_ptr( v.data() )
+    {}
+
+    const T * begin() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+    const T * end() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr + m_count;
+    }
+
+    const T & front() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *m_ptr;
+    }
+
+    const T & back() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_count && m_ptr );
+      return *( m_ptr + m_count - 1 );
+    }
+
+    bool empty() const VULKAN_HPP_NOEXCEPT
+    {
+      return ( m_count == 0 );
+    }
+
+    uint32_t size() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_count;
+    }
+
+    T * data() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+
+  private:
+    uint32_t m_count;
+    T *      m_ptr;
+  };
+
+  template <typename T>
+  class StridedArrayProxy : protected ArrayProxy<T>
+  {
+  public:
+    using ArrayProxy<T>::ArrayProxy;
+
+    StridedArrayProxy( uint32_t count, T const * ptr, uint32_t stride ) VULKAN_HPP_NOEXCEPT
+      : ArrayProxy<T>( count, ptr )
+      , m_stride( stride )
+    {
+      VULKAN_HPP_ASSERT( sizeof( T ) <= stride );
+    }
+
+    using ArrayProxy<T>::begin;
+
+    const T * end() const VULKAN_HPP_NOEXCEPT
+    {
+      return reinterpret_cast<T const *>( static_cast<uint8_t const *>( begin() ) + size() * m_stride );
+    }
+
+    using ArrayProxy<T>::front;
+
+    const T & back() const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( begin() && size() );
+      return *reinterpret_cast<T const *>( static_cast<uint8_t const *>( begin() ) + ( size() - 1 ) * m_stride );
+    }
+
+    using ArrayProxy<T>::empty;
+    using ArrayProxy<T>::size;
+    using ArrayProxy<T>::data;
+
+    uint32_t stride() const
+    {
+      return m_stride;
+    }
+
+  private:
+    uint32_t m_stride = sizeof( T );
+  };
+
+  template <typename RefType>
+  class Optional
+  {
+  public:
+    Optional( RefType & reference ) VULKAN_HPP_NOEXCEPT
+    {
+      m_ptr = &reference;
+    }
+    Optional( RefType * ptr ) VULKAN_HPP_NOEXCEPT
+    {
+      m_ptr = ptr;
+    }
+    Optional( std::nullptr_t ) VULKAN_HPP_NOEXCEPT
+    {
+      m_ptr = nullptr;
+    }
+
+    operator RefType *() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+    RefType const * operator->() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_ptr;
+    }
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return !!m_ptr;
+    }
+
+  private:
+    RefType * m_ptr;
+  };
+
+  template <typename X, typename Y>
+  struct StructExtends
+  {
+    enum
+    {
+      value = false
+    };
+  };
+
+  template <typename Type, class...>
+  struct IsPartOfStructureChain
+  {
+    static const bool valid = false;
+  };
+
+  template <typename Type, typename Head, typename... Tail>
+  struct IsPartOfStructureChain<Type, Head, Tail...>
+  {
+    static const bool valid = std::is_same<Type, Head>::value || IsPartOfStructureChain<Type, Tail...>::valid;
+  };
+
+  template <size_t Index, typename T, typename... ChainElements>
+  struct StructureChainContains
+  {
+    static const bool value =
+      std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value ||
+      StructureChainContains<Index - 1, T, ChainElements...>::value;
+  };
+
+  template <typename T, typename... ChainElements>
+  struct StructureChainContains<0, T, ChainElements...>
+  {
+    static const bool value =
+      std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value;
+  };
+
+  template <size_t Index, typename... ChainElements>
+  struct StructureChainValidation
+  {
+    using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type;
+    static const bool valid =
+      StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
+      ( TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value ) &&
+      StructureChainValidation<Index - 1, ChainElements...>::valid;
+  };
+
+  template <typename... ChainElements>
+  struct StructureChainValidation<0, ChainElements...>
+  {
+    static const bool valid = true;
+  };
+
+  template <typename... ChainElements>
+  class StructureChain : public std::tuple<ChainElements...>
+  {
+  public:
+    StructureChain() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link<sizeof...( ChainElements ) - 1>();
+    }
+
+    StructureChain( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( rhs )
+    {
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link( &std::get<0>( *this ),
+            &std::get<0>( rhs ),
+            reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
+            reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) );
+    }
+
+    StructureChain( StructureChain && rhs ) VULKAN_HPP_NOEXCEPT
+      : std::tuple<ChainElements...>( std::forward<std::tuple<ChainElements...>>( rhs ) )
+    {
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link( &std::get<0>( *this ),
+            &std::get<0>( rhs ),
+            reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
+            reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) );
+    }
+
+    StructureChain( ChainElements const &... elems ) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>( elems... )
+    {
+      static_assert( StructureChainValidation<sizeof...( ChainElements ) - 1, ChainElements...>::valid,
+                     "The structure chain is not valid!" );
+      link<sizeof...( ChainElements ) - 1>();
+    }
+
+    StructureChain & operator=( StructureChain const & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      std::tuple<ChainElements...>::operator=( rhs );
+      link( &std::get<0>( *this ),
+            &std::get<0>( rhs ),
+            reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( *this ) ),
+            reinterpret_cast<VkBaseInStructure const *>( &std::get<0>( rhs ) ) );
+      return *this;
+    }
+
+    StructureChain & operator=( StructureChain && rhs ) = delete;
+
+    template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
+    T & get() VULKAN_HPP_NOEXCEPT
+    {
+      return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>(
+        static_cast<std::tuple<ChainElements...> &>( *this ) );
+    }
+
+    template <typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
+    T const & get() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>(
+        static_cast<std::tuple<ChainElements...> const &>( *this ) );
+    }
+
+    template <typename T0, typename T1, typename... Ts>
+    std::tuple<T0 &, T1 &, Ts &...> get() VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
+    }
+
+    template <typename T0, typename T1, typename... Ts>
+    std::tuple<T0 const &, T1 const &, Ts const &...> get() const VULKAN_HPP_NOEXCEPT
+    {
+      return std::tie( get<T0>(), get<T1>(), get<Ts>()... );
+    }
+
+    template <typename ClassType, size_t Which = 0>
+    typename std::enable_if<
+      std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
+        ( Which == 0 ),
+      bool>::type
+      isLinked() const VULKAN_HPP_NOEXCEPT
+    {
+      return true;
+    }
+
+    template <typename ClassType, size_t Which = 0>
+    typename std::enable_if<
+      !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
+        ( Which != 0 ),
+      bool>::type
+      isLinked() const VULKAN_HPP_NOEXCEPT
+    {
+      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
+                     "Can't unlink Structure that's not part of this StructureChain!" );
+      return isLinked( reinterpret_cast<VkBaseInStructure const *>( &get<ClassType, Which>() ) );
+    }
+
+    template <typename ClassType, size_t Which = 0>
+    typename std::enable_if<
+      !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
+        ( Which != 0 ),
+      void>::type relink() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
+                     "Can't relink Structure that's not part of this StructureChain!" );
+      auto pNext = reinterpret_cast<VkBaseInStructure *>( &get<ClassType, Which>() );
+      VULKAN_HPP_ASSERT( !isLinked( pNext ) );
+      auto & headElement = std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) );
+      pNext->pNext       = reinterpret_cast<VkBaseInStructure const *>( headElement.pNext );
+      headElement.pNext  = pNext;
+    }
+
+    template <typename ClassType, size_t Which = 0>
+    typename std::enable_if<
+      !std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value ||
+        ( Which != 0 ),
+      void>::type unlink() VULKAN_HPP_NOEXCEPT
+    {
+      static_assert( IsPartOfStructureChain<ClassType, ChainElements...>::valid,
+                     "Can't unlink Structure that's not part of this StructureChain!" );
+      unlink( reinterpret_cast<VkBaseOutStructure const *>( &get<ClassType, Which>() ) );
+    }
+
+  private:
+    template <int Index, typename T, int Which, typename, class First, class... Types>
+    struct ChainElementIndex : ChainElementIndex<Index + 1, T, Which, void, Types...>
+    {};
+
+    template <int Index, typename T, int Which, class First, class... Types>
+    struct ChainElementIndex<Index,
+                             T,
+                             Which,
+                             typename std::enable_if<!std::is_same<T, First>::value, void>::type,
+                             First,
+                             Types...> : ChainElementIndex<Index + 1, T, Which, void, Types...>
+    {};
+
+    template <int Index, typename T, int Which, class First, class... Types>
+    struct ChainElementIndex<Index,
+                             T,
+                             Which,
+                             typename std::enable_if<std::is_same<T, First>::value, void>::type,
+                             First,
+                             Types...> : ChainElementIndex<Index + 1, T, Which - 1, void, Types...>
+    {};
+
+    template <int Index, typename T, class First, class... Types>
+    struct ChainElementIndex<Index,
+                             T,
+                             0,
+                             typename std::enable_if<std::is_same<T, First>::value, void>::type,
+                             First,
+                             Types...> : std::integral_constant<int, Index>
+    {};
+
+    bool isLinked( VkBaseInStructure const * pNext ) const VULKAN_HPP_NOEXCEPT
+    {
+      VkBaseInStructure const * elementPtr = reinterpret_cast<VkBaseInStructure const *>(
+        &std::get<0>( static_cast<std::tuple<ChainElements...> const &>( *this ) ) );
+      while ( elementPtr )
+      {
+        if ( elementPtr->pNext == pNext )
+        {
+          return true;
+        }
+        elementPtr = elementPtr->pNext;
+      }
+      return false;
+    }
+
+    template <size_t Index>
+    typename std::enable_if<Index != 0, void>::type link() VULKAN_HPP_NOEXCEPT
+    {
+      auto & x = std::get<Index - 1>( static_cast<std::tuple<ChainElements...> &>( *this ) );
+      x.pNext  = &std::get<Index>( static_cast<std::tuple<ChainElements...> &>( *this ) );
+      link<Index - 1>();
+    }
+
+    template <size_t Index>
+    typename std::enable_if<Index == 0, void>::type link() VULKAN_HPP_NOEXCEPT
+    {}
+
+    void link( void * dstBase, void const * srcBase, VkBaseOutStructure * dst, VkBaseInStructure const * src )
+    {
+      while ( src->pNext )
+      {
+        std::ptrdiff_t offset =
+          reinterpret_cast<char const *>( src->pNext ) - reinterpret_cast<char const *>( srcBase );
+        dst->pNext = reinterpret_cast<VkBaseOutStructure *>( reinterpret_cast<char *>( dstBase ) + offset );
+        dst        = dst->pNext;
+        src        = src->pNext;
+      }
+      dst->pNext = nullptr;
+    }
+
+    void unlink( VkBaseOutStructure const * pNext ) VULKAN_HPP_NOEXCEPT
+    {
+      VkBaseOutStructure * elementPtr =
+        reinterpret_cast<VkBaseOutStructure *>( &std::get<0>( static_cast<std::tuple<ChainElements...> &>( *this ) ) );
+      while ( elementPtr && ( elementPtr->pNext != pNext ) )
+      {
+        elementPtr = elementPtr->pNext;
+      }
+      if ( elementPtr )
+      {
+        elementPtr->pNext = pNext->pNext;
+      }
+      else
+      {
+        VULKAN_HPP_ASSERT( false );  // fires, if the ClassType member has already been unlinked !
+      }
+    }
+  };
+
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+  template <typename Type, typename Dispatch>
+  class UniqueHandleTraits;
+
+  template <typename Type, typename Dispatch>
+  class UniqueHandle : public UniqueHandleTraits<Type, Dispatch>::deleter
+  {
+  private:
+    using Deleter = typename UniqueHandleTraits<Type, Dispatch>::deleter;
+
+  public:
+    using element_type = Type;
+
+    UniqueHandle()
+      : Deleter()
+      , m_value()
+    {}
+
+    explicit UniqueHandle( Type const & value, Deleter const & deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
+      : Deleter( deleter )
+      , m_value( value )
+    {}
+
+    UniqueHandle( UniqueHandle const & ) = delete;
+
+    UniqueHandle( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
+      : Deleter( std::move( static_cast<Deleter &>( other ) ) )
+      , m_value( other.release() )
+    {}
+
+    ~UniqueHandle() VULKAN_HPP_NOEXCEPT
+    {
+      if ( m_value )
+      {
+        this->destroy( m_value );
+      }
+    }
+
+    UniqueHandle & operator=( UniqueHandle const & ) = delete;
+
+    UniqueHandle & operator=( UniqueHandle && other ) VULKAN_HPP_NOEXCEPT
+    {
+      reset( other.release() );
+      *static_cast<Deleter *>( this ) = std::move( static_cast<Deleter &>( other ) );
+      return *this;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_value.operator bool();
+    }
+
+    Type const * operator->() const VULKAN_HPP_NOEXCEPT
+    {
+      return &m_value;
+    }
+
+    Type * operator->() VULKAN_HPP_NOEXCEPT
+    {
+      return &m_value;
+    }
+
+    Type const & operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    Type & operator*() VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    const Type & get() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    Type & get() VULKAN_HPP_NOEXCEPT
+    {
+      return m_value;
+    }
+
+    void reset( Type const & value = Type() ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( m_value != value )
+      {
+        if ( m_value )
+        {
+          this->destroy( m_value );
+        }
+        m_value = value;
+      }
+    }
+
+    Type release() VULKAN_HPP_NOEXCEPT
+    {
+      Type value = m_value;
+      m_value    = nullptr;
+      return value;
+    }
+
+    void swap( UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
+    {
+      std::swap( m_value, rhs.m_value );
+      std::swap( static_cast<Deleter &>( *this ), static_cast<Deleter &>( rhs ) );
+    }
+
+  private:
+    Type m_value;
+  };
+
+  template <typename UniqueType>
+  VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type>
+                    uniqueToRaw( std::vector<UniqueType> const & handles )
+  {
+    std::vector<typename UniqueType::element_type> newBuffer( handles.size() );
+    std::transform( handles.begin(), handles.end(), newBuffer.begin(), []( UniqueType const & handle ) { return handle.get(); } );
+    return newBuffer;
+  }
+
+  template <typename Type, typename Dispatch>
+  VULKAN_HPP_INLINE void swap( UniqueHandle<Type, Dispatch> & lhs,
+                               UniqueHandle<Type, Dispatch> & rhs ) VULKAN_HPP_NOEXCEPT
+  {
+    lhs.swap( rhs );
+  }
+#endif
+#endif  // VULKAN_HPP_DISABLE_ENHANCED_MODE
+
+  class DispatchLoaderBase
+  {
+  public:
+    DispatchLoaderBase() = default;
+    DispatchLoaderBase( std::nullptr_t )
+#if !defined( NDEBUG )
+      : m_valid( false )
+#endif
+    {}
+
+#if !defined( NDEBUG )
+    size_t getVkHeaderVersion() const
+    {
+      VULKAN_HPP_ASSERT( m_valid );
+      return vkHeaderVersion;
+    }
+
+  private:
+    size_t vkHeaderVersion = VK_HEADER_VERSION;
+    bool   m_valid         = true;
+#endif
+  };
+
+
+#if !defined( VK_NO_PROTOTYPES )
+  class DispatchLoaderStatic : public DispatchLoaderBase
+  {
+  public:
+
+  //=== VK_VERSION_1_0 ===
+
+
+    VkResult vkCreateInstance( const VkInstanceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkInstance * pInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateInstance( pCreateInfo, pAllocator, pInstance );
+    }
+
+
+    void vkDestroyInstance( VkInstance instance, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyInstance( instance, pAllocator );
+    }
+
+
+    VkResult vkEnumeratePhysicalDevices( VkInstance instance, uint32_t * pPhysicalDeviceCount, VkPhysicalDevice * pPhysicalDevices ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDevices( instance, pPhysicalDeviceCount, pPhysicalDevices );
+    }
+
+
+    void vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures * pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures( physicalDevice, pFeatures );
+    }
+
+
+    void vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties( physicalDevice, format, pFormatProperties );
+    }
+
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties( physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties );
+    }
+
+
+    void vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties( physicalDevice, pProperties );
+    }
+
+
+    void vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t * pQueueFamilyPropertyCount, VkQueueFamilyProperties * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+
+    void vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties( physicalDevice, pMemoryProperties );
+    }
+
+
+    PFN_vkVoidFunction vkGetInstanceProcAddr( VkInstance instance, const char * pName ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetInstanceProcAddr( instance, pName );
+    }
+
+
+    PFN_vkVoidFunction vkGetDeviceProcAddr( VkDevice device, const char * pName ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceProcAddr( device, pName );
+    }
+
+
+    VkResult vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDevice * pDevice ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDevice( physicalDevice, pCreateInfo, pAllocator, pDevice );
+    }
+
+
+    void vkDestroyDevice( VkDevice device, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDevice( device, pAllocator );
+    }
+
+
+    VkResult vkEnumerateInstanceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char * pLayerName, uint32_t * pPropertyCount, VkExtensionProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateDeviceExtensionProperties( physicalDevice, pLayerName, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkEnumerateInstanceLayerProperties( uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceLayerProperties( pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkLayerProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateDeviceLayerProperties( physicalDevice, pPropertyCount, pProperties );
+    }
+
+
+    void vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceQueue( device, queueFamilyIndex, queueIndex, pQueue );
+    }
+
+
+    VkResult vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSubmit( queue, submitCount, pSubmits, fence );
+    }
+
+
+    VkResult vkQueueWaitIdle( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueWaitIdle( queue );
+    }
+
+
+    VkResult vkDeviceWaitIdle( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDeviceWaitIdle( device );
+    }
+
+
+    VkResult vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo * pAllocateInfo, const VkAllocationCallbacks * pAllocator, VkDeviceMemory * pMemory ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAllocateMemory( device, pAllocateInfo, pAllocator, pMemory );
+    }
+
+
+    void vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeMemory( device, memory, pAllocator );
+    }
+
+
+    VkResult vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void ** ppData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMapMemory( device, memory, offset, size, flags, ppData );
+    }
+
+
+    void vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUnmapMemory( device, memory );
+    }
+
+
+    VkResult vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFlushMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
+    }
+
+
+    VkResult vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange * pMemoryRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkInvalidateMappedMemoryRanges( device, memoryRangeCount, pMemoryRanges );
+    }
+
+
+    void vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize * pCommittedMemoryInBytes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryCommitment( device, memory, pCommittedMemoryInBytes );
+    }
+
+
+    VkResult vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindBufferMemory( device, buffer, memory, memoryOffset );
+    }
+
+
+    VkResult vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindImageMemory( device, image, memory, memoryOffset );
+    }
+
+
+    void vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements( device, buffer, pMemoryRequirements );
+    }
+
+
+    void vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements( device, image, pMemoryRequirements );
+    }
+
+
+    void vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t * pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements( device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t * pPropertyCount, VkSparseImageFormatProperties * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties( physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo * pBindInfo, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueBindSparse( queue, bindInfoCount, pBindInfo, fence );
+    }
+
+
+    VkResult vkCreateFence( VkDevice device, const VkFenceCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkFence * pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateFence( device, pCreateInfo, pAllocator, pFence );
+    }
+
+
+    void vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyFence( device, fence, pAllocator );
+    }
+
+
+    VkResult vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetFences( device, fenceCount, pFences );
+    }
+
+
+    VkResult vkGetFenceStatus( VkDevice device, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceStatus( device, fence );
+    }
+
+
+    VkResult vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence * pFences, VkBool32 waitAll, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitForFences( device, fenceCount, pFences, waitAll, timeout );
+    }
+
+
+    VkResult vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSemaphore * pSemaphore ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSemaphore( device, pCreateInfo, pAllocator, pSemaphore );
+    }
+
+
+    void vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySemaphore( device, semaphore, pAllocator );
+    }
+
+
+    VkResult vkCreateEvent( VkDevice device, const VkEventCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkEvent * pEvent ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateEvent( device, pCreateInfo, pAllocator, pEvent );
+    }
+
+
+    void vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyEvent( device, event, pAllocator );
+    }
+
+
+    VkResult vkGetEventStatus( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetEventStatus( device, event );
+    }
+
+
+    VkResult vkSetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetEvent( device, event );
+    }
+
+
+    VkResult vkResetEvent( VkDevice device, VkEvent event ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetEvent( device, event );
+    }
+
+
+    VkResult vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkQueryPool * pQueryPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateQueryPool( device, pCreateInfo, pAllocator, pQueryPool );
+    }
+
+
+    void vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyQueryPool( device, queryPool, pAllocator );
+    }
+
+
+    VkResult vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void * pData, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueryPoolResults( device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags );
+    }
+
+
+    VkResult vkCreateBuffer( VkDevice device, const VkBufferCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkBuffer * pBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateBuffer( device, pCreateInfo, pAllocator, pBuffer );
+    }
+
+
+    void vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyBuffer( device, buffer, pAllocator );
+    }
+
+
+    VkResult vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkBufferView * pView ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateBufferView( device, pCreateInfo, pAllocator, pView );
+    }
+
+
+    void vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyBufferView( device, bufferView, pAllocator );
+    }
+
+
+    VkResult vkCreateImage( VkDevice device, const VkImageCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkImage * pImage ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImage( device, pCreateInfo, pAllocator, pImage );
+    }
+
+
+    void vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyImage( device, image, pAllocator );
+    }
+
+
+    void vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource * pSubresource, VkSubresourceLayout * pLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSubresourceLayout( device, image, pSubresource, pLayout );
+    }
+
+
+    VkResult vkCreateImageView( VkDevice device, const VkImageViewCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkImageView * pView ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImageView( device, pCreateInfo, pAllocator, pView );
+    }
+
+
+    void vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyImageView( device, imageView, pAllocator );
+    }
+
+
+    VkResult vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkShaderModule * pShaderModule ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateShaderModule( device, pCreateInfo, pAllocator, pShaderModule );
+    }
+
+
+    void vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyShaderModule( device, shaderModule, pAllocator );
+    }
+
+
+    VkResult vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkPipelineCache * pPipelineCache ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePipelineCache( device, pCreateInfo, pAllocator, pPipelineCache );
+    }
+
+
+    void vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipelineCache( device, pipelineCache, pAllocator );
+    }
+
+
+    VkResult vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineCacheData( device, pipelineCache, pDataSize, pData );
+    }
+
+
+    VkResult vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache * pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMergePipelineCaches( device, dstCache, srcCacheCount, pSrcCaches );
+    }
+
+
+    VkResult vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo * pCreateInfos, const VkAllocationCallbacks * pAllocator, VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateGraphicsPipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+
+    VkResult vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo * pCreateInfos, const VkAllocationCallbacks * pAllocator, VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateComputePipelines( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+
+    void vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipeline( device, pipeline, pAllocator );
+    }
+
+
+    VkResult vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkPipelineLayout * pPipelineLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePipelineLayout( device, pCreateInfo, pAllocator, pPipelineLayout );
+    }
+
+
+    void vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPipelineLayout( device, pipelineLayout, pAllocator );
+    }
+
+
+    VkResult vkCreateSampler( VkDevice device, const VkSamplerCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSampler * pSampler ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSampler( device, pCreateInfo, pAllocator, pSampler );
+    }
+
+
+    void vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySampler( device, sampler, pAllocator );
+    }
+
+
+    VkResult vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDescriptorSetLayout * pSetLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorSetLayout( device, pCreateInfo, pAllocator, pSetLayout );
+    }
+
+
+    void vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorSetLayout( device, descriptorSetLayout, pAllocator );
+    }
+
+
+    VkResult vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDescriptorPool * pDescriptorPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorPool( device, pCreateInfo, pAllocator, pDescriptorPool );
+    }
+
+
+    void vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorPool( device, descriptorPool, pAllocator );
+    }
+
+
+    VkResult vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetDescriptorPool( device, descriptorPool, flags );
+    }
+
+
+    VkResult vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo * pAllocateInfo, VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAllocateDescriptorSets( device, pAllocateInfo, pDescriptorSets );
+    }
+
+
+    VkResult vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet * pDescriptorSets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeDescriptorSets( device, descriptorPool, descriptorSetCount, pDescriptorSets );
+    }
+
+
+    void vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet * pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet * pDescriptorCopies ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSets( device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies );
+    }
+
+
+    VkResult vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkFramebuffer * pFramebuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateFramebuffer( device, pCreateInfo, pAllocator, pFramebuffer );
+    }
+
+
+    void vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyFramebuffer( device, framebuffer, pAllocator );
+    }
+
+
+    VkResult vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+
+    void vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyRenderPass( device, renderPass, pAllocator );
+    }
+
+
+    void vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D * pGranularity ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRenderAreaGranularity( device, renderPass, pGranularity );
+    }
+
+
+    VkResult vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkCommandPool * pCommandPool ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateCommandPool( device, pCreateInfo, pAllocator, pCommandPool );
+    }
+
+
+    void vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyCommandPool( device, commandPool, pAllocator );
+    }
+
+
+    VkResult vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetCommandPool( device, commandPool, flags );
+    }
+
+
+    VkResult vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo * pAllocateInfo, VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAllocateCommandBuffers( device, pAllocateInfo, pCommandBuffers );
+    }
+
+
+    void vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkFreeCommandBuffers( device, commandPool, commandBufferCount, pCommandBuffers );
+    }
+
+
+    VkResult vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBeginCommandBuffer( commandBuffer, pBeginInfo );
+    }
+
+
+    VkResult vkEndCommandBuffer( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEndCommandBuffer( commandBuffer );
+    }
+
+
+    VkResult vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetCommandBuffer( commandBuffer, flags );
+    }
+
+
+    void vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindPipeline( commandBuffer, pipelineBindPoint, pipeline );
+    }
+
+
+    void vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewport( commandBuffer, firstViewport, viewportCount, pViewports );
+    }
+
+
+    void vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetScissor( commandBuffer, firstScissor, scissorCount, pScissors );
+    }
+
+
+    void vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineWidth( commandBuffer, lineWidth );
+    }
+
+
+    void vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBias( commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+    }
+
+
+    void vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetBlendConstants( commandBuffer, blendConstants );
+    }
+
+
+    void vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBounds( commandBuffer, minDepthBounds, maxDepthBounds );
+    }
+
+
+    void vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilCompareMask( commandBuffer, faceMask, compareMask );
+    }
+
+
+    void vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilWriteMask( commandBuffer, faceMask, writeMask );
+    }
+
+
+    void vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilReference( commandBuffer, faceMask, reference );
+    }
+
+
+    void vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet * pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t * pDynamicOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindDescriptorSets( commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets );
+    }
+
+
+    void vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindIndexBuffer( commandBuffer, buffer, offset, indexType );
+    }
+
+
+    void vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer * pBuffers, const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindVertexBuffers( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets );
+    }
+
+
+    void vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDraw( commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+    }
+
+
+    void vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexed( commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+    }
+
+
+    void vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirect( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+
+    void vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirect( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+
+    void vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatch( commandBuffer, groupCountX, groupCountY, groupCountZ );
+    }
+
+
+    void vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchIndirect( commandBuffer, buffer, offset );
+    }
+
+
+    void vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBuffer( commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions );
+    }
+
+
+    void vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+
+    void vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit * pRegions, VkFilter filter ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBlitImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter );
+    }
+
+
+    void vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBufferToImage( commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+
+    void vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy * pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImageToBuffer( commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions );
+    }
+
+
+    void vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdUpdateBuffer( commandBuffer, dstBuffer, dstOffset, dataSize, pData );
+    }
+
+
+    void vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdFillBuffer( commandBuffer, dstBuffer, dstOffset, size, data );
+    }
+
+
+    void vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue * pColor, uint32_t rangeCount, const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearColorImage( commandBuffer, image, imageLayout, pColor, rangeCount, pRanges );
+    }
+
+
+    void vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue * pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange * pRanges ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearDepthStencilImage( commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges );
+    }
+
+
+    void vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment * pAttachments, uint32_t rectCount, const VkClearRect * pRects ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdClearAttachments( commandBuffer, attachmentCount, pAttachments, rectCount, pRects );
+    }
+
+
+    void vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve * pRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResolveImage( commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions );
+    }
+
+
+    void vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetEvent( commandBuffer, event, stageMask );
+    }
+
+
+    void vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetEvent( commandBuffer, event, stageMask );
+    }
+
+
+    void vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent * pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWaitEvents( commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
+    }
+
+
+    void vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier * pImageMemoryBarriers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPipelineBarrier( commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers );
+    }
+
+
+    void vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginQuery( commandBuffer, queryPool, query, flags );
+    }
+
+
+    void vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndQuery( commandBuffer, queryPool, query );
+    }
+
+
+    void vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetQueryPool( commandBuffer, queryPool, firstQuery, queryCount );
+    }
+
+
+    void vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteTimestamp( commandBuffer, pipelineStage, queryPool, query );
+    }
+
+
+    void vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyQueryPoolResults( commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags );
+    }
+
+
+    void vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void * pValues ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushConstants( commandBuffer, layout, stageFlags, offset, size, pValues );
+    }
+
+
+    void vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo * pRenderPassBegin, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass( commandBuffer, pRenderPassBegin, contents );
+    }
+
+
+    void vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass( commandBuffer, contents );
+    }
+
+
+    void vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass( commandBuffer );
+    }
+
+
+    void vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer * pCommandBuffers ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdExecuteCommands( commandBuffer, commandBufferCount, pCommandBuffers );
+    }
+
+  //=== VK_VERSION_1_1 ===
+
+
+    VkResult vkEnumerateInstanceVersion( uint32_t * pApiVersion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumerateInstanceVersion( pApiVersion );
+    }
+
+
+    VkResult vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindBufferMemory2( device, bindInfoCount, pBindInfos );
+    }
+
+
+    VkResult vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindImageMemory2( device, bindInfoCount, pBindInfos );
+    }
+
+
+    void vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPeerMemoryFeatures( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
+    }
+
+
+    void vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDeviceMask( commandBuffer, deviceMask );
+    }
+
+
+    void vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchBase( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+    }
+
+
+    VkResult vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t * pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceGroups( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+    }
+
+
+    void vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements2( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements2( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements2( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+
+    void vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures2( physicalDevice, pFeatures );
+    }
+
+
+    void vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties2( physicalDevice, pProperties );
+    }
+
+
+    void vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties2( physicalDevice, format, pFormatProperties );
+    }
+
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties2( physicalDevice, pImageFormatInfo, pImageFormatProperties );
+    }
+
+
+    void vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t * pQueueFamilyPropertyCount, VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties2( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+
+    void vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties2( physicalDevice, pMemoryProperties );
+    }
+
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties2( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+    }
+
+
+    void vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkTrimCommandPool( device, commandPool, flags );
+    }
+
+
+    void vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2 * pQueueInfo, VkQueue * pQueue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceQueue2( device, pQueueInfo, pQueue );
+    }
+
+
+    VkResult vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSamplerYcbcrConversion( device, pCreateInfo, pAllocator, pYcbcrConversion );
+    }
+
+
+    void vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySamplerYcbcrConversion( device, ycbcrConversion, pAllocator );
+    }
+
+
+    VkResult vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorUpdateTemplate( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
+    }
+
+
+    void vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorUpdateTemplate( device, descriptorUpdateTemplate, pAllocator );
+    }
+
+
+    void vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSetWithTemplate( device, descriptorSet, descriptorUpdateTemplate, pData );
+    }
+
+
+    void vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalBufferProperties( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+    }
+
+
+    void vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalFenceProperties( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+    }
+
+
+    void vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalSemaphoreProperties( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+    }
+
+
+    void vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo * pCreateInfo, VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutSupport( device, pCreateInfo, pSupport );
+    }
+
+  //=== VK_VERSION_1_2 ===
+
+
+    void vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+
+    void vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCount( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+
+    VkResult vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2 * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass2( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+
+    void vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo * pRenderPassBegin, const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass2( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
+    }
+
+
+    void vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo * pSubpassBeginInfo, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass2( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
+    }
+
+
+    void vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass2( commandBuffer, pSubpassEndInfo );
+    }
+
+
+    void vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetQueryPool( device, queryPool, firstQuery, queryCount );
+    }
+
+
+    VkResult vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreCounterValue( device, semaphore, pValue );
+    }
+
+
+    VkResult vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitSemaphores( device, pWaitInfo, timeout );
+    }
+
+
+    VkResult vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSignalSemaphore( device, pSignalInfo );
+    }
+
+
+    VkDeviceAddress vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddress( device, pInfo );
+    }
+
+
+    uint64_t vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferOpaqueCaptureAddress( device, pInfo );
+    }
+
+
+    uint64_t vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryOpaqueCaptureAddress( device, pInfo );
+    }
+
+  //=== VK_VERSION_1_3 ===
+
+
+    VkResult vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, uint32_t * pToolCount, VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceToolProperties( physicalDevice, pToolCount, pToolProperties );
+    }
+
+
+    VkResult vkCreatePrivateDataSlot( VkDevice device, const VkPrivateDataSlotCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePrivateDataSlot( device, pCreateInfo, pAllocator, pPrivateDataSlot );
+    }
+
+
+    void vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPrivateDataSlot( device, privateDataSlot, pAllocator );
+    }
+
+
+    VkResult vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetPrivateData( device, objectType, objectHandle, privateDataSlot, data );
+    }
+
+
+    void vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPrivateData( device, objectType, objectHandle, privateDataSlot, pData );
+    }
+
+
+    void vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetEvent2( commandBuffer, event, pDependencyInfo );
+    }
+
+
+    void vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetEvent2( commandBuffer, event, stageMask );
+    }
+
+
+    void vkCmdWaitEvents2( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent * pEvents, const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWaitEvents2( commandBuffer, eventCount, pEvents, pDependencyInfos );
+    }
+
+
+    void vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPipelineBarrier2( commandBuffer, pDependencyInfo );
+    }
+
+
+    void vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteTimestamp2( commandBuffer, stage, queryPool, query );
+    }
+
+
+    VkResult vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSubmit2( queue, submitCount, pSubmits, fence );
+    }
+
+
+    void vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBuffer2( commandBuffer, pCopyBufferInfo );
+    }
+
+
+    void vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImage2( commandBuffer, pCopyImageInfo );
+    }
+
+
+    void vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBufferToImage2( commandBuffer, pCopyBufferToImageInfo );
+    }
+
+
+    void vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImageToBuffer2( commandBuffer, pCopyImageToBufferInfo );
+    }
+
+
+    void vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBlitImage2( commandBuffer, pBlitImageInfo );
+    }
+
+
+    void vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResolveImage2( commandBuffer, pResolveImageInfo );
+    }
+
+
+    void vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRendering( commandBuffer, pRenderingInfo );
+    }
+
+
+    void vkCmdEndRendering( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRendering( commandBuffer );
+    }
+
+
+    void vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCullMode( commandBuffer, cullMode );
+    }
+
+
+    void vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetFrontFace( commandBuffer, frontFace );
+    }
+
+
+    void vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPrimitiveTopology( commandBuffer, primitiveTopology );
+    }
+
+
+    void vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWithCount( commandBuffer, viewportCount, pViewports );
+    }
+
+
+    void vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetScissorWithCount( commandBuffer, scissorCount, pScissors );
+    }
+
+
+    void vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer * pBuffers, const VkDeviceSize * pOffsets, const VkDeviceSize * pSizes, const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindVertexBuffers2( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
+    }
+
+
+    void vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthTestEnable( commandBuffer, depthTestEnable );
+    }
+
+
+    void vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthWriteEnable( commandBuffer, depthWriteEnable );
+    }
+
+
+    void vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthCompareOp( commandBuffer, depthCompareOp );
+    }
+
+
+    void vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBoundsTestEnable( commandBuffer, depthBoundsTestEnable );
+    }
+
+
+    void vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilTestEnable( commandBuffer, stencilTestEnable );
+    }
+
+
+    void vkCmdSetStencilOp( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilOp( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp );
+    }
+
+
+    void vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRasterizerDiscardEnable( commandBuffer, rasterizerDiscardEnable );
+    }
+
+
+    void vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBiasEnable( commandBuffer, depthBiasEnable );
+    }
+
+
+    void vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPrimitiveRestartEnable( commandBuffer, primitiveRestartEnable );
+    }
+
+
+    void vkGetDeviceBufferMemoryRequirements( VkDevice device, const VkDeviceBufferMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceBufferMemoryRequirements( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetDeviceImageMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceImageMemoryRequirements( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetDeviceImageSparseMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceImageSparseMemoryRequirements( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+  //=== VK_KHR_surface ===
+
+
+    void vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySurfaceKHR( instance, surface, pAllocator );
+    }
+
+
+    VkResult vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32 * pSupported ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceSupportKHR( physicalDevice, queueFamilyIndex, surface, pSupported );
+    }
+
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilitiesKHR( physicalDevice, surface, pSurfaceCapabilities );
+    }
+
+
+    VkResult vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t * pSurfaceFormatCount, VkSurfaceFormatKHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceFormatsKHR( physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats );
+    }
+
+
+    VkResult vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t * pPresentModeCount, VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfacePresentModesKHR( physicalDevice, surface, pPresentModeCount, pPresentModes );
+    }
+
+  //=== VK_KHR_swapchain ===
+
+
+    VkResult vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSwapchainKHR * pSwapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSwapchainKHR( device, pCreateInfo, pAllocator, pSwapchain );
+    }
+
+
+    void vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySwapchainKHR( device, swapchain, pAllocator );
+    }
+
+
+    VkResult vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t * pSwapchainImageCount, VkImage * pSwapchainImages ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainImagesKHR( device, swapchain, pSwapchainImageCount, pSwapchainImages );
+    }
+
+
+    VkResult vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireNextImageKHR( device, swapchain, timeout, semaphore, fence, pImageIndex );
+    }
+
+
+    VkResult vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR * pPresentInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueuePresentKHR( queue, pPresentInfo );
+    }
+
+
+    VkResult vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPresentCapabilitiesKHR( device, pDeviceGroupPresentCapabilities );
+    }
+
+
+    VkResult vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupSurfacePresentModesKHR( device, surface, pModes );
+    }
+
+
+    VkResult vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t * pRectCount, VkRect2D * pRects ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDevicePresentRectanglesKHR( physicalDevice, surface, pRectCount, pRects );
+    }
+
+
+    VkResult vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireNextImage2KHR( device, pAcquireInfo, pImageIndex );
+    }
+
+  //=== VK_KHR_display ===
+
+
+    VkResult vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayPropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPropertiesKHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayPlanePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPlanePropertiesKHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t * pDisplayCount, VkDisplayKHR * pDisplays ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneSupportedDisplaysKHR( physicalDevice, planeIndex, pDisplayCount, pDisplays );
+    }
+
+
+    VkResult vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t * pPropertyCount, VkDisplayModePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayModePropertiesKHR( physicalDevice, display, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDisplayModeKHR * pMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDisplayModeKHR( physicalDevice, display, pCreateInfo, pAllocator, pMode );
+    }
+
+
+    VkResult vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneCapabilitiesKHR( physicalDevice, mode, planeIndex, pCapabilities );
+    }
+
+
+    VkResult vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDisplayPlaneSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+  //=== VK_KHR_display_swapchain ===
+
+
+    VkResult vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR * pCreateInfos, const VkAllocationCallbacks * pAllocator, VkSwapchainKHR * pSwapchains ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSharedSwapchainsKHR( device, swapchainCount, pCreateInfos, pAllocator, pSwapchains );
+    }
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+
+    VkResult vkCreateXlibSurfaceKHR( VkInstance instance, const VkXlibSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateXlibSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceXlibPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, Display * dpy, VisualID visualID ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceXlibPresentationSupportKHR( physicalDevice, queueFamilyIndex, dpy, visualID );
+    }
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+
+    VkResult vkCreateXcbSurfaceKHR( VkInstance instance, const VkXcbSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateXcbSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceXcbPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, xcb_connection_t * connection, xcb_visualid_t visual_id ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceXcbPresentationSupportKHR( physicalDevice, queueFamilyIndex, connection, visual_id );
+    }
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+
+    VkResult vkCreateWaylandSurfaceKHR( VkInstance instance, const VkWaylandSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateWaylandSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceWaylandPresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct wl_display * display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceWaylandPresentationSupportKHR( physicalDevice, queueFamilyIndex, display );
+    }
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+
+    VkResult vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAndroidSurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+
+    VkResult vkCreateWin32SurfaceKHR( VkInstance instance, const VkWin32SurfaceCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateWin32SurfaceKHR( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceWin32PresentationSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceWin32PresentationSupportKHR( physicalDevice, queueFamilyIndex );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+
+    VkResult vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDebugReportCallbackEXT * pCallback ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDebugReportCallbackEXT( instance, pCreateInfo, pAllocator, pCallback );
+    }
+
+
+    void vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDebugReportCallbackEXT( instance, callback, pAllocator );
+    }
+
+
+    void vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char * pLayerPrefix, const char * pMessage ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugReportMessageEXT( instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage );
+    }
+
+  //=== VK_EXT_debug_marker ===
+
+
+    VkResult vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugMarkerSetObjectTagEXT( device, pTagInfo );
+    }
+
+
+    VkResult vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDebugMarkerSetObjectNameEXT( device, pNameInfo );
+    }
+
+
+    void vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerBeginEXT( commandBuffer, pMarkerInfo );
+    }
+
+
+    void vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerEndEXT( commandBuffer );
+    }
+
+
+    void vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDebugMarkerInsertEXT( commandBuffer, pMarkerInfo );
+    }
+
+  //=== VK_KHR_video_queue ===
+
+
+    VkResult vkGetPhysicalDeviceVideoCapabilitiesKHR( VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR * pVideoProfile, VkVideoCapabilitiesKHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceVideoCapabilitiesKHR( physicalDevice, pVideoProfile, pCapabilities );
+    }
+
+
+    VkResult vkGetPhysicalDeviceVideoFormatPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, uint32_t * pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR * pVideoFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceVideoFormatPropertiesKHR( physicalDevice, pVideoFormatInfo, pVideoFormatPropertyCount, pVideoFormatProperties );
+    }
+
+
+    VkResult vkCreateVideoSessionKHR( VkDevice device, const VkVideoSessionCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkVideoSessionKHR * pVideoSession ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateVideoSessionKHR( device, pCreateInfo, pAllocator, pVideoSession );
+    }
+
+
+    void vkDestroyVideoSessionKHR( VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyVideoSessionKHR( device, videoSession, pAllocator );
+    }
+
+
+    VkResult vkGetVideoSessionMemoryRequirementsKHR( VkDevice device, VkVideoSessionKHR videoSession, uint32_t * pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetVideoSessionMemoryRequirementsKHR( device, videoSession, pMemoryRequirementsCount, pMemoryRequirements );
+    }
+
+
+    VkResult vkBindVideoSessionMemoryKHR( VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindVideoSessionMemoryKHR( device, videoSession, bindSessionMemoryInfoCount, pBindSessionMemoryInfos );
+    }
+
+
+    VkResult vkCreateVideoSessionParametersKHR( VkDevice device, const VkVideoSessionParametersCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkVideoSessionParametersKHR * pVideoSessionParameters ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateVideoSessionParametersKHR( device, pCreateInfo, pAllocator, pVideoSessionParameters );
+    }
+
+
+    VkResult vkUpdateVideoSessionParametersKHR( VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR * pUpdateInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateVideoSessionParametersKHR( device, videoSessionParameters, pUpdateInfo );
+    }
+
+
+    void vkDestroyVideoSessionParametersKHR( VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyVideoSessionParametersKHR( device, videoSessionParameters, pAllocator );
+    }
+
+
+    void vkCmdBeginVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR * pBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginVideoCodingKHR( commandBuffer, pBeginInfo );
+    }
+
+
+    void vkCmdEndVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR * pEndCodingInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndVideoCodingKHR( commandBuffer, pEndCodingInfo );
+    }
+
+
+    void vkCmdControlVideoCodingKHR( VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR * pCodingControlInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdControlVideoCodingKHR( commandBuffer, pCodingControlInfo );
+    }
+
+  //=== VK_KHR_video_decode_queue ===
+
+
+    void vkCmdDecodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR * pDecodeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDecodeVideoKHR( commandBuffer, pDecodeInfo );
+    }
+
+  //=== VK_EXT_transform_feedback ===
+
+
+    void vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer * pBuffers, const VkDeviceSize * pOffsets, const VkDeviceSize * pSizes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindTransformFeedbackBuffersEXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes );
+    }
+
+
+    void vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer * pCounterBuffers, const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
+    }
+
+
+    void vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer * pCounterBuffers, const VkDeviceSize * pCounterBufferOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndTransformFeedbackEXT( commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets );
+    }
+
+
+    void vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginQueryIndexedEXT( commandBuffer, queryPool, query, flags, index );
+    }
+
+
+    void vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndQueryIndexedEXT( commandBuffer, queryPool, query, index );
+    }
+
+
+    void vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectByteCountEXT( commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride );
+    }
+
+  //=== VK_NVX_binary_import ===
+
+
+    VkResult vkCreateCuModuleNVX( VkDevice device, const VkCuModuleCreateInfoNVX * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkCuModuleNVX * pModule ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateCuModuleNVX( device, pCreateInfo, pAllocator, pModule );
+    }
+
+
+    VkResult vkCreateCuFunctionNVX( VkDevice device, const VkCuFunctionCreateInfoNVX * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkCuFunctionNVX * pFunction ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateCuFunctionNVX( device, pCreateInfo, pAllocator, pFunction );
+    }
+
+
+    void vkDestroyCuModuleNVX( VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyCuModuleNVX( device, module, pAllocator );
+    }
+
+
+    void vkDestroyCuFunctionNVX( VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyCuFunctionNVX( device, function, pAllocator );
+    }
+
+
+    void vkCmdCuLaunchKernelNVX( VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX * pLaunchInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCuLaunchKernelNVX( commandBuffer, pLaunchInfo );
+    }
+
+  //=== VK_NVX_image_view_handle ===
+
+
+    uint32_t vkGetImageViewHandleNVX( VkDevice device, const VkImageViewHandleInfoNVX * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageViewHandleNVX( device, pInfo );
+    }
+
+
+    VkResult vkGetImageViewAddressNVX( VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageViewAddressNVX( device, imageView, pProperties );
+    }
+
+  //=== VK_AMD_draw_indirect_count ===
+
+
+    void vkCmdDrawIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+
+    void vkCmdDrawIndexedIndirectCountAMD( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCountAMD( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+  //=== VK_AMD_shader_info ===
+
+
+    VkResult vkGetShaderInfoAMD( VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t * pInfoSize, void * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetShaderInfoAMD( device, pipeline, shaderStage, infoType, pInfoSize, pInfo );
+    }
+
+  //=== VK_KHR_dynamic_rendering ===
+
+
+    void vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo * pRenderingInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderingKHR( commandBuffer, pRenderingInfo );
+    }
+
+
+    void vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderingKHR( commandBuffer );
+    }
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+
+    VkResult vkCreateStreamDescriptorSurfaceGGP( VkInstance instance, const VkStreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateStreamDescriptorSurfaceGGP( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+
+
+    VkResult vkGetPhysicalDeviceExternalImageFormatPropertiesNV( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV * pExternalImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalImageFormatPropertiesNV( physicalDevice, format, type, tiling, usage, flags, externalHandleType, pExternalImageFormatProperties );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+
+
+    VkResult vkGetMemoryWin32HandleNV( VkDevice device, VkDeviceMemory memory, VkExternalMemoryHandleTypeFlagsNV handleType, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandleNV( device, memory, handleType, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+
+
+    void vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 * pFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFeatures2KHR( physicalDevice, pFeatures );
+    }
+
+
+    void vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceProperties2KHR( physicalDevice, pProperties );
+    }
+
+
+    void vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2 * pFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFormatProperties2KHR( physicalDevice, format, pFormatProperties );
+    }
+
+
+    VkResult vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VkImageFormatProperties2 * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceImageFormatProperties2KHR( physicalDevice, pImageFormatInfo, pImageFormatProperties );
+    }
+
+
+    void vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t * pQueueFamilyPropertyCount, VkQueueFamilyProperties2 * pQueueFamilyProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyProperties2KHR( physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties );
+    }
+
+
+    void vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2 * pMemoryProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMemoryProperties2KHR( physicalDevice, pMemoryProperties );
+    }
+
+
+    void vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VkSparseImageFormatProperties2 * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSparseImageFormatProperties2KHR( physicalDevice, pFormatInfo, pPropertyCount, pProperties );
+    }
+
+  //=== VK_KHR_device_group ===
+
+
+    void vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags * pPeerMemoryFeatures ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupPeerMemoryFeaturesKHR( device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures );
+    }
+
+
+    void vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDeviceMaskKHR( commandBuffer, deviceMask );
+    }
+
+
+    void vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDispatchBaseKHR( commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+    }
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+
+    VkResult vkCreateViSurfaceNN( VkInstance instance, const VkViSurfaceCreateInfoNN * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateViSurfaceNN( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_KHR_maintenance1 ===
+
+
+    void vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkTrimCommandPoolKHR( device, commandPool, flags );
+    }
+
+  //=== VK_KHR_device_group_creation ===
+
+
+    VkResult vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t * pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceGroupsKHR( instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties );
+    }
+
+  //=== VK_KHR_external_memory_capabilities ===
+
+
+    void vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VkExternalBufferProperties * pExternalBufferProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalBufferPropertiesKHR( physicalDevice, pExternalBufferInfo, pExternalBufferProperties );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+
+
+    VkResult vkGetMemoryWin32HandleKHR( VkDevice device, const VkMemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
+
+
+    VkResult vkGetMemoryWin32HandlePropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VkMemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryWin32HandlePropertiesKHR( device, handleType, handle, pMemoryWin32HandleProperties );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+
+
+    VkResult vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryFdKHR( device, pGetFdInfo, pFd );
+    }
+
+
+    VkResult vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR * pMemoryFdProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryFdPropertiesKHR( device, handleType, fd, pMemoryFdProperties );
+    }
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+
+
+    void vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VkExternalSemaphoreProperties * pExternalSemaphoreProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+
+
+    VkResult vkImportSemaphoreWin32HandleKHR( VkDevice device, const VkImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportSemaphoreWin32HandleKHR( device, pImportSemaphoreWin32HandleInfo );
+    }
+
+
+    VkResult vkGetSemaphoreWin32HandleKHR( VkDevice device, const VkSemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+
+
+    VkResult vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportSemaphoreFdKHR( device, pImportSemaphoreFdInfo );
+    }
+
+
+    VkResult vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreFdKHR( device, pGetFdInfo, pFd );
+    }
+
+  //=== VK_KHR_push_descriptor ===
+
+
+    void vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet * pDescriptorWrites ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushDescriptorSetKHR( commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites );
+    }
+
+
+    void vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPushDescriptorSetWithTemplateKHR( commandBuffer, descriptorUpdateTemplate, layout, set, pData );
+    }
+
+  //=== VK_EXT_conditional_rendering ===
+
+
+    void vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginConditionalRenderingEXT( commandBuffer, pConditionalRenderingBegin );
+    }
+
+
+    void vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndConditionalRenderingEXT( commandBuffer );
+    }
+
+  //=== VK_KHR_descriptor_update_template ===
+
+
+    VkResult vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDescriptorUpdateTemplateKHR( device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate );
+    }
+
+
+    void vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDescriptorUpdateTemplateKHR( device, descriptorUpdateTemplate, pAllocator );
+    }
+
+
+    void vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUpdateDescriptorSetWithTemplateKHR( device, descriptorSet, descriptorUpdateTemplate, pData );
+    }
+
+  //=== VK_NV_clip_space_w_scaling ===
+
+
+    void vkCmdSetViewportWScalingNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV * pViewportWScalings ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWScalingNV( commandBuffer, firstViewport, viewportCount, pViewportWScalings );
+    }
+
+  //=== VK_EXT_direct_mode_display ===
+
+
+    VkResult vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseDisplayEXT( physicalDevice, display );
+    }
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+
+
+    VkResult vkAcquireXlibDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireXlibDisplayEXT( physicalDevice, dpy, display );
+    }
+
+
+    VkResult vkGetRandROutputDisplayEXT( VkPhysicalDevice physicalDevice, Display * dpy, RROutput rrOutput, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRandROutputDisplayEXT( physicalDevice, dpy, rrOutput, pDisplay );
+    }
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilities2EXT( physicalDevice, surface, pSurfaceCapabilities );
+    }
+
+  //=== VK_EXT_display_control ===
+
+
+    VkResult vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT * pDisplayPowerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDisplayPowerControlEXT( device, display, pDisplayPowerInfo );
+    }
+
+
+    VkResult vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT * pDeviceEventInfo, const VkAllocationCallbacks * pAllocator, VkFence * pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkRegisterDeviceEventEXT( device, pDeviceEventInfo, pAllocator, pFence );
+    }
+
+
+    VkResult vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT * pDisplayEventInfo, const VkAllocationCallbacks * pAllocator, VkFence * pFence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkRegisterDisplayEventEXT( device, display, pDisplayEventInfo, pAllocator, pFence );
+    }
+
+
+    VkResult vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t * pCounterValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainCounterEXT( device, swapchain, counter, pCounterValue );
+    }
+
+  //=== VK_GOOGLE_display_timing ===
+
+
+    VkResult vkGetRefreshCycleDurationGOOGLE( VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE * pDisplayTimingProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRefreshCycleDurationGOOGLE( device, swapchain, pDisplayTimingProperties );
+    }
+
+
+    VkResult vkGetPastPresentationTimingGOOGLE( VkDevice device, VkSwapchainKHR swapchain, uint32_t * pPresentationTimingCount, VkPastPresentationTimingGOOGLE * pPresentationTimings ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPastPresentationTimingGOOGLE( device, swapchain, pPresentationTimingCount, pPresentationTimings );
+    }
+
+  //=== VK_EXT_discard_rectangles ===
+
+
+    void vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D * pDiscardRectangles ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDiscardRectangleEXT( commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles );
+    }
+
+  //=== VK_EXT_hdr_metadata ===
+
+
+    void vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR * pSwapchains, const VkHdrMetadataEXT * pMetadata ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetHdrMetadataEXT( device, swapchainCount, pSwapchains, pMetadata );
+    }
+
+  //=== VK_KHR_create_renderpass2 ===
+
+
+    VkResult vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2 * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkRenderPass * pRenderPass ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRenderPass2KHR( device, pCreateInfo, pAllocator, pRenderPass );
+    }
+
+
+    void vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo * pRenderPassBegin, const VkSubpassBeginInfo * pSubpassBeginInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginRenderPass2KHR( commandBuffer, pRenderPassBegin, pSubpassBeginInfo );
+    }
+
+
+    void vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo * pSubpassBeginInfo, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdNextSubpass2KHR( commandBuffer, pSubpassBeginInfo, pSubpassEndInfo );
+    }
+
+
+    void vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo * pSubpassEndInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndRenderPass2KHR( commandBuffer, pSubpassEndInfo );
+    }
+
+  //=== VK_KHR_shared_presentable_image ===
+
+
+    VkResult vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSwapchainStatusKHR( device, swapchain );
+    }
+
+  //=== VK_KHR_external_fence_capabilities ===
+
+
+    void vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VkExternalFenceProperties * pExternalFenceProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceExternalFencePropertiesKHR( physicalDevice, pExternalFenceInfo, pExternalFenceProperties );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+
+
+    VkResult vkImportFenceWin32HandleKHR( VkDevice device, const VkImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportFenceWin32HandleKHR( device, pImportFenceWin32HandleInfo );
+    }
+
+
+    VkResult vkGetFenceWin32HandleKHR( VkDevice device, const VkFenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceWin32HandleKHR( device, pGetWin32HandleInfo, pHandle );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+
+
+    VkResult vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR * pImportFenceFdInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportFenceFdKHR( device, pImportFenceFdInfo );
+    }
+
+
+    VkResult vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR * pGetFdInfo, int * pFd ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFenceFdKHR( device, pGetFdInfo, pFd );
+    }
+
+  //=== VK_KHR_performance_query ===
+
+
+    VkResult vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t * pCounterCount, VkPerformanceCounterKHR * pCounters, VkPerformanceCounterDescriptionKHR * pCounterDescriptions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions );
+    }
+
+
+    void vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, uint32_t * pNumPasses ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( physicalDevice, pPerformanceQueryCreateInfo, pNumPasses );
+    }
+
+
+    VkResult vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireProfilingLockKHR( device, pInfo );
+    }
+
+
+    void vkReleaseProfilingLockKHR( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseProfilingLockKHR( device );
+    }
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+
+
+    VkResult vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VkSurfaceCapabilities2KHR * pSurfaceCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceCapabilities2KHR( physicalDevice, pSurfaceInfo, pSurfaceCapabilities );
+    }
+
+
+    VkResult vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pSurfaceFormatCount, VkSurfaceFormat2KHR * pSurfaceFormats ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfaceFormats2KHR( physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats );
+    }
+
+  //=== VK_KHR_get_display_properties2 ===
+
+
+    VkResult vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayProperties2KHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkDisplayPlaneProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDisplayPlaneProperties2KHR( physicalDevice, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t * pPropertyCount, VkDisplayModeProperties2KHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayModeProperties2KHR( physicalDevice, display, pPropertyCount, pProperties );
+    }
+
+
+    VkResult vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR * pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR * pCapabilities ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDisplayPlaneCapabilities2KHR( physicalDevice, pDisplayPlaneInfo, pCapabilities );
+    }
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+
+    VkResult vkCreateIOSSurfaceMVK( VkInstance instance, const VkIOSSurfaceCreateInfoMVK * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateIOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+
+    VkResult vkCreateMacOSSurfaceMVK( VkInstance instance, const VkMacOSSurfaceCreateInfoMVK * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateMacOSSurfaceMVK( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+
+    VkResult vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT * pNameInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetDebugUtilsObjectNameEXT( device, pNameInfo );
+    }
+
+
+    VkResult vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT * pTagInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetDebugUtilsObjectTagEXT( device, pTagInfo );
+    }
+
+
+    void vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueBeginDebugUtilsLabelEXT( queue, pLabelInfo );
+    }
+
+
+    void vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueEndDebugUtilsLabelEXT( queue );
+    }
+
+
+    void vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueInsertDebugUtilsLabelEXT( queue, pLabelInfo );
+    }
+
+
+    void vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBeginDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
+    }
+
+
+    void vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEndDebugUtilsLabelEXT( commandBuffer );
+    }
+
+
+    void vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT * pLabelInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdInsertDebugUtilsLabelEXT( commandBuffer, pLabelInfo );
+    }
+
+
+    VkResult vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkDebugUtilsMessengerEXT * pMessenger ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDebugUtilsMessengerEXT( instance, pCreateInfo, pAllocator, pMessenger );
+    }
+
+
+    void vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDebugUtilsMessengerEXT( instance, messenger, pAllocator );
+    }
+
+
+    void vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT * pCallbackData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSubmitDebugUtilsMessageEXT( instance, messageSeverity, messageTypes, pCallbackData );
+    }
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+
+
+    VkResult vkGetAndroidHardwareBufferPropertiesANDROID( VkDevice device, const struct AHardwareBuffer * buffer, VkAndroidHardwareBufferPropertiesANDROID * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAndroidHardwareBufferPropertiesANDROID( device, buffer, pProperties );
+    }
+
+
+    VkResult vkGetMemoryAndroidHardwareBufferANDROID( VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo, struct AHardwareBuffer ** pBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryAndroidHardwareBufferANDROID( device, pInfo, pBuffer );
+    }
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+
+
+    void vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT * pSampleLocationsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetSampleLocationsEXT( commandBuffer, pSampleLocationsInfo );
+    }
+
+
+    void vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT * pMultisampleProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceMultisamplePropertiesEXT( physicalDevice, samples, pMultisampleProperties );
+    }
+
+  //=== VK_KHR_get_memory_requirements2 ===
+
+
+    void vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2 * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferMemoryRequirements2KHR( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSparseMemoryRequirements2KHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+  //=== VK_KHR_acceleration_structure ===
+
+
+    VkResult vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkAccelerationStructureKHR * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAccelerationStructureKHR( device, pCreateInfo, pAllocator, pAccelerationStructure );
+    }
+
+
+    void vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyAccelerationStructureKHR( device, accelerationStructure, pAllocator );
+    }
+
+
+    void vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildAccelerationStructuresKHR( commandBuffer, infoCount, pInfos, ppBuildRangeInfos );
+    }
+
+
+    void vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, const VkDeviceAddress * pIndirectDeviceAddresses, const uint32_t * pIndirectStrides, const uint32_t * const * ppMaxPrimitiveCounts ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildAccelerationStructuresIndirectKHR( commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts );
+    }
+
+
+    VkResult vkBuildAccelerationStructuresKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR * pInfos, const VkAccelerationStructureBuildRangeInfoKHR * const * ppBuildRangeInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBuildAccelerationStructuresKHR( device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos );
+    }
+
+
+    VkResult vkCopyAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyAccelerationStructureKHR( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkCopyAccelerationStructureToMemoryKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyAccelerationStructureToMemoryKHR( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkCopyMemoryToAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyMemoryToAccelerationStructureKHR( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR * pAccelerationStructures, VkQueryType queryType, size_t dataSize, void * pData, size_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWriteAccelerationStructuresPropertiesKHR( device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride );
+    }
+
+
+    void vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyAccelerationStructureKHR( commandBuffer, pInfo );
+    }
+
+
+    void vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyAccelerationStructureToMemoryKHR( commandBuffer, pInfo );
+    }
+
+
+    void vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMemoryToAccelerationStructureKHR( commandBuffer, pInfo );
+    }
+
+
+    VkDeviceAddress vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureDeviceAddressKHR( device, pInfo );
+    }
+
+
+    void vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR * pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteAccelerationStructuresPropertiesKHR( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
+    }
+
+
+    void vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionInfoKHR * pVersionInfo, VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceAccelerationStructureCompatibilityKHR( device, pVersionInfo, pCompatibility );
+    }
+
+
+    void vkGetAccelerationStructureBuildSizesKHR( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR * pBuildInfo, const uint32_t * pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR * pSizeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureBuildSizesKHR( device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo );
+    }
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+
+
+    VkResult vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSamplerYcbcrConversion * pYcbcrConversion ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateSamplerYcbcrConversionKHR( device, pCreateInfo, pAllocator, pYcbcrConversion );
+    }
+
+
+    void vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroySamplerYcbcrConversionKHR( device, ycbcrConversion, pAllocator );
+    }
+
+  //=== VK_KHR_bind_memory2 ===
+
+
+    VkResult vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindBufferMemory2KHR( device, bindInfoCount, pBindInfos );
+    }
+
+
+    VkResult vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindImageMemory2KHR( device, bindInfoCount, pBindInfos );
+    }
+
+  //=== VK_EXT_image_drm_format_modifier ===
+
+
+    VkResult vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageDrmFormatModifierPropertiesEXT( device, image, pProperties );
+    }
+
+  //=== VK_EXT_validation_cache ===
+
+
+    VkResult vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkValidationCacheEXT * pValidationCache ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateValidationCacheEXT( device, pCreateInfo, pAllocator, pValidationCache );
+    }
+
+
+    void vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyValidationCacheEXT( device, validationCache, pAllocator );
+    }
+
+
+    VkResult vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT * pSrcCaches ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkMergeValidationCachesEXT( device, dstCache, srcCacheCount, pSrcCaches );
+    }
+
+
+    VkResult vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t * pDataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetValidationCacheDataEXT( device, validationCache, pDataSize, pData );
+    }
+
+  //=== VK_NV_shading_rate_image ===
+
+
+    void vkCmdBindShadingRateImageNV( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindShadingRateImageNV( commandBuffer, imageView, imageLayout );
+    }
+
+
+    void vkCmdSetViewportShadingRatePaletteNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV * pShadingRatePalettes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportShadingRatePaletteNV( commandBuffer, firstViewport, viewportCount, pShadingRatePalettes );
+    }
+
+
+    void vkCmdSetCoarseSampleOrderNV( VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV * pCustomSampleOrders ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoarseSampleOrderNV( commandBuffer, sampleOrderType, customSampleOrderCount, pCustomSampleOrders );
+    }
+
+  //=== VK_NV_ray_tracing ===
+
+
+    VkResult vkCreateAccelerationStructureNV( VkDevice device, const VkAccelerationStructureCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkAccelerationStructureNV * pAccelerationStructure ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateAccelerationStructureNV( device, pCreateInfo, pAllocator, pAccelerationStructure );
+    }
+
+
+    void vkDestroyAccelerationStructureNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyAccelerationStructureNV( device, accelerationStructure, pAllocator );
+    }
+
+
+    void vkGetAccelerationStructureMemoryRequirementsNV( VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV * pInfo, VkMemoryRequirements2KHR * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
+    }
+
+
+    VkResult vkBindAccelerationStructureMemoryNV( VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV * pBindInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindAccelerationStructureMemoryNV( device, bindInfoCount, pBindInfos );
+    }
+
+
+    void vkCmdBuildAccelerationStructureNV( VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV * pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildAccelerationStructureNV( commandBuffer, pInfo, instanceData, instanceOffset, update, dst, src, scratch, scratchOffset );
+    }
+
+
+    void vkCmdCopyAccelerationStructureNV( VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyAccelerationStructureNV( commandBuffer, dst, src, mode );
+    }
+
+
+    void vkCmdTraceRaysNV( VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysNV( commandBuffer, raygenShaderBindingTableBuffer, raygenShaderBindingOffset, missShaderBindingTableBuffer, missShaderBindingOffset, missShaderBindingStride, hitShaderBindingTableBuffer, hitShaderBindingOffset, hitShaderBindingStride, callableShaderBindingTableBuffer, callableShaderBindingOffset, callableShaderBindingStride, width, height, depth );
+    }
+
+
+    VkResult vkCreateRayTracingPipelinesNV( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV * pCreateInfos, const VkAllocationCallbacks * pAllocator, VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRayTracingPipelinesNV( device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+
+    VkResult vkGetRayTracingShaderGroupHandlesNV( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingShaderGroupHandlesNV( device, pipeline, firstGroup, groupCount, dataSize, pData );
+    }
+
+
+    VkResult vkGetAccelerationStructureHandleNV( VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureHandleNV( device, accelerationStructure, dataSize, pData );
+    }
+
+
+    void vkCmdWriteAccelerationStructuresPropertiesNV( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV * pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteAccelerationStructuresPropertiesNV( commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery );
+    }
+
+
+    VkResult vkCompileDeferredNV( VkDevice device, VkPipeline pipeline, uint32_t shader ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCompileDeferredNV( device, pipeline, shader );
+    }
+
+  //=== VK_KHR_maintenance3 ===
+
+
+    void vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo * pCreateInfo, VkDescriptorSetLayoutSupport * pSupport ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutSupportKHR( device, pCreateInfo, pSupport );
+    }
+
+  //=== VK_KHR_draw_indirect_count ===
+
+
+    void vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+
+    void vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawIndexedIndirectCountKHR( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+  //=== VK_EXT_external_memory_host ===
+
+
+    VkResult vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void * pHostPointer, VkMemoryHostPointerPropertiesEXT * pMemoryHostPointerProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryHostPointerPropertiesEXT( device, handleType, pHostPointer, pMemoryHostPointerProperties );
+    }
+
+  //=== VK_AMD_buffer_marker ===
+
+
+    void vkCmdWriteBufferMarkerAMD( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteBufferMarkerAMD( commandBuffer, pipelineStage, dstBuffer, dstOffset, marker );
+    }
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+
+    VkResult vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t * pTimeDomainCount, VkTimeDomainEXT * pTimeDomains ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( physicalDevice, pTimeDomainCount, pTimeDomains );
+    }
+
+
+    VkResult vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT * pTimestampInfos, uint64_t * pTimestamps, uint64_t * pMaxDeviation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetCalibratedTimestampsEXT( device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation );
+    }
+
+  //=== VK_NV_mesh_shader ===
+
+
+    void vkCmdDrawMeshTasksNV( VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksNV( commandBuffer, taskCount, firstTask );
+    }
+
+
+    void vkCmdDrawMeshTasksIndirectNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectNV( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+
+    void vkCmdDrawMeshTasksIndirectCountNV( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectCountNV( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+  //=== VK_NV_scissor_exclusive ===
+
+
+    void vkCmdSetExclusiveScissorNV( VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D * pExclusiveScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetExclusiveScissorNV( commandBuffer, firstExclusiveScissor, exclusiveScissorCount, pExclusiveScissors );
+    }
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+
+
+    void vkCmdSetCheckpointNV( VkCommandBuffer commandBuffer, const void * pCheckpointMarker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCheckpointNV( commandBuffer, pCheckpointMarker );
+    }
+
+
+    void vkGetQueueCheckpointDataNV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointDataNV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueueCheckpointDataNV( queue, pCheckpointDataCount, pCheckpointData );
+    }
+
+  //=== VK_KHR_timeline_semaphore ===
+
+
+    VkResult vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t * pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreCounterValueKHR( device, semaphore, pValue );
+    }
+
+
+    VkResult vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo * pWaitInfo, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitSemaphoresKHR( device, pWaitInfo, timeout );
+    }
+
+
+    VkResult vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo * pSignalInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSignalSemaphoreKHR( device, pSignalInfo );
+    }
+
+  //=== VK_INTEL_performance_query ===
+
+
+    VkResult vkInitializePerformanceApiINTEL( VkDevice device, const VkInitializePerformanceApiInfoINTEL * pInitializeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkInitializePerformanceApiINTEL( device, pInitializeInfo );
+    }
+
+
+    void vkUninitializePerformanceApiINTEL( VkDevice device ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkUninitializePerformanceApiINTEL( device );
+    }
+
+
+    VkResult vkCmdSetPerformanceMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceMarkerINTEL( commandBuffer, pMarkerInfo );
+    }
+
+
+    VkResult vkCmdSetPerformanceStreamMarkerINTEL( VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL * pMarkerInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceStreamMarkerINTEL( commandBuffer, pMarkerInfo );
+    }
+
+
+    VkResult vkCmdSetPerformanceOverrideINTEL( VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL * pOverrideInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPerformanceOverrideINTEL( commandBuffer, pOverrideInfo );
+    }
+
+
+    VkResult vkAcquirePerformanceConfigurationINTEL( VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL * pAcquireInfo, VkPerformanceConfigurationINTEL * pConfiguration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquirePerformanceConfigurationINTEL( device, pAcquireInfo, pConfiguration );
+    }
+
+
+    VkResult vkReleasePerformanceConfigurationINTEL( VkDevice device, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleasePerformanceConfigurationINTEL( device, configuration );
+    }
+
+
+    VkResult vkQueueSetPerformanceConfigurationINTEL( VkQueue queue, VkPerformanceConfigurationINTEL configuration ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSetPerformanceConfigurationINTEL( queue, configuration );
+    }
+
+
+    VkResult vkGetPerformanceParameterINTEL( VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL * pValue ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPerformanceParameterINTEL( device, parameter, pValue );
+    }
+
+  //=== VK_AMD_display_native_hdr ===
+
+
+    void vkSetLocalDimmingAMD( VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetLocalDimmingAMD( device, swapChain, localDimmingEnable );
+    }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+
+    VkResult vkCreateImagePipeSurfaceFUCHSIA( VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateImagePipeSurfaceFUCHSIA( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+
+    VkResult vkCreateMetalSurfaceEXT( VkInstance instance, const VkMetalSurfaceCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateMetalSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+
+    VkResult vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, uint32_t * pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR * pFragmentShadingRates ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceFragmentShadingRatesKHR( physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates );
+    }
+
+
+    void vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, const VkExtent2D * pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetFragmentShadingRateKHR( commandBuffer, pFragmentSize, combinerOps );
+    }
+
+  //=== VK_EXT_buffer_device_address ===
+
+
+    VkDeviceAddress vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddressEXT( device, pInfo );
+    }
+
+  //=== VK_EXT_tooling_info ===
+
+
+    VkResult vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t * pToolCount, VkPhysicalDeviceToolProperties * pToolProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceToolPropertiesEXT( physicalDevice, pToolCount, pToolProperties );
+    }
+
+  //=== VK_KHR_present_wait ===
+
+
+    VkResult vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWaitForPresentKHR( device, swapchain, presentId, timeout );
+    }
+
+  //=== VK_NV_cooperative_matrix ===
+
+
+    VkResult vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( VkPhysicalDevice physicalDevice, uint32_t * pPropertyCount, VkCooperativeMatrixPropertiesNV * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( physicalDevice, pPropertyCount, pProperties );
+    }
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+
+    VkResult vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( VkPhysicalDevice physicalDevice, uint32_t * pCombinationCount, VkFramebufferMixedSamplesCombinationNV * pCombinations ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( physicalDevice, pCombinationCount, pCombinations );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+
+    VkResult vkGetPhysicalDeviceSurfacePresentModes2EXT( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pPresentModeCount, VkPresentModeKHR * pPresentModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceSurfacePresentModes2EXT( physicalDevice, pSurfaceInfo, pPresentModeCount, pPresentModes );
+    }
+
+
+    VkResult vkAcquireFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireFullScreenExclusiveModeEXT( device, swapchain );
+    }
+
+
+    VkResult vkReleaseFullScreenExclusiveModeEXT( VkDevice device, VkSwapchainKHR swapchain ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseFullScreenExclusiveModeEXT( device, swapchain );
+    }
+
+
+    VkResult vkGetDeviceGroupSurfacePresentModes2EXT( VkDevice device, const VkPhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VkDeviceGroupPresentModeFlagsKHR * pModes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceGroupSurfacePresentModes2EXT( device, pSurfaceInfo, pModes );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+
+
+    VkResult vkCreateHeadlessSurfaceEXT( VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateHeadlessSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+  //=== VK_KHR_buffer_device_address ===
+
+
+    VkDeviceAddress vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferDeviceAddressKHR( device, pInfo );
+    }
+
+
+    uint64_t vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferOpaqueCaptureAddressKHR( device, pInfo );
+    }
+
+
+    uint64_t vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMemoryOpaqueCaptureAddressKHR( device, pInfo );
+    }
+
+  //=== VK_EXT_line_rasterization ===
+
+
+    void vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineStippleEXT( commandBuffer, lineStippleFactor, lineStipplePattern );
+    }
+
+  //=== VK_EXT_host_query_reset ===
+
+
+    void vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkResetQueryPoolEXT( device, queryPool, firstQuery, queryCount );
+    }
+
+  //=== VK_EXT_extended_dynamic_state ===
+
+
+    void vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCullModeEXT( commandBuffer, cullMode );
+    }
+
+
+    void vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetFrontFaceEXT( commandBuffer, frontFace );
+    }
+
+
+    void vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPrimitiveTopologyEXT( commandBuffer, primitiveTopology );
+    }
+
+
+    void vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport * pViewports ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWithCountEXT( commandBuffer, viewportCount, pViewports );
+    }
+
+
+    void vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D * pScissors ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetScissorWithCountEXT( commandBuffer, scissorCount, pScissors );
+    }
+
+
+    void vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer * pBuffers, const VkDeviceSize * pOffsets, const VkDeviceSize * pSizes, const VkDeviceSize * pStrides ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindVertexBuffers2EXT( commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides );
+    }
+
+
+    void vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthTestEnableEXT( commandBuffer, depthTestEnable );
+    }
+
+
+    void vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthWriteEnableEXT( commandBuffer, depthWriteEnable );
+    }
+
+
+    void vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthCompareOpEXT( commandBuffer, depthCompareOp );
+    }
+
+
+    void vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBoundsTestEnableEXT( commandBuffer, depthBoundsTestEnable );
+    }
+
+
+    void vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilTestEnableEXT( commandBuffer, stencilTestEnable );
+    }
+
+
+    void vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetStencilOpEXT( commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp );
+    }
+
+  //=== VK_KHR_deferred_host_operations ===
+
+
+    VkResult vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks * pAllocator, VkDeferredOperationKHR * pDeferredOperation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDeferredOperationKHR( device, pAllocator, pDeferredOperation );
+    }
+
+
+    void vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyDeferredOperationKHR( device, operation, pAllocator );
+    }
+
+
+    uint32_t vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeferredOperationMaxConcurrencyKHR( device, operation );
+    }
+
+
+    VkResult vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeferredOperationResultKHR( device, operation );
+    }
+
+
+    VkResult vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDeferredOperationJoinKHR( device, operation );
+    }
+
+  //=== VK_KHR_pipeline_executable_properties ===
+
+
+    VkResult vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR * pPipelineInfo, uint32_t * pExecutableCount, VkPipelineExecutablePropertiesKHR * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutablePropertiesKHR( device, pPipelineInfo, pExecutableCount, pProperties );
+    }
+
+
+    VkResult vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR * pExecutableInfo, uint32_t * pStatisticCount, VkPipelineExecutableStatisticKHR * pStatistics ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutableStatisticsKHR( device, pExecutableInfo, pStatisticCount, pStatistics );
+    }
+
+
+    VkResult vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR * pExecutableInfo, uint32_t * pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR * pInternalRepresentations ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelineExecutableInternalRepresentationsKHR( device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations );
+    }
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+
+
+    VkResult vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT * pReleaseInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkReleaseSwapchainImagesEXT( device, pReleaseInfo );
+    }
+
+  //=== VK_NV_device_generated_commands ===
+
+
+    void vkGetGeneratedCommandsMemoryRequirementsNV( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetGeneratedCommandsMemoryRequirementsNV( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkCmdPreprocessGeneratedCommandsNV( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPreprocessGeneratedCommandsNV( commandBuffer, pGeneratedCommandsInfo );
+    }
+
+
+    void vkCmdExecuteGeneratedCommandsNV( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV * pGeneratedCommandsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdExecuteGeneratedCommandsNV( commandBuffer, isPreprocessed, pGeneratedCommandsInfo );
+    }
+
+
+    void vkCmdBindPipelineShaderGroupNV( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindPipelineShaderGroupNV( commandBuffer, pipelineBindPoint, pipeline, groupIndex );
+    }
+
+
+    VkResult vkCreateIndirectCommandsLayoutNV( VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkIndirectCommandsLayoutNV * pIndirectCommandsLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateIndirectCommandsLayoutNV( device, pCreateInfo, pAllocator, pIndirectCommandsLayout );
+    }
+
+
+    void vkDestroyIndirectCommandsLayoutNV( VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyIndirectCommandsLayoutNV( device, indirectCommandsLayout, pAllocator );
+    }
+
+  //=== VK_EXT_acquire_drm_display ===
+
+
+    VkResult vkAcquireDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireDrmDisplayEXT( physicalDevice, drmFd, display );
+    }
+
+
+    VkResult vkGetDrmDisplayEXT( VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR * display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDrmDisplayEXT( physicalDevice, drmFd, connectorId, display );
+    }
+
+  //=== VK_EXT_private_data ===
+
+
+    VkResult vkCreatePrivateDataSlotEXT( VkDevice device, const VkPrivateDataSlotCreateInfo * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkPrivateDataSlot * pPrivateDataSlot ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreatePrivateDataSlotEXT( device, pCreateInfo, pAllocator, pPrivateDataSlot );
+    }
+
+
+    void vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyPrivateDataSlotEXT( device, privateDataSlot, pAllocator );
+    }
+
+
+    VkResult vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, data );
+    }
+
+
+    void vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPrivateDataEXT( device, objectType, objectHandle, privateDataSlot, pData );
+    }
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+
+    void vkCmdEncodeVideoKHR( VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR * pEncodeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdEncodeVideoKHR( commandBuffer, pEncodeInfo );
+    }
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+
+    void vkExportMetalObjectsEXT( VkDevice device, VkExportMetalObjectsInfoEXT * pMetalObjectsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkExportMetalObjectsEXT( device, pMetalObjectsInfo );
+    }
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+
+
+    void vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetEvent2KHR( commandBuffer, event, pDependencyInfo );
+    }
+
+
+    void vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResetEvent2KHR( commandBuffer, event, stageMask );
+    }
+
+
+    void vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent * pEvents, const VkDependencyInfo * pDependencyInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWaitEvents2KHR( commandBuffer, eventCount, pEvents, pDependencyInfos );
+    }
+
+
+    void vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo * pDependencyInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdPipelineBarrier2KHR( commandBuffer, pDependencyInfo );
+    }
+
+
+    void vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteTimestamp2KHR( commandBuffer, stage, queryPool, query );
+    }
+
+
+    VkResult vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2 * pSubmits, VkFence fence ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkQueueSubmit2KHR( queue, submitCount, pSubmits, fence );
+    }
+
+
+    void vkCmdWriteBufferMarker2AMD( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteBufferMarker2AMD( commandBuffer, stage, dstBuffer, dstOffset, marker );
+    }
+
+
+    void vkGetQueueCheckpointData2NV( VkQueue queue, uint32_t * pCheckpointDataCount, VkCheckpointData2NV * pCheckpointData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetQueueCheckpointData2NV( queue, pCheckpointDataCount, pCheckpointData );
+    }
+
+  //=== VK_EXT_descriptor_buffer ===
+
+
+    void vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize * pLayoutSizeInBytes ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutSizeEXT( device, layout, pLayoutSizeInBytes );
+    }
+
+
+    void vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize * pOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutBindingOffsetEXT( device, layout, binding, pOffset );
+    }
+
+
+    void vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT * pDescriptorInfo, size_t dataSize, void * pDescriptor ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorEXT( device, pDescriptorInfo, dataSize, pDescriptor );
+    }
+
+
+    void vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT * pBindingInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindDescriptorBuffersEXT( commandBuffer, bufferCount, pBindingInfos );
+    }
+
+
+    void vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t * pBufferIndices, const VkDeviceSize * pOffsets ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDescriptorBufferOffsetsEXT( commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets );
+    }
+
+
+    void vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindDescriptorBufferEmbeddedSamplersEXT( commandBuffer, pipelineBindPoint, layout, set );
+    }
+
+
+    VkResult vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferOpaqueCaptureDescriptorDataEXT( device, pInfo, pData );
+    }
+
+
+    VkResult vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageOpaqueCaptureDescriptorDataEXT( device, pInfo, pData );
+    }
+
+
+    VkResult vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageViewOpaqueCaptureDescriptorDataEXT( device, pInfo, pData );
+    }
+
+
+    VkResult vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSamplerOpaqueCaptureDescriptorDataEXT( device, pInfo, pData );
+    }
+
+
+    VkResult vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT * pInfo, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( device, pInfo, pData );
+    }
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+
+
+    void vkCmdSetFragmentShadingRateEnumNV( VkCommandBuffer commandBuffer, VkFragmentShadingRateNV shadingRate, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetFragmentShadingRateEnumNV( commandBuffer, shadingRate, combinerOps );
+    }
+
+  //=== VK_EXT_mesh_shader ===
+
+
+    void vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksEXT( commandBuffer, groupCountX, groupCountY, groupCountZ );
+    }
+
+
+    void vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectEXT( commandBuffer, buffer, offset, drawCount, stride );
+    }
+
+
+    void vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMeshTasksIndirectCountEXT( commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride );
+    }
+
+  //=== VK_KHR_copy_commands2 ===
+
+
+    void vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2 * pCopyBufferInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBuffer2KHR( commandBuffer, pCopyBufferInfo );
+    }
+
+
+    void vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2 * pCopyImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImage2KHR( commandBuffer, pCopyImageInfo );
+    }
+
+
+    void vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2 * pCopyBufferToImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyBufferToImage2KHR( commandBuffer, pCopyBufferToImageInfo );
+    }
+
+
+    void vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2 * pCopyImageToBufferInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyImageToBuffer2KHR( commandBuffer, pCopyImageToBufferInfo );
+    }
+
+
+    void vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2 * pBlitImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBlitImage2KHR( commandBuffer, pBlitImageInfo );
+    }
+
+
+    void vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2 * pResolveImageInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdResolveImage2KHR( commandBuffer, pResolveImageInfo );
+    }
+
+  //=== VK_EXT_image_compression_control ===
+
+
+    void vkGetImageSubresourceLayout2EXT( VkDevice device, VkImage image, const VkImageSubresource2EXT * pSubresource, VkSubresourceLayout2EXT * pLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetImageSubresourceLayout2EXT( device, image, pSubresource, pLayout );
+    }
+
+  //=== VK_EXT_device_fault ===
+
+
+    VkResult vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT * pFaultCounts, VkDeviceFaultInfoEXT * pFaultInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceFaultInfoEXT( device, pFaultCounts, pFaultInfo );
+    }
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+
+
+    VkResult vkAcquireWinrtDisplayNV( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkAcquireWinrtDisplayNV( physicalDevice, display );
+    }
+
+
+    VkResult vkGetWinrtDisplayNV( VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR * pDisplay ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetWinrtDisplayNV( physicalDevice, deviceRelativeId, pDisplay );
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+
+    VkResult vkCreateDirectFBSurfaceEXT( VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateDirectFBSurfaceEXT( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceDirectFBPresentationSupportEXT( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB * dfb ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceDirectFBPresentationSupportEXT( physicalDevice, queueFamilyIndex, dfb );
+    }
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+
+    void vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth );
+    }
+
+
+    VkResult vkCreateRayTracingPipelinesKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR * pCreateInfos, const VkAllocationCallbacks * pAllocator, VkPipeline * pPipelines ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateRayTracingPipelinesKHR( device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines );
+    }
+
+
+    VkResult vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
+    }
+
+
+    VkResult vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void * pData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( device, pipeline, firstGroup, groupCount, dataSize, pData );
+    }
+
+
+    void vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR * pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR * pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysIndirectKHR( commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress );
+    }
+
+
+    VkDeviceSize vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetRayTracingShaderGroupStackSizeKHR( device, pipeline, group, groupShader );
+    }
+
+
+    void vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRayTracingPipelineStackSizeKHR( commandBuffer, pipelineStackSize );
+    }
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+
+
+    void vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT * pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT * pVertexAttributeDescriptions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetVertexInputEXT( commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions );
+    }
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+
+
+    VkResult vkGetMemoryZirconHandleFUCHSIA( VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle );
+    }
+
+
+    VkResult vkGetMemoryZirconHandlePropertiesFUCHSIA( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA * pMemoryZirconHandleProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryZirconHandlePropertiesFUCHSIA( device, handleType, zirconHandle, pMemoryZirconHandleProperties );
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+
+
+    VkResult vkImportSemaphoreZirconHandleFUCHSIA( VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA * pImportSemaphoreZirconHandleInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkImportSemaphoreZirconHandleFUCHSIA( device, pImportSemaphoreZirconHandleInfo );
+    }
+
+
+    VkResult vkGetSemaphoreZirconHandleFUCHSIA( VkDevice device, const VkSemaphoreGetZirconHandleInfoFUCHSIA * pGetZirconHandleInfo, zx_handle_t * pZirconHandle ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetSemaphoreZirconHandleFUCHSIA( device, pGetZirconHandleInfo, pZirconHandle );
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+
+    VkResult vkCreateBufferCollectionFUCHSIA( VkDevice device, const VkBufferCollectionCreateInfoFUCHSIA * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkBufferCollectionFUCHSIA * pCollection ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateBufferCollectionFUCHSIA( device, pCreateInfo, pAllocator, pCollection );
+    }
+
+
+    VkResult vkSetBufferCollectionImageConstraintsFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA * pImageConstraintsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetBufferCollectionImageConstraintsFUCHSIA( device, collection, pImageConstraintsInfo );
+    }
+
+
+    VkResult vkSetBufferCollectionBufferConstraintsFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetBufferCollectionBufferConstraintsFUCHSIA( device, collection, pBufferConstraintsInfo );
+    }
+
+
+    void vkDestroyBufferCollectionFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyBufferCollectionFUCHSIA( device, collection, pAllocator );
+    }
+
+
+    VkResult vkGetBufferCollectionPropertiesFUCHSIA( VkDevice device, VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetBufferCollectionPropertiesFUCHSIA( device, collection, pProperties );
+    }
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+
+
+    VkResult vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( VkDevice device, VkRenderPass renderpass, VkExtent2D * pMaxWorkgroupSize ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( device, renderpass, pMaxWorkgroupSize );
+    }
+
+
+    void vkCmdSubpassShadingHUAWEI( VkCommandBuffer commandBuffer ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSubpassShadingHUAWEI( commandBuffer );
+    }
+
+  //=== VK_HUAWEI_invocation_mask ===
+
+
+    void vkCmdBindInvocationMaskHUAWEI( VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBindInvocationMaskHUAWEI( commandBuffer, imageView, imageLayout );
+    }
+
+  //=== VK_NV_external_memory_rdma ===
+
+
+    VkResult vkGetMemoryRemoteAddressNV( VkDevice device, const VkMemoryGetRemoteAddressInfoNV * pMemoryGetRemoteAddressInfo, VkRemoteAddressNV * pAddress ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMemoryRemoteAddressNV( device, pMemoryGetRemoteAddressInfo, pAddress );
+    }
+
+  //=== VK_EXT_pipeline_properties ===
+
+
+    VkResult vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT * pPipelineInfo, VkBaseOutStructure * pPipelineProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPipelinePropertiesEXT( device, pPipelineInfo, pPipelineProperties );
+    }
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+
+
+    void vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPatchControlPointsEXT( commandBuffer, patchControlPoints );
+    }
+
+
+    void vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRasterizerDiscardEnableEXT( commandBuffer, rasterizerDiscardEnable );
+    }
+
+
+    void vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthBiasEnableEXT( commandBuffer, depthBiasEnable );
+    }
+
+
+    void vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLogicOpEXT( commandBuffer, logicOp );
+    }
+
+
+    void vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPrimitiveRestartEnableEXT( commandBuffer, primitiveRestartEnable );
+    }
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+
+    VkResult vkCreateScreenSurfaceQNX( VkInstance instance, const VkScreenSurfaceCreateInfoQNX * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkSurfaceKHR * pSurface ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateScreenSurfaceQNX( instance, pCreateInfo, pAllocator, pSurface );
+    }
+
+
+    VkBool32 vkGetPhysicalDeviceScreenPresentationSupportQNX( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, struct _screen_window * window ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceScreenPresentationSupportQNX( physicalDevice, queueFamilyIndex, window );
+    }
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+
+
+    void vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32 * pColorWriteEnables ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetColorWriteEnableEXT( commandBuffer, attachmentCount, pColorWriteEnables );
+    }
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+
+
+    void vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdTraceRaysIndirect2KHR( commandBuffer, indirectDeviceAddress );
+    }
+
+  //=== VK_EXT_multi_draw ===
+
+
+    void vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT * pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMultiEXT( commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride );
+    }
+
+
+    void vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT * pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t * pVertexOffset ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDrawMultiIndexedEXT( commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset );
+    }
+
+  //=== VK_EXT_opacity_micromap ===
+
+
+    VkResult vkCreateMicromapEXT( VkDevice device, const VkMicromapCreateInfoEXT * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkMicromapEXT * pMicromap ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateMicromapEXT( device, pCreateInfo, pAllocator, pMicromap );
+    }
+
+
+    void vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyMicromapEXT( device, micromap, pAllocator );
+    }
+
+
+    void vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdBuildMicromapsEXT( commandBuffer, infoCount, pInfos );
+    }
+
+
+    VkResult vkBuildMicromapsEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT * pInfos ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBuildMicromapsEXT( device, deferredOperation, infoCount, pInfos );
+    }
+
+
+    VkResult vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyMicromapEXT( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkCopyMicromapToMemoryEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyMicromapToMemoryEXT( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkCopyMemoryToMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCopyMemoryToMicromapEXT( device, deferredOperation, pInfo );
+    }
+
+
+    VkResult vkWriteMicromapsPropertiesEXT( VkDevice device, uint32_t micromapCount, const VkMicromapEXT * pMicromaps, VkQueryType queryType, size_t dataSize, void * pData, size_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkWriteMicromapsPropertiesEXT( device, micromapCount, pMicromaps, queryType, dataSize, pData, stride );
+    }
+
+
+    void vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMicromapEXT( commandBuffer, pInfo );
+    }
+
+
+    void vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMicromapToMemoryEXT( commandBuffer, pInfo );
+    }
+
+
+    void vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT * pInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMemoryToMicromapEXT( commandBuffer, pInfo );
+    }
+
+
+    void vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT * pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdWriteMicromapsPropertiesEXT( commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery );
+    }
+
+
+    void vkGetDeviceMicromapCompatibilityEXT( VkDevice device, const VkMicromapVersionInfoEXT * pVersionInfo, VkAccelerationStructureCompatibilityKHR * pCompatibility ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceMicromapCompatibilityEXT( device, pVersionInfo, pCompatibility );
+    }
+
+
+    void vkGetMicromapBuildSizesEXT( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT * pBuildInfo, VkMicromapBuildSizesInfoEXT * pSizeInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetMicromapBuildSizesEXT( device, buildType, pBuildInfo, pSizeInfo );
+    }
+
+  //=== VK_EXT_pageable_device_local_memory ===
+
+
+    void vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkSetDeviceMemoryPriorityEXT( device, memory, priority );
+    }
+
+  //=== VK_KHR_maintenance4 ===
+
+
+    void vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, const VkDeviceBufferMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceBufferMemoryRequirementsKHR( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, VkMemoryRequirements2 * pMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceImageMemoryRequirementsKHR( device, pInfo, pMemoryRequirements );
+    }
+
+
+    void vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, const VkDeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDeviceImageSparseMemoryRequirementsKHR( device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements );
+    }
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+
+
+    void vkGetDescriptorSetLayoutHostMappingInfoVALVE( VkDevice device, const VkDescriptorSetBindingReferenceVALVE * pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE * pHostMapping ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetLayoutHostMappingInfoVALVE( device, pBindingReference, pHostMapping );
+    }
+
+
+    void vkGetDescriptorSetHostMappingVALVE( VkDevice device, VkDescriptorSet descriptorSet, void ** ppData ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDescriptorSetHostMappingVALVE( device, descriptorSet, ppData );
+    }
+
+  //=== VK_NV_copy_memory_indirect ===
+
+
+    void vkCmdCopyMemoryIndirectNV( VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMemoryIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride );
+    }
+
+
+    void vkCmdCopyMemoryToImageIndirectNV( VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VkImage dstImage, VkImageLayout dstImageLayout, const VkImageSubresourceLayers * pImageSubresources ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdCopyMemoryToImageIndirectNV( commandBuffer, copyBufferAddress, copyCount, stride, dstImage, dstImageLayout, pImageSubresources );
+    }
+
+  //=== VK_NV_memory_decompression ===
+
+
+    void vkCmdDecompressMemoryNV( VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV * pDecompressMemoryRegions ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDecompressMemoryNV( commandBuffer, decompressRegionCount, pDecompressMemoryRegions );
+    }
+
+
+    void vkCmdDecompressMemoryIndirectCountNV( VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, uint32_t stride ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdDecompressMemoryIndirectCountNV( commandBuffer, indirectCommandsAddress, indirectCommandsCountAddress, stride );
+    }
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+
+
+    void vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetTessellationDomainOriginEXT( commandBuffer, domainOrigin );
+    }
+
+
+    void vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthClampEnableEXT( commandBuffer, depthClampEnable );
+    }
+
+
+    void vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetPolygonModeEXT( commandBuffer, polygonMode );
+    }
+
+
+    void vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRasterizationSamplesEXT( commandBuffer, rasterizationSamples );
+    }
+
+
+    void vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask * pSampleMask ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetSampleMaskEXT( commandBuffer, samples, pSampleMask );
+    }
+
+
+    void vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetAlphaToCoverageEnableEXT( commandBuffer, alphaToCoverageEnable );
+    }
+
+
+    void vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetAlphaToOneEnableEXT( commandBuffer, alphaToOneEnable );
+    }
+
+
+    void vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLogicOpEnableEXT( commandBuffer, logicOpEnable );
+    }
+
+
+    void vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32 * pColorBlendEnables ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetColorBlendEnableEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables );
+    }
+
+
+    void vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT * pColorBlendEquations ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetColorBlendEquationEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations );
+    }
+
+
+    void vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags * pColorWriteMasks ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetColorWriteMaskEXT( commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks );
+    }
+
+
+    void vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRasterizationStreamEXT( commandBuffer, rasterizationStream );
+    }
+
+
+    void vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetConservativeRasterizationModeEXT( commandBuffer, conservativeRasterizationMode );
+    }
+
+
+    void vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetExtraPrimitiveOverestimationSizeEXT( commandBuffer, extraPrimitiveOverestimationSize );
+    }
+
+
+    void vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthClipEnableEXT( commandBuffer, depthClipEnable );
+    }
+
+
+    void vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetSampleLocationsEnableEXT( commandBuffer, sampleLocationsEnable );
+    }
+
+
+    void vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT * pColorBlendAdvanced ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetColorBlendAdvancedEXT( commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced );
+    }
+
+
+    void vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetProvokingVertexModeEXT( commandBuffer, provokingVertexMode );
+    }
+
+
+    void vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineRasterizationModeEXT( commandBuffer, lineRasterizationMode );
+    }
+
+
+    void vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetLineStippleEnableEXT( commandBuffer, stippledLineEnable );
+    }
+
+
+    void vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetDepthClipNegativeOneToOneEXT( commandBuffer, negativeOneToOne );
+    }
+
+
+    void vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportWScalingEnableNV( commandBuffer, viewportWScalingEnable );
+    }
+
+
+    void vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV * pViewportSwizzles ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetViewportSwizzleNV( commandBuffer, firstViewport, viewportCount, pViewportSwizzles );
+    }
+
+
+    void vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageToColorEnableNV( commandBuffer, coverageToColorEnable );
+    }
+
+
+    void vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageToColorLocationNV( commandBuffer, coverageToColorLocation );
+    }
+
+
+    void vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageModulationModeNV( commandBuffer, coverageModulationMode );
+    }
+
+
+    void vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageModulationTableEnableNV( commandBuffer, coverageModulationTableEnable );
+    }
+
+
+    void vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float * pCoverageModulationTable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageModulationTableNV( commandBuffer, coverageModulationTableCount, pCoverageModulationTable );
+    }
+
+
+    void vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetShadingRateImageEnableNV( commandBuffer, shadingRateImageEnable );
+    }
+
+
+    void vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetRepresentativeFragmentTestEnableNV( commandBuffer, representativeFragmentTestEnable );
+    }
+
+
+    void vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdSetCoverageReductionModeNV( commandBuffer, coverageReductionMode );
+    }
+
+  //=== VK_EXT_shader_module_identifier ===
+
+
+    void vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetShaderModuleIdentifierEXT( device, shaderModule, pIdentifier );
+    }
+
+
+    void vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, const VkShaderModuleCreateInfo * pCreateInfo, VkShaderModuleIdentifierEXT * pIdentifier ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetShaderModuleCreateInfoIdentifierEXT( device, pCreateInfo, pIdentifier );
+    }
+
+  //=== VK_NV_optical_flow ===
+
+
+    VkResult vkGetPhysicalDeviceOpticalFlowImageFormatsNV( VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV * pOpticalFlowImageFormatInfo, uint32_t * pFormatCount, VkOpticalFlowImageFormatPropertiesNV * pImageFormatProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetPhysicalDeviceOpticalFlowImageFormatsNV( physicalDevice, pOpticalFlowImageFormatInfo, pFormatCount, pImageFormatProperties );
+    }
+
+
+    VkResult vkCreateOpticalFlowSessionNV( VkDevice device, const VkOpticalFlowSessionCreateInfoNV * pCreateInfo, const VkAllocationCallbacks * pAllocator, VkOpticalFlowSessionNV * pSession ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCreateOpticalFlowSessionNV( device, pCreateInfo, pAllocator, pSession );
+    }
+
+
+    void vkDestroyOpticalFlowSessionNV( VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks * pAllocator ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkDestroyOpticalFlowSessionNV( device, session, pAllocator );
+    }
+
+
+    VkResult vkBindOpticalFlowSessionImageNV( VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkBindOpticalFlowSessionImageNV( device, session, bindingPoint, view, layout );
+    }
+
+
+    void vkCmdOpticalFlowExecuteNV( VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV * pExecuteInfo ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkCmdOpticalFlowExecuteNV( commandBuffer, session, pExecuteInfo );
+    }
+
+  //=== VK_QCOM_tile_properties ===
+
+
+    VkResult vkGetFramebufferTilePropertiesQCOM( VkDevice device, VkFramebuffer framebuffer, uint32_t * pPropertiesCount, VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetFramebufferTilePropertiesQCOM( device, framebuffer, pPropertiesCount, pProperties );
+    }
+
+
+    VkResult vkGetDynamicRenderingTilePropertiesQCOM( VkDevice device, const VkRenderingInfo * pRenderingInfo, VkTilePropertiesQCOM * pProperties ) const VULKAN_HPP_NOEXCEPT
+    {
+      return ::vkGetDynamicRenderingTilePropertiesQCOM( device, pRenderingInfo, pProperties );
+    }
+
+  };
+#endif
+
+  class DispatchLoaderDynamic;
+#if !defined(VULKAN_HPP_DISPATCH_LOADER_DYNAMIC)
+# if defined( VK_NO_PROTOTYPES )
+#  define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1
+# else
+#  define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0
+# endif
+#endif
+
+#if !defined( VULKAN_HPP_STORAGE_API )
+#  if defined( VULKAN_HPP_STORAGE_SHARED )
+#    if defined( _MSC_VER )
+#      if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT )
+#        define VULKAN_HPP_STORAGE_API __declspec( dllexport )
+#      else
+#        define VULKAN_HPP_STORAGE_API __declspec( dllimport )
+#      endif
+#    elif defined( __clang__ ) || defined( __GNUC__ )
+#      if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT )
+#        define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) )
+#      else
+#        define VULKAN_HPP_STORAGE_API
+#      endif
+#    else
+#      define VULKAN_HPP_STORAGE_API
+#      pragma warning Unknown import / export semantics
+#    endif
+#  else
+#    define VULKAN_HPP_STORAGE_API
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER )
+#  if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+#    define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic
+#    define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE                     \
+      namespace VULKAN_HPP_NAMESPACE                                               \
+      {                                                                            \
+        VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic; \
+      }
+  extern VULKAN_HPP_STORAGE_API DispatchLoaderDynamic defaultDispatchLoaderDynamic;
+#  else
+  static inline ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic & getDispatchLoaderStatic()
+  {
+    static ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic dls;
+    return dls;
+  }
+#    define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::getDispatchLoaderStatic()
+#    define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE )
+#  if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+#    define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic
+#  else
+#    define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic
+#  endif
+#endif
+
+#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER )
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT
+#  define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT
+#else
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT         = {}
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr
+#  define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT       = VULKAN_HPP_DEFAULT_DISPATCHER
+#endif
+
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+  struct AllocationCallbacks;
+
+  template <typename OwnerType, typename Dispatch>
+  class ObjectDestroy
+  {
+  public:
+    ObjectDestroy() = default;
+
+    ObjectDestroy( OwnerType owner,
+                   Optional<const AllocationCallbacks> allocationCallbacks
+                                             VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+                   Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+      : m_owner( owner )
+      , m_allocationCallbacks( allocationCallbacks )
+      , m_dispatch( &dispatch )
+    {}
+
+    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
+    Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
+
+  protected:
+    template <typename T>
+    void destroy(T t) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
+      m_owner.destroy( t, m_allocationCallbacks, *m_dispatch );
+    }
+
+  private:
+    OwnerType                           m_owner               = {};
+    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+    Dispatch const *                    m_dispatch            = nullptr;
+  };
+
+  class NoParent;
+
+  template <typename Dispatch>
+  class ObjectDestroy<NoParent, Dispatch>
+  {
+  public:
+    ObjectDestroy() = default;
+
+    ObjectDestroy( Optional<const AllocationCallbacks> allocationCallbacks,
+                   Dispatch const & dispatch           VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+      : m_allocationCallbacks( allocationCallbacks )
+      , m_dispatch( &dispatch )
+    {}
+
+    Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT { return m_allocationCallbacks; }
+
+  protected:
+    template <typename T>
+    void destroy(T t) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_dispatch );
+      t.destroy( m_allocationCallbacks, *m_dispatch );
+    }
+
+  private:
+    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+    Dispatch const *                    m_dispatch            = nullptr;
+  };
+
+  template <typename OwnerType, typename Dispatch>
+  class ObjectFree
+  {
+  public:
+    ObjectFree() = default;
+
+    ObjectFree( OwnerType                                               owner,
+                Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+                Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+      : m_owner( owner )
+      , m_allocationCallbacks( allocationCallbacks )
+      , m_dispatch( &dispatch )
+    {}
+
+    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_owner;
+    }
+
+    Optional<const AllocationCallbacks> getAllocator() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_allocationCallbacks;
+    }
+
+  protected:
+    template <typename T>
+    void destroy( T t ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
+      ( m_owner.free )( t, m_allocationCallbacks, *m_dispatch );
+    }
+
+  private:
+    OwnerType                           m_owner               = {};
+    Optional<const AllocationCallbacks> m_allocationCallbacks = nullptr;
+    Dispatch const *                    m_dispatch            = nullptr;
+  };
+
+  template <typename OwnerType, typename Dispatch>
+  class ObjectRelease
+  {
+  public:
+    ObjectRelease() = default;
+
+    ObjectRelease( OwnerType                 owner,
+                   Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+      : m_owner( owner )
+      , m_dispatch( &dispatch )
+    {}
+
+    OwnerType getOwner() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_owner;
+    }
+
+  protected:
+    template <typename T>
+    void destroy( T t ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_owner && m_dispatch );
+      m_owner.release( t, *m_dispatch );
+    }
+
+  private:
+    OwnerType        m_owner    = {};
+    Dispatch const * m_dispatch = nullptr;
+  };
+
+  template <typename OwnerType, typename PoolType, typename Dispatch>
+  class PoolFree
+  {
+    public:
+      PoolFree() = default;
+
+    PoolFree( OwnerType                 owner,
+              PoolType                  pool,
+              Dispatch const & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT ) VULKAN_HPP_NOEXCEPT
+        : m_owner( owner )
+        , m_pool( pool )
+        , m_dispatch( &dispatch )
+      {}
+
+      OwnerType getOwner() const VULKAN_HPP_NOEXCEPT { return m_owner; }
+      PoolType getPool() const VULKAN_HPP_NOEXCEPT { return m_pool; }
+
+    protected:
+      template <typename T>
+      void destroy(T t) VULKAN_HPP_NOEXCEPT
+      {
+        ( m_owner.free )( m_pool, t, *m_dispatch );
+      }
+
+    private:
+      OwnerType        m_owner    = OwnerType();
+      PoolType         m_pool     = PoolType();
+      Dispatch const * m_dispatch = nullptr;
+  };
+
+#endif // !VULKAN_HPP_NO_SMART_HANDLE
+
+  //==================
+  //=== BASE TYPEs ===
+  //==================
+
+  using Bool32 = uint32_t;
+  using DeviceAddress = uint64_t;
+  using DeviceSize = uint64_t;
+  using RemoteAddressNV = void *;
+  using SampleMask = uint32_t;
+
+
+} // namespace VULKAN_HPP_NAMESPACE
+
+#include <vulkan/vulkan_enums.hpp>
+#if !defined( VULKAN_HPP_NO_TO_STRING )
+#include <vulkan/vulkan_to_string.hpp>
+#endif
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+namespace std
+{
+  template <>
+  struct is_error_code_enum<VULKAN_HPP_NAMESPACE::Result> : public true_type
+  {};
+}  // namespace std
+#endif
+
+namespace VULKAN_HPP_NAMESPACE
+{
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+  class ErrorCategoryImpl : public std::error_category
+  {
+    public:
+    virtual const char* name() const VULKAN_HPP_NOEXCEPT override { return VULKAN_HPP_NAMESPACE_STRING"::Result"; }
+    virtual std::string message(int ev) const override
+    {
+#if defined( VULKAN_HPP_NO_TO_STRING )
+      return std::to_string( ev );
+#else
+      return VULKAN_HPP_NAMESPACE::to_string(static_cast<VULKAN_HPP_NAMESPACE::Result>(ev));
+#endif
+    }
+  };
+
+  class Error
+  {
+    public:
+    Error() VULKAN_HPP_NOEXCEPT = default;
+    Error(const Error&) VULKAN_HPP_NOEXCEPT = default;
+    virtual ~Error() VULKAN_HPP_NOEXCEPT = default;
+
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT = 0;
+  };
+
+  class LogicError : public Error, public std::logic_error
+  {
+    public:
+    explicit LogicError( const std::string& what )
+      : Error(), std::logic_error(what) {}
+    explicit LogicError( char const * what )
+      : Error(), std::logic_error(what) {}
+
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::logic_error::what(); }
+  };
+
+  class SystemError : public Error, public std::system_error
+  {
+    public:
+    SystemError( std::error_code ec )
+      : Error(), std::system_error(ec) {}
+    SystemError( std::error_code ec, std::string const & what )
+      : Error(), std::system_error(ec, what) {}
+    SystemError( std::error_code ec, char const * what )
+      : Error(), std::system_error(ec, what) {}
+    SystemError( int ev, std::error_category const & ecat )
+      : Error(), std::system_error(ev, ecat) {}
+    SystemError( int ev, std::error_category const & ecat, std::string const & what)
+      : Error(), std::system_error(ev, ecat, what) {}
+    SystemError( int ev, std::error_category const & ecat, char const * what)
+      : Error(), std::system_error(ev, ecat, what) {}
+
+    virtual const char* what() const VULKAN_HPP_NOEXCEPT { return std::system_error::what(); }
+  };
+
+  VULKAN_HPP_INLINE const std::error_category& errorCategory() VULKAN_HPP_NOEXCEPT
+  {
+    static ErrorCategoryImpl instance;
+    return instance;
+  }
+
+  VULKAN_HPP_INLINE std::error_code make_error_code(Result e) VULKAN_HPP_NOEXCEPT
+  {
+    return std::error_code(static_cast<int>(e), errorCategory());
+  }
+
+  VULKAN_HPP_INLINE std::error_condition make_error_condition(Result e) VULKAN_HPP_NOEXCEPT
+  {
+    return std::error_condition(static_cast<int>(e), errorCategory());
+  }
+
+
+  class OutOfHostMemoryError : public SystemError
+  {
+  public:
+    OutOfHostMemoryError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+    OutOfHostMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfHostMemory ), message ) {}
+  };
+
+  class OutOfDeviceMemoryError : public SystemError
+  {
+  public:
+    OutOfDeviceMemoryError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+    OutOfDeviceMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfDeviceMemory ), message ) {}
+  };
+
+  class InitializationFailedError : public SystemError
+  {
+  public:
+    InitializationFailedError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+    InitializationFailedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInitializationFailed ), message ) {}
+  };
+
+  class DeviceLostError : public SystemError
+  {
+  public:
+    DeviceLostError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+    DeviceLostError( char const * message )
+      : SystemError( make_error_code( Result::eErrorDeviceLost ), message ) {}
+  };
+
+  class MemoryMapFailedError : public SystemError
+  {
+  public:
+    MemoryMapFailedError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+    MemoryMapFailedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorMemoryMapFailed ), message ) {}
+  };
+
+  class LayerNotPresentError : public SystemError
+  {
+  public:
+    LayerNotPresentError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+    LayerNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorLayerNotPresent ), message ) {}
+  };
+
+  class ExtensionNotPresentError : public SystemError
+  {
+  public:
+    ExtensionNotPresentError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+    ExtensionNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorExtensionNotPresent ), message ) {}
+  };
+
+  class FeatureNotPresentError : public SystemError
+  {
+  public:
+    FeatureNotPresentError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+    FeatureNotPresentError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFeatureNotPresent ), message ) {}
+  };
+
+  class IncompatibleDriverError : public SystemError
+  {
+  public:
+    IncompatibleDriverError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+    IncompatibleDriverError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDriver ), message ) {}
+  };
+
+  class TooManyObjectsError : public SystemError
+  {
+  public:
+    TooManyObjectsError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+    TooManyObjectsError( char const * message )
+      : SystemError( make_error_code( Result::eErrorTooManyObjects ), message ) {}
+  };
+
+  class FormatNotSupportedError : public SystemError
+  {
+  public:
+    FormatNotSupportedError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+    FormatNotSupportedError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFormatNotSupported ), message ) {}
+  };
+
+  class FragmentedPoolError : public SystemError
+  {
+  public:
+    FragmentedPoolError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+    FragmentedPoolError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFragmentedPool ), message ) {}
+  };
+
+  class UnknownError : public SystemError
+  {
+  public:
+    UnknownError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
+    UnknownError( char const * message )
+      : SystemError( make_error_code( Result::eErrorUnknown ), message ) {}
+  };
+
+  class OutOfPoolMemoryError : public SystemError
+  {
+  public:
+    OutOfPoolMemoryError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
+    OutOfPoolMemoryError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfPoolMemory ), message ) {}
+  };
+
+  class InvalidExternalHandleError : public SystemError
+  {
+  public:
+    InvalidExternalHandleError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
+    InvalidExternalHandleError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidExternalHandle ), message ) {}
+  };
+
+  class FragmentationError : public SystemError
+  {
+  public:
+    FragmentationError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
+    FragmentationError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFragmentation ), message ) {}
+  };
+
+  class InvalidOpaqueCaptureAddressError : public SystemError
+  {
+  public:
+    InvalidOpaqueCaptureAddressError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
+    InvalidOpaqueCaptureAddressError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidOpaqueCaptureAddress ), message ) {}
+  };
+
+  class SurfaceLostKHRError : public SystemError
+  {
+  public:
+    SurfaceLostKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+    SurfaceLostKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorSurfaceLostKHR ), message ) {}
+  };
+
+  class NativeWindowInUseKHRError : public SystemError
+  {
+  public:
+    NativeWindowInUseKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+    NativeWindowInUseKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorNativeWindowInUseKHR ), message ) {}
+  };
+
+  class OutOfDateKHRError : public SystemError
+  {
+  public:
+    OutOfDateKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+    OutOfDateKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorOutOfDateKHR ), message ) {}
+  };
+
+  class IncompatibleDisplayKHRError : public SystemError
+  {
+  public:
+    IncompatibleDisplayKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+    IncompatibleDisplayKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorIncompatibleDisplayKHR ), message ) {}
+  };
+
+  class ValidationFailedEXTError : public SystemError
+  {
+  public:
+    ValidationFailedEXTError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+    ValidationFailedEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorValidationFailedEXT ), message ) {}
+  };
+
+  class InvalidShaderNVError : public SystemError
+  {
+  public:
+    InvalidShaderNVError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+    InvalidShaderNVError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidShaderNV ), message ) {}
+  };
+
+  class ImageUsageNotSupportedKHRError : public SystemError
+  {
+  public:
+    ImageUsageNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {}
+    ImageUsageNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorImageUsageNotSupportedKHR ), message ) {}
+  };
+
+  class VideoPictureLayoutNotSupportedKHRError : public SystemError
+  {
+  public:
+    VideoPictureLayoutNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) {}
+    VideoPictureLayoutNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorVideoPictureLayoutNotSupportedKHR ), message ) {}
+  };
+
+  class VideoProfileOperationNotSupportedKHRError : public SystemError
+  {
+  public:
+    VideoProfileOperationNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) {}
+    VideoProfileOperationNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileOperationNotSupportedKHR ), message ) {}
+  };
+
+  class VideoProfileFormatNotSupportedKHRError : public SystemError
+  {
+  public:
+    VideoProfileFormatNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) {}
+    VideoProfileFormatNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileFormatNotSupportedKHR ), message ) {}
+  };
+
+  class VideoProfileCodecNotSupportedKHRError : public SystemError
+  {
+  public:
+    VideoProfileCodecNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) {}
+    VideoProfileCodecNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorVideoProfileCodecNotSupportedKHR ), message ) {}
+  };
+
+  class VideoStdVersionNotSupportedKHRError : public SystemError
+  {
+  public:
+    VideoStdVersionNotSupportedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) {}
+    VideoStdVersionNotSupportedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorVideoStdVersionNotSupportedKHR ), message ) {}
+  };
+
+  class InvalidDrmFormatModifierPlaneLayoutEXTError : public SystemError
+  {
+  public:
+    InvalidDrmFormatModifierPlaneLayoutEXTError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
+    InvalidDrmFormatModifierPlaneLayoutEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT ), message ) {}
+  };
+
+  class NotPermittedKHRError : public SystemError
+  {
+  public:
+    NotPermittedKHRError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {}
+    NotPermittedKHRError( char const * message )
+      : SystemError( make_error_code( Result::eErrorNotPermittedKHR ), message ) {}
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  class FullScreenExclusiveModeLostEXTError : public SystemError
+  {
+  public:
+    FullScreenExclusiveModeLostEXTError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
+    FullScreenExclusiveModeLostEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorFullScreenExclusiveModeLostEXT ), message ) {}
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  class CompressionExhaustedEXTError : public SystemError
+  {
+  public:
+    CompressionExhaustedEXTError( std::string const & message )
+      : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {}
+    CompressionExhaustedEXTError( char const * message )
+      : SystemError( make_error_code( Result::eErrorCompressionExhaustedEXT ), message ) {}
+  };
+
+
+  namespace
+  {
+    [[noreturn]] void throwResultException( Result result, char const * message )
+    {
+      switch ( result )
+      {
+      case Result::eErrorOutOfHostMemory: throw OutOfHostMemoryError( message );
+      case Result::eErrorOutOfDeviceMemory: throw OutOfDeviceMemoryError( message );
+      case Result::eErrorInitializationFailed: throw InitializationFailedError( message );
+      case Result::eErrorDeviceLost: throw DeviceLostError( message );
+      case Result::eErrorMemoryMapFailed: throw MemoryMapFailedError( message );
+      case Result::eErrorLayerNotPresent: throw LayerNotPresentError( message );
+      case Result::eErrorExtensionNotPresent: throw ExtensionNotPresentError( message );
+      case Result::eErrorFeatureNotPresent: throw FeatureNotPresentError( message );
+      case Result::eErrorIncompatibleDriver: throw IncompatibleDriverError( message );
+      case Result::eErrorTooManyObjects: throw TooManyObjectsError( message );
+      case Result::eErrorFormatNotSupported: throw FormatNotSupportedError( message );
+      case Result::eErrorFragmentedPool: throw FragmentedPoolError( message );
+      case Result::eErrorUnknown: throw UnknownError( message );
+      case Result::eErrorOutOfPoolMemory: throw OutOfPoolMemoryError( message );
+      case Result::eErrorInvalidExternalHandle: throw InvalidExternalHandleError( message );
+      case Result::eErrorFragmentation: throw FragmentationError( message );
+      case Result::eErrorInvalidOpaqueCaptureAddress: throw InvalidOpaqueCaptureAddressError( message );
+      case Result::eErrorSurfaceLostKHR: throw SurfaceLostKHRError( message );
+      case Result::eErrorNativeWindowInUseKHR: throw NativeWindowInUseKHRError( message );
+      case Result::eErrorOutOfDateKHR: throw OutOfDateKHRError( message );
+      case Result::eErrorIncompatibleDisplayKHR: throw IncompatibleDisplayKHRError( message );
+      case Result::eErrorValidationFailedEXT: throw ValidationFailedEXTError( message );
+      case Result::eErrorInvalidShaderNV: throw InvalidShaderNVError( message );
+      case Result::eErrorImageUsageNotSupportedKHR: throw ImageUsageNotSupportedKHRError( message );
+      case Result::eErrorVideoPictureLayoutNotSupportedKHR: throw VideoPictureLayoutNotSupportedKHRError( message );
+      case Result::eErrorVideoProfileOperationNotSupportedKHR: throw VideoProfileOperationNotSupportedKHRError( message );
+      case Result::eErrorVideoProfileFormatNotSupportedKHR: throw VideoProfileFormatNotSupportedKHRError( message );
+      case Result::eErrorVideoProfileCodecNotSupportedKHR: throw VideoProfileCodecNotSupportedKHRError( message );
+      case Result::eErrorVideoStdVersionNotSupportedKHR: throw VideoStdVersionNotSupportedKHRError( message );
+      case Result::eErrorInvalidDrmFormatModifierPlaneLayoutEXT: throw InvalidDrmFormatModifierPlaneLayoutEXTError( message );
+      case Result::eErrorNotPermittedKHR: throw NotPermittedKHRError( message );
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+      case Result::eErrorFullScreenExclusiveModeLostEXT: throw FullScreenExclusiveModeLostEXTError( message );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+      case Result::eErrorCompressionExhaustedEXT: throw CompressionExhaustedEXTError( message );
+        default: throw SystemError( make_error_code( result ) );
+      }
+    }
+  }
+#endif
+
+  template <typename T> void ignore(T const &) VULKAN_HPP_NOEXCEPT {}
+
+  template <typename T>
+  struct ResultValue
+  {
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, T & v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(v)))
+#else
+    ResultValue( Result r, T & v )
+#endif
+      : result( r )
+      , value( v )
+    {}
+
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, T && v ) VULKAN_HPP_NOEXCEPT(VULKAN_HPP_NOEXCEPT(T(std::move(v))))
+#else
+    ResultValue( Result r, T && v )
+#endif
+      : result( r )
+      , value( std::move( v ) )
+    {}
+
+    Result  result;
+    T       value;
+
+    operator std::tuple<Result&, T&>() VULKAN_HPP_NOEXCEPT { return std::tuple<Result&, T&>(result, value); }
+  };
+
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+  template <typename Type, typename Dispatch>
+  struct ResultValue<UniqueHandle<Type, Dispatch>>
+  {
+#ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue(Result r, UniqueHandle<Type, Dispatch> && v) VULKAN_HPP_NOEXCEPT
+#else
+    ResultValue(Result r, UniqueHandle<Type, Dispatch> && v)
+#endif
+      : result(r)
+      , value(std::move(v))
+    {}
+
+    std::tuple<Result, UniqueHandle<Type, Dispatch>> asTuple()
+    {
+      return std::make_tuple( result, std::move( value ) );
+    }
+
+    Result                        result;
+    UniqueHandle<Type, Dispatch>  value;
+  };
+
+  template <typename Type, typename Dispatch>
+  struct ResultValue<std::vector<UniqueHandle<Type, Dispatch>>>
+  {
+#  ifdef VULKAN_HPP_HAS_NOEXCEPT
+    ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v ) VULKAN_HPP_NOEXCEPT
+#  else
+    ResultValue( Result r, std::vector<UniqueHandle<Type, Dispatch>> && v )
+#  endif
+      : result( r )
+      , value( std::move( v ) )
+    {}
+
+    std::tuple<Result, std::vector<UniqueHandle<Type, Dispatch>>> asTuple()
+    {
+      return std::make_tuple( result, std::move( value ) );
+    }
+
+    Result                                    result;
+    std::vector<UniqueHandle<Type, Dispatch>> value;
+  };
+#endif
+
+  template <typename T>
+  struct ResultValueType
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    typedef ResultValue<T>  type;
+#else
+    typedef T               type;
+#endif
+  };
+
+  template <>
+  struct ResultValueType<void>
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    typedef Result type;
+#else
+    typedef void   type;
+#endif
+  };
+
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type createResultValueType( Result result )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    return result;
+#else
+    ignore( result );
+#endif
+  }
+
+  template <typename T>
+  VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValueType( Result result, T & data )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    return ResultValue<T>( result, data );
+#else
+    ignore( result );
+    return data;
+#endif
+  }
+
+  template <typename T>
+  VULKAN_HPP_INLINE typename ResultValueType<T>::type createResultValueType( Result result, T && data )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    return ResultValue<T>( result, std::move( data ) );
+#else
+    ignore( result );
+    return std::move( data );
+#endif
+  }
+
+  VULKAN_HPP_INLINE void resultCheck( Result result, char const * message )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore( result );  // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
+    ignore( message );
+    VULKAN_HPP_ASSERT_ON_RESULT( result == Result::eSuccess );
+#else
+    if ( result != Result::eSuccess )
+    {
+      throwResultException( result, message );
+    }
+#endif
+  }
+
+  VULKAN_HPP_INLINE void resultCheck( Result result, char const * message, std::initializer_list<Result> successCodes )
+  {
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    ignore( result );  // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
+    ignore( message );
+    ignore( successCodes );  // just in case VULKAN_HPP_ASSERT_ON_RESULT is empty
+    VULKAN_HPP_ASSERT_ON_RESULT( std::find( successCodes.begin(), successCodes.end(), result ) != successCodes.end() );
+#else
+    if ( std::find( successCodes.begin(), successCodes.end(), result ) == successCodes.end() )
+    {
+      throwResultException( result, message );
+    }
+#endif
+  }
+} // namespace VULKAN_HPP_NAMESPACE
+
+// clang-format off
+#include <vulkan/vulkan_handles.hpp>
+#include <vulkan/vulkan_structs.hpp>
+#include <vulkan/vulkan_funcs.hpp>
+// clang-format on
+
+namespace VULKAN_HPP_NAMESPACE
+{
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+
+  //=======================
+  //=== STRUCTS EXTENDS ===
+  //=======================
+
+
+  //=== VK_VERSION_1_0 ===
+  template <> struct StructExtends<ShaderModuleCreateInfo, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+
+  //=== VK_VERSION_1_1 ===
+  template <> struct StructExtends<PhysicalDeviceSubgroupProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice16BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryDedicatedRequirements, MemoryRequirements2>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryDedicatedAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryAllocateFlagsInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupRenderPassBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupRenderPassBeginInfo, RenderingInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupCommandBufferBeginInfo, CommandBufferBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupSubmitInfo, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupBindSparseInfo, BindSparseInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindBufferMemoryDeviceGroupInfo, BindBufferMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindImageMemoryDeviceGroupInfo, BindImageMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupDeviceCreateInfo, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFeatures2, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePointClippingProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassInputAttachmentAspectCreateInfo, RenderPassCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageViewUsageCreateInfo, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineTessellationDomainOriginStateCreateInfo, PipelineTessellationStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassMultiviewCreateInfo, RenderPassCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVariablePointersFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVariablePointersFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProtectedMemoryFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProtectedMemoryProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<ProtectedSubmitInfo, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerYcbcrConversionInfo, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerYcbcrConversionInfo, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindImagePlaneMemoryInfo, BindImageMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImagePlaneMemoryRequirementsInfo, ImageMemoryRequirementsInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSamplerYcbcrConversionFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerYcbcrConversionImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExternalImageFormatInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalImageFormatProperties, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceIDProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalMemoryImageCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalMemoryBufferCreateInfo, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMemoryAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportFenceCreateInfo, FenceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportSemaphoreCreateInfo, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMaintenance3Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDrawParametersFeatures, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_VERSION_1_2 ===
+  template <> struct StructExtends<PhysicalDeviceVulkan11Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan11Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan11Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan12Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan12Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan12Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<ImageFormatListCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageFormatListCreateInfo, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<ImageFormatListCreateInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice8BitStorageFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice8BitStorageFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDriverProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicInt64Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderFloat16Int8Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFloatControlsProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorSetLayoutBindingFlagsCreateInfo, DescriptorSetLayoutCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorIndexingFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorIndexingProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorSetVariableDescriptorCountAllocateInfo, DescriptorSetAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorSetVariableDescriptorCountLayoutSupport, DescriptorSetLayoutSupport>{ enum { value = true }; };
+  template <> struct StructExtends<SubpassDescriptionDepthStencilResolve, SubpassDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthStencilResolveProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceScalarBlockLayoutFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageStencilUsageCreateInfo, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageStencilUsageCreateInfo, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerReductionModeCreateInfo, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSamplerFilterMinmaxProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkanMemoryModelFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImagelessFramebufferFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<FramebufferAttachmentsCreateInfo, FramebufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassAttachmentBeginInfo, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceUniformBufferStandardLayoutFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSubgroupExtendedTypesFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSeparateDepthStencilLayoutsFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<AttachmentReferenceStencilLayout, AttachmentReference2>{ enum { value = true }; };
+  template <> struct StructExtends<AttachmentDescriptionStencilLayout, AttachmentDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceHostQueryResetFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceHostQueryResetFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTimelineSemaphoreProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<SemaphoreTypeCreateInfo, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SemaphoreTypeCreateInfo, PhysicalDeviceExternalSemaphoreInfo>{ enum { value = true }; };
+  template <> struct StructExtends<TimelineSemaphoreSubmitInfo, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<TimelineSemaphoreSubmitInfo, BindSparseInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BufferOpaqueCaptureAddressCreateInfo, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryOpaqueCaptureAddressAllocateInfo, MemoryAllocateInfo>{ enum { value = true }; };
+
+  //=== VK_VERSION_1_3 ===
+  template <> struct StructExtends<PhysicalDeviceVulkan13Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan13Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVulkan13Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfo, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfo, ComputePipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfo, RayTracingPipelineCreateInfoNV>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCreationFeedbackCreateInfo, RayTracingPipelineCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderTerminateInvocationFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderDemoteToHelperInvocationFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePrivateDataFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePrivateDataFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DevicePrivateDataCreateInfo, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineCreationCacheControlFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryBarrier2, SubpassDependency2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSynchronization2Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSynchronization2Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageRobustnessFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageRobustnessFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubgroupSizeControlProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineShaderStageRequiredSubgroupSizeCreateInfo, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInlineUniformBlockFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInlineUniformBlockFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInlineUniformBlockProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<WriteDescriptorSetInlineUniformBlock, WriteDescriptorSet>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorPoolInlineUniformBlockCreateInfo, DescriptorPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTextureCompressionASTCHDRFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRenderingCreateInfo, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDynamicRenderingFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDynamicRenderingFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<CommandBufferInheritanceRenderingInfo, CommandBufferInheritanceInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerDotProductFeatures, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerDotProductFeatures, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerDotProductProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentProperties, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<FormatProperties3, FormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMaintenance4Features, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMaintenance4Features, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMaintenance4Properties, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_swapchain ===
+  template <> struct StructExtends<ImageSwapchainCreateInfoKHR, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BindImageMemorySwapchainInfoKHR, BindImageMemoryInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupPresentInfoKHR, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceGroupSwapchainCreateInfoKHR, SwapchainCreateInfoKHR>{ enum { value = true }; };
+
+  //=== VK_KHR_display_swapchain ===
+  template <> struct StructExtends<DisplayPresentInfoKHR, PresentInfoKHR>{ enum { value = true }; };
+
+  //=== VK_EXT_debug_report ===
+  template <> struct StructExtends<DebugReportCallbackCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_AMD_rasterization_order ===
+  template <> struct StructExtends<PipelineRasterizationStateRasterizationOrderAMD, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_video_queue ===
+  template <> struct StructExtends<QueueFamilyQueryResultStatusPropertiesKHR, QueueFamilyProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<QueueFamilyVideoPropertiesKHR, QueueFamilyProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<VideoProfileInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoProfileListInfoKHR, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<VideoProfileListInfoKHR, PhysicalDeviceVideoFormatInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoProfileListInfoKHR, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoProfileListInfoKHR, BufferCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_video_decode_queue ===
+  template <> struct StructExtends<VideoDecodeCapabilitiesKHR, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeUsageInfoKHR, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeUsageInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_dedicated_allocation ===
+  template <> struct StructExtends<DedicatedAllocationImageCreateInfoNV, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DedicatedAllocationBufferCreateInfoNV, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DedicatedAllocationMemoryAllocateInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_transform_feedback ===
+  template <> struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTransformFeedbackFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTransformFeedbackPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationStateStreamCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h264 ===
+  template <> struct StructExtends<VideoEncodeH264CapabilitiesEXT, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264SessionParametersAddInfoEXT, VideoSessionParametersUpdateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264VclFrameInfoEXT, VideoEncodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264EmitPictureParametersInfoEXT, VideoEncodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264ProfileInfoEXT, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264ProfileInfoEXT, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264RateControlInfoEXT, VideoCodingControlInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264RateControlLayerInfoEXT, VideoCodingControlInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH264RateControlLayerInfoEXT, VideoEncodeRateControlLayerInfoKHR>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h265 ===
+  template <> struct StructExtends<VideoEncodeH265CapabilitiesEXT, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265SessionParametersCreateInfoEXT, VideoSessionParametersCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265SessionParametersAddInfoEXT, VideoSessionParametersUpdateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265VclFrameInfoEXT, VideoEncodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265EmitPictureParametersInfoEXT, VideoEncodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265ProfileInfoEXT, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265ProfileInfoEXT, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265RateControlInfoEXT, VideoCodingControlInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265RateControlLayerInfoEXT, VideoCodingControlInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeH265RateControlLayerInfoEXT, VideoEncodeRateControlLayerInfoKHR>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_decode_h264 ===
+  template <> struct StructExtends<VideoDecodeH264ProfileInfoKHR, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264ProfileInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264CapabilitiesKHR, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264SessionParametersCreateInfoKHR, VideoSessionParametersCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264SessionParametersAddInfoKHR, VideoSessionParametersUpdateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264PictureInfoKHR, VideoDecodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH264DpbSlotInfoKHR, VideoReferenceSlotInfoKHR>{ enum { value = true }; };
+
+  //=== VK_AMD_texture_gather_bias_lod ===
+  template <> struct StructExtends<TextureLODGatherFormatPropertiesAMD, ImageFormatProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_dynamic_rendering ===
+  template <> struct StructExtends<RenderingFragmentShadingRateAttachmentInfoKHR, RenderingInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderingFragmentDensityMapAttachmentInfoEXT, RenderingInfo>{ enum { value = true }; };
+  template <> struct StructExtends<AttachmentSampleCountInfoAMD, CommandBufferInheritanceInfo>{ enum { value = true }; };
+  template <> struct StructExtends<AttachmentSampleCountInfoAMD, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MultiviewPerViewAttributesInfoNVX, CommandBufferInheritanceInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MultiviewPerViewAttributesInfoNVX, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MultiviewPerViewAttributesInfoNVX, RenderingInfo>{ enum { value = true }; };
+
+  //=== VK_NV_corner_sampled_image ===
+  template <> struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCornerSampledImageFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_external_memory ===
+  template <> struct StructExtends<ExternalMemoryImageCreateInfoNV, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMemoryAllocateInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+  template <> struct StructExtends<ImportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMemoryWin32HandleInfoNV, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_win32_keyed_mutex ===
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoNV, SubmitInfo2>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_validation_flags ===
+  template <> struct StructExtends<ValidationFlagsEXT, InstanceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_astc_decode_mode ===
+  template <> struct StructExtends<ImageViewASTCDecodeModeEXT, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceASTCDecodeFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_pipeline_robustness ===
+  template <> struct StructExtends<PhysicalDevicePipelineRobustnessFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineRobustnessFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineRobustnessPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRobustnessCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRobustnessCreateInfoEXT, ComputePipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRobustnessCreateInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRobustnessCreateInfoEXT, RayTracingPipelineCreateInfoKHR>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+  template <> struct StructExtends<ImportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMemoryWin32HandleInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+  template <> struct StructExtends<ImportMemoryFdInfoKHR, MemoryAllocateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_keyed_mutex ===
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<Win32KeyedMutexAcquireReleaseInfoKHR, SubmitInfo2>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+  template <> struct StructExtends<ExportSemaphoreWin32HandleInfoKHR, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<D3D12FenceSubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_push_descriptor ===
+  template <> struct StructExtends<PhysicalDevicePushDescriptorPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_conditional_rendering ===
+  template <> struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceConditionalRenderingFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<CommandBufferInheritanceConditionalRenderingInfoEXT, CommandBufferInheritanceInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_incremental_present ===
+  template <> struct StructExtends<PresentRegionsKHR, PresentInfoKHR>{ enum { value = true }; };
+
+  //=== VK_NV_clip_space_w_scaling ===
+  template <> struct StructExtends<PipelineViewportWScalingStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_display_control ===
+  template <> struct StructExtends<SwapchainCounterCreateInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+
+  //=== VK_GOOGLE_display_timing ===
+  template <> struct StructExtends<PresentTimesInfoGOOGLE, PresentInfoKHR>{ enum { value = true }; };
+
+  //=== VK_NVX_multiview_per_view_attributes ===
+  template <> struct StructExtends<PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_viewport_swizzle ===
+  template <> struct StructExtends<PipelineViewportSwizzleStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_discard_rectangles ===
+  template <> struct StructExtends<PhysicalDeviceDiscardRectanglePropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineDiscardRectangleStateCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_conservative_rasterization ===
+  template <> struct StructExtends<PhysicalDeviceConservativeRasterizationPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationConservativeStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_depth_clip_enable ===
+  template <> struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthClipEnableFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationDepthClipStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_shared_presentable_image ===
+  template <> struct StructExtends<SharedPresentSurfaceCapabilitiesKHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+  template <> struct StructExtends<ExportFenceWin32HandleInfoKHR, FenceCreateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_performance_query ===
+  template <> struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePerformanceQueryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePerformanceQueryPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<QueryPoolPerformanceCreateInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PerformanceQuerySubmitInfoKHR, SubmitInfo2>{ enum { value = true }; };
+
+  //=== VK_EXT_debug_utils ===
+  template <> struct StructExtends<DebugUtilsMessengerCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DebugUtilsObjectNameInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+  template <> struct StructExtends<AndroidHardwareBufferUsageANDROID, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<AndroidHardwareBufferFormatPropertiesANDROID, AndroidHardwareBufferPropertiesANDROID>{ enum { value = true }; };
+  template <> struct StructExtends<ImportAndroidHardwareBufferInfoANDROID, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalFormatANDROID, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExternalFormatANDROID, SamplerYcbcrConversionCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<AndroidHardwareBufferFormatProperties2ANDROID, AndroidHardwareBufferPropertiesANDROID>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+  template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier>{ enum { value = true }; };
+  template <> struct StructExtends<SampleLocationsInfoEXT, ImageMemoryBarrier2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassSampleLocationsBeginInfoEXT, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineSampleLocationsStateCreateInfoEXT, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSampleLocationsPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_blend_operation_advanced ===
+  template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBlendOperationAdvancedPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineColorBlendAdvancedStateCreateInfoEXT, PipelineColorBlendStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_fragment_coverage_to_color ===
+  template <> struct StructExtends<PipelineCoverageToColorStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_acceleration_structure ===
+  template <> struct StructExtends<WriteDescriptorSetAccelerationStructureKHR, WriteDescriptorSet>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAccelerationStructureFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAccelerationStructureFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAccelerationStructurePropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_framebuffer_mixed_samples ===
+  template <> struct StructExtends<PipelineCoverageModulationStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_shader_sm_builtins ===
+  template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSMBuiltinsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_image_drm_format_modifier ===
+  template <> struct StructExtends<DrmFormatModifierPropertiesListEXT, FormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageDrmFormatModifierInfoEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<ImageDrmFormatModifierListCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageDrmFormatModifierExplicitCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DrmFormatModifierPropertiesList2EXT, FormatProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_validation_cache ===
+  template <> struct StructExtends<ShaderModuleValidationCacheCreateInfoEXT, ShaderModuleCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ShaderModuleValidationCacheCreateInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_portability_subset ===
+  template <> struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePortabilitySubsetFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePortabilitySubsetPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_shading_rate_image ===
+  template <> struct StructExtends<PipelineViewportShadingRateImageStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShadingRateImageFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShadingRateImagePropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineViewportCoarseSampleOrderStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_ray_tracing ===
+  template <> struct StructExtends<WriteDescriptorSetAccelerationStructureNV, WriteDescriptorSet>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_representative_fragment_test ===
+  template <> struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRepresentativeFragmentTestFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRepresentativeFragmentTestStateCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_filter_cubic ===
+  template <> struct StructExtends<PhysicalDeviceImageViewImageFormatInfoEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<FilterCubicImageViewImageFormatPropertiesEXT, ImageFormatProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_external_memory_host ===
+  template <> struct StructExtends<ImportMemoryHostPointerInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExternalMemoryHostPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_shader_clock ===
+  template <> struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderClockFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_AMD_pipeline_compiler_control ===
+  template <> struct StructExtends<PipelineCompilerControlCreateInfoAMD, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCompilerControlCreateInfoAMD, ComputePipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_AMD_shader_core_properties ===
+  template <> struct StructExtends<PhysicalDeviceShaderCorePropertiesAMD, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_video_decode_h265 ===
+  template <> struct StructExtends<VideoDecodeH265ProfileInfoKHR, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265ProfileInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265CapabilitiesKHR, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265SessionParametersCreateInfoKHR, VideoSessionParametersCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265SessionParametersAddInfoKHR, VideoSessionParametersUpdateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265PictureInfoKHR, VideoDecodeInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoDecodeH265DpbSlotInfoKHR, VideoReferenceSlotInfoKHR>{ enum { value = true }; };
+
+  //=== VK_KHR_global_priority ===
+  template <> struct StructExtends<DeviceQueueGlobalPriorityCreateInfoKHR, DeviceQueueCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceGlobalPriorityQueryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceGlobalPriorityQueryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<QueueFamilyGlobalPriorityPropertiesKHR, QueueFamilyProperties2>{ enum { value = true }; };
+
+  //=== VK_AMD_memory_overallocation_behavior ===
+  template <> struct StructExtends<DeviceMemoryOverallocationCreateInfoAMD, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_vertex_attribute_divisor ===
+  template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineVertexInputDivisorStateCreateInfoEXT, PipelineVertexInputStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVertexAttributeDivisorFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_frame_token ===
+  template <> struct StructExtends<PresentFrameTokenGGP, PresentInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_compute_shader_derivatives ===
+  template <> struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceComputeShaderDerivativesFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_mesh_shader ===
+  template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_shader_image_footprint ===
+  template <> struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderImageFootprintFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_scissor_exclusive ===
+  template <> struct StructExtends<PipelineViewportExclusiveScissorStateCreateInfoNV, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExclusiveScissorFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+  template <> struct StructExtends<QueueFamilyCheckpointPropertiesNV, QueueFamilyProperties2>{ enum { value = true }; };
+
+  //=== VK_INTEL_shader_integer_functions2 ===
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_INTEL_performance_query ===
+  template <> struct StructExtends<QueryPoolPerformanceQueryCreateInfoINTEL, QueryPoolCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_pci_bus_info ===
+  template <> struct StructExtends<PhysicalDevicePCIBusInfoPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_AMD_display_native_hdr ===
+  template <> struct StructExtends<DisplayNativeHdrSurfaceCapabilitiesAMD, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainDisplayNativeHdrCreateInfoAMD, SwapchainCreateInfoKHR>{ enum { value = true }; };
+
+  //=== VK_EXT_fragment_density_map ===
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassFragmentDensityMapCreateInfoEXT, RenderPassCreateInfo2>{ enum { value = true }; };
+
+  //=== VK_KHR_fragment_shading_rate ===
+  template <> struct StructExtends<FragmentShadingRateAttachmentInfoKHR, SubpassDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineFragmentShadingRateStateCreateInfoKHR, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRatePropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_AMD_shader_core_properties2 ===
+  template <> struct StructExtends<PhysicalDeviceShaderCoreProperties2AMD, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_AMD_device_coherent_memory ===
+  template <> struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCoherentMemoryFeaturesAMD, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_shader_image_atomic_int64 ===
+  template <> struct StructExtends<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderImageAtomicInt64FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_memory_budget ===
+  template <> struct StructExtends<PhysicalDeviceMemoryBudgetPropertiesEXT, PhysicalDeviceMemoryProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_memory_priority ===
+  template <> struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMemoryPriorityFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MemoryPriorityAllocateInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_surface_protected_capabilities ===
+  template <> struct StructExtends<SurfaceProtectedCapabilitiesKHR, SurfaceCapabilities2KHR>{ enum { value = true }; };
+
+  //=== VK_NV_dedicated_allocation_image_aliasing ===
+  template <> struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_buffer_device_address ===
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBufferDeviceAddressFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BufferDeviceAddressCreateInfoEXT, BufferCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_validation_features ===
+  template <> struct StructExtends<ValidationFeaturesEXT, InstanceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_present_wait ===
+  template <> struct StructExtends<PhysicalDevicePresentWaitFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePresentWaitFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_cooperative_matrix ===
+  template <> struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCooperativeMatrixFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCooperativeMatrixPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_coverage_reduction_mode ===
+  template <> struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCoverageReductionModeFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineCoverageReductionStateCreateInfoNV, PipelineMultisampleStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_fragment_shader_interlock ===
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderInterlockFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_ycbcr_image_arrays ===
+  template <> struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceYcbcrImageArraysFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_provoking_vertex ===
+  template <> struct StructExtends<PhysicalDeviceProvokingVertexFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProvokingVertexFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceProvokingVertexPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationProvokingVertexStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+  template <> struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceFullScreenExclusiveInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceCapabilitiesFullScreenExclusiveEXT, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceFullScreenExclusiveWin32InfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_line_rasterization ===
+  template <> struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLineRasterizationFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLineRasterizationPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineRasterizationLineStateCreateInfoEXT, PipelineRasterizationStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_shader_atomic_float ===
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicFloatFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_index_type_uint8 ===
+  template <> struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceIndexTypeUint8FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_extended_dynamic_state ===
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicStateFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_pipeline_executable_properties ===
+  template <> struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineExecutablePropertiesFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_shader_atomic_float2 ===
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicFloat2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderAtomicFloat2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_surface_maintenance1 ===
+  template <> struct StructExtends<SurfacePresentModeEXT, PhysicalDeviceSurfaceInfo2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfacePresentScalingCapabilitiesEXT, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SurfacePresentModeCompatibilityEXT, SurfaceCapabilities2KHR>{ enum { value = true }; };
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+  template <> struct StructExtends<PhysicalDeviceSwapchainMaintenance1FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSwapchainMaintenance1FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainPresentFenceInfoEXT, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainPresentModesCreateInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainPresentModeInfoEXT, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainPresentScalingCreateInfoEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+
+  //=== VK_NV_device_generated_commands ===
+  template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceGeneratedCommandsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<GraphicsPipelineShaderGroupsCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_inherited_viewport_scissor ===
+  template <> struct StructExtends<PhysicalDeviceInheritedViewportScissorFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInheritedViewportScissorFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<CommandBufferInheritanceViewportScissorInfoNV, CommandBufferInheritanceInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_texel_buffer_alignment ===
+  template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTexelBufferAlignmentFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_QCOM_render_pass_transform ===
+  template <> struct StructExtends<RenderPassTransformBeginInfoQCOM, RenderPassBeginInfo>{ enum { value = true }; };
+  template <> struct StructExtends<CommandBufferInheritanceRenderPassTransformInfoQCOM, CommandBufferInheritanceInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_device_memory_report ===
+  template <> struct StructExtends<PhysicalDeviceDeviceMemoryReportFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDeviceMemoryReportFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceDeviceMemoryReportCreateInfoEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_robustness2 ===
+  template <> struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRobustness2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRobustness2PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_custom_border_color ===
+  template <> struct StructExtends<SamplerCustomBorderColorCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCustomBorderColorPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCustomBorderColorFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_pipeline_library ===
+  template <> struct StructExtends<PipelineLibraryCreateInfoKHR, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_present_barrier ===
+  template <> struct StructExtends<PhysicalDevicePresentBarrierFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePresentBarrierFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SurfaceCapabilitiesPresentBarrierNV, SurfaceCapabilities2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<SwapchainPresentBarrierCreateInfoNV, SwapchainCreateInfoKHR>{ enum { value = true }; };
+
+  //=== VK_KHR_present_id ===
+  template <> struct StructExtends<PresentIdKHR, PresentInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePresentIdFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePresentIdFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+  template <> struct StructExtends<VideoEncodeCapabilitiesKHR, VideoCapabilitiesKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeUsageInfoKHR, VideoProfileInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeUsageInfoKHR, QueryPoolCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeRateControlInfoKHR, VideoCodingControlInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<VideoEncodeRateControlLayerInfoKHR, VideoCodingControlInfoKHR>{ enum { value = true }; };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_device_diagnostics_config ===
+  template <> struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDiagnosticsConfigFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceDiagnosticsConfigCreateInfoNV, DeviceCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, InstanceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, BufferViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalObjectCreateInfoEXT, EventCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalDeviceInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalCommandQueueInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalBufferInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMetalBufferInfoEXT, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalTextureInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMetalTextureInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalIOSurfaceInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMetalIOSurfaceInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ExportMetalSharedEventInfoEXT, ExportMetalObjectsInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMetalSharedEventInfoEXT, SemaphoreCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImportMetalSharedEventInfoEXT, EventCreateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+  template <> struct StructExtends<QueueFamilyCheckpointProperties2NV, QueueFamilyProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_descriptor_buffer ===
+  template <> struct StructExtends<PhysicalDeviceDescriptorBufferPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorBufferFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorBufferFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DescriptorBufferBindingPushDescriptorBufferHandleEXT, DescriptorBufferBindingInfoEXT>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, BufferCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, AccelerationStructureCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<OpaqueCaptureDescriptorDataCreateInfoEXT, AccelerationStructureCreateInfoNV>{ enum { value = true }; };
+
+  //=== VK_EXT_graphics_pipeline_library ===
+  template <> struct StructExtends<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<GraphicsPipelineLibraryCreateInfoEXT, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_AMD_shader_early_and_late_fragment_tests ===
+  template <> struct StructExtends<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_fragment_shader_barycentric ===
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShaderBarycentricPropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_shader_subgroup_uniform_control_flow ===
+  template <> struct StructExtends<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentShadingRateEnumsPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineFragmentShadingRateEnumStateCreateInfoNV, GraphicsPipelineCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_ray_tracing_motion_blur ===
+  template <> struct StructExtends<AccelerationStructureGeometryMotionTrianglesDataNV, AccelerationStructureGeometryTrianglesDataKHR>{ enum { value = true }; };
+  template <> struct StructExtends<AccelerationStructureMotionInfoNV, AccelerationStructureCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingMotionBlurFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingMotionBlurFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_mesh_shader ===
+  template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMeshShaderPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_ycbcr_2plane_444_formats ===
+  template <> struct StructExtends<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_fragment_density_map2 ===
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMap2PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_QCOM_rotated_copy_commands ===
+  template <> struct StructExtends<CopyCommandTransformInfoQCOM, BufferImageCopy2>{ enum { value = true }; };
+  template <> struct StructExtends<CopyCommandTransformInfoQCOM, ImageBlit2>{ enum { value = true }; };
+
+  //=== VK_KHR_workgroup_memory_explicit_layout ===
+  template <> struct StructExtends<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_image_compression_control ===
+  template <> struct StructExtends<PhysicalDeviceImageCompressionControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageCompressionControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionControlEXT, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionControlEXT, SwapchainCreateInfoKHR>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionControlEXT, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionPropertiesEXT, ImageFormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionPropertiesEXT, SurfaceFormat2KHR>{ enum { value = true }; };
+  template <> struct StructExtends<ImageCompressionPropertiesEXT, SubresourceLayout2EXT>{ enum { value = true }; };
+
+  //=== VK_EXT_attachment_feedback_loop_layout ===
+  template <> struct StructExtends<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_4444_formats ===
+  template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevice4444FormatsFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_device_fault ===
+  template <> struct StructExtends<PhysicalDeviceFaultFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFaultFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_rgba10x6_formats ===
+  template <> struct StructExtends<PhysicalDeviceRGBA10X6FormatsFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRGBA10X6FormatsFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+  template <> struct StructExtends<PhysicalDeviceRayTracingPipelineFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingPipelineFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingPipelinePropertiesKHR, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_KHR_ray_query ===
+  template <> struct StructExtends<PhysicalDeviceRayQueryFeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayQueryFeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+  template <> struct StructExtends<PhysicalDeviceVertexInputDynamicStateFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceVertexInputDynamicStateFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_physical_device_drm ===
+  template <> struct StructExtends<PhysicalDeviceDrmPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_device_address_binding_report ===
+  template <> struct StructExtends<PhysicalDeviceAddressBindingReportFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAddressBindingReportFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<DeviceAddressBindingCallbackDataEXT, DebugUtilsMessengerCallbackDataEXT>{ enum { value = true }; };
+
+  //=== VK_EXT_depth_clip_control ===
+  template <> struct StructExtends<PhysicalDeviceDepthClipControlFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthClipControlFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineViewportDepthClipControlCreateInfoEXT, PipelineViewportStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_primitive_topology_list_restart ===
+  template <> struct StructExtends<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+  template <> struct StructExtends<ImportMemoryZirconHandleInfoFUCHSIA, MemoryAllocateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+  template <> struct StructExtends<ImportMemoryBufferCollectionFUCHSIA, MemoryAllocateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BufferCollectionImageCreateInfoFUCHSIA, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<BufferCollectionBufferCreateInfoFUCHSIA, BufferCreateInfo>{ enum { value = true }; };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+  template <> struct StructExtends<SubpassShadingPipelineCreateInfoHUAWEI, ComputePipelineCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubpassShadingFeaturesHUAWEI, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubpassShadingFeaturesHUAWEI, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubpassShadingPropertiesHUAWEI, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_HUAWEI_invocation_mask ===
+  template <> struct StructExtends<PhysicalDeviceInvocationMaskFeaturesHUAWEI, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceInvocationMaskFeaturesHUAWEI, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_external_memory_rdma ===
+  template <> struct StructExtends<PhysicalDeviceExternalMemoryRDMAFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExternalMemoryRDMAFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_pipeline_properties ===
+  template <> struct StructExtends<PhysicalDevicePipelinePropertiesFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelinePropertiesFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_multisampled_render_to_single_sampled ===
+  template <> struct StructExtends<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SubpassResolvePerformanceQueryEXT, FormatProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<MultisampledRenderToSingleSampledInfoEXT, SubpassDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<MultisampledRenderToSingleSampledInfoEXT, RenderingInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicState2FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicState2FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_color_write_enable ===
+  template <> struct StructExtends<PhysicalDeviceColorWriteEnableFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceColorWriteEnableFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineColorWriteCreateInfoEXT, PipelineColorBlendStateCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_primitives_generated_query ===
+  template <> struct StructExtends<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+  template <> struct StructExtends<PhysicalDeviceRayTracingMaintenance1FeaturesKHR, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingMaintenance1FeaturesKHR, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_image_view_min_lod ===
+  template <> struct StructExtends<PhysicalDeviceImageViewMinLodFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageViewMinLodFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<ImageViewMinLodCreateInfoEXT, ImageViewCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_multi_draw ===
+  template <> struct StructExtends<PhysicalDeviceMultiDrawFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiDrawFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiDrawPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_image_2d_view_of_3d ===
+  template <> struct StructExtends<PhysicalDeviceImage2DViewOf3DFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImage2DViewOf3DFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_opacity_micromap ===
+  template <> struct StructExtends<PhysicalDeviceOpacityMicromapFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceOpacityMicromapFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceOpacityMicromapPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<AccelerationStructureTrianglesOpacityMicromapEXT, AccelerationStructureGeometryTrianglesDataKHR>{ enum { value = true }; };
+
+  //=== VK_EXT_border_color_swizzle ===
+  template <> struct StructExtends<PhysicalDeviceBorderColorSwizzleFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceBorderColorSwizzleFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<SamplerBorderColorComponentMappingCreateInfoEXT, SamplerCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_pageable_device_local_memory ===
+  template <> struct StructExtends<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+  template <> struct StructExtends<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_depth_clamp_zero_one ===
+  template <> struct StructExtends<PhysicalDeviceDepthClampZeroOneFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceDepthClampZeroOneFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_non_seamless_cube_map ===
+  template <> struct StructExtends<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceNonSeamlessCubeMapFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_QCOM_fragment_density_map_offset ===
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<SubpassFragmentDensityMapOffsetEndInfoQCOM, SubpassEndInfo>{ enum { value = true }; };
+
+  //=== VK_NV_copy_memory_indirect ===
+  template <> struct StructExtends<PhysicalDeviceCopyMemoryIndirectFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCopyMemoryIndirectFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceCopyMemoryIndirectPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_memory_decompression ===
+  template <> struct StructExtends<PhysicalDeviceMemoryDecompressionFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMemoryDecompressionFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMemoryDecompressionPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_NV_linear_color_attachment ===
+  template <> struct StructExtends<PhysicalDeviceLinearColorAttachmentFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLinearColorAttachmentFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_image_compression_control_swapchain ===
+  template <> struct StructExtends<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_QCOM_image_processing ===
+  template <> struct StructExtends<ImageViewSampleWeightCreateInfoQCOM, ImageViewCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageProcessingFeaturesQCOM, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageProcessingFeaturesQCOM, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceImageProcessingPropertiesQCOM, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicState3FeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicState3FeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceExtendedDynamicState3PropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+  //=== VK_EXT_subpass_merge_feedback ===
+  template <> struct StructExtends<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceSubpassMergeFeedbackFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassCreationControlEXT, RenderPassCreateInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassCreationControlEXT, SubpassDescription2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassCreationFeedbackCreateInfoEXT, RenderPassCreateInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<RenderPassSubpassFeedbackCreateInfoEXT, SubpassDescription2>{ enum { value = true }; };
+
+  //=== VK_LUNARG_direct_driver_loading ===
+  template <> struct StructExtends<DirectDriverLoadingListLUNARG, InstanceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_shader_module_identifier ===
+  template <> struct StructExtends<PhysicalDeviceShaderModuleIdentifierFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderModuleIdentifierFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderModuleIdentifierPropertiesEXT, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PipelineShaderStageModuleIdentifierCreateInfoEXT, PipelineShaderStageCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_rasterization_order_attachment_access ===
+  template <> struct StructExtends<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_optical_flow ===
+  template <> struct StructExtends<PhysicalDeviceOpticalFlowFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceOpticalFlowFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceOpticalFlowPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<OpticalFlowImageFormatInfoNV, PhysicalDeviceImageFormatInfo2>{ enum { value = true }; };
+  template <> struct StructExtends<OpticalFlowImageFormatInfoNV, ImageCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<OpticalFlowSessionCreatePrivateDataInfoNV, OpticalFlowSessionCreateInfoNV>{ enum { value = true }; };
+
+  //=== VK_EXT_legacy_dithering ===
+  template <> struct StructExtends<PhysicalDeviceLegacyDitheringFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceLegacyDitheringFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_pipeline_protected_access ===
+  template <> struct StructExtends<PhysicalDevicePipelineProtectedAccessFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDevicePipelineProtectedAccessFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_QCOM_tile_properties ===
+  template <> struct StructExtends<PhysicalDeviceTilePropertiesFeaturesQCOM, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceTilePropertiesFeaturesQCOM, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_SEC_amigo_profiling ===
+  template <> struct StructExtends<PhysicalDeviceAmigoProfilingFeaturesSEC, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceAmigoProfilingFeaturesSEC, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<AmigoProfilingSubmitInfoSEC, SubmitInfo>{ enum { value = true }; };
+
+  //=== VK_QCOM_multiview_per_view_viewports ===
+  template <> struct StructExtends<PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_NV_ray_tracing_invocation_reorder ===
+  template <> struct StructExtends<PhysicalDeviceRayTracingInvocationReorderPropertiesNV, PhysicalDeviceProperties2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingInvocationReorderFeaturesNV, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceRayTracingInvocationReorderFeaturesNV, DeviceCreateInfo>{ enum { value = true }; };
+
+  //=== VK_EXT_mutable_descriptor_type ===
+  template <> struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesEXT, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceMutableDescriptorTypeFeaturesEXT, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MutableDescriptorTypeCreateInfoEXT, DescriptorSetLayoutCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<MutableDescriptorTypeCreateInfoEXT, DescriptorPoolCreateInfo>{ enum { value = true }; };
+
+  //=== VK_ARM_shader_core_builtins ===
+  template <> struct StructExtends<PhysicalDeviceShaderCoreBuiltinsFeaturesARM, PhysicalDeviceFeatures2>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderCoreBuiltinsFeaturesARM, DeviceCreateInfo>{ enum { value = true }; };
+  template <> struct StructExtends<PhysicalDeviceShaderCoreBuiltinsPropertiesARM, PhysicalDeviceProperties2>{ enum { value = true }; };
+
+#endif // VULKAN_HPP_DISABLE_ENHANCED_MODE
+
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+  class DynamicLoader
+  {
+  public:
+#  ifdef VULKAN_HPP_NO_EXCEPTIONS
+    DynamicLoader( std::string const & vulkanLibraryName = {} ) VULKAN_HPP_NOEXCEPT
+#  else
+    DynamicLoader( std::string const & vulkanLibraryName = {} )
+#  endif
+    {
+      if ( !vulkanLibraryName.empty() )
+      {
+#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+        m_library = dlopen( vulkanLibraryName.c_str(), RTLD_NOW | RTLD_LOCAL );
+#  elif defined( _WIN32 )
+        m_library = ::LoadLibraryA( vulkanLibraryName.c_str() );
+#  else
+#    error unsupported platform
+#  endif
+      }
+      else
+      {
+#  if defined( __unix__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+        m_library = dlopen( "libvulkan.so", RTLD_NOW | RTLD_LOCAL );
+        if ( m_library == nullptr )
+        {
+          m_library = dlopen( "libvulkan.so.1", RTLD_NOW | RTLD_LOCAL );
+        }
+#  elif defined( __APPLE__ )
+        m_library = dlopen( "libvulkan.dylib", RTLD_NOW | RTLD_LOCAL );
+#  elif defined( _WIN32 )
+        m_library = ::LoadLibraryA( "vulkan-1.dll" );
+#  else
+#    error unsupported platform
+#  endif
+      }
+
+#ifndef VULKAN_HPP_NO_EXCEPTIONS
+      if ( m_library == nullptr )
+      {
+        // NOTE there should be an InitializationFailedError, but msvc insists on the symbol does not exist within the scope of this function.
+        throw std::runtime_error( "Failed to load vulkan library!" );
+      }
+#endif
+    }
+
+    DynamicLoader( DynamicLoader const & ) = delete;
+
+    DynamicLoader( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT : m_library(other.m_library)
+    {
+      other.m_library = nullptr;
+    }
+
+    DynamicLoader &operator=( DynamicLoader const & ) = delete;
+
+    DynamicLoader &operator=( DynamicLoader && other ) VULKAN_HPP_NOEXCEPT
+    {
+      std::swap(m_library, other.m_library);
+      return *this;
+    }
+
+    ~DynamicLoader() VULKAN_HPP_NOEXCEPT
+    {
+      if ( m_library )
+      {
+#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+        dlclose( m_library );
+#  elif defined( _WIN32 )
+        ::FreeLibrary( m_library );
+#  else
+#    error unsupported platform
+#  endif
+      }
+    }
+
+    template <typename T>
+    T getProcAddress( const char* function ) const VULKAN_HPP_NOEXCEPT
+    {
+#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+      return (T)dlsym( m_library, function );
+#  elif defined( _WIN32 )
+      return (T)::GetProcAddress( m_library, function );
+#  else
+#    error unsupported platform
+#  endif
+    }
+
+    bool success() const VULKAN_HPP_NOEXCEPT { return m_library != nullptr; }
+
+  private:
+#  if defined( __unix__ ) || defined( __APPLE__ ) || defined( __QNXNTO__ ) || defined(__Fuchsia__)
+    void * m_library;
+#  elif defined( _WIN32 )
+    ::HINSTANCE m_library;
+#  else
+#    error unsupported platform
+#  endif
+  };
+#endif
+
+
+  using PFN_dummy = void ( * )();
+
+  class DispatchLoaderDynamic : public DispatchLoaderBase
+  {
+  public:
+
+  //=== VK_VERSION_1_0 ===
+    PFN_vkCreateInstance vkCreateInstance = 0;
+    PFN_vkDestroyInstance vkDestroyInstance = 0;
+    PFN_vkEnumeratePhysicalDevices vkEnumeratePhysicalDevices = 0;
+    PFN_vkGetPhysicalDeviceFeatures vkGetPhysicalDeviceFeatures = 0;
+    PFN_vkGetPhysicalDeviceFormatProperties vkGetPhysicalDeviceFormatProperties = 0;
+    PFN_vkGetPhysicalDeviceImageFormatProperties vkGetPhysicalDeviceImageFormatProperties = 0;
+    PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties vkGetPhysicalDeviceQueueFamilyProperties = 0;
+    PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties = 0;
+    PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = 0;
+    PFN_vkGetDeviceProcAddr vkGetDeviceProcAddr = 0;
+    PFN_vkCreateDevice vkCreateDevice = 0;
+    PFN_vkDestroyDevice vkDestroyDevice = 0;
+    PFN_vkEnumerateInstanceExtensionProperties vkEnumerateInstanceExtensionProperties = 0;
+    PFN_vkEnumerateDeviceExtensionProperties vkEnumerateDeviceExtensionProperties = 0;
+    PFN_vkEnumerateInstanceLayerProperties vkEnumerateInstanceLayerProperties = 0;
+    PFN_vkEnumerateDeviceLayerProperties vkEnumerateDeviceLayerProperties = 0;
+    PFN_vkGetDeviceQueue vkGetDeviceQueue = 0;
+    PFN_vkQueueSubmit vkQueueSubmit = 0;
+    PFN_vkQueueWaitIdle vkQueueWaitIdle = 0;
+    PFN_vkDeviceWaitIdle vkDeviceWaitIdle = 0;
+    PFN_vkAllocateMemory vkAllocateMemory = 0;
+    PFN_vkFreeMemory vkFreeMemory = 0;
+    PFN_vkMapMemory vkMapMemory = 0;
+    PFN_vkUnmapMemory vkUnmapMemory = 0;
+    PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges = 0;
+    PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges = 0;
+    PFN_vkGetDeviceMemoryCommitment vkGetDeviceMemoryCommitment = 0;
+    PFN_vkBindBufferMemory vkBindBufferMemory = 0;
+    PFN_vkBindImageMemory vkBindImageMemory = 0;
+    PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements = 0;
+    PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements = 0;
+    PFN_vkGetImageSparseMemoryRequirements vkGetImageSparseMemoryRequirements = 0;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties vkGetPhysicalDeviceSparseImageFormatProperties = 0;
+    PFN_vkQueueBindSparse vkQueueBindSparse = 0;
+    PFN_vkCreateFence vkCreateFence = 0;
+    PFN_vkDestroyFence vkDestroyFence = 0;
+    PFN_vkResetFences vkResetFences = 0;
+    PFN_vkGetFenceStatus vkGetFenceStatus = 0;
+    PFN_vkWaitForFences vkWaitForFences = 0;
+    PFN_vkCreateSemaphore vkCreateSemaphore = 0;
+    PFN_vkDestroySemaphore vkDestroySemaphore = 0;
+    PFN_vkCreateEvent vkCreateEvent = 0;
+    PFN_vkDestroyEvent vkDestroyEvent = 0;
+    PFN_vkGetEventStatus vkGetEventStatus = 0;
+    PFN_vkSetEvent vkSetEvent = 0;
+    PFN_vkResetEvent vkResetEvent = 0;
+    PFN_vkCreateQueryPool vkCreateQueryPool = 0;
+    PFN_vkDestroyQueryPool vkDestroyQueryPool = 0;
+    PFN_vkGetQueryPoolResults vkGetQueryPoolResults = 0;
+    PFN_vkCreateBuffer vkCreateBuffer = 0;
+    PFN_vkDestroyBuffer vkDestroyBuffer = 0;
+    PFN_vkCreateBufferView vkCreateBufferView = 0;
+    PFN_vkDestroyBufferView vkDestroyBufferView = 0;
+    PFN_vkCreateImage vkCreateImage = 0;
+    PFN_vkDestroyImage vkDestroyImage = 0;
+    PFN_vkGetImageSubresourceLayout vkGetImageSubresourceLayout = 0;
+    PFN_vkCreateImageView vkCreateImageView = 0;
+    PFN_vkDestroyImageView vkDestroyImageView = 0;
+    PFN_vkCreateShaderModule vkCreateShaderModule = 0;
+    PFN_vkDestroyShaderModule vkDestroyShaderModule = 0;
+    PFN_vkCreatePipelineCache vkCreatePipelineCache = 0;
+    PFN_vkDestroyPipelineCache vkDestroyPipelineCache = 0;
+    PFN_vkGetPipelineCacheData vkGetPipelineCacheData = 0;
+    PFN_vkMergePipelineCaches vkMergePipelineCaches = 0;
+    PFN_vkCreateGraphicsPipelines vkCreateGraphicsPipelines = 0;
+    PFN_vkCreateComputePipelines vkCreateComputePipelines = 0;
+    PFN_vkDestroyPipeline vkDestroyPipeline = 0;
+    PFN_vkCreatePipelineLayout vkCreatePipelineLayout = 0;
+    PFN_vkDestroyPipelineLayout vkDestroyPipelineLayout = 0;
+    PFN_vkCreateSampler vkCreateSampler = 0;
+    PFN_vkDestroySampler vkDestroySampler = 0;
+    PFN_vkCreateDescriptorSetLayout vkCreateDescriptorSetLayout = 0;
+    PFN_vkDestroyDescriptorSetLayout vkDestroyDescriptorSetLayout = 0;
+    PFN_vkCreateDescriptorPool vkCreateDescriptorPool = 0;
+    PFN_vkDestroyDescriptorPool vkDestroyDescriptorPool = 0;
+    PFN_vkResetDescriptorPool vkResetDescriptorPool = 0;
+    PFN_vkAllocateDescriptorSets vkAllocateDescriptorSets = 0;
+    PFN_vkFreeDescriptorSets vkFreeDescriptorSets = 0;
+    PFN_vkUpdateDescriptorSets vkUpdateDescriptorSets = 0;
+    PFN_vkCreateFramebuffer vkCreateFramebuffer = 0;
+    PFN_vkDestroyFramebuffer vkDestroyFramebuffer = 0;
+    PFN_vkCreateRenderPass vkCreateRenderPass = 0;
+    PFN_vkDestroyRenderPass vkDestroyRenderPass = 0;
+    PFN_vkGetRenderAreaGranularity vkGetRenderAreaGranularity = 0;
+    PFN_vkCreateCommandPool vkCreateCommandPool = 0;
+    PFN_vkDestroyCommandPool vkDestroyCommandPool = 0;
+    PFN_vkResetCommandPool vkResetCommandPool = 0;
+    PFN_vkAllocateCommandBuffers vkAllocateCommandBuffers = 0;
+    PFN_vkFreeCommandBuffers vkFreeCommandBuffers = 0;
+    PFN_vkBeginCommandBuffer vkBeginCommandBuffer = 0;
+    PFN_vkEndCommandBuffer vkEndCommandBuffer = 0;
+    PFN_vkResetCommandBuffer vkResetCommandBuffer = 0;
+    PFN_vkCmdBindPipeline vkCmdBindPipeline = 0;
+    PFN_vkCmdSetViewport vkCmdSetViewport = 0;
+    PFN_vkCmdSetScissor vkCmdSetScissor = 0;
+    PFN_vkCmdSetLineWidth vkCmdSetLineWidth = 0;
+    PFN_vkCmdSetDepthBias vkCmdSetDepthBias = 0;
+    PFN_vkCmdSetBlendConstants vkCmdSetBlendConstants = 0;
+    PFN_vkCmdSetDepthBounds vkCmdSetDepthBounds = 0;
+    PFN_vkCmdSetStencilCompareMask vkCmdSetStencilCompareMask = 0;
+    PFN_vkCmdSetStencilWriteMask vkCmdSetStencilWriteMask = 0;
+    PFN_vkCmdSetStencilReference vkCmdSetStencilReference = 0;
+    PFN_vkCmdBindDescriptorSets vkCmdBindDescriptorSets = 0;
+    PFN_vkCmdBindIndexBuffer vkCmdBindIndexBuffer = 0;
+    PFN_vkCmdBindVertexBuffers vkCmdBindVertexBuffers = 0;
+    PFN_vkCmdDraw vkCmdDraw = 0;
+    PFN_vkCmdDrawIndexed vkCmdDrawIndexed = 0;
+    PFN_vkCmdDrawIndirect vkCmdDrawIndirect = 0;
+    PFN_vkCmdDrawIndexedIndirect vkCmdDrawIndexedIndirect = 0;
+    PFN_vkCmdDispatch vkCmdDispatch = 0;
+    PFN_vkCmdDispatchIndirect vkCmdDispatchIndirect = 0;
+    PFN_vkCmdCopyBuffer vkCmdCopyBuffer = 0;
+    PFN_vkCmdCopyImage vkCmdCopyImage = 0;
+    PFN_vkCmdBlitImage vkCmdBlitImage = 0;
+    PFN_vkCmdCopyBufferToImage vkCmdCopyBufferToImage = 0;
+    PFN_vkCmdCopyImageToBuffer vkCmdCopyImageToBuffer = 0;
+    PFN_vkCmdUpdateBuffer vkCmdUpdateBuffer = 0;
+    PFN_vkCmdFillBuffer vkCmdFillBuffer = 0;
+    PFN_vkCmdClearColorImage vkCmdClearColorImage = 0;
+    PFN_vkCmdClearDepthStencilImage vkCmdClearDepthStencilImage = 0;
+    PFN_vkCmdClearAttachments vkCmdClearAttachments = 0;
+    PFN_vkCmdResolveImage vkCmdResolveImage = 0;
+    PFN_vkCmdSetEvent vkCmdSetEvent = 0;
+    PFN_vkCmdResetEvent vkCmdResetEvent = 0;
+    PFN_vkCmdWaitEvents vkCmdWaitEvents = 0;
+    PFN_vkCmdPipelineBarrier vkCmdPipelineBarrier = 0;
+    PFN_vkCmdBeginQuery vkCmdBeginQuery = 0;
+    PFN_vkCmdEndQuery vkCmdEndQuery = 0;
+    PFN_vkCmdResetQueryPool vkCmdResetQueryPool = 0;
+    PFN_vkCmdWriteTimestamp vkCmdWriteTimestamp = 0;
+    PFN_vkCmdCopyQueryPoolResults vkCmdCopyQueryPoolResults = 0;
+    PFN_vkCmdPushConstants vkCmdPushConstants = 0;
+    PFN_vkCmdBeginRenderPass vkCmdBeginRenderPass = 0;
+    PFN_vkCmdNextSubpass vkCmdNextSubpass = 0;
+    PFN_vkCmdEndRenderPass vkCmdEndRenderPass = 0;
+    PFN_vkCmdExecuteCommands vkCmdExecuteCommands = 0;
+
+  //=== VK_VERSION_1_1 ===
+    PFN_vkEnumerateInstanceVersion vkEnumerateInstanceVersion = 0;
+    PFN_vkBindBufferMemory2 vkBindBufferMemory2 = 0;
+    PFN_vkBindImageMemory2 vkBindImageMemory2 = 0;
+    PFN_vkGetDeviceGroupPeerMemoryFeatures vkGetDeviceGroupPeerMemoryFeatures = 0;
+    PFN_vkCmdSetDeviceMask vkCmdSetDeviceMask = 0;
+    PFN_vkCmdDispatchBase vkCmdDispatchBase = 0;
+    PFN_vkEnumeratePhysicalDeviceGroups vkEnumeratePhysicalDeviceGroups = 0;
+    PFN_vkGetImageMemoryRequirements2 vkGetImageMemoryRequirements2 = 0;
+    PFN_vkGetBufferMemoryRequirements2 vkGetBufferMemoryRequirements2 = 0;
+    PFN_vkGetImageSparseMemoryRequirements2 vkGetImageSparseMemoryRequirements2 = 0;
+    PFN_vkGetPhysicalDeviceFeatures2 vkGetPhysicalDeviceFeatures2 = 0;
+    PFN_vkGetPhysicalDeviceProperties2 vkGetPhysicalDeviceProperties2 = 0;
+    PFN_vkGetPhysicalDeviceFormatProperties2 vkGetPhysicalDeviceFormatProperties2 = 0;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2 vkGetPhysicalDeviceImageFormatProperties2 = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2 vkGetPhysicalDeviceQueueFamilyProperties2 = 0;
+    PFN_vkGetPhysicalDeviceMemoryProperties2 vkGetPhysicalDeviceMemoryProperties2 = 0;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 vkGetPhysicalDeviceSparseImageFormatProperties2 = 0;
+    PFN_vkTrimCommandPool vkTrimCommandPool = 0;
+    PFN_vkGetDeviceQueue2 vkGetDeviceQueue2 = 0;
+    PFN_vkCreateSamplerYcbcrConversion vkCreateSamplerYcbcrConversion = 0;
+    PFN_vkDestroySamplerYcbcrConversion vkDestroySamplerYcbcrConversion = 0;
+    PFN_vkCreateDescriptorUpdateTemplate vkCreateDescriptorUpdateTemplate = 0;
+    PFN_vkDestroyDescriptorUpdateTemplate vkDestroyDescriptorUpdateTemplate = 0;
+    PFN_vkUpdateDescriptorSetWithTemplate vkUpdateDescriptorSetWithTemplate = 0;
+    PFN_vkGetPhysicalDeviceExternalBufferProperties vkGetPhysicalDeviceExternalBufferProperties = 0;
+    PFN_vkGetPhysicalDeviceExternalFenceProperties vkGetPhysicalDeviceExternalFenceProperties = 0;
+    PFN_vkGetPhysicalDeviceExternalSemaphoreProperties vkGetPhysicalDeviceExternalSemaphoreProperties = 0;
+    PFN_vkGetDescriptorSetLayoutSupport vkGetDescriptorSetLayoutSupport = 0;
+
+  //=== VK_VERSION_1_2 ===
+    PFN_vkCmdDrawIndirectCount vkCmdDrawIndirectCount = 0;
+    PFN_vkCmdDrawIndexedIndirectCount vkCmdDrawIndexedIndirectCount = 0;
+    PFN_vkCreateRenderPass2 vkCreateRenderPass2 = 0;
+    PFN_vkCmdBeginRenderPass2 vkCmdBeginRenderPass2 = 0;
+    PFN_vkCmdNextSubpass2 vkCmdNextSubpass2 = 0;
+    PFN_vkCmdEndRenderPass2 vkCmdEndRenderPass2 = 0;
+    PFN_vkResetQueryPool vkResetQueryPool = 0;
+    PFN_vkGetSemaphoreCounterValue vkGetSemaphoreCounterValue = 0;
+    PFN_vkWaitSemaphores vkWaitSemaphores = 0;
+    PFN_vkSignalSemaphore vkSignalSemaphore = 0;
+    PFN_vkGetBufferDeviceAddress vkGetBufferDeviceAddress = 0;
+    PFN_vkGetBufferOpaqueCaptureAddress vkGetBufferOpaqueCaptureAddress = 0;
+    PFN_vkGetDeviceMemoryOpaqueCaptureAddress vkGetDeviceMemoryOpaqueCaptureAddress = 0;
+
+  //=== VK_VERSION_1_3 ===
+    PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties = 0;
+    PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot = 0;
+    PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot = 0;
+    PFN_vkSetPrivateData vkSetPrivateData = 0;
+    PFN_vkGetPrivateData vkGetPrivateData = 0;
+    PFN_vkCmdSetEvent2 vkCmdSetEvent2 = 0;
+    PFN_vkCmdResetEvent2 vkCmdResetEvent2 = 0;
+    PFN_vkCmdWaitEvents2 vkCmdWaitEvents2 = 0;
+    PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2 = 0;
+    PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2 = 0;
+    PFN_vkQueueSubmit2 vkQueueSubmit2 = 0;
+    PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2 = 0;
+    PFN_vkCmdCopyImage2 vkCmdCopyImage2 = 0;
+    PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2 = 0;
+    PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2 = 0;
+    PFN_vkCmdBlitImage2 vkCmdBlitImage2 = 0;
+    PFN_vkCmdResolveImage2 vkCmdResolveImage2 = 0;
+    PFN_vkCmdBeginRendering vkCmdBeginRendering = 0;
+    PFN_vkCmdEndRendering vkCmdEndRendering = 0;
+    PFN_vkCmdSetCullMode vkCmdSetCullMode = 0;
+    PFN_vkCmdSetFrontFace vkCmdSetFrontFace = 0;
+    PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology = 0;
+    PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount = 0;
+    PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount = 0;
+    PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2 = 0;
+    PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable = 0;
+    PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable = 0;
+    PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp = 0;
+    PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable = 0;
+    PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable = 0;
+    PFN_vkCmdSetStencilOp vkCmdSetStencilOp = 0;
+    PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable = 0;
+    PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable = 0;
+    PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable = 0;
+    PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements = 0;
+    PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements = 0;
+    PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements = 0;
+
+  //=== VK_KHR_surface ===
+    PFN_vkDestroySurfaceKHR vkDestroySurfaceKHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = 0;
+    PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = 0;
+
+  //=== VK_KHR_swapchain ===
+    PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = 0;
+    PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = 0;
+    PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = 0;
+    PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = 0;
+    PFN_vkQueuePresentKHR vkQueuePresentKHR = 0;
+    PFN_vkGetDeviceGroupPresentCapabilitiesKHR vkGetDeviceGroupPresentCapabilitiesKHR = 0;
+    PFN_vkGetDeviceGroupSurfacePresentModesKHR vkGetDeviceGroupSurfacePresentModesKHR = 0;
+    PFN_vkGetPhysicalDevicePresentRectanglesKHR vkGetPhysicalDevicePresentRectanglesKHR = 0;
+    PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR = 0;
+
+  //=== VK_KHR_display ===
+    PFN_vkGetPhysicalDeviceDisplayPropertiesKHR vkGetPhysicalDeviceDisplayPropertiesKHR = 0;
+    PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR vkGetPhysicalDeviceDisplayPlanePropertiesKHR = 0;
+    PFN_vkGetDisplayPlaneSupportedDisplaysKHR vkGetDisplayPlaneSupportedDisplaysKHR = 0;
+    PFN_vkGetDisplayModePropertiesKHR vkGetDisplayModePropertiesKHR = 0;
+    PFN_vkCreateDisplayModeKHR vkCreateDisplayModeKHR = 0;
+    PFN_vkGetDisplayPlaneCapabilitiesKHR vkGetDisplayPlaneCapabilitiesKHR = 0;
+    PFN_vkCreateDisplayPlaneSurfaceKHR vkCreateDisplayPlaneSurfaceKHR = 0;
+
+  //=== VK_KHR_display_swapchain ===
+    PFN_vkCreateSharedSwapchainsKHR vkCreateSharedSwapchainsKHR = 0;
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+    PFN_vkCreateXlibSurfaceKHR vkCreateXlibSurfaceKHR = 0;
+    PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR vkGetPhysicalDeviceXlibPresentationSupportKHR = 0;
+#else
+    PFN_dummy vkCreateXlibSurfaceKHR_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceXlibPresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+    PFN_vkCreateXcbSurfaceKHR vkCreateXcbSurfaceKHR = 0;
+    PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR vkGetPhysicalDeviceXcbPresentationSupportKHR = 0;
+#else
+    PFN_dummy vkCreateXcbSurfaceKHR_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceXcbPresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+    PFN_vkCreateWaylandSurfaceKHR vkCreateWaylandSurfaceKHR = 0;
+    PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR vkGetPhysicalDeviceWaylandPresentationSupportKHR = 0;
+#else
+    PFN_dummy vkCreateWaylandSurfaceKHR_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceWaylandPresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+    PFN_vkCreateAndroidSurfaceKHR vkCreateAndroidSurfaceKHR = 0;
+#else
+    PFN_dummy vkCreateAndroidSurfaceKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+    PFN_vkCreateWin32SurfaceKHR vkCreateWin32SurfaceKHR = 0;
+    PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR vkGetPhysicalDeviceWin32PresentationSupportKHR = 0;
+#else
+    PFN_dummy vkCreateWin32SurfaceKHR_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceWin32PresentationSupportKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+    PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = 0;
+    PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = 0;
+    PFN_vkDebugReportMessageEXT vkDebugReportMessageEXT = 0;
+
+  //=== VK_EXT_debug_marker ===
+    PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT = 0;
+    PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT = 0;
+    PFN_vkCmdDebugMarkerBeginEXT vkCmdDebugMarkerBeginEXT = 0;
+    PFN_vkCmdDebugMarkerEndEXT vkCmdDebugMarkerEndEXT = 0;
+    PFN_vkCmdDebugMarkerInsertEXT vkCmdDebugMarkerInsertEXT = 0;
+
+  //=== VK_KHR_video_queue ===
+    PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR vkGetPhysicalDeviceVideoCapabilitiesKHR = 0;
+    PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR vkGetPhysicalDeviceVideoFormatPropertiesKHR = 0;
+    PFN_vkCreateVideoSessionKHR vkCreateVideoSessionKHR = 0;
+    PFN_vkDestroyVideoSessionKHR vkDestroyVideoSessionKHR = 0;
+    PFN_vkGetVideoSessionMemoryRequirementsKHR vkGetVideoSessionMemoryRequirementsKHR = 0;
+    PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR = 0;
+    PFN_vkCreateVideoSessionParametersKHR vkCreateVideoSessionParametersKHR = 0;
+    PFN_vkUpdateVideoSessionParametersKHR vkUpdateVideoSessionParametersKHR = 0;
+    PFN_vkDestroyVideoSessionParametersKHR vkDestroyVideoSessionParametersKHR = 0;
+    PFN_vkCmdBeginVideoCodingKHR vkCmdBeginVideoCodingKHR = 0;
+    PFN_vkCmdEndVideoCodingKHR vkCmdEndVideoCodingKHR = 0;
+    PFN_vkCmdControlVideoCodingKHR vkCmdControlVideoCodingKHR = 0;
+
+  //=== VK_KHR_video_decode_queue ===
+    PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR = 0;
+
+  //=== VK_EXT_transform_feedback ===
+    PFN_vkCmdBindTransformFeedbackBuffersEXT vkCmdBindTransformFeedbackBuffersEXT = 0;
+    PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT = 0;
+    PFN_vkCmdEndTransformFeedbackEXT vkCmdEndTransformFeedbackEXT = 0;
+    PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT = 0;
+    PFN_vkCmdEndQueryIndexedEXT vkCmdEndQueryIndexedEXT = 0;
+    PFN_vkCmdDrawIndirectByteCountEXT vkCmdDrawIndirectByteCountEXT = 0;
+
+  //=== VK_NVX_binary_import ===
+    PFN_vkCreateCuModuleNVX vkCreateCuModuleNVX = 0;
+    PFN_vkCreateCuFunctionNVX vkCreateCuFunctionNVX = 0;
+    PFN_vkDestroyCuModuleNVX vkDestroyCuModuleNVX = 0;
+    PFN_vkDestroyCuFunctionNVX vkDestroyCuFunctionNVX = 0;
+    PFN_vkCmdCuLaunchKernelNVX vkCmdCuLaunchKernelNVX = 0;
+
+  //=== VK_NVX_image_view_handle ===
+    PFN_vkGetImageViewHandleNVX vkGetImageViewHandleNVX = 0;
+    PFN_vkGetImageViewAddressNVX vkGetImageViewAddressNVX = 0;
+
+  //=== VK_AMD_draw_indirect_count ===
+    PFN_vkCmdDrawIndirectCountAMD vkCmdDrawIndirectCountAMD = 0;
+    PFN_vkCmdDrawIndexedIndirectCountAMD vkCmdDrawIndexedIndirectCountAMD = 0;
+
+  //=== VK_AMD_shader_info ===
+    PFN_vkGetShaderInfoAMD vkGetShaderInfoAMD = 0;
+
+  //=== VK_KHR_dynamic_rendering ===
+    PFN_vkCmdBeginRenderingKHR vkCmdBeginRenderingKHR = 0;
+    PFN_vkCmdEndRenderingKHR vkCmdEndRenderingKHR = 0;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+    PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP = 0;
+#else
+    PFN_dummy vkCreateStreamDescriptorSurfaceGGP_placeholder = 0;
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+    PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+    PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV = 0;
+#else
+    PFN_dummy vkGetMemoryWin32HandleNV_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+    PFN_vkGetPhysicalDeviceFeatures2KHR vkGetPhysicalDeviceFeatures2KHR = 0;
+    PFN_vkGetPhysicalDeviceProperties2KHR vkGetPhysicalDeviceProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceFormatProperties2KHR vkGetPhysicalDeviceFormatProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceImageFormatProperties2KHR vkGetPhysicalDeviceImageFormatProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR vkGetPhysicalDeviceQueueFamilyProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceMemoryProperties2KHR vkGetPhysicalDeviceMemoryProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR vkGetPhysicalDeviceSparseImageFormatProperties2KHR = 0;
+
+  //=== VK_KHR_device_group ===
+    PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR vkGetDeviceGroupPeerMemoryFeaturesKHR = 0;
+    PFN_vkCmdSetDeviceMaskKHR vkCmdSetDeviceMaskKHR = 0;
+    PFN_vkCmdDispatchBaseKHR vkCmdDispatchBaseKHR = 0;
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+    PFN_vkCreateViSurfaceNN vkCreateViSurfaceNN = 0;
+#else
+    PFN_dummy vkCreateViSurfaceNN_placeholder = 0;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_KHR_maintenance1 ===
+    PFN_vkTrimCommandPoolKHR vkTrimCommandPoolKHR = 0;
+
+  //=== VK_KHR_device_group_creation ===
+    PFN_vkEnumeratePhysicalDeviceGroupsKHR vkEnumeratePhysicalDeviceGroupsKHR = 0;
+
+  //=== VK_KHR_external_memory_capabilities ===
+    PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR vkGetPhysicalDeviceExternalBufferPropertiesKHR = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+    PFN_vkGetMemoryWin32HandleKHR vkGetMemoryWin32HandleKHR = 0;
+    PFN_vkGetMemoryWin32HandlePropertiesKHR vkGetMemoryWin32HandlePropertiesKHR = 0;
+#else
+    PFN_dummy vkGetMemoryWin32HandleKHR_placeholder = 0;
+    PFN_dummy vkGetMemoryWin32HandlePropertiesKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+    PFN_vkGetMemoryFdKHR vkGetMemoryFdKHR = 0;
+    PFN_vkGetMemoryFdPropertiesKHR vkGetMemoryFdPropertiesKHR = 0;
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+    PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+    PFN_vkImportSemaphoreWin32HandleKHR vkImportSemaphoreWin32HandleKHR = 0;
+    PFN_vkGetSemaphoreWin32HandleKHR vkGetSemaphoreWin32HandleKHR = 0;
+#else
+    PFN_dummy vkImportSemaphoreWin32HandleKHR_placeholder = 0;
+    PFN_dummy vkGetSemaphoreWin32HandleKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+    PFN_vkImportSemaphoreFdKHR vkImportSemaphoreFdKHR = 0;
+    PFN_vkGetSemaphoreFdKHR vkGetSemaphoreFdKHR = 0;
+
+  //=== VK_KHR_push_descriptor ===
+    PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR = 0;
+    PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR = 0;
+
+  //=== VK_EXT_conditional_rendering ===
+    PFN_vkCmdBeginConditionalRenderingEXT vkCmdBeginConditionalRenderingEXT = 0;
+    PFN_vkCmdEndConditionalRenderingEXT vkCmdEndConditionalRenderingEXT = 0;
+
+  //=== VK_KHR_descriptor_update_template ===
+    PFN_vkCreateDescriptorUpdateTemplateKHR vkCreateDescriptorUpdateTemplateKHR = 0;
+    PFN_vkDestroyDescriptorUpdateTemplateKHR vkDestroyDescriptorUpdateTemplateKHR = 0;
+    PFN_vkUpdateDescriptorSetWithTemplateKHR vkUpdateDescriptorSetWithTemplateKHR = 0;
+
+  //=== VK_NV_clip_space_w_scaling ===
+    PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV = 0;
+
+  //=== VK_EXT_direct_mode_display ===
+    PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT = 0;
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+    PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT = 0;
+    PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT = 0;
+#else
+    PFN_dummy vkAcquireXlibDisplayEXT_placeholder = 0;
+    PFN_dummy vkGetRandROutputDisplayEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT = 0;
+
+  //=== VK_EXT_display_control ===
+    PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT = 0;
+    PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT = 0;
+    PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT = 0;
+    PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT = 0;
+
+  //=== VK_GOOGLE_display_timing ===
+    PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE = 0;
+    PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE = 0;
+
+  //=== VK_EXT_discard_rectangles ===
+    PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT = 0;
+
+  //=== VK_EXT_hdr_metadata ===
+    PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT = 0;
+
+  //=== VK_KHR_create_renderpass2 ===
+    PFN_vkCreateRenderPass2KHR vkCreateRenderPass2KHR = 0;
+    PFN_vkCmdBeginRenderPass2KHR vkCmdBeginRenderPass2KHR = 0;
+    PFN_vkCmdNextSubpass2KHR vkCmdNextSubpass2KHR = 0;
+    PFN_vkCmdEndRenderPass2KHR vkCmdEndRenderPass2KHR = 0;
+
+  //=== VK_KHR_shared_presentable_image ===
+    PFN_vkGetSwapchainStatusKHR vkGetSwapchainStatusKHR = 0;
+
+  //=== VK_KHR_external_fence_capabilities ===
+    PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR vkGetPhysicalDeviceExternalFencePropertiesKHR = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+    PFN_vkImportFenceWin32HandleKHR vkImportFenceWin32HandleKHR = 0;
+    PFN_vkGetFenceWin32HandleKHR vkGetFenceWin32HandleKHR = 0;
+#else
+    PFN_dummy vkImportFenceWin32HandleKHR_placeholder = 0;
+    PFN_dummy vkGetFenceWin32HandleKHR_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+    PFN_vkImportFenceFdKHR vkImportFenceFdKHR = 0;
+    PFN_vkGetFenceFdKHR vkGetFenceFdKHR = 0;
+
+  //=== VK_KHR_performance_query ===
+    PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = 0;
+    PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = 0;
+    PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR = 0;
+    PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR = 0;
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+    PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR vkGetPhysicalDeviceSurfaceCapabilities2KHR = 0;
+    PFN_vkGetPhysicalDeviceSurfaceFormats2KHR vkGetPhysicalDeviceSurfaceFormats2KHR = 0;
+
+  //=== VK_KHR_get_display_properties2 ===
+    PFN_vkGetPhysicalDeviceDisplayProperties2KHR vkGetPhysicalDeviceDisplayProperties2KHR = 0;
+    PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR vkGetPhysicalDeviceDisplayPlaneProperties2KHR = 0;
+    PFN_vkGetDisplayModeProperties2KHR vkGetDisplayModeProperties2KHR = 0;
+    PFN_vkGetDisplayPlaneCapabilities2KHR vkGetDisplayPlaneCapabilities2KHR = 0;
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+    PFN_vkCreateIOSSurfaceMVK vkCreateIOSSurfaceMVK = 0;
+#else
+    PFN_dummy vkCreateIOSSurfaceMVK_placeholder = 0;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+    PFN_vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK = 0;
+#else
+    PFN_dummy vkCreateMacOSSurfaceMVK_placeholder = 0;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+    PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT = 0;
+    PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT = 0;
+    PFN_vkQueueBeginDebugUtilsLabelEXT vkQueueBeginDebugUtilsLabelEXT = 0;
+    PFN_vkQueueEndDebugUtilsLabelEXT vkQueueEndDebugUtilsLabelEXT = 0;
+    PFN_vkQueueInsertDebugUtilsLabelEXT vkQueueInsertDebugUtilsLabelEXT = 0;
+    PFN_vkCmdBeginDebugUtilsLabelEXT vkCmdBeginDebugUtilsLabelEXT = 0;
+    PFN_vkCmdEndDebugUtilsLabelEXT vkCmdEndDebugUtilsLabelEXT = 0;
+    PFN_vkCmdInsertDebugUtilsLabelEXT vkCmdInsertDebugUtilsLabelEXT = 0;
+    PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = 0;
+    PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = 0;
+    PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT = 0;
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+    PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID = 0;
+    PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID = 0;
+#else
+    PFN_dummy vkGetAndroidHardwareBufferPropertiesANDROID_placeholder = 0;
+    PFN_dummy vkGetMemoryAndroidHardwareBufferANDROID_placeholder = 0;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+    PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT = 0;
+    PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT = 0;
+
+  //=== VK_KHR_get_memory_requirements2 ===
+    PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR = 0;
+    PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR = 0;
+    PFN_vkGetImageSparseMemoryRequirements2KHR vkGetImageSparseMemoryRequirements2KHR = 0;
+
+  //=== VK_KHR_acceleration_structure ===
+    PFN_vkCreateAccelerationStructureKHR vkCreateAccelerationStructureKHR = 0;
+    PFN_vkDestroyAccelerationStructureKHR vkDestroyAccelerationStructureKHR = 0;
+    PFN_vkCmdBuildAccelerationStructuresKHR vkCmdBuildAccelerationStructuresKHR = 0;
+    PFN_vkCmdBuildAccelerationStructuresIndirectKHR vkCmdBuildAccelerationStructuresIndirectKHR = 0;
+    PFN_vkBuildAccelerationStructuresKHR vkBuildAccelerationStructuresKHR = 0;
+    PFN_vkCopyAccelerationStructureKHR vkCopyAccelerationStructureKHR = 0;
+    PFN_vkCopyAccelerationStructureToMemoryKHR vkCopyAccelerationStructureToMemoryKHR = 0;
+    PFN_vkCopyMemoryToAccelerationStructureKHR vkCopyMemoryToAccelerationStructureKHR = 0;
+    PFN_vkWriteAccelerationStructuresPropertiesKHR vkWriteAccelerationStructuresPropertiesKHR = 0;
+    PFN_vkCmdCopyAccelerationStructureKHR vkCmdCopyAccelerationStructureKHR = 0;
+    PFN_vkCmdCopyAccelerationStructureToMemoryKHR vkCmdCopyAccelerationStructureToMemoryKHR = 0;
+    PFN_vkCmdCopyMemoryToAccelerationStructureKHR vkCmdCopyMemoryToAccelerationStructureKHR = 0;
+    PFN_vkGetAccelerationStructureDeviceAddressKHR vkGetAccelerationStructureDeviceAddressKHR = 0;
+    PFN_vkCmdWriteAccelerationStructuresPropertiesKHR vkCmdWriteAccelerationStructuresPropertiesKHR = 0;
+    PFN_vkGetDeviceAccelerationStructureCompatibilityKHR vkGetDeviceAccelerationStructureCompatibilityKHR = 0;
+    PFN_vkGetAccelerationStructureBuildSizesKHR vkGetAccelerationStructureBuildSizesKHR = 0;
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+    PFN_vkCreateSamplerYcbcrConversionKHR vkCreateSamplerYcbcrConversionKHR = 0;
+    PFN_vkDestroySamplerYcbcrConversionKHR vkDestroySamplerYcbcrConversionKHR = 0;
+
+  //=== VK_KHR_bind_memory2 ===
+    PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR = 0;
+    PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR = 0;
+
+  //=== VK_EXT_image_drm_format_modifier ===
+    PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierPropertiesEXT = 0;
+
+  //=== VK_EXT_validation_cache ===
+    PFN_vkCreateValidationCacheEXT vkCreateValidationCacheEXT = 0;
+    PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT = 0;
+    PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT = 0;
+    PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT = 0;
+
+  //=== VK_NV_shading_rate_image ===
+    PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV = 0;
+    PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV = 0;
+    PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV = 0;
+
+  //=== VK_NV_ray_tracing ===
+    PFN_vkCreateAccelerationStructureNV vkCreateAccelerationStructureNV = 0;
+    PFN_vkDestroyAccelerationStructureNV vkDestroyAccelerationStructureNV = 0;
+    PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV = 0;
+    PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV = 0;
+    PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV = 0;
+    PFN_vkCmdCopyAccelerationStructureNV vkCmdCopyAccelerationStructureNV = 0;
+    PFN_vkCmdTraceRaysNV vkCmdTraceRaysNV = 0;
+    PFN_vkCreateRayTracingPipelinesNV vkCreateRayTracingPipelinesNV = 0;
+    PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV = 0;
+    PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV = 0;
+    PFN_vkCmdWriteAccelerationStructuresPropertiesNV vkCmdWriteAccelerationStructuresPropertiesNV = 0;
+    PFN_vkCompileDeferredNV vkCompileDeferredNV = 0;
+
+  //=== VK_KHR_maintenance3 ===
+    PFN_vkGetDescriptorSetLayoutSupportKHR vkGetDescriptorSetLayoutSupportKHR = 0;
+
+  //=== VK_KHR_draw_indirect_count ===
+    PFN_vkCmdDrawIndirectCountKHR vkCmdDrawIndirectCountKHR = 0;
+    PFN_vkCmdDrawIndexedIndirectCountKHR vkCmdDrawIndexedIndirectCountKHR = 0;
+
+  //=== VK_EXT_external_memory_host ===
+    PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT = 0;
+
+  //=== VK_AMD_buffer_marker ===
+    PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD = 0;
+
+  //=== VK_EXT_calibrated_timestamps ===
+    PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = 0;
+    PFN_vkGetCalibratedTimestampsEXT vkGetCalibratedTimestampsEXT = 0;
+
+  //=== VK_NV_mesh_shader ===
+    PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV = 0;
+    PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV = 0;
+    PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV = 0;
+
+  //=== VK_NV_scissor_exclusive ===
+    PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV = 0;
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+    PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV = 0;
+    PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV = 0;
+
+  //=== VK_KHR_timeline_semaphore ===
+    PFN_vkGetSemaphoreCounterValueKHR vkGetSemaphoreCounterValueKHR = 0;
+    PFN_vkWaitSemaphoresKHR vkWaitSemaphoresKHR = 0;
+    PFN_vkSignalSemaphoreKHR vkSignalSemaphoreKHR = 0;
+
+  //=== VK_INTEL_performance_query ===
+    PFN_vkInitializePerformanceApiINTEL vkInitializePerformanceApiINTEL = 0;
+    PFN_vkUninitializePerformanceApiINTEL vkUninitializePerformanceApiINTEL = 0;
+    PFN_vkCmdSetPerformanceMarkerINTEL vkCmdSetPerformanceMarkerINTEL = 0;
+    PFN_vkCmdSetPerformanceStreamMarkerINTEL vkCmdSetPerformanceStreamMarkerINTEL = 0;
+    PFN_vkCmdSetPerformanceOverrideINTEL vkCmdSetPerformanceOverrideINTEL = 0;
+    PFN_vkAcquirePerformanceConfigurationINTEL vkAcquirePerformanceConfigurationINTEL = 0;
+    PFN_vkReleasePerformanceConfigurationINTEL vkReleasePerformanceConfigurationINTEL = 0;
+    PFN_vkQueueSetPerformanceConfigurationINTEL vkQueueSetPerformanceConfigurationINTEL = 0;
+    PFN_vkGetPerformanceParameterINTEL vkGetPerformanceParameterINTEL = 0;
+
+  //=== VK_AMD_display_native_hdr ===
+    PFN_vkSetLocalDimmingAMD vkSetLocalDimmingAMD = 0;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+    PFN_vkCreateImagePipeSurfaceFUCHSIA vkCreateImagePipeSurfaceFUCHSIA = 0;
+#else
+    PFN_dummy vkCreateImagePipeSurfaceFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+    PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT = 0;
+#else
+    PFN_dummy vkCreateMetalSurfaceEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+    PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR vkGetPhysicalDeviceFragmentShadingRatesKHR = 0;
+    PFN_vkCmdSetFragmentShadingRateKHR vkCmdSetFragmentShadingRateKHR = 0;
+
+  //=== VK_EXT_buffer_device_address ===
+    PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT = 0;
+
+  //=== VK_EXT_tooling_info ===
+    PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT = 0;
+
+  //=== VK_KHR_present_wait ===
+    PFN_vkWaitForPresentKHR vkWaitForPresentKHR = 0;
+
+  //=== VK_NV_cooperative_matrix ===
+    PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = 0;
+
+  //=== VK_NV_coverage_reduction_mode ===
+    PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+    PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT vkGetPhysicalDeviceSurfacePresentModes2EXT = 0;
+    PFN_vkAcquireFullScreenExclusiveModeEXT vkAcquireFullScreenExclusiveModeEXT = 0;
+    PFN_vkReleaseFullScreenExclusiveModeEXT vkReleaseFullScreenExclusiveModeEXT = 0;
+    PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT = 0;
+#else
+    PFN_dummy vkGetPhysicalDeviceSurfacePresentModes2EXT_placeholder = 0;
+    PFN_dummy vkAcquireFullScreenExclusiveModeEXT_placeholder = 0;
+    PFN_dummy vkReleaseFullScreenExclusiveModeEXT_placeholder = 0;
+    PFN_dummy vkGetDeviceGroupSurfacePresentModes2EXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+    PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT = 0;
+
+  //=== VK_KHR_buffer_device_address ===
+    PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR = 0;
+    PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR = 0;
+    PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR = 0;
+
+  //=== VK_EXT_line_rasterization ===
+    PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT = 0;
+
+  //=== VK_EXT_host_query_reset ===
+    PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT = 0;
+
+  //=== VK_EXT_extended_dynamic_state ===
+    PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT = 0;
+    PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT = 0;
+    PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT = 0;
+    PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT = 0;
+    PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT = 0;
+    PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT = 0;
+    PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT = 0;
+    PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT = 0;
+    PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT = 0;
+    PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT = 0;
+    PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT = 0;
+    PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT = 0;
+
+  //=== VK_KHR_deferred_host_operations ===
+    PFN_vkCreateDeferredOperationKHR vkCreateDeferredOperationKHR = 0;
+    PFN_vkDestroyDeferredOperationKHR vkDestroyDeferredOperationKHR = 0;
+    PFN_vkGetDeferredOperationMaxConcurrencyKHR vkGetDeferredOperationMaxConcurrencyKHR = 0;
+    PFN_vkGetDeferredOperationResultKHR vkGetDeferredOperationResultKHR = 0;
+    PFN_vkDeferredOperationJoinKHR vkDeferredOperationJoinKHR = 0;
+
+  //=== VK_KHR_pipeline_executable_properties ===
+    PFN_vkGetPipelineExecutablePropertiesKHR vkGetPipelineExecutablePropertiesKHR = 0;
+    PFN_vkGetPipelineExecutableStatisticsKHR vkGetPipelineExecutableStatisticsKHR = 0;
+    PFN_vkGetPipelineExecutableInternalRepresentationsKHR vkGetPipelineExecutableInternalRepresentationsKHR = 0;
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+    PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT = 0;
+
+  //=== VK_NV_device_generated_commands ===
+    PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV = 0;
+    PFN_vkCmdPreprocessGeneratedCommandsNV vkCmdPreprocessGeneratedCommandsNV = 0;
+    PFN_vkCmdExecuteGeneratedCommandsNV vkCmdExecuteGeneratedCommandsNV = 0;
+    PFN_vkCmdBindPipelineShaderGroupNV vkCmdBindPipelineShaderGroupNV = 0;
+    PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV = 0;
+    PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV = 0;
+
+  //=== VK_EXT_acquire_drm_display ===
+    PFN_vkAcquireDrmDisplayEXT vkAcquireDrmDisplayEXT = 0;
+    PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT = 0;
+
+  //=== VK_EXT_private_data ===
+    PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT = 0;
+    PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT = 0;
+    PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT = 0;
+    PFN_vkGetPrivateDataEXT vkGetPrivateDataEXT = 0;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+    PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR = 0;
+#else
+    PFN_dummy vkCmdEncodeVideoKHR_placeholder = 0;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+    PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT = 0;
+#else
+    PFN_dummy vkExportMetalObjectsEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+    PFN_vkCmdSetEvent2KHR vkCmdSetEvent2KHR = 0;
+    PFN_vkCmdResetEvent2KHR vkCmdResetEvent2KHR = 0;
+    PFN_vkCmdWaitEvents2KHR vkCmdWaitEvents2KHR = 0;
+    PFN_vkCmdPipelineBarrier2KHR vkCmdPipelineBarrier2KHR = 0;
+    PFN_vkCmdWriteTimestamp2KHR vkCmdWriteTimestamp2KHR = 0;
+    PFN_vkQueueSubmit2KHR vkQueueSubmit2KHR = 0;
+    PFN_vkCmdWriteBufferMarker2AMD vkCmdWriteBufferMarker2AMD = 0;
+    PFN_vkGetQueueCheckpointData2NV vkGetQueueCheckpointData2NV = 0;
+
+  //=== VK_EXT_descriptor_buffer ===
+    PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT = 0;
+    PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT = 0;
+    PFN_vkGetDescriptorEXT vkGetDescriptorEXT = 0;
+    PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT = 0;
+    PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT = 0;
+    PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT = 0;
+    PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT = 0;
+    PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT = 0;
+    PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT = 0;
+    PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT = 0;
+    PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = 0;
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+    PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV = 0;
+
+  //=== VK_EXT_mesh_shader ===
+    PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT = 0;
+    PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT = 0;
+    PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT = 0;
+
+  //=== VK_KHR_copy_commands2 ===
+    PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR = 0;
+    PFN_vkCmdCopyImage2KHR vkCmdCopyImage2KHR = 0;
+    PFN_vkCmdCopyBufferToImage2KHR vkCmdCopyBufferToImage2KHR = 0;
+    PFN_vkCmdCopyImageToBuffer2KHR vkCmdCopyImageToBuffer2KHR = 0;
+    PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR = 0;
+    PFN_vkCmdResolveImage2KHR vkCmdResolveImage2KHR = 0;
+
+  //=== VK_EXT_image_compression_control ===
+    PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT = 0;
+
+  //=== VK_EXT_device_fault ===
+    PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT = 0;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+    PFN_vkAcquireWinrtDisplayNV vkAcquireWinrtDisplayNV = 0;
+    PFN_vkGetWinrtDisplayNV vkGetWinrtDisplayNV = 0;
+#else
+    PFN_dummy vkAcquireWinrtDisplayNV_placeholder = 0;
+    PFN_dummy vkGetWinrtDisplayNV_placeholder = 0;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+    PFN_vkCreateDirectFBSurfaceEXT vkCreateDirectFBSurfaceEXT = 0;
+    PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDeviceDirectFBPresentationSupportEXT = 0;
+#else
+    PFN_dummy vkCreateDirectFBSurfaceEXT_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceDirectFBPresentationSupportEXT_placeholder = 0;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+    PFN_vkCmdTraceRaysKHR vkCmdTraceRaysKHR = 0;
+    PFN_vkCreateRayTracingPipelinesKHR vkCreateRayTracingPipelinesKHR = 0;
+    PFN_vkGetRayTracingShaderGroupHandlesKHR vkGetRayTracingShaderGroupHandlesKHR = 0;
+    PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = 0;
+    PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR = 0;
+    PFN_vkGetRayTracingShaderGroupStackSizeKHR vkGetRayTracingShaderGroupStackSizeKHR = 0;
+    PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR = 0;
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+    PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT = 0;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+    PFN_vkGetMemoryZirconHandleFUCHSIA vkGetMemoryZirconHandleFUCHSIA = 0;
+    PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA vkGetMemoryZirconHandlePropertiesFUCHSIA = 0;
+#else
+    PFN_dummy vkGetMemoryZirconHandleFUCHSIA_placeholder = 0;
+    PFN_dummy vkGetMemoryZirconHandlePropertiesFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+    PFN_vkImportSemaphoreZirconHandleFUCHSIA vkImportSemaphoreZirconHandleFUCHSIA = 0;
+    PFN_vkGetSemaphoreZirconHandleFUCHSIA vkGetSemaphoreZirconHandleFUCHSIA = 0;
+#else
+    PFN_dummy vkImportSemaphoreZirconHandleFUCHSIA_placeholder = 0;
+    PFN_dummy vkGetSemaphoreZirconHandleFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+    PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA = 0;
+    PFN_vkSetBufferCollectionImageConstraintsFUCHSIA vkSetBufferCollectionImageConstraintsFUCHSIA = 0;
+    PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA vkSetBufferCollectionBufferConstraintsFUCHSIA = 0;
+    PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA = 0;
+    PFN_vkGetBufferCollectionPropertiesFUCHSIA vkGetBufferCollectionPropertiesFUCHSIA = 0;
+#else
+    PFN_dummy vkCreateBufferCollectionFUCHSIA_placeholder = 0;
+    PFN_dummy vkSetBufferCollectionImageConstraintsFUCHSIA_placeholder = 0;
+    PFN_dummy vkSetBufferCollectionBufferConstraintsFUCHSIA_placeholder = 0;
+    PFN_dummy vkDestroyBufferCollectionFUCHSIA_placeholder = 0;
+    PFN_dummy vkGetBufferCollectionPropertiesFUCHSIA_placeholder = 0;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+    PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = 0;
+    PFN_vkCmdSubpassShadingHUAWEI vkCmdSubpassShadingHUAWEI = 0;
+
+  //=== VK_HUAWEI_invocation_mask ===
+    PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI = 0;
+
+  //=== VK_NV_external_memory_rdma ===
+    PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV = 0;
+
+  //=== VK_EXT_pipeline_properties ===
+    PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT = 0;
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+    PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT = 0;
+    PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT = 0;
+    PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT = 0;
+    PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT = 0;
+    PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT = 0;
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+    PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX = 0;
+    PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX = 0;
+#else
+    PFN_dummy vkCreateScreenSurfaceQNX_placeholder = 0;
+    PFN_dummy vkGetPhysicalDeviceScreenPresentationSupportQNX_placeholder = 0;
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+    PFN_vkCmdSetColorWriteEnableEXT vkCmdSetColorWriteEnableEXT = 0;
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+    PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR = 0;
+
+  //=== VK_EXT_multi_draw ===
+    PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT = 0;
+    PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT = 0;
+
+  //=== VK_EXT_opacity_micromap ===
+    PFN_vkCreateMicromapEXT vkCreateMicromapEXT = 0;
+    PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT = 0;
+    PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT = 0;
+    PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT = 0;
+    PFN_vkCopyMicromapEXT vkCopyMicromapEXT = 0;
+    PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT = 0;
+    PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT = 0;
+    PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT = 0;
+    PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT = 0;
+    PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT = 0;
+    PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT = 0;
+    PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT = 0;
+    PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT = 0;
+    PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT = 0;
+
+  //=== VK_EXT_pageable_device_local_memory ===
+    PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT = 0;
+
+  //=== VK_KHR_maintenance4 ===
+    PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequirementsKHR = 0;
+    PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR = 0;
+    PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR = 0;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+    PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE = 0;
+    PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE = 0;
+
+  //=== VK_NV_copy_memory_indirect ===
+    PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV = 0;
+    PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV = 0;
+
+  //=== VK_NV_memory_decompression ===
+    PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV = 0;
+    PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV = 0;
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+    PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT = 0;
+    PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT = 0;
+    PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT = 0;
+    PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT = 0;
+    PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT = 0;
+    PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT = 0;
+    PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT = 0;
+    PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT = 0;
+    PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT = 0;
+    PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT = 0;
+    PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT = 0;
+    PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT = 0;
+    PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT = 0;
+    PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT = 0;
+    PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT = 0;
+    PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT = 0;
+    PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT = 0;
+    PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT = 0;
+    PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT = 0;
+    PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT = 0;
+    PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT = 0;
+    PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV = 0;
+    PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV = 0;
+    PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV = 0;
+    PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV = 0;
+    PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV = 0;
+    PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV = 0;
+    PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV = 0;
+    PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV = 0;
+    PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV = 0;
+    PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV = 0;
+
+  //=== VK_EXT_shader_module_identifier ===
+    PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT = 0;
+    PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT = 0;
+
+  //=== VK_NV_optical_flow ===
+    PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV = 0;
+    PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV = 0;
+    PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV = 0;
+    PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV = 0;
+    PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV = 0;
+
+  //=== VK_QCOM_tile_properties ===
+    PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM = 0;
+    PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM = 0;
+
+
+  public:
+    DispatchLoaderDynamic() VULKAN_HPP_NOEXCEPT = default;
+    DispatchLoaderDynamic( DispatchLoaderDynamic const & rhs ) VULKAN_HPP_NOEXCEPT = default;
+
+    DispatchLoaderDynamic(PFN_vkGetInstanceProcAddr getInstanceProcAddr) VULKAN_HPP_NOEXCEPT
+    {
+      init(getInstanceProcAddr);
+    }
+
+    void init( PFN_vkGetInstanceProcAddr getInstanceProcAddr ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(getInstanceProcAddr);
+
+      vkGetInstanceProcAddr = getInstanceProcAddr;
+
+
+  //=== VK_VERSION_1_0 ===
+      vkCreateInstance = PFN_vkCreateInstance( vkGetInstanceProcAddr( NULL, "vkCreateInstance" ) );
+      vkEnumerateInstanceExtensionProperties = PFN_vkEnumerateInstanceExtensionProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceExtensionProperties" ) );
+      vkEnumerateInstanceLayerProperties = PFN_vkEnumerateInstanceLayerProperties( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceLayerProperties" ) );
+
+  //=== VK_VERSION_1_1 ===
+      vkEnumerateInstanceVersion = PFN_vkEnumerateInstanceVersion( vkGetInstanceProcAddr( NULL, "vkEnumerateInstanceVersion" ) );
+
+    }
+
+    // This interface does not require a linked vulkan library.
+    DispatchLoaderDynamic( VkInstance                instance,
+                           PFN_vkGetInstanceProcAddr getInstanceProcAddr,
+                           VkDevice                  device            = {},
+                           PFN_vkGetDeviceProcAddr   getDeviceProcAddr = nullptr ) VULKAN_HPP_NOEXCEPT
+    {
+      init( instance, getInstanceProcAddr, device, getDeviceProcAddr );
+    }
+
+    // This interface does not require a linked vulkan library.
+    void init( VkInstance                instance,
+               PFN_vkGetInstanceProcAddr getInstanceProcAddr,
+               VkDevice                  device              = {},
+               PFN_vkGetDeviceProcAddr /*getDeviceProcAddr*/ = nullptr ) VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT(instance && getInstanceProcAddr);
+      vkGetInstanceProcAddr = getInstanceProcAddr;
+      init( VULKAN_HPP_NAMESPACE::Instance(instance) );
+      if (device) {
+        init( VULKAN_HPP_NAMESPACE::Device(device) );
+      }
+    }
+
+    void init( VULKAN_HPP_NAMESPACE::Instance instanceCpp ) VULKAN_HPP_NOEXCEPT
+    {
+      VkInstance instance = static_cast<VkInstance>(instanceCpp);
+
+
+  //=== VK_VERSION_1_0 ===
+      vkDestroyInstance = PFN_vkDestroyInstance( vkGetInstanceProcAddr( instance, "vkDestroyInstance" ) );
+      vkEnumeratePhysicalDevices = PFN_vkEnumeratePhysicalDevices( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDevices" ) );
+      vkGetPhysicalDeviceFeatures = PFN_vkGetPhysicalDeviceFeatures( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures" ) );
+      vkGetPhysicalDeviceFormatProperties = PFN_vkGetPhysicalDeviceFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties" ) );
+      vkGetPhysicalDeviceImageFormatProperties = PFN_vkGetPhysicalDeviceImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties" ) );
+      vkGetPhysicalDeviceProperties = PFN_vkGetPhysicalDeviceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties" ) );
+      vkGetPhysicalDeviceQueueFamilyProperties = PFN_vkGetPhysicalDeviceQueueFamilyProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties" ) );
+      vkGetPhysicalDeviceMemoryProperties = PFN_vkGetPhysicalDeviceMemoryProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties" ) );
+      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetInstanceProcAddr( instance, "vkGetDeviceProcAddr" ) );
+      vkCreateDevice = PFN_vkCreateDevice( vkGetInstanceProcAddr( instance, "vkCreateDevice" ) );
+      vkDestroyDevice = PFN_vkDestroyDevice( vkGetInstanceProcAddr( instance, "vkDestroyDevice" ) );
+      vkEnumerateDeviceExtensionProperties = PFN_vkEnumerateDeviceExtensionProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceExtensionProperties" ) );
+      vkEnumerateDeviceLayerProperties = PFN_vkEnumerateDeviceLayerProperties( vkGetInstanceProcAddr( instance, "vkEnumerateDeviceLayerProperties" ) );
+      vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue" ) );
+      vkQueueSubmit = PFN_vkQueueSubmit( vkGetInstanceProcAddr( instance, "vkQueueSubmit" ) );
+      vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetInstanceProcAddr( instance, "vkQueueWaitIdle" ) );
+      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetInstanceProcAddr( instance, "vkDeviceWaitIdle" ) );
+      vkAllocateMemory = PFN_vkAllocateMemory( vkGetInstanceProcAddr( instance, "vkAllocateMemory" ) );
+      vkFreeMemory = PFN_vkFreeMemory( vkGetInstanceProcAddr( instance, "vkFreeMemory" ) );
+      vkMapMemory = PFN_vkMapMemory( vkGetInstanceProcAddr( instance, "vkMapMemory" ) );
+      vkUnmapMemory = PFN_vkUnmapMemory( vkGetInstanceProcAddr( instance, "vkUnmapMemory" ) );
+      vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkFlushMappedMemoryRanges" ) );
+      vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetInstanceProcAddr( instance, "vkInvalidateMappedMemoryRanges" ) );
+      vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryCommitment" ) );
+      vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetInstanceProcAddr( instance, "vkBindBufferMemory" ) );
+      vkBindImageMemory = PFN_vkBindImageMemory( vkGetInstanceProcAddr( instance, "vkBindImageMemory" ) );
+      vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements" ) );
+      vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements" ) );
+      vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements" ) );
+      vkGetPhysicalDeviceSparseImageFormatProperties = PFN_vkGetPhysicalDeviceSparseImageFormatProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties" ) );
+      vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetInstanceProcAddr( instance, "vkQueueBindSparse" ) );
+      vkCreateFence = PFN_vkCreateFence( vkGetInstanceProcAddr( instance, "vkCreateFence" ) );
+      vkDestroyFence = PFN_vkDestroyFence( vkGetInstanceProcAddr( instance, "vkDestroyFence" ) );
+      vkResetFences = PFN_vkResetFences( vkGetInstanceProcAddr( instance, "vkResetFences" ) );
+      vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetInstanceProcAddr( instance, "vkGetFenceStatus" ) );
+      vkWaitForFences = PFN_vkWaitForFences( vkGetInstanceProcAddr( instance, "vkWaitForFences" ) );
+      vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetInstanceProcAddr( instance, "vkCreateSemaphore" ) );
+      vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetInstanceProcAddr( instance, "vkDestroySemaphore" ) );
+      vkCreateEvent = PFN_vkCreateEvent( vkGetInstanceProcAddr( instance, "vkCreateEvent" ) );
+      vkDestroyEvent = PFN_vkDestroyEvent( vkGetInstanceProcAddr( instance, "vkDestroyEvent" ) );
+      vkGetEventStatus = PFN_vkGetEventStatus( vkGetInstanceProcAddr( instance, "vkGetEventStatus" ) );
+      vkSetEvent = PFN_vkSetEvent( vkGetInstanceProcAddr( instance, "vkSetEvent" ) );
+      vkResetEvent = PFN_vkResetEvent( vkGetInstanceProcAddr( instance, "vkResetEvent" ) );
+      vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetInstanceProcAddr( instance, "vkCreateQueryPool" ) );
+      vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetInstanceProcAddr( instance, "vkDestroyQueryPool" ) );
+      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetInstanceProcAddr( instance, "vkGetQueryPoolResults" ) );
+      vkCreateBuffer = PFN_vkCreateBuffer( vkGetInstanceProcAddr( instance, "vkCreateBuffer" ) );
+      vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetInstanceProcAddr( instance, "vkDestroyBuffer" ) );
+      vkCreateBufferView = PFN_vkCreateBufferView( vkGetInstanceProcAddr( instance, "vkCreateBufferView" ) );
+      vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetInstanceProcAddr( instance, "vkDestroyBufferView" ) );
+      vkCreateImage = PFN_vkCreateImage( vkGetInstanceProcAddr( instance, "vkCreateImage" ) );
+      vkDestroyImage = PFN_vkDestroyImage( vkGetInstanceProcAddr( instance, "vkDestroyImage" ) );
+      vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout" ) );
+      vkCreateImageView = PFN_vkCreateImageView( vkGetInstanceProcAddr( instance, "vkCreateImageView" ) );
+      vkDestroyImageView = PFN_vkDestroyImageView( vkGetInstanceProcAddr( instance, "vkDestroyImageView" ) );
+      vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetInstanceProcAddr( instance, "vkCreateShaderModule" ) );
+      vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetInstanceProcAddr( instance, "vkDestroyShaderModule" ) );
+      vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetInstanceProcAddr( instance, "vkCreatePipelineCache" ) );
+      vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetInstanceProcAddr( instance, "vkDestroyPipelineCache" ) );
+      vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetInstanceProcAddr( instance, "vkGetPipelineCacheData" ) );
+      vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetInstanceProcAddr( instance, "vkMergePipelineCaches" ) );
+      vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetInstanceProcAddr( instance, "vkCreateGraphicsPipelines" ) );
+      vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetInstanceProcAddr( instance, "vkCreateComputePipelines" ) );
+      vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetInstanceProcAddr( instance, "vkDestroyPipeline" ) );
+      vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetInstanceProcAddr( instance, "vkCreatePipelineLayout" ) );
+      vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetInstanceProcAddr( instance, "vkDestroyPipelineLayout" ) );
+      vkCreateSampler = PFN_vkCreateSampler( vkGetInstanceProcAddr( instance, "vkCreateSampler" ) );
+      vkDestroySampler = PFN_vkDestroySampler( vkGetInstanceProcAddr( instance, "vkDestroySampler" ) );
+      vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkCreateDescriptorSetLayout" ) );
+      vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorSetLayout" ) );
+      vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetInstanceProcAddr( instance, "vkCreateDescriptorPool" ) );
+      vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorPool" ) );
+      vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetInstanceProcAddr( instance, "vkResetDescriptorPool" ) );
+      vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetInstanceProcAddr( instance, "vkAllocateDescriptorSets" ) );
+      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetInstanceProcAddr( instance, "vkFreeDescriptorSets" ) );
+      vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSets" ) );
+      vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetInstanceProcAddr( instance, "vkCreateFramebuffer" ) );
+      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetInstanceProcAddr( instance, "vkDestroyFramebuffer" ) );
+      vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetInstanceProcAddr( instance, "vkCreateRenderPass" ) );
+      vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetInstanceProcAddr( instance, "vkDestroyRenderPass" ) );
+      vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetInstanceProcAddr( instance, "vkGetRenderAreaGranularity" ) );
+      vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetInstanceProcAddr( instance, "vkCreateCommandPool" ) );
+      vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetInstanceProcAddr( instance, "vkDestroyCommandPool" ) );
+      vkResetCommandPool = PFN_vkResetCommandPool( vkGetInstanceProcAddr( instance, "vkResetCommandPool" ) );
+      vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetInstanceProcAddr( instance, "vkAllocateCommandBuffers" ) );
+      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetInstanceProcAddr( instance, "vkFreeCommandBuffers" ) );
+      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetInstanceProcAddr( instance, "vkBeginCommandBuffer" ) );
+      vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetInstanceProcAddr( instance, "vkEndCommandBuffer" ) );
+      vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetInstanceProcAddr( instance, "vkResetCommandBuffer" ) );
+      vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetInstanceProcAddr( instance, "vkCmdBindPipeline" ) );
+      vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetInstanceProcAddr( instance, "vkCmdSetViewport" ) );
+      vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetInstanceProcAddr( instance, "vkCmdSetScissor" ) );
+      vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetInstanceProcAddr( instance, "vkCmdSetLineWidth" ) );
+      vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBias" ) );
+      vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetInstanceProcAddr( instance, "vkCmdSetBlendConstants" ) );
+      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBounds" ) );
+      vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilCompareMask" ) );
+      vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetInstanceProcAddr( instance, "vkCmdSetStencilWriteMask" ) );
+      vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetInstanceProcAddr( instance, "vkCmdSetStencilReference" ) );
+      vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorSets" ) );
+      vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetInstanceProcAddr( instance, "vkCmdBindIndexBuffer" ) );
+      vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers" ) );
+      vkCmdDraw = PFN_vkCmdDraw( vkGetInstanceProcAddr( instance, "vkCmdDraw" ) );
+      vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexed" ) );
+      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirect" ) );
+      vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirect" ) );
+      vkCmdDispatch = PFN_vkCmdDispatch( vkGetInstanceProcAddr( instance, "vkCmdDispatch" ) );
+      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetInstanceProcAddr( instance, "vkCmdDispatchIndirect" ) );
+      vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer" ) );
+      vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetInstanceProcAddr( instance, "vkCmdCopyImage" ) );
+      vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetInstanceProcAddr( instance, "vkCmdBlitImage" ) );
+      vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage" ) );
+      vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer" ) );
+      vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetInstanceProcAddr( instance, "vkCmdUpdateBuffer" ) );
+      vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetInstanceProcAddr( instance, "vkCmdFillBuffer" ) );
+      vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetInstanceProcAddr( instance, "vkCmdClearColorImage" ) );
+      vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetInstanceProcAddr( instance, "vkCmdClearDepthStencilImage" ) );
+      vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetInstanceProcAddr( instance, "vkCmdClearAttachments" ) );
+      vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetInstanceProcAddr( instance, "vkCmdResolveImage" ) );
+      vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetInstanceProcAddr( instance, "vkCmdSetEvent" ) );
+      vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetInstanceProcAddr( instance, "vkCmdResetEvent" ) );
+      vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents" ) );
+      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier" ) );
+      vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetInstanceProcAddr( instance, "vkCmdBeginQuery" ) );
+      vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetInstanceProcAddr( instance, "vkCmdEndQuery" ) );
+      vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetInstanceProcAddr( instance, "vkCmdResetQueryPool" ) );
+      vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp" ) );
+      vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetInstanceProcAddr( instance, "vkCmdCopyQueryPoolResults" ) );
+      vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetInstanceProcAddr( instance, "vkCmdPushConstants" ) );
+      vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass" ) );
+      vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass" ) );
+      vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass" ) );
+      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetInstanceProcAddr( instance, "vkCmdExecuteCommands" ) );
+
+  //=== VK_VERSION_1_1 ===
+      vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2" ) );
+      vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetInstanceProcAddr( instance, "vkBindImageMemory2" ) );
+      vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+      vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMask" ) );
+      vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetInstanceProcAddr( instance, "vkCmdDispatchBase" ) );
+      vkEnumeratePhysicalDeviceGroups = PFN_vkEnumeratePhysicalDeviceGroups( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroups" ) );
+      vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2" ) );
+      vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2" ) );
+      vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2" ) );
+      vkGetPhysicalDeviceFeatures2 = PFN_vkGetPhysicalDeviceFeatures2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2" ) );
+      vkGetPhysicalDeviceProperties2 = PFN_vkGetPhysicalDeviceProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2" ) );
+      vkGetPhysicalDeviceFormatProperties2 = PFN_vkGetPhysicalDeviceFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2" ) );
+      vkGetPhysicalDeviceImageFormatProperties2 = PFN_vkGetPhysicalDeviceImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2" ) );
+      vkGetPhysicalDeviceQueueFamilyProperties2 = PFN_vkGetPhysicalDeviceQueueFamilyProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2" ) );
+      vkGetPhysicalDeviceMemoryProperties2 = PFN_vkGetPhysicalDeviceMemoryProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2" ) );
+      vkGetPhysicalDeviceSparseImageFormatProperties2 = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2" ) );
+      vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetInstanceProcAddr( instance, "vkTrimCommandPool" ) );
+      vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetInstanceProcAddr( instance, "vkGetDeviceQueue2" ) );
+      vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversion" ) );
+      vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversion" ) );
+      vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplate" ) );
+      vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplate" ) );
+      vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplate" ) );
+      vkGetPhysicalDeviceExternalBufferProperties = PFN_vkGetPhysicalDeviceExternalBufferProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferProperties" ) );
+      vkGetPhysicalDeviceExternalFenceProperties = PFN_vkGetPhysicalDeviceExternalFenceProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFenceProperties" ) );
+      vkGetPhysicalDeviceExternalSemaphoreProperties = PFN_vkGetPhysicalDeviceExternalSemaphoreProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphoreProperties" ) );
+      vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupport" ) );
+
+  //=== VK_VERSION_1_2 ===
+      vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCount" ) );
+      vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCount" ) );
+      vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2" ) );
+      vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2" ) );
+      vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2" ) );
+      vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2" ) );
+      vkResetQueryPool = PFN_vkResetQueryPool( vkGetInstanceProcAddr( instance, "vkResetQueryPool" ) );
+      vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValue" ) );
+      vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetInstanceProcAddr( instance, "vkWaitSemaphores" ) );
+      vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetInstanceProcAddr( instance, "vkSignalSemaphore" ) );
+      vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddress" ) );
+      vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddress" ) );
+      vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+
+  //=== VK_VERSION_1_3 ===
+      vkGetPhysicalDeviceToolProperties = PFN_vkGetPhysicalDeviceToolProperties( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolProperties" ) );
+      vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlot" ) );
+      vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlot" ) );
+      vkSetPrivateData = PFN_vkSetPrivateData( vkGetInstanceProcAddr( instance, "vkSetPrivateData" ) );
+      vkGetPrivateData = PFN_vkGetPrivateData( vkGetInstanceProcAddr( instance, "vkGetPrivateData" ) );
+      vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2" ) );
+      vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2" ) );
+      vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2" ) );
+      vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2" ) );
+      vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2" ) );
+      vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetInstanceProcAddr( instance, "vkQueueSubmit2" ) );
+      vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2" ) );
+      vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2" ) );
+      vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2" ) );
+      vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2" ) );
+      vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2" ) );
+      vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2" ) );
+      vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetInstanceProcAddr( instance, "vkCmdBeginRendering" ) );
+      vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetInstanceProcAddr( instance, "vkCmdEndRendering" ) );
+      vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetInstanceProcAddr( instance, "vkCmdSetCullMode" ) );
+      vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFace" ) );
+      vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopology" ) );
+      vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCount" ) );
+      vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCount" ) );
+      vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2" ) );
+      vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnable" ) );
+      vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnable" ) );
+      vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOp" ) );
+      vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnable" ) );
+      vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnable" ) );
+      vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOp" ) );
+      vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnable" ) );
+      vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnable" ) );
+      vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnable" ) );
+      vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirements" ) );
+      vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirements" ) );
+      vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirements" ) );
+
+  //=== VK_KHR_surface ===
+      vkDestroySurfaceKHR = PFN_vkDestroySurfaceKHR( vkGetInstanceProcAddr( instance, "vkDestroySurfaceKHR" ) );
+      vkGetPhysicalDeviceSurfaceSupportKHR = PFN_vkGetPhysicalDeviceSurfaceSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceSupportKHR" ) );
+      vkGetPhysicalDeviceSurfaceCapabilitiesKHR = PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR" ) );
+      vkGetPhysicalDeviceSurfaceFormatsKHR = PFN_vkGetPhysicalDeviceSurfaceFormatsKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormatsKHR" ) );
+      vkGetPhysicalDeviceSurfacePresentModesKHR = PFN_vkGetPhysicalDeviceSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModesKHR" ) );
+
+  //=== VK_KHR_swapchain ===
+      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetInstanceProcAddr( instance, "vkCreateSwapchainKHR" ) );
+      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetInstanceProcAddr( instance, "vkDestroySwapchainKHR" ) );
+      vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainImagesKHR" ) );
+      vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImageKHR" ) );
+      vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetInstanceProcAddr( instance, "vkQueuePresentKHR" ) );
+      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+      vkGetPhysicalDevicePresentRectanglesKHR = PFN_vkGetPhysicalDevicePresentRectanglesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDevicePresentRectanglesKHR" ) );
+      vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetInstanceProcAddr( instance, "vkAcquireNextImage2KHR" ) );
+
+  //=== VK_KHR_display ===
+      vkGetPhysicalDeviceDisplayPropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPropertiesKHR" ) );
+      vkGetPhysicalDeviceDisplayPlanePropertiesKHR = PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlanePropertiesKHR" ) );
+      vkGetDisplayPlaneSupportedDisplaysKHR = PFN_vkGetDisplayPlaneSupportedDisplaysKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneSupportedDisplaysKHR" ) );
+      vkGetDisplayModePropertiesKHR = PFN_vkGetDisplayModePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModePropertiesKHR" ) );
+      vkCreateDisplayModeKHR = PFN_vkCreateDisplayModeKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayModeKHR" ) );
+      vkGetDisplayPlaneCapabilitiesKHR = PFN_vkGetDisplayPlaneCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilitiesKHR" ) );
+      vkCreateDisplayPlaneSurfaceKHR = PFN_vkCreateDisplayPlaneSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateDisplayPlaneSurfaceKHR" ) );
+
+  //=== VK_KHR_display_swapchain ===
+      vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetInstanceProcAddr( instance, "vkCreateSharedSwapchainsKHR" ) );
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+      vkCreateXlibSurfaceKHR = PFN_vkCreateXlibSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXlibSurfaceKHR" ) );
+      vkGetPhysicalDeviceXlibPresentationSupportKHR = PFN_vkGetPhysicalDeviceXlibPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXlibPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+      vkCreateXcbSurfaceKHR = PFN_vkCreateXcbSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateXcbSurfaceKHR" ) );
+      vkGetPhysicalDeviceXcbPresentationSupportKHR = PFN_vkGetPhysicalDeviceXcbPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceXcbPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+      vkCreateWaylandSurfaceKHR = PFN_vkCreateWaylandSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWaylandSurfaceKHR" ) );
+      vkGetPhysicalDeviceWaylandPresentationSupportKHR = PFN_vkGetPhysicalDeviceWaylandPresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWaylandPresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+      vkCreateAndroidSurfaceKHR = PFN_vkCreateAndroidSurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateAndroidSurfaceKHR" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+      vkCreateWin32SurfaceKHR = PFN_vkCreateWin32SurfaceKHR( vkGetInstanceProcAddr( instance, "vkCreateWin32SurfaceKHR" ) );
+      vkGetPhysicalDeviceWin32PresentationSupportKHR = PFN_vkGetPhysicalDeviceWin32PresentationSupportKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceWin32PresentationSupportKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+      vkCreateDebugReportCallbackEXT = PFN_vkCreateDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugReportCallbackEXT" ) );
+      vkDestroyDebugReportCallbackEXT = PFN_vkDestroyDebugReportCallbackEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugReportCallbackEXT" ) );
+      vkDebugReportMessageEXT = PFN_vkDebugReportMessageEXT( vkGetInstanceProcAddr( instance, "vkDebugReportMessageEXT" ) );
+
+  //=== VK_EXT_debug_marker ===
+      vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectTagEXT" ) );
+      vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetInstanceProcAddr( instance, "vkDebugMarkerSetObjectNameEXT" ) );
+      vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerBeginEXT" ) );
+      vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerEndEXT" ) );
+      vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetInstanceProcAddr( instance, "vkCmdDebugMarkerInsertEXT" ) );
+
+  //=== VK_KHR_video_queue ===
+      vkGetPhysicalDeviceVideoCapabilitiesKHR = PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoCapabilitiesKHR" ) );
+      vkGetPhysicalDeviceVideoFormatPropertiesKHR = PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceVideoFormatPropertiesKHR" ) );
+      vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionKHR" ) );
+      vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionKHR" ) );
+      vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetVideoSessionMemoryRequirementsKHR" ) );
+      vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetInstanceProcAddr( instance, "vkBindVideoSessionMemoryKHR" ) );
+      vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkCreateVideoSessionParametersKHR" ) );
+      vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkUpdateVideoSessionParametersKHR" ) );
+      vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetInstanceProcAddr( instance, "vkDestroyVideoSessionParametersKHR" ) );
+      vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginVideoCodingKHR" ) );
+      vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndVideoCodingKHR" ) );
+      vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetInstanceProcAddr( instance, "vkCmdControlVideoCodingKHR" ) );
+
+  //=== VK_KHR_video_decode_queue ===
+      vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdDecodeVideoKHR" ) );
+
+  //=== VK_EXT_transform_feedback ===
+      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+      vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginTransformFeedbackEXT" ) );
+      vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetInstanceProcAddr( instance, "vkCmdEndTransformFeedbackEXT" ) );
+      vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginQueryIndexedEXT" ) );
+      vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdEndQueryIndexedEXT" ) );
+      vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectByteCountEXT" ) );
+
+  //=== VK_NVX_binary_import ===
+      vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetInstanceProcAddr( instance, "vkCreateCuModuleNVX" ) );
+      vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkCreateCuFunctionNVX" ) );
+      vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuModuleNVX" ) );
+      vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetInstanceProcAddr( instance, "vkDestroyCuFunctionNVX" ) );
+      vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetInstanceProcAddr( instance, "vkCmdCuLaunchKernelNVX" ) );
+
+  //=== VK_NVX_image_view_handle ===
+      vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewHandleNVX" ) );
+      vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetInstanceProcAddr( instance, "vkGetImageViewAddressNVX" ) );
+
+  //=== VK_AMD_draw_indirect_count ===
+      vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountAMD" ) );
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
+      vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountAMD" ) );
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
+
+  //=== VK_AMD_shader_info ===
+      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetInstanceProcAddr( instance, "vkGetShaderInfoAMD" ) );
+
+  //=== VK_KHR_dynamic_rendering ===
+      vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderingKHR" ) );
+      if ( !vkCmdBeginRendering ) vkCmdBeginRendering = vkCmdBeginRenderingKHR;
+      vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderingKHR" ) );
+      if ( !vkCmdEndRendering ) vkCmdEndRendering = vkCmdEndRenderingKHR;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+      vkCreateStreamDescriptorSurfaceGGP = PFN_vkCreateStreamDescriptorSurfaceGGP( vkGetInstanceProcAddr( instance, "vkCreateStreamDescriptorSurfaceGGP" ) );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+      vkGetPhysicalDeviceExternalImageFormatPropertiesNV = PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalImageFormatPropertiesNV" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+      vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_get_physical_device_properties2 ===
+      vkGetPhysicalDeviceFeatures2KHR = PFN_vkGetPhysicalDeviceFeatures2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFeatures2KHR" ) );
+      if ( !vkGetPhysicalDeviceFeatures2 ) vkGetPhysicalDeviceFeatures2 = vkGetPhysicalDeviceFeatures2KHR;
+      vkGetPhysicalDeviceProperties2KHR = PFN_vkGetPhysicalDeviceProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceProperties2 ) vkGetPhysicalDeviceProperties2 = vkGetPhysicalDeviceProperties2KHR;
+      vkGetPhysicalDeviceFormatProperties2KHR = PFN_vkGetPhysicalDeviceFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFormatProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceFormatProperties2 ) vkGetPhysicalDeviceFormatProperties2 = vkGetPhysicalDeviceFormatProperties2KHR;
+      vkGetPhysicalDeviceImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceImageFormatProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceImageFormatProperties2 ) vkGetPhysicalDeviceImageFormatProperties2 = vkGetPhysicalDeviceImageFormatProperties2KHR;
+      vkGetPhysicalDeviceQueueFamilyProperties2KHR = PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceQueueFamilyProperties2 ) vkGetPhysicalDeviceQueueFamilyProperties2 = vkGetPhysicalDeviceQueueFamilyProperties2KHR;
+      vkGetPhysicalDeviceMemoryProperties2KHR = PFN_vkGetPhysicalDeviceMemoryProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMemoryProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceMemoryProperties2 ) vkGetPhysicalDeviceMemoryProperties2 = vkGetPhysicalDeviceMemoryProperties2KHR;
+      vkGetPhysicalDeviceSparseImageFormatProperties2KHR = PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSparseImageFormatProperties2KHR" ) );
+      if ( !vkGetPhysicalDeviceSparseImageFormatProperties2 ) vkGetPhysicalDeviceSparseImageFormatProperties2 = vkGetPhysicalDeviceSparseImageFormatProperties2KHR;
+
+  //=== VK_KHR_device_group ===
+      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+      if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
+      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetInstanceProcAddr( instance, "vkCmdSetDeviceMaskKHR" ) );
+      if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
+      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetInstanceProcAddr( instance, "vkCmdDispatchBaseKHR" ) );
+      if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+      vkCreateViSurfaceNN = PFN_vkCreateViSurfaceNN( vkGetInstanceProcAddr( instance, "vkCreateViSurfaceNN" ) );
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_KHR_maintenance1 ===
+      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetInstanceProcAddr( instance, "vkTrimCommandPoolKHR" ) );
+      if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
+
+  //=== VK_KHR_device_group_creation ===
+      vkEnumeratePhysicalDeviceGroupsKHR = PFN_vkEnumeratePhysicalDeviceGroupsKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceGroupsKHR" ) );
+      if ( !vkEnumeratePhysicalDeviceGroups ) vkEnumeratePhysicalDeviceGroups = vkEnumeratePhysicalDeviceGroupsKHR;
+
+  //=== VK_KHR_external_memory_capabilities ===
+      vkGetPhysicalDeviceExternalBufferPropertiesKHR = PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalBufferPropertiesKHR" ) );
+      if ( !vkGetPhysicalDeviceExternalBufferProperties ) vkGetPhysicalDeviceExternalBufferProperties = vkGetPhysicalDeviceExternalBufferPropertiesKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+      vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandleKHR" ) );
+      vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdKHR" ) );
+      vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetMemoryFdPropertiesKHR" ) );
+
+  //=== VK_KHR_external_semaphore_capabilities ===
+      vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalSemaphorePropertiesKHR" ) );
+      if ( !vkGetPhysicalDeviceExternalSemaphoreProperties ) vkGetPhysicalDeviceExternalSemaphoreProperties = vkGetPhysicalDeviceExternalSemaphorePropertiesKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+      vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreWin32HandleKHR" ) );
+      vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+      vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkImportSemaphoreFdKHR" ) );
+      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreFdKHR" ) );
+
+  //=== VK_KHR_push_descriptor ===
+      vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetKHR" ) );
+      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+
+  //=== VK_EXT_conditional_rendering ===
+      vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginConditionalRenderingEXT" ) );
+      vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetInstanceProcAddr( instance, "vkCmdEndConditionalRenderingEXT" ) );
+
+  //=== VK_KHR_descriptor_update_template ===
+      vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkCreateDescriptorUpdateTemplateKHR" ) );
+      if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
+      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetInstanceProcAddr( instance, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+      if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
+      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetInstanceProcAddr( instance, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+      if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
+
+  //=== VK_NV_clip_space_w_scaling ===
+      vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingNV" ) );
+
+  //=== VK_EXT_direct_mode_display ===
+      vkReleaseDisplayEXT = PFN_vkReleaseDisplayEXT( vkGetInstanceProcAddr( instance, "vkReleaseDisplayEXT" ) );
+
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+  //=== VK_EXT_acquire_xlib_display ===
+      vkAcquireXlibDisplayEXT = PFN_vkAcquireXlibDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireXlibDisplayEXT" ) );
+      vkGetRandROutputDisplayEXT = PFN_vkGetRandROutputDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetRandROutputDisplayEXT" ) );
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+
+  //=== VK_EXT_display_surface_counter ===
+      vkGetPhysicalDeviceSurfaceCapabilities2EXT = PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2EXT" ) );
+
+  //=== VK_EXT_display_control ===
+      vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetInstanceProcAddr( instance, "vkDisplayPowerControlEXT" ) );
+      vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDeviceEventEXT" ) );
+      vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetInstanceProcAddr( instance, "vkRegisterDisplayEventEXT" ) );
+      vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetInstanceProcAddr( instance, "vkGetSwapchainCounterEXT" ) );
+
+  //=== VK_GOOGLE_display_timing ===
+      vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetInstanceProcAddr( instance, "vkGetRefreshCycleDurationGOOGLE" ) );
+      vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetInstanceProcAddr( instance, "vkGetPastPresentationTimingGOOGLE" ) );
+
+  //=== VK_EXT_discard_rectangles ===
+      vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDiscardRectangleEXT" ) );
+
+  //=== VK_EXT_hdr_metadata ===
+      vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetInstanceProcAddr( instance, "vkSetHdrMetadataEXT" ) );
+
+  //=== VK_KHR_create_renderpass2 ===
+      vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCreateRenderPass2KHR" ) );
+      if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
+      vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdBeginRenderPass2KHR" ) );
+      if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
+      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetInstanceProcAddr( instance, "vkCmdNextSubpass2KHR" ) );
+      if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
+      vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetInstanceProcAddr( instance, "vkCmdEndRenderPass2KHR" ) );
+      if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
+
+  //=== VK_KHR_shared_presentable_image ===
+      vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetInstanceProcAddr( instance, "vkGetSwapchainStatusKHR" ) );
+
+  //=== VK_KHR_external_fence_capabilities ===
+      vkGetPhysicalDeviceExternalFencePropertiesKHR = PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceExternalFencePropertiesKHR" ) );
+      if ( !vkGetPhysicalDeviceExternalFenceProperties ) vkGetPhysicalDeviceExternalFenceProperties = vkGetPhysicalDeviceExternalFencePropertiesKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+      vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkImportFenceWin32HandleKHR" ) );
+      vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetInstanceProcAddr( instance, "vkGetFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetInstanceProcAddr( instance, "vkImportFenceFdKHR" ) );
+      vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetInstanceProcAddr( instance, "vkGetFenceFdKHR" ) );
+
+  //=== VK_KHR_performance_query ===
+      vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( vkGetInstanceProcAddr( instance, "vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR" ) );
+      vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR" ) );
+      vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkAcquireProfilingLockKHR" ) );
+      vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetInstanceProcAddr( instance, "vkReleaseProfilingLockKHR" ) );
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+      vkGetPhysicalDeviceSurfaceCapabilities2KHR = PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceCapabilities2KHR" ) );
+      vkGetPhysicalDeviceSurfaceFormats2KHR = PFN_vkGetPhysicalDeviceSurfaceFormats2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfaceFormats2KHR" ) );
+
+  //=== VK_KHR_get_display_properties2 ===
+      vkGetPhysicalDeviceDisplayProperties2KHR = PFN_vkGetPhysicalDeviceDisplayProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayProperties2KHR" ) );
+      vkGetPhysicalDeviceDisplayPlaneProperties2KHR = PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDisplayPlaneProperties2KHR" ) );
+      vkGetDisplayModeProperties2KHR = PFN_vkGetDisplayModeProperties2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayModeProperties2KHR" ) );
+      vkGetDisplayPlaneCapabilities2KHR = PFN_vkGetDisplayPlaneCapabilities2KHR( vkGetInstanceProcAddr( instance, "vkGetDisplayPlaneCapabilities2KHR" ) );
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+      vkCreateIOSSurfaceMVK = PFN_vkCreateIOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateIOSSurfaceMVK" ) );
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+      vkCreateMacOSSurfaceMVK = PFN_vkCreateMacOSSurfaceMVK( vkGetInstanceProcAddr( instance, "vkCreateMacOSSurfaceMVK" ) );
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+      vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectNameEXT" ) );
+      vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetInstanceProcAddr( instance, "vkSetDebugUtilsObjectTagEXT" ) );
+      vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueBeginDebugUtilsLabelEXT" ) );
+      vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueEndDebugUtilsLabelEXT" ) );
+      vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkQueueInsertDebugUtilsLabelEXT" ) );
+      vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdBeginDebugUtilsLabelEXT" ) );
+      vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdEndDebugUtilsLabelEXT" ) );
+      vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetInstanceProcAddr( instance, "vkCmdInsertDebugUtilsLabelEXT" ) );
+      vkCreateDebugUtilsMessengerEXT = PFN_vkCreateDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkCreateDebugUtilsMessengerEXT" ) );
+      vkDestroyDebugUtilsMessengerEXT = PFN_vkDestroyDebugUtilsMessengerEXT( vkGetInstanceProcAddr( instance, "vkDestroyDebugUtilsMessengerEXT" ) );
+      vkSubmitDebugUtilsMessageEXT = PFN_vkSubmitDebugUtilsMessageEXT( vkGetInstanceProcAddr( instance, "vkSubmitDebugUtilsMessageEXT" ) );
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetInstanceProcAddr( instance, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetInstanceProcAddr( instance, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+      vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEXT" ) );
+      vkGetPhysicalDeviceMultisamplePropertiesEXT = PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceMultisamplePropertiesEXT" ) );
+
+  //=== VK_KHR_get_memory_requirements2 ===
+      vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageMemoryRequirements2KHR" ) );
+      if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
+      vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetBufferMemoryRequirements2KHR" ) );
+      if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
+      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetInstanceProcAddr( instance, "vkGetImageSparseMemoryRequirements2KHR" ) );
+      if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
+
+  //=== VK_KHR_acceleration_structure ===
+      vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureKHR" ) );
+      vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureKHR" ) );
+      vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresKHR" ) );
+      vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
+      vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetInstanceProcAddr( instance, "vkBuildAccelerationStructuresKHR" ) );
+      vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureKHR" ) );
+      vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCopyAccelerationStructureToMemoryKHR" ) );
+      vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCopyMemoryToAccelerationStructureKHR" ) );
+      vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+      vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureKHR" ) );
+      vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+      vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+      vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+      vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+      vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+      vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureBuildSizesKHR" ) );
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+      vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkCreateSamplerYcbcrConversionKHR" ) );
+      if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
+      vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetInstanceProcAddr( instance, "vkDestroySamplerYcbcrConversionKHR" ) );
+      if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
+
+  //=== VK_KHR_bind_memory2 ===
+      vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindBufferMemory2KHR" ) );
+      if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
+      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetInstanceProcAddr( instance, "vkBindImageMemory2KHR" ) );
+      if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
+
+  //=== VK_EXT_image_drm_format_modifier ===
+      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+
+  //=== VK_EXT_validation_cache ===
+      vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkCreateValidationCacheEXT" ) );
+      vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetInstanceProcAddr( instance, "vkDestroyValidationCacheEXT" ) );
+      vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetInstanceProcAddr( instance, "vkMergeValidationCachesEXT" ) );
+      vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetInstanceProcAddr( instance, "vkGetValidationCacheDataEXT" ) );
+
+  //=== VK_NV_shading_rate_image ===
+      vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetInstanceProcAddr( instance, "vkCmdBindShadingRateImageNV" ) );
+      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportShadingRatePaletteNV" ) );
+      vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoarseSampleOrderNV" ) );
+
+  //=== VK_NV_ray_tracing ===
+      vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCreateAccelerationStructureNV" ) );
+      vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkDestroyAccelerationStructureNV" ) );
+      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+      vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetInstanceProcAddr( instance, "vkBindAccelerationStructureMemoryNV" ) );
+      vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdBuildAccelerationStructureNV" ) );
+      vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetInstanceProcAddr( instance, "vkCmdCopyAccelerationStructureNV" ) );
+      vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysNV" ) );
+      vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesNV" ) );
+      vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesNV" ) );
+      if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
+      vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureHandleNV" ) );
+      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetInstanceProcAddr( instance, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+      vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetInstanceProcAddr( instance, "vkCompileDeferredNV" ) );
+
+  //=== VK_KHR_maintenance3 ===
+      vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSupportKHR" ) );
+      if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
+
+  //=== VK_KHR_draw_indirect_count ===
+      vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndirectCountKHR" ) );
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
+      vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetInstanceProcAddr( instance, "vkCmdDrawIndexedIndirectCountKHR" ) );
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
+
+  //=== VK_EXT_external_memory_host ===
+      vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetMemoryHostPointerPropertiesEXT" ) );
+
+  //=== VK_AMD_buffer_marker ===
+      vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarkerAMD" ) );
+
+  //=== VK_EXT_calibrated_timestamps ===
+      vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCalibrateableTimeDomainsEXT" ) );
+      vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetInstanceProcAddr( instance, "vkGetCalibratedTimestampsEXT" ) );
+
+  //=== VK_NV_mesh_shader ===
+      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksNV" ) );
+      vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectNV" ) );
+      vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+
+  //=== VK_NV_scissor_exclusive ===
+      vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetInstanceProcAddr( instance, "vkCmdSetExclusiveScissorNV" ) );
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+      vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetInstanceProcAddr( instance, "vkCmdSetCheckpointNV" ) );
+      vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointDataNV" ) );
+
+  //=== VK_KHR_timeline_semaphore ===
+      vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetInstanceProcAddr( instance, "vkGetSemaphoreCounterValueKHR" ) );
+      if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
+      vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetInstanceProcAddr( instance, "vkWaitSemaphoresKHR" ) );
+      if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
+      vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetInstanceProcAddr( instance, "vkSignalSemaphoreKHR" ) );
+      if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
+
+  //=== VK_INTEL_performance_query ===
+      vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkInitializePerformanceApiINTEL" ) );
+      vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetInstanceProcAddr( instance, "vkUninitializePerformanceApiINTEL" ) );
+      vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceMarkerINTEL" ) );
+      vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+      vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetInstanceProcAddr( instance, "vkCmdSetPerformanceOverrideINTEL" ) );
+      vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkAcquirePerformanceConfigurationINTEL" ) );
+      vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkReleasePerformanceConfigurationINTEL" ) );
+      vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetInstanceProcAddr( instance, "vkQueueSetPerformanceConfigurationINTEL" ) );
+      vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetInstanceProcAddr( instance, "vkGetPerformanceParameterINTEL" ) );
+
+  //=== VK_AMD_display_native_hdr ===
+      vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetInstanceProcAddr( instance, "vkSetLocalDimmingAMD" ) );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+      vkCreateImagePipeSurfaceFUCHSIA = PFN_vkCreateImagePipeSurfaceFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateImagePipeSurfaceFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+      vkCreateMetalSurfaceEXT = PFN_vkCreateMetalSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateMetalSurfaceEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+      vkGetPhysicalDeviceFragmentShadingRatesKHR = PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceFragmentShadingRatesKHR" ) );
+      vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateKHR" ) );
+
+  //=== VK_EXT_buffer_device_address ===
+      vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressEXT" ) );
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
+
+  //=== VK_EXT_tooling_info ===
+      vkGetPhysicalDeviceToolPropertiesEXT = PFN_vkGetPhysicalDeviceToolPropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceToolPropertiesEXT" ) );
+      if ( !vkGetPhysicalDeviceToolProperties ) vkGetPhysicalDeviceToolProperties = vkGetPhysicalDeviceToolPropertiesEXT;
+
+  //=== VK_KHR_present_wait ===
+      vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetInstanceProcAddr( instance, "vkWaitForPresentKHR" ) );
+
+  //=== VK_NV_cooperative_matrix ===
+      vkGetPhysicalDeviceCooperativeMatrixPropertiesNV = PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceCooperativeMatrixPropertiesNV" ) );
+
+  //=== VK_NV_coverage_reduction_mode ===
+      vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV = PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+      vkGetPhysicalDeviceSurfacePresentModes2EXT = PFN_vkGetPhysicalDeviceSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceSurfacePresentModes2EXT" ) );
+      vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkAcquireFullScreenExclusiveModeEXT" ) );
+      vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetInstanceProcAddr( instance, "vkReleaseFullScreenExclusiveModeEXT" ) );
+      vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetInstanceProcAddr( instance, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+      vkCreateHeadlessSurfaceEXT = PFN_vkCreateHeadlessSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateHeadlessSurfaceEXT" ) );
+
+  //=== VK_KHR_buffer_device_address ===
+      vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferDeviceAddressKHR" ) );
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
+      vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+      if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
+      vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+      if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
+
+  //=== VK_EXT_line_rasterization ===
+      vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEXT" ) );
+
+  //=== VK_EXT_host_query_reset ===
+      vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetInstanceProcAddr( instance, "vkResetQueryPoolEXT" ) );
+      if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
+
+  //=== VK_EXT_extended_dynamic_state ===
+      vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetCullModeEXT" ) );
+      if ( !vkCmdSetCullMode ) vkCmdSetCullMode = vkCmdSetCullModeEXT;
+      vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetInstanceProcAddr( instance, "vkCmdSetFrontFaceEXT" ) );
+      if ( !vkCmdSetFrontFace ) vkCmdSetFrontFace = vkCmdSetFrontFaceEXT;
+      vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveTopologyEXT" ) );
+      if ( !vkCmdSetPrimitiveTopology ) vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT;
+      vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWithCountEXT" ) );
+      if ( !vkCmdSetViewportWithCount ) vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT;
+      vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetInstanceProcAddr( instance, "vkCmdSetScissorWithCountEXT" ) );
+      if ( !vkCmdSetScissorWithCount ) vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT;
+      vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetInstanceProcAddr( instance, "vkCmdBindVertexBuffers2EXT" ) );
+      if ( !vkCmdBindVertexBuffers2 ) vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT;
+      vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthTestEnableEXT" ) );
+      if ( !vkCmdSetDepthTestEnable ) vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT;
+      vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthWriteEnableEXT" ) );
+      if ( !vkCmdSetDepthWriteEnable ) vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT;
+      vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthCompareOpEXT" ) );
+      if ( !vkCmdSetDepthCompareOp ) vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT;
+      vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+      if ( !vkCmdSetDepthBoundsTestEnable ) vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT;
+      vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilTestEnableEXT" ) );
+      if ( !vkCmdSetStencilTestEnable ) vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT;
+      vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetStencilOpEXT" ) );
+      if ( !vkCmdSetStencilOp ) vkCmdSetStencilOp = vkCmdSetStencilOpEXT;
+
+  //=== VK_KHR_deferred_host_operations ===
+      vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkCreateDeferredOperationKHR" ) );
+      vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetInstanceProcAddr( instance, "vkDestroyDeferredOperationKHR" ) );
+      vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+      vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetInstanceProcAddr( instance, "vkGetDeferredOperationResultKHR" ) );
+      vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetInstanceProcAddr( instance, "vkDeferredOperationJoinKHR" ) );
+
+  //=== VK_KHR_pipeline_executable_properties ===
+      vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutablePropertiesKHR" ) );
+      vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableStatisticsKHR" ) );
+      vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetInstanceProcAddr( instance, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+      vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetInstanceProcAddr( instance, "vkReleaseSwapchainImagesEXT" ) );
+
+  //=== VK_NV_device_generated_commands ===
+      vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetInstanceProcAddr( instance, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
+      vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdPreprocessGeneratedCommandsNV" ) );
+      vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetInstanceProcAddr( instance, "vkCmdExecuteGeneratedCommandsNV" ) );
+      vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetInstanceProcAddr( instance, "vkCmdBindPipelineShaderGroupNV" ) );
+      vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkCreateIndirectCommandsLayoutNV" ) );
+      vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetInstanceProcAddr( instance, "vkDestroyIndirectCommandsLayoutNV" ) );
+
+  //=== VK_EXT_acquire_drm_display ===
+      vkAcquireDrmDisplayEXT = PFN_vkAcquireDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkAcquireDrmDisplayEXT" ) );
+      vkGetDrmDisplayEXT = PFN_vkGetDrmDisplayEXT( vkGetInstanceProcAddr( instance, "vkGetDrmDisplayEXT" ) );
+
+  //=== VK_EXT_private_data ===
+      vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkCreatePrivateDataSlotEXT" ) );
+      if ( !vkCreatePrivateDataSlot ) vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT;
+      vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetInstanceProcAddr( instance, "vkDestroyPrivateDataSlotEXT" ) );
+      if ( !vkDestroyPrivateDataSlot ) vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT;
+      vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkSetPrivateDataEXT" ) );
+      if ( !vkSetPrivateData ) vkSetPrivateData = vkSetPrivateDataEXT;
+      vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetInstanceProcAddr( instance, "vkGetPrivateDataEXT" ) );
+      if ( !vkGetPrivateData ) vkGetPrivateData = vkGetPrivateDataEXT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+      vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetInstanceProcAddr( instance, "vkCmdEncodeVideoKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+      vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetInstanceProcAddr( instance, "vkExportMetalObjectsEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+      vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdSetEvent2KHR" ) );
+      if ( !vkCmdSetEvent2 ) vkCmdSetEvent2 = vkCmdSetEvent2KHR;
+      vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetInstanceProcAddr( instance, "vkCmdResetEvent2KHR" ) );
+      if ( !vkCmdResetEvent2 ) vkCmdResetEvent2 = vkCmdResetEvent2KHR;
+      vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetInstanceProcAddr( instance, "vkCmdWaitEvents2KHR" ) );
+      if ( !vkCmdWaitEvents2 ) vkCmdWaitEvents2 = vkCmdWaitEvents2KHR;
+      vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetInstanceProcAddr( instance, "vkCmdPipelineBarrier2KHR" ) );
+      if ( !vkCmdPipelineBarrier2 ) vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR;
+      vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetInstanceProcAddr( instance, "vkCmdWriteTimestamp2KHR" ) );
+      if ( !vkCmdWriteTimestamp2 ) vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR;
+      vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetInstanceProcAddr( instance, "vkQueueSubmit2KHR" ) );
+      if ( !vkQueueSubmit2 ) vkQueueSubmit2 = vkQueueSubmit2KHR;
+      vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetInstanceProcAddr( instance, "vkCmdWriteBufferMarker2AMD" ) );
+      vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetInstanceProcAddr( instance, "vkGetQueueCheckpointData2NV" ) );
+
+  //=== VK_EXT_descriptor_buffer ===
+      vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutSizeEXT" ) );
+      vkGetDescriptorSetLayoutBindingOffsetEXT = PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) );
+      vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetInstanceProcAddr( instance, "vkGetDescriptorEXT" ) );
+      vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBuffersEXT" ) );
+      vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDescriptorBufferOffsetsEXT" ) );
+      vkCmdBindDescriptorBufferEmbeddedSamplersEXT = PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetInstanceProcAddr( instance, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) );
+      vkGetBufferOpaqueCaptureDescriptorDataEXT = PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetImageOpaqueCaptureDescriptorDataEXT = PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetImageViewOpaqueCaptureDescriptorDataEXT = PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetSamplerOpaqueCaptureDescriptorDataEXT = PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( vkGetInstanceProcAddr( instance, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) );
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+      vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetInstanceProcAddr( instance, "vkCmdSetFragmentShadingRateEnumNV" ) );
+
+  //=== VK_EXT_mesh_shader ===
+      vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksEXT" ) );
+      vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectEXT" ) );
+      vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMeshTasksIndirectCountEXT" ) );
+
+  //=== VK_KHR_copy_commands2 ===
+      vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBuffer2KHR" ) );
+      if ( !vkCmdCopyBuffer2 ) vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR;
+      vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImage2KHR" ) );
+      if ( !vkCmdCopyImage2 ) vkCmdCopyImage2 = vkCmdCopyImage2KHR;
+      vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyBufferToImage2KHR" ) );
+      if ( !vkCmdCopyBufferToImage2 ) vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR;
+      vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetInstanceProcAddr( instance, "vkCmdCopyImageToBuffer2KHR" ) );
+      if ( !vkCmdCopyImageToBuffer2 ) vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR;
+      vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdBlitImage2KHR" ) );
+      if ( !vkCmdBlitImage2 ) vkCmdBlitImage2 = vkCmdBlitImage2KHR;
+      vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetInstanceProcAddr( instance, "vkCmdResolveImage2KHR" ) );
+      if ( !vkCmdResolveImage2 ) vkCmdResolveImage2 = vkCmdResolveImage2KHR;
+
+  //=== VK_EXT_image_compression_control ===
+      vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetInstanceProcAddr( instance, "vkGetImageSubresourceLayout2EXT" ) );
+
+  //=== VK_EXT_device_fault ===
+      vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceFaultInfoEXT" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_acquire_winrt_display ===
+      vkAcquireWinrtDisplayNV = PFN_vkAcquireWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkAcquireWinrtDisplayNV" ) );
+      vkGetWinrtDisplayNV = PFN_vkGetWinrtDisplayNV( vkGetInstanceProcAddr( instance, "vkGetWinrtDisplayNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+      vkCreateDirectFBSurfaceEXT = PFN_vkCreateDirectFBSurfaceEXT( vkGetInstanceProcAddr( instance, "vkCreateDirectFBSurfaceEXT" ) );
+      vkGetPhysicalDeviceDirectFBPresentationSupportEXT = PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceDirectFBPresentationSupportEXT" ) );
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+      vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysKHR" ) );
+      vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetInstanceProcAddr( instance, "vkCreateRayTracingPipelinesKHR" ) );
+      vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+      vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+      vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirectKHR" ) );
+      vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetInstanceProcAddr( instance, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
+      vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetInstanceProcAddr( instance, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+      vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetInstanceProcAddr( instance, "vkCmdSetVertexInputEXT" ) );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+      vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandleFUCHSIA" ) );
+      vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+      vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
+      vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+      vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkCreateBufferCollectionFUCHSIA" ) );
+      vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
+      vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetInstanceProcAddr( instance, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
+      vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetInstanceProcAddr( instance, "vkDestroyBufferCollectionFUCHSIA" ) );
+      vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetInstanceProcAddr( instance, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+      vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetInstanceProcAddr( instance, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
+      vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdSubpassShadingHUAWEI" ) );
+
+  //=== VK_HUAWEI_invocation_mask ===
+      vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetInstanceProcAddr( instance, "vkCmdBindInvocationMaskHUAWEI" ) );
+
+  //=== VK_NV_external_memory_rdma ===
+      vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetInstanceProcAddr( instance, "vkGetMemoryRemoteAddressNV" ) );
+
+  //=== VK_EXT_pipeline_properties ===
+      vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetInstanceProcAddr( instance, "vkGetPipelinePropertiesEXT" ) );
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+      vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPatchControlPointsEXT" ) );
+      vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizerDiscardEnableEXT" ) );
+      if ( !vkCmdSetRasterizerDiscardEnable ) vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT;
+      vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthBiasEnableEXT" ) );
+      if ( !vkCmdSetDepthBiasEnable ) vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT;
+      vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEXT" ) );
+      vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPrimitiveRestartEnableEXT" ) );
+      if ( !vkCmdSetPrimitiveRestartEnable ) vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+      vkCreateScreenSurfaceQNX = PFN_vkCreateScreenSurfaceQNX( vkGetInstanceProcAddr( instance, "vkCreateScreenSurfaceQNX" ) );
+      vkGetPhysicalDeviceScreenPresentationSupportQNX = PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceScreenPresentationSupportQNX" ) );
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+      vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteEnableEXT" ) );
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+      vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetInstanceProcAddr( instance, "vkCmdTraceRaysIndirect2KHR" ) );
+
+  //=== VK_EXT_multi_draw ===
+      vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiEXT" ) );
+      vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetInstanceProcAddr( instance, "vkCmdDrawMultiIndexedEXT" ) );
+
+  //=== VK_EXT_opacity_micromap ===
+      vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetInstanceProcAddr( instance, "vkCreateMicromapEXT" ) );
+      vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetInstanceProcAddr( instance, "vkDestroyMicromapEXT" ) );
+      vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkCmdBuildMicromapsEXT" ) );
+      vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetInstanceProcAddr( instance, "vkBuildMicromapsEXT" ) );
+      vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapEXT" ) );
+      vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCopyMicromapToMemoryEXT" ) );
+      vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCopyMemoryToMicromapEXT" ) );
+      vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkWriteMicromapsPropertiesEXT" ) );
+      vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapEXT" ) );
+      vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMicromapToMemoryEXT" ) );
+      vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToMicromapEXT" ) );
+      vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetInstanceProcAddr( instance, "vkCmdWriteMicromapsPropertiesEXT" ) );
+      vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetInstanceProcAddr( instance, "vkGetDeviceMicromapCompatibilityEXT" ) );
+      vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetInstanceProcAddr( instance, "vkGetMicromapBuildSizesEXT" ) );
+
+  //=== VK_EXT_pageable_device_local_memory ===
+      vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetInstanceProcAddr( instance, "vkSetDeviceMemoryPriorityEXT" ) );
+
+  //=== VK_KHR_maintenance4 ===
+      vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceBufferMemoryRequirements ) vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR;
+      vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceImageMemoryRequirements ) vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR;
+      vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetInstanceProcAddr( instance, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceImageSparseMemoryRequirements ) vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+      vkGetDescriptorSetLayoutHostMappingInfoVALVE = PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
+      vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetInstanceProcAddr( instance, "vkGetDescriptorSetHostMappingVALVE" ) );
+
+  //=== VK_NV_copy_memory_indirect ===
+      vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryIndirectNV" ) );
+      vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetInstanceProcAddr( instance, "vkCmdCopyMemoryToImageIndirectNV" ) );
+
+  //=== VK_NV_memory_decompression ===
+      vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryNV" ) );
+      vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetInstanceProcAddr( instance, "vkCmdDecompressMemoryIndirectCountNV" ) );
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+      vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetInstanceProcAddr( instance, "vkCmdSetTessellationDomainOriginEXT" ) );
+      vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClampEnableEXT" ) );
+      vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetPolygonModeEXT" ) );
+      vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationSamplesEXT" ) );
+      vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleMaskEXT" ) );
+      vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+      vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetAlphaToOneEnableEXT" ) );
+      vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLogicOpEnableEXT" ) );
+      vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEnableEXT" ) );
+      vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendEquationEXT" ) );
+      vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorWriteMaskEXT" ) );
+      vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetInstanceProcAddr( instance, "vkCmdSetRasterizationStreamEXT" ) );
+      vkCmdSetConservativeRasterizationModeEXT = PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetConservativeRasterizationModeEXT" ) );
+      vkCmdSetExtraPrimitiveOverestimationSizeEXT = PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+      vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipEnableEXT" ) );
+      vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetSampleLocationsEnableEXT" ) );
+      vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetInstanceProcAddr( instance, "vkCmdSetColorBlendAdvancedEXT" ) );
+      vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetProvokingVertexModeEXT" ) );
+      vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineRasterizationModeEXT" ) );
+      vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetInstanceProcAddr( instance, "vkCmdSetLineStippleEnableEXT" ) );
+      vkCmdSetDepthClipNegativeOneToOneEXT = PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetInstanceProcAddr( instance, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+      vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportWScalingEnableNV" ) );
+      vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetInstanceProcAddr( instance, "vkCmdSetViewportSwizzleNV" ) );
+      vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorEnableNV" ) );
+      vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageToColorLocationNV" ) );
+      vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationModeNV" ) );
+      vkCmdSetCoverageModulationTableEnableNV = PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableEnableNV" ) );
+      vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageModulationTableNV" ) );
+      vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetShadingRateImageEnableNV" ) );
+      vkCmdSetRepresentativeFragmentTestEnableNV = PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetInstanceProcAddr( instance, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+      vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetInstanceProcAddr( instance, "vkCmdSetCoverageReductionModeNV" ) );
+
+  //=== VK_EXT_shader_module_identifier ===
+      vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleIdentifierEXT" ) );
+      vkGetShaderModuleCreateInfoIdentifierEXT = PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetInstanceProcAddr( instance, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
+
+  //=== VK_NV_optical_flow ===
+      vkGetPhysicalDeviceOpticalFlowImageFormatsNV = PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV( vkGetInstanceProcAddr( instance, "vkGetPhysicalDeviceOpticalFlowImageFormatsNV" ) );
+      vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkCreateOpticalFlowSessionNV" ) );
+      vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetInstanceProcAddr( instance, "vkDestroyOpticalFlowSessionNV" ) );
+      vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetInstanceProcAddr( instance, "vkBindOpticalFlowSessionImageNV" ) );
+      vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetInstanceProcAddr( instance, "vkCmdOpticalFlowExecuteNV" ) );
+
+  //=== VK_QCOM_tile_properties ===
+      vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetFramebufferTilePropertiesQCOM" ) );
+      vkGetDynamicRenderingTilePropertiesQCOM = PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetInstanceProcAddr( instance, "vkGetDynamicRenderingTilePropertiesQCOM" ) );
+
+    }
+
+    void init( VULKAN_HPP_NAMESPACE::Device deviceCpp ) VULKAN_HPP_NOEXCEPT
+    {
+      VkDevice device = static_cast<VkDevice>(deviceCpp);
+
+
+  //=== VK_VERSION_1_0 ===
+      vkGetDeviceProcAddr = PFN_vkGetDeviceProcAddr( vkGetDeviceProcAddr( device, "vkGetDeviceProcAddr" ) );
+      vkDestroyDevice = PFN_vkDestroyDevice( vkGetDeviceProcAddr( device, "vkDestroyDevice" ) );
+      vkGetDeviceQueue = PFN_vkGetDeviceQueue( vkGetDeviceProcAddr( device, "vkGetDeviceQueue" ) );
+      vkQueueSubmit = PFN_vkQueueSubmit( vkGetDeviceProcAddr( device, "vkQueueSubmit" ) );
+      vkQueueWaitIdle = PFN_vkQueueWaitIdle( vkGetDeviceProcAddr( device, "vkQueueWaitIdle" ) );
+      vkDeviceWaitIdle = PFN_vkDeviceWaitIdle( vkGetDeviceProcAddr( device, "vkDeviceWaitIdle" ) );
+      vkAllocateMemory = PFN_vkAllocateMemory( vkGetDeviceProcAddr( device, "vkAllocateMemory" ) );
+      vkFreeMemory = PFN_vkFreeMemory( vkGetDeviceProcAddr( device, "vkFreeMemory" ) );
+      vkMapMemory = PFN_vkMapMemory( vkGetDeviceProcAddr( device, "vkMapMemory" ) );
+      vkUnmapMemory = PFN_vkUnmapMemory( vkGetDeviceProcAddr( device, "vkUnmapMemory" ) );
+      vkFlushMappedMemoryRanges = PFN_vkFlushMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkFlushMappedMemoryRanges" ) );
+      vkInvalidateMappedMemoryRanges = PFN_vkInvalidateMappedMemoryRanges( vkGetDeviceProcAddr( device, "vkInvalidateMappedMemoryRanges" ) );
+      vkGetDeviceMemoryCommitment = PFN_vkGetDeviceMemoryCommitment( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryCommitment" ) );
+      vkBindBufferMemory = PFN_vkBindBufferMemory( vkGetDeviceProcAddr( device, "vkBindBufferMemory" ) );
+      vkBindImageMemory = PFN_vkBindImageMemory( vkGetDeviceProcAddr( device, "vkBindImageMemory" ) );
+      vkGetBufferMemoryRequirements = PFN_vkGetBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements" ) );
+      vkGetImageMemoryRequirements = PFN_vkGetImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements" ) );
+      vkGetImageSparseMemoryRequirements = PFN_vkGetImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements" ) );
+      vkQueueBindSparse = PFN_vkQueueBindSparse( vkGetDeviceProcAddr( device, "vkQueueBindSparse" ) );
+      vkCreateFence = PFN_vkCreateFence( vkGetDeviceProcAddr( device, "vkCreateFence" ) );
+      vkDestroyFence = PFN_vkDestroyFence( vkGetDeviceProcAddr( device, "vkDestroyFence" ) );
+      vkResetFences = PFN_vkResetFences( vkGetDeviceProcAddr( device, "vkResetFences" ) );
+      vkGetFenceStatus = PFN_vkGetFenceStatus( vkGetDeviceProcAddr( device, "vkGetFenceStatus" ) );
+      vkWaitForFences = PFN_vkWaitForFences( vkGetDeviceProcAddr( device, "vkWaitForFences" ) );
+      vkCreateSemaphore = PFN_vkCreateSemaphore( vkGetDeviceProcAddr( device, "vkCreateSemaphore" ) );
+      vkDestroySemaphore = PFN_vkDestroySemaphore( vkGetDeviceProcAddr( device, "vkDestroySemaphore" ) );
+      vkCreateEvent = PFN_vkCreateEvent( vkGetDeviceProcAddr( device, "vkCreateEvent" ) );
+      vkDestroyEvent = PFN_vkDestroyEvent( vkGetDeviceProcAddr( device, "vkDestroyEvent" ) );
+      vkGetEventStatus = PFN_vkGetEventStatus( vkGetDeviceProcAddr( device, "vkGetEventStatus" ) );
+      vkSetEvent = PFN_vkSetEvent( vkGetDeviceProcAddr( device, "vkSetEvent" ) );
+      vkResetEvent = PFN_vkResetEvent( vkGetDeviceProcAddr( device, "vkResetEvent" ) );
+      vkCreateQueryPool = PFN_vkCreateQueryPool( vkGetDeviceProcAddr( device, "vkCreateQueryPool" ) );
+      vkDestroyQueryPool = PFN_vkDestroyQueryPool( vkGetDeviceProcAddr( device, "vkDestroyQueryPool" ) );
+      vkGetQueryPoolResults = PFN_vkGetQueryPoolResults( vkGetDeviceProcAddr( device, "vkGetQueryPoolResults" ) );
+      vkCreateBuffer = PFN_vkCreateBuffer( vkGetDeviceProcAddr( device, "vkCreateBuffer" ) );
+      vkDestroyBuffer = PFN_vkDestroyBuffer( vkGetDeviceProcAddr( device, "vkDestroyBuffer" ) );
+      vkCreateBufferView = PFN_vkCreateBufferView( vkGetDeviceProcAddr( device, "vkCreateBufferView" ) );
+      vkDestroyBufferView = PFN_vkDestroyBufferView( vkGetDeviceProcAddr( device, "vkDestroyBufferView" ) );
+      vkCreateImage = PFN_vkCreateImage( vkGetDeviceProcAddr( device, "vkCreateImage" ) );
+      vkDestroyImage = PFN_vkDestroyImage( vkGetDeviceProcAddr( device, "vkDestroyImage" ) );
+      vkGetImageSubresourceLayout = PFN_vkGetImageSubresourceLayout( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout" ) );
+      vkCreateImageView = PFN_vkCreateImageView( vkGetDeviceProcAddr( device, "vkCreateImageView" ) );
+      vkDestroyImageView = PFN_vkDestroyImageView( vkGetDeviceProcAddr( device, "vkDestroyImageView" ) );
+      vkCreateShaderModule = PFN_vkCreateShaderModule( vkGetDeviceProcAddr( device, "vkCreateShaderModule" ) );
+      vkDestroyShaderModule = PFN_vkDestroyShaderModule( vkGetDeviceProcAddr( device, "vkDestroyShaderModule" ) );
+      vkCreatePipelineCache = PFN_vkCreatePipelineCache( vkGetDeviceProcAddr( device, "vkCreatePipelineCache" ) );
+      vkDestroyPipelineCache = PFN_vkDestroyPipelineCache( vkGetDeviceProcAddr( device, "vkDestroyPipelineCache" ) );
+      vkGetPipelineCacheData = PFN_vkGetPipelineCacheData( vkGetDeviceProcAddr( device, "vkGetPipelineCacheData" ) );
+      vkMergePipelineCaches = PFN_vkMergePipelineCaches( vkGetDeviceProcAddr( device, "vkMergePipelineCaches" ) );
+      vkCreateGraphicsPipelines = PFN_vkCreateGraphicsPipelines( vkGetDeviceProcAddr( device, "vkCreateGraphicsPipelines" ) );
+      vkCreateComputePipelines = PFN_vkCreateComputePipelines( vkGetDeviceProcAddr( device, "vkCreateComputePipelines" ) );
+      vkDestroyPipeline = PFN_vkDestroyPipeline( vkGetDeviceProcAddr( device, "vkDestroyPipeline" ) );
+      vkCreatePipelineLayout = PFN_vkCreatePipelineLayout( vkGetDeviceProcAddr( device, "vkCreatePipelineLayout" ) );
+      vkDestroyPipelineLayout = PFN_vkDestroyPipelineLayout( vkGetDeviceProcAddr( device, "vkDestroyPipelineLayout" ) );
+      vkCreateSampler = PFN_vkCreateSampler( vkGetDeviceProcAddr( device, "vkCreateSampler" ) );
+      vkDestroySampler = PFN_vkDestroySampler( vkGetDeviceProcAddr( device, "vkDestroySampler" ) );
+      vkCreateDescriptorSetLayout = PFN_vkCreateDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkCreateDescriptorSetLayout" ) );
+      vkDestroyDescriptorSetLayout = PFN_vkDestroyDescriptorSetLayout( vkGetDeviceProcAddr( device, "vkDestroyDescriptorSetLayout" ) );
+      vkCreateDescriptorPool = PFN_vkCreateDescriptorPool( vkGetDeviceProcAddr( device, "vkCreateDescriptorPool" ) );
+      vkDestroyDescriptorPool = PFN_vkDestroyDescriptorPool( vkGetDeviceProcAddr( device, "vkDestroyDescriptorPool" ) );
+      vkResetDescriptorPool = PFN_vkResetDescriptorPool( vkGetDeviceProcAddr( device, "vkResetDescriptorPool" ) );
+      vkAllocateDescriptorSets = PFN_vkAllocateDescriptorSets( vkGetDeviceProcAddr( device, "vkAllocateDescriptorSets" ) );
+      vkFreeDescriptorSets = PFN_vkFreeDescriptorSets( vkGetDeviceProcAddr( device, "vkFreeDescriptorSets" ) );
+      vkUpdateDescriptorSets = PFN_vkUpdateDescriptorSets( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSets" ) );
+      vkCreateFramebuffer = PFN_vkCreateFramebuffer( vkGetDeviceProcAddr( device, "vkCreateFramebuffer" ) );
+      vkDestroyFramebuffer = PFN_vkDestroyFramebuffer( vkGetDeviceProcAddr( device, "vkDestroyFramebuffer" ) );
+      vkCreateRenderPass = PFN_vkCreateRenderPass( vkGetDeviceProcAddr( device, "vkCreateRenderPass" ) );
+      vkDestroyRenderPass = PFN_vkDestroyRenderPass( vkGetDeviceProcAddr( device, "vkDestroyRenderPass" ) );
+      vkGetRenderAreaGranularity = PFN_vkGetRenderAreaGranularity( vkGetDeviceProcAddr( device, "vkGetRenderAreaGranularity" ) );
+      vkCreateCommandPool = PFN_vkCreateCommandPool( vkGetDeviceProcAddr( device, "vkCreateCommandPool" ) );
+      vkDestroyCommandPool = PFN_vkDestroyCommandPool( vkGetDeviceProcAddr( device, "vkDestroyCommandPool" ) );
+      vkResetCommandPool = PFN_vkResetCommandPool( vkGetDeviceProcAddr( device, "vkResetCommandPool" ) );
+      vkAllocateCommandBuffers = PFN_vkAllocateCommandBuffers( vkGetDeviceProcAddr( device, "vkAllocateCommandBuffers" ) );
+      vkFreeCommandBuffers = PFN_vkFreeCommandBuffers( vkGetDeviceProcAddr( device, "vkFreeCommandBuffers" ) );
+      vkBeginCommandBuffer = PFN_vkBeginCommandBuffer( vkGetDeviceProcAddr( device, "vkBeginCommandBuffer" ) );
+      vkEndCommandBuffer = PFN_vkEndCommandBuffer( vkGetDeviceProcAddr( device, "vkEndCommandBuffer" ) );
+      vkResetCommandBuffer = PFN_vkResetCommandBuffer( vkGetDeviceProcAddr( device, "vkResetCommandBuffer" ) );
+      vkCmdBindPipeline = PFN_vkCmdBindPipeline( vkGetDeviceProcAddr( device, "vkCmdBindPipeline" ) );
+      vkCmdSetViewport = PFN_vkCmdSetViewport( vkGetDeviceProcAddr( device, "vkCmdSetViewport" ) );
+      vkCmdSetScissor = PFN_vkCmdSetScissor( vkGetDeviceProcAddr( device, "vkCmdSetScissor" ) );
+      vkCmdSetLineWidth = PFN_vkCmdSetLineWidth( vkGetDeviceProcAddr( device, "vkCmdSetLineWidth" ) );
+      vkCmdSetDepthBias = PFN_vkCmdSetDepthBias( vkGetDeviceProcAddr( device, "vkCmdSetDepthBias" ) );
+      vkCmdSetBlendConstants = PFN_vkCmdSetBlendConstants( vkGetDeviceProcAddr( device, "vkCmdSetBlendConstants" ) );
+      vkCmdSetDepthBounds = PFN_vkCmdSetDepthBounds( vkGetDeviceProcAddr( device, "vkCmdSetDepthBounds" ) );
+      vkCmdSetStencilCompareMask = PFN_vkCmdSetStencilCompareMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilCompareMask" ) );
+      vkCmdSetStencilWriteMask = PFN_vkCmdSetStencilWriteMask( vkGetDeviceProcAddr( device, "vkCmdSetStencilWriteMask" ) );
+      vkCmdSetStencilReference = PFN_vkCmdSetStencilReference( vkGetDeviceProcAddr( device, "vkCmdSetStencilReference" ) );
+      vkCmdBindDescriptorSets = PFN_vkCmdBindDescriptorSets( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorSets" ) );
+      vkCmdBindIndexBuffer = PFN_vkCmdBindIndexBuffer( vkGetDeviceProcAddr( device, "vkCmdBindIndexBuffer" ) );
+      vkCmdBindVertexBuffers = PFN_vkCmdBindVertexBuffers( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers" ) );
+      vkCmdDraw = PFN_vkCmdDraw( vkGetDeviceProcAddr( device, "vkCmdDraw" ) );
+      vkCmdDrawIndexed = PFN_vkCmdDrawIndexed( vkGetDeviceProcAddr( device, "vkCmdDrawIndexed" ) );
+      vkCmdDrawIndirect = PFN_vkCmdDrawIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndirect" ) );
+      vkCmdDrawIndexedIndirect = PFN_vkCmdDrawIndexedIndirect( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirect" ) );
+      vkCmdDispatch = PFN_vkCmdDispatch( vkGetDeviceProcAddr( device, "vkCmdDispatch" ) );
+      vkCmdDispatchIndirect = PFN_vkCmdDispatchIndirect( vkGetDeviceProcAddr( device, "vkCmdDispatchIndirect" ) );
+      vkCmdCopyBuffer = PFN_vkCmdCopyBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer" ) );
+      vkCmdCopyImage = PFN_vkCmdCopyImage( vkGetDeviceProcAddr( device, "vkCmdCopyImage" ) );
+      vkCmdBlitImage = PFN_vkCmdBlitImage( vkGetDeviceProcAddr( device, "vkCmdBlitImage" ) );
+      vkCmdCopyBufferToImage = PFN_vkCmdCopyBufferToImage( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage" ) );
+      vkCmdCopyImageToBuffer = PFN_vkCmdCopyImageToBuffer( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer" ) );
+      vkCmdUpdateBuffer = PFN_vkCmdUpdateBuffer( vkGetDeviceProcAddr( device, "vkCmdUpdateBuffer" ) );
+      vkCmdFillBuffer = PFN_vkCmdFillBuffer( vkGetDeviceProcAddr( device, "vkCmdFillBuffer" ) );
+      vkCmdClearColorImage = PFN_vkCmdClearColorImage( vkGetDeviceProcAddr( device, "vkCmdClearColorImage" ) );
+      vkCmdClearDepthStencilImage = PFN_vkCmdClearDepthStencilImage( vkGetDeviceProcAddr( device, "vkCmdClearDepthStencilImage" ) );
+      vkCmdClearAttachments = PFN_vkCmdClearAttachments( vkGetDeviceProcAddr( device, "vkCmdClearAttachments" ) );
+      vkCmdResolveImage = PFN_vkCmdResolveImage( vkGetDeviceProcAddr( device, "vkCmdResolveImage" ) );
+      vkCmdSetEvent = PFN_vkCmdSetEvent( vkGetDeviceProcAddr( device, "vkCmdSetEvent" ) );
+      vkCmdResetEvent = PFN_vkCmdResetEvent( vkGetDeviceProcAddr( device, "vkCmdResetEvent" ) );
+      vkCmdWaitEvents = PFN_vkCmdWaitEvents( vkGetDeviceProcAddr( device, "vkCmdWaitEvents" ) );
+      vkCmdPipelineBarrier = PFN_vkCmdPipelineBarrier( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier" ) );
+      vkCmdBeginQuery = PFN_vkCmdBeginQuery( vkGetDeviceProcAddr( device, "vkCmdBeginQuery" ) );
+      vkCmdEndQuery = PFN_vkCmdEndQuery( vkGetDeviceProcAddr( device, "vkCmdEndQuery" ) );
+      vkCmdResetQueryPool = PFN_vkCmdResetQueryPool( vkGetDeviceProcAddr( device, "vkCmdResetQueryPool" ) );
+      vkCmdWriteTimestamp = PFN_vkCmdWriteTimestamp( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp" ) );
+      vkCmdCopyQueryPoolResults = PFN_vkCmdCopyQueryPoolResults( vkGetDeviceProcAddr( device, "vkCmdCopyQueryPoolResults" ) );
+      vkCmdPushConstants = PFN_vkCmdPushConstants( vkGetDeviceProcAddr( device, "vkCmdPushConstants" ) );
+      vkCmdBeginRenderPass = PFN_vkCmdBeginRenderPass( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass" ) );
+      vkCmdNextSubpass = PFN_vkCmdNextSubpass( vkGetDeviceProcAddr( device, "vkCmdNextSubpass" ) );
+      vkCmdEndRenderPass = PFN_vkCmdEndRenderPass( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass" ) );
+      vkCmdExecuteCommands = PFN_vkCmdExecuteCommands( vkGetDeviceProcAddr( device, "vkCmdExecuteCommands" ) );
+
+  //=== VK_VERSION_1_1 ===
+      vkBindBufferMemory2 = PFN_vkBindBufferMemory2( vkGetDeviceProcAddr( device, "vkBindBufferMemory2" ) );
+      vkBindImageMemory2 = PFN_vkBindImageMemory2( vkGetDeviceProcAddr( device, "vkBindImageMemory2" ) );
+      vkGetDeviceGroupPeerMemoryFeatures = PFN_vkGetDeviceGroupPeerMemoryFeatures( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeatures" ) );
+      vkCmdSetDeviceMask = PFN_vkCmdSetDeviceMask( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMask" ) );
+      vkCmdDispatchBase = PFN_vkCmdDispatchBase( vkGetDeviceProcAddr( device, "vkCmdDispatchBase" ) );
+      vkGetImageMemoryRequirements2 = PFN_vkGetImageMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2" ) );
+      vkGetBufferMemoryRequirements2 = PFN_vkGetBufferMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2" ) );
+      vkGetImageSparseMemoryRequirements2 = PFN_vkGetImageSparseMemoryRequirements2( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2" ) );
+      vkTrimCommandPool = PFN_vkTrimCommandPool( vkGetDeviceProcAddr( device, "vkTrimCommandPool" ) );
+      vkGetDeviceQueue2 = PFN_vkGetDeviceQueue2( vkGetDeviceProcAddr( device, "vkGetDeviceQueue2" ) );
+      vkCreateSamplerYcbcrConversion = PFN_vkCreateSamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversion" ) );
+      vkDestroySamplerYcbcrConversion = PFN_vkDestroySamplerYcbcrConversion( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversion" ) );
+      vkCreateDescriptorUpdateTemplate = PFN_vkCreateDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplate" ) );
+      vkDestroyDescriptorUpdateTemplate = PFN_vkDestroyDescriptorUpdateTemplate( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplate" ) );
+      vkUpdateDescriptorSetWithTemplate = PFN_vkUpdateDescriptorSetWithTemplate( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplate" ) );
+      vkGetDescriptorSetLayoutSupport = PFN_vkGetDescriptorSetLayoutSupport( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupport" ) );
+
+  //=== VK_VERSION_1_2 ===
+      vkCmdDrawIndirectCount = PFN_vkCmdDrawIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCount" ) );
+      vkCmdDrawIndexedIndirectCount = PFN_vkCmdDrawIndexedIndirectCount( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCount" ) );
+      vkCreateRenderPass2 = PFN_vkCreateRenderPass2( vkGetDeviceProcAddr( device, "vkCreateRenderPass2" ) );
+      vkCmdBeginRenderPass2 = PFN_vkCmdBeginRenderPass2( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2" ) );
+      vkCmdNextSubpass2 = PFN_vkCmdNextSubpass2( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2" ) );
+      vkCmdEndRenderPass2 = PFN_vkCmdEndRenderPass2( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2" ) );
+      vkResetQueryPool = PFN_vkResetQueryPool( vkGetDeviceProcAddr( device, "vkResetQueryPool" ) );
+      vkGetSemaphoreCounterValue = PFN_vkGetSemaphoreCounterValue( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValue" ) );
+      vkWaitSemaphores = PFN_vkWaitSemaphores( vkGetDeviceProcAddr( device, "vkWaitSemaphores" ) );
+      vkSignalSemaphore = PFN_vkSignalSemaphore( vkGetDeviceProcAddr( device, "vkSignalSemaphore" ) );
+      vkGetBufferDeviceAddress = PFN_vkGetBufferDeviceAddress( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddress" ) );
+      vkGetBufferOpaqueCaptureAddress = PFN_vkGetBufferOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddress" ) );
+      vkGetDeviceMemoryOpaqueCaptureAddress = PFN_vkGetDeviceMemoryOpaqueCaptureAddress( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddress" ) );
+
+  //=== VK_VERSION_1_3 ===
+      vkCreatePrivateDataSlot = PFN_vkCreatePrivateDataSlot( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlot" ) );
+      vkDestroyPrivateDataSlot = PFN_vkDestroyPrivateDataSlot( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlot" ) );
+      vkSetPrivateData = PFN_vkSetPrivateData( vkGetDeviceProcAddr( device, "vkSetPrivateData" ) );
+      vkGetPrivateData = PFN_vkGetPrivateData( vkGetDeviceProcAddr( device, "vkGetPrivateData" ) );
+      vkCmdSetEvent2 = PFN_vkCmdSetEvent2( vkGetDeviceProcAddr( device, "vkCmdSetEvent2" ) );
+      vkCmdResetEvent2 = PFN_vkCmdResetEvent2( vkGetDeviceProcAddr( device, "vkCmdResetEvent2" ) );
+      vkCmdWaitEvents2 = PFN_vkCmdWaitEvents2( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2" ) );
+      vkCmdPipelineBarrier2 = PFN_vkCmdPipelineBarrier2( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2" ) );
+      vkCmdWriteTimestamp2 = PFN_vkCmdWriteTimestamp2( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2" ) );
+      vkQueueSubmit2 = PFN_vkQueueSubmit2( vkGetDeviceProcAddr( device, "vkQueueSubmit2" ) );
+      vkCmdCopyBuffer2 = PFN_vkCmdCopyBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2" ) );
+      vkCmdCopyImage2 = PFN_vkCmdCopyImage2( vkGetDeviceProcAddr( device, "vkCmdCopyImage2" ) );
+      vkCmdCopyBufferToImage2 = PFN_vkCmdCopyBufferToImage2( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2" ) );
+      vkCmdCopyImageToBuffer2 = PFN_vkCmdCopyImageToBuffer2( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2" ) );
+      vkCmdBlitImage2 = PFN_vkCmdBlitImage2( vkGetDeviceProcAddr( device, "vkCmdBlitImage2" ) );
+      vkCmdResolveImage2 = PFN_vkCmdResolveImage2( vkGetDeviceProcAddr( device, "vkCmdResolveImage2" ) );
+      vkCmdBeginRendering = PFN_vkCmdBeginRendering( vkGetDeviceProcAddr( device, "vkCmdBeginRendering" ) );
+      vkCmdEndRendering = PFN_vkCmdEndRendering( vkGetDeviceProcAddr( device, "vkCmdEndRendering" ) );
+      vkCmdSetCullMode = PFN_vkCmdSetCullMode( vkGetDeviceProcAddr( device, "vkCmdSetCullMode" ) );
+      vkCmdSetFrontFace = PFN_vkCmdSetFrontFace( vkGetDeviceProcAddr( device, "vkCmdSetFrontFace" ) );
+      vkCmdSetPrimitiveTopology = PFN_vkCmdSetPrimitiveTopology( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopology" ) );
+      vkCmdSetViewportWithCount = PFN_vkCmdSetViewportWithCount( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCount" ) );
+      vkCmdSetScissorWithCount = PFN_vkCmdSetScissorWithCount( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCount" ) );
+      vkCmdBindVertexBuffers2 = PFN_vkCmdBindVertexBuffers2( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2" ) );
+      vkCmdSetDepthTestEnable = PFN_vkCmdSetDepthTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnable" ) );
+      vkCmdSetDepthWriteEnable = PFN_vkCmdSetDepthWriteEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnable" ) );
+      vkCmdSetDepthCompareOp = PFN_vkCmdSetDepthCompareOp( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOp" ) );
+      vkCmdSetDepthBoundsTestEnable = PFN_vkCmdSetDepthBoundsTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnable" ) );
+      vkCmdSetStencilTestEnable = PFN_vkCmdSetStencilTestEnable( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnable" ) );
+      vkCmdSetStencilOp = PFN_vkCmdSetStencilOp( vkGetDeviceProcAddr( device, "vkCmdSetStencilOp" ) );
+      vkCmdSetRasterizerDiscardEnable = PFN_vkCmdSetRasterizerDiscardEnable( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnable" ) );
+      vkCmdSetDepthBiasEnable = PFN_vkCmdSetDepthBiasEnable( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnable" ) );
+      vkCmdSetPrimitiveRestartEnable = PFN_vkCmdSetPrimitiveRestartEnable( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnable" ) );
+      vkGetDeviceBufferMemoryRequirements = PFN_vkGetDeviceBufferMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirements" ) );
+      vkGetDeviceImageMemoryRequirements = PFN_vkGetDeviceImageMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirements" ) );
+      vkGetDeviceImageSparseMemoryRequirements = PFN_vkGetDeviceImageSparseMemoryRequirements( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirements" ) );
+
+  //=== VK_KHR_swapchain ===
+      vkCreateSwapchainKHR = PFN_vkCreateSwapchainKHR( vkGetDeviceProcAddr( device, "vkCreateSwapchainKHR" ) );
+      vkDestroySwapchainKHR = PFN_vkDestroySwapchainKHR( vkGetDeviceProcAddr( device, "vkDestroySwapchainKHR" ) );
+      vkGetSwapchainImagesKHR = PFN_vkGetSwapchainImagesKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainImagesKHR" ) );
+      vkAcquireNextImageKHR = PFN_vkAcquireNextImageKHR( vkGetDeviceProcAddr( device, "vkAcquireNextImageKHR" ) );
+      vkQueuePresentKHR = PFN_vkQueuePresentKHR( vkGetDeviceProcAddr( device, "vkQueuePresentKHR" ) );
+      vkGetDeviceGroupPresentCapabilitiesKHR = PFN_vkGetDeviceGroupPresentCapabilitiesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPresentCapabilitiesKHR" ) );
+      vkGetDeviceGroupSurfacePresentModesKHR = PFN_vkGetDeviceGroupSurfacePresentModesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModesKHR" ) );
+      vkAcquireNextImage2KHR = PFN_vkAcquireNextImage2KHR( vkGetDeviceProcAddr( device, "vkAcquireNextImage2KHR" ) );
+
+  //=== VK_KHR_display_swapchain ===
+      vkCreateSharedSwapchainsKHR = PFN_vkCreateSharedSwapchainsKHR( vkGetDeviceProcAddr( device, "vkCreateSharedSwapchainsKHR" ) );
+
+  //=== VK_EXT_debug_marker ===
+      vkDebugMarkerSetObjectTagEXT = PFN_vkDebugMarkerSetObjectTagEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectTagEXT" ) );
+      vkDebugMarkerSetObjectNameEXT = PFN_vkDebugMarkerSetObjectNameEXT( vkGetDeviceProcAddr( device, "vkDebugMarkerSetObjectNameEXT" ) );
+      vkCmdDebugMarkerBeginEXT = PFN_vkCmdDebugMarkerBeginEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerBeginEXT" ) );
+      vkCmdDebugMarkerEndEXT = PFN_vkCmdDebugMarkerEndEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerEndEXT" ) );
+      vkCmdDebugMarkerInsertEXT = PFN_vkCmdDebugMarkerInsertEXT( vkGetDeviceProcAddr( device, "vkCmdDebugMarkerInsertEXT" ) );
+
+  //=== VK_KHR_video_queue ===
+      vkCreateVideoSessionKHR = PFN_vkCreateVideoSessionKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionKHR" ) );
+      vkDestroyVideoSessionKHR = PFN_vkDestroyVideoSessionKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionKHR" ) );
+      vkGetVideoSessionMemoryRequirementsKHR = PFN_vkGetVideoSessionMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetVideoSessionMemoryRequirementsKHR" ) );
+      vkBindVideoSessionMemoryKHR = PFN_vkBindVideoSessionMemoryKHR( vkGetDeviceProcAddr( device, "vkBindVideoSessionMemoryKHR" ) );
+      vkCreateVideoSessionParametersKHR = PFN_vkCreateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkCreateVideoSessionParametersKHR" ) );
+      vkUpdateVideoSessionParametersKHR = PFN_vkUpdateVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkUpdateVideoSessionParametersKHR" ) );
+      vkDestroyVideoSessionParametersKHR = PFN_vkDestroyVideoSessionParametersKHR( vkGetDeviceProcAddr( device, "vkDestroyVideoSessionParametersKHR" ) );
+      vkCmdBeginVideoCodingKHR = PFN_vkCmdBeginVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginVideoCodingKHR" ) );
+      vkCmdEndVideoCodingKHR = PFN_vkCmdEndVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdEndVideoCodingKHR" ) );
+      vkCmdControlVideoCodingKHR = PFN_vkCmdControlVideoCodingKHR( vkGetDeviceProcAddr( device, "vkCmdControlVideoCodingKHR" ) );
+
+  //=== VK_KHR_video_decode_queue ===
+      vkCmdDecodeVideoKHR = PFN_vkCmdDecodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdDecodeVideoKHR" ) );
+
+  //=== VK_EXT_transform_feedback ===
+      vkCmdBindTransformFeedbackBuffersEXT = PFN_vkCmdBindTransformFeedbackBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindTransformFeedbackBuffersEXT" ) );
+      vkCmdBeginTransformFeedbackEXT = PFN_vkCmdBeginTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdBeginTransformFeedbackEXT" ) );
+      vkCmdEndTransformFeedbackEXT = PFN_vkCmdEndTransformFeedbackEXT( vkGetDeviceProcAddr( device, "vkCmdEndTransformFeedbackEXT" ) );
+      vkCmdBeginQueryIndexedEXT = PFN_vkCmdBeginQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdBeginQueryIndexedEXT" ) );
+      vkCmdEndQueryIndexedEXT = PFN_vkCmdEndQueryIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdEndQueryIndexedEXT" ) );
+      vkCmdDrawIndirectByteCountEXT = PFN_vkCmdDrawIndirectByteCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectByteCountEXT" ) );
+
+  //=== VK_NVX_binary_import ===
+      vkCreateCuModuleNVX = PFN_vkCreateCuModuleNVX( vkGetDeviceProcAddr( device, "vkCreateCuModuleNVX" ) );
+      vkCreateCuFunctionNVX = PFN_vkCreateCuFunctionNVX( vkGetDeviceProcAddr( device, "vkCreateCuFunctionNVX" ) );
+      vkDestroyCuModuleNVX = PFN_vkDestroyCuModuleNVX( vkGetDeviceProcAddr( device, "vkDestroyCuModuleNVX" ) );
+      vkDestroyCuFunctionNVX = PFN_vkDestroyCuFunctionNVX( vkGetDeviceProcAddr( device, "vkDestroyCuFunctionNVX" ) );
+      vkCmdCuLaunchKernelNVX = PFN_vkCmdCuLaunchKernelNVX( vkGetDeviceProcAddr( device, "vkCmdCuLaunchKernelNVX" ) );
+
+  //=== VK_NVX_image_view_handle ===
+      vkGetImageViewHandleNVX = PFN_vkGetImageViewHandleNVX( vkGetDeviceProcAddr( device, "vkGetImageViewHandleNVX" ) );
+      vkGetImageViewAddressNVX = PFN_vkGetImageViewAddressNVX( vkGetDeviceProcAddr( device, "vkGetImageViewAddressNVX" ) );
+
+  //=== VK_AMD_draw_indirect_count ===
+      vkCmdDrawIndirectCountAMD = PFN_vkCmdDrawIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountAMD" ) );
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountAMD;
+      vkCmdDrawIndexedIndirectCountAMD = PFN_vkCmdDrawIndexedIndirectCountAMD( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountAMD" ) );
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountAMD;
+
+  //=== VK_AMD_shader_info ===
+      vkGetShaderInfoAMD = PFN_vkGetShaderInfoAMD( vkGetDeviceProcAddr( device, "vkGetShaderInfoAMD" ) );
+
+  //=== VK_KHR_dynamic_rendering ===
+      vkCmdBeginRenderingKHR = PFN_vkCmdBeginRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderingKHR" ) );
+      if ( !vkCmdBeginRendering ) vkCmdBeginRendering = vkCmdBeginRenderingKHR;
+      vkCmdEndRenderingKHR = PFN_vkCmdEndRenderingKHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderingKHR" ) );
+      if ( !vkCmdEndRendering ) vkCmdEndRendering = vkCmdEndRenderingKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+      vkGetMemoryWin32HandleNV = PFN_vkGetMemoryWin32HandleNV( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleNV" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_device_group ===
+      vkGetDeviceGroupPeerMemoryFeaturesKHR = PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR( vkGetDeviceProcAddr( device, "vkGetDeviceGroupPeerMemoryFeaturesKHR" ) );
+      if ( !vkGetDeviceGroupPeerMemoryFeatures ) vkGetDeviceGroupPeerMemoryFeatures = vkGetDeviceGroupPeerMemoryFeaturesKHR;
+      vkCmdSetDeviceMaskKHR = PFN_vkCmdSetDeviceMaskKHR( vkGetDeviceProcAddr( device, "vkCmdSetDeviceMaskKHR" ) );
+      if ( !vkCmdSetDeviceMask ) vkCmdSetDeviceMask = vkCmdSetDeviceMaskKHR;
+      vkCmdDispatchBaseKHR = PFN_vkCmdDispatchBaseKHR( vkGetDeviceProcAddr( device, "vkCmdDispatchBaseKHR" ) );
+      if ( !vkCmdDispatchBase ) vkCmdDispatchBase = vkCmdDispatchBaseKHR;
+
+  //=== VK_KHR_maintenance1 ===
+      vkTrimCommandPoolKHR = PFN_vkTrimCommandPoolKHR( vkGetDeviceProcAddr( device, "vkTrimCommandPoolKHR" ) );
+      if ( !vkTrimCommandPool ) vkTrimCommandPool = vkTrimCommandPoolKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+      vkGetMemoryWin32HandleKHR = PFN_vkGetMemoryWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandleKHR" ) );
+      vkGetMemoryWin32HandlePropertiesKHR = PFN_vkGetMemoryWin32HandlePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryWin32HandlePropertiesKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+      vkGetMemoryFdKHR = PFN_vkGetMemoryFdKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdKHR" ) );
+      vkGetMemoryFdPropertiesKHR = PFN_vkGetMemoryFdPropertiesKHR( vkGetDeviceProcAddr( device, "vkGetMemoryFdPropertiesKHR" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+      vkImportSemaphoreWin32HandleKHR = PFN_vkImportSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreWin32HandleKHR" ) );
+      vkGetSemaphoreWin32HandleKHR = PFN_vkGetSemaphoreWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+      vkImportSemaphoreFdKHR = PFN_vkImportSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkImportSemaphoreFdKHR" ) );
+      vkGetSemaphoreFdKHR = PFN_vkGetSemaphoreFdKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreFdKHR" ) );
+
+  //=== VK_KHR_push_descriptor ===
+      vkCmdPushDescriptorSetKHR = PFN_vkCmdPushDescriptorSetKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetKHR" ) );
+      vkCmdPushDescriptorSetWithTemplateKHR = PFN_vkCmdPushDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkCmdPushDescriptorSetWithTemplateKHR" ) );
+
+  //=== VK_EXT_conditional_rendering ===
+      vkCmdBeginConditionalRenderingEXT = PFN_vkCmdBeginConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdBeginConditionalRenderingEXT" ) );
+      vkCmdEndConditionalRenderingEXT = PFN_vkCmdEndConditionalRenderingEXT( vkGetDeviceProcAddr( device, "vkCmdEndConditionalRenderingEXT" ) );
+
+  //=== VK_KHR_descriptor_update_template ===
+      vkCreateDescriptorUpdateTemplateKHR = PFN_vkCreateDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkCreateDescriptorUpdateTemplateKHR" ) );
+      if ( !vkCreateDescriptorUpdateTemplate ) vkCreateDescriptorUpdateTemplate = vkCreateDescriptorUpdateTemplateKHR;
+      vkDestroyDescriptorUpdateTemplateKHR = PFN_vkDestroyDescriptorUpdateTemplateKHR( vkGetDeviceProcAddr( device, "vkDestroyDescriptorUpdateTemplateKHR" ) );
+      if ( !vkDestroyDescriptorUpdateTemplate ) vkDestroyDescriptorUpdateTemplate = vkDestroyDescriptorUpdateTemplateKHR;
+      vkUpdateDescriptorSetWithTemplateKHR = PFN_vkUpdateDescriptorSetWithTemplateKHR( vkGetDeviceProcAddr( device, "vkUpdateDescriptorSetWithTemplateKHR" ) );
+      if ( !vkUpdateDescriptorSetWithTemplate ) vkUpdateDescriptorSetWithTemplate = vkUpdateDescriptorSetWithTemplateKHR;
+
+  //=== VK_NV_clip_space_w_scaling ===
+      vkCmdSetViewportWScalingNV = PFN_vkCmdSetViewportWScalingNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingNV" ) );
+
+  //=== VK_EXT_display_control ===
+      vkDisplayPowerControlEXT = PFN_vkDisplayPowerControlEXT( vkGetDeviceProcAddr( device, "vkDisplayPowerControlEXT" ) );
+      vkRegisterDeviceEventEXT = PFN_vkRegisterDeviceEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDeviceEventEXT" ) );
+      vkRegisterDisplayEventEXT = PFN_vkRegisterDisplayEventEXT( vkGetDeviceProcAddr( device, "vkRegisterDisplayEventEXT" ) );
+      vkGetSwapchainCounterEXT = PFN_vkGetSwapchainCounterEXT( vkGetDeviceProcAddr( device, "vkGetSwapchainCounterEXT" ) );
+
+  //=== VK_GOOGLE_display_timing ===
+      vkGetRefreshCycleDurationGOOGLE = PFN_vkGetRefreshCycleDurationGOOGLE( vkGetDeviceProcAddr( device, "vkGetRefreshCycleDurationGOOGLE" ) );
+      vkGetPastPresentationTimingGOOGLE = PFN_vkGetPastPresentationTimingGOOGLE( vkGetDeviceProcAddr( device, "vkGetPastPresentationTimingGOOGLE" ) );
+
+  //=== VK_EXT_discard_rectangles ===
+      vkCmdSetDiscardRectangleEXT = PFN_vkCmdSetDiscardRectangleEXT( vkGetDeviceProcAddr( device, "vkCmdSetDiscardRectangleEXT" ) );
+
+  //=== VK_EXT_hdr_metadata ===
+      vkSetHdrMetadataEXT = PFN_vkSetHdrMetadataEXT( vkGetDeviceProcAddr( device, "vkSetHdrMetadataEXT" ) );
+
+  //=== VK_KHR_create_renderpass2 ===
+      vkCreateRenderPass2KHR = PFN_vkCreateRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCreateRenderPass2KHR" ) );
+      if ( !vkCreateRenderPass2 ) vkCreateRenderPass2 = vkCreateRenderPass2KHR;
+      vkCmdBeginRenderPass2KHR = PFN_vkCmdBeginRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdBeginRenderPass2KHR" ) );
+      if ( !vkCmdBeginRenderPass2 ) vkCmdBeginRenderPass2 = vkCmdBeginRenderPass2KHR;
+      vkCmdNextSubpass2KHR = PFN_vkCmdNextSubpass2KHR( vkGetDeviceProcAddr( device, "vkCmdNextSubpass2KHR" ) );
+      if ( !vkCmdNextSubpass2 ) vkCmdNextSubpass2 = vkCmdNextSubpass2KHR;
+      vkCmdEndRenderPass2KHR = PFN_vkCmdEndRenderPass2KHR( vkGetDeviceProcAddr( device, "vkCmdEndRenderPass2KHR" ) );
+      if ( !vkCmdEndRenderPass2 ) vkCmdEndRenderPass2 = vkCmdEndRenderPass2KHR;
+
+  //=== VK_KHR_shared_presentable_image ===
+      vkGetSwapchainStatusKHR = PFN_vkGetSwapchainStatusKHR( vkGetDeviceProcAddr( device, "vkGetSwapchainStatusKHR" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+      vkImportFenceWin32HandleKHR = PFN_vkImportFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkImportFenceWin32HandleKHR" ) );
+      vkGetFenceWin32HandleKHR = PFN_vkGetFenceWin32HandleKHR( vkGetDeviceProcAddr( device, "vkGetFenceWin32HandleKHR" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+      vkImportFenceFdKHR = PFN_vkImportFenceFdKHR( vkGetDeviceProcAddr( device, "vkImportFenceFdKHR" ) );
+      vkGetFenceFdKHR = PFN_vkGetFenceFdKHR( vkGetDeviceProcAddr( device, "vkGetFenceFdKHR" ) );
+
+  //=== VK_KHR_performance_query ===
+      vkAcquireProfilingLockKHR = PFN_vkAcquireProfilingLockKHR( vkGetDeviceProcAddr( device, "vkAcquireProfilingLockKHR" ) );
+      vkReleaseProfilingLockKHR = PFN_vkReleaseProfilingLockKHR( vkGetDeviceProcAddr( device, "vkReleaseProfilingLockKHR" ) );
+
+  //=== VK_EXT_debug_utils ===
+      vkSetDebugUtilsObjectNameEXT = PFN_vkSetDebugUtilsObjectNameEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectNameEXT" ) );
+      vkSetDebugUtilsObjectTagEXT = PFN_vkSetDebugUtilsObjectTagEXT( vkGetDeviceProcAddr( device, "vkSetDebugUtilsObjectTagEXT" ) );
+      vkQueueBeginDebugUtilsLabelEXT = PFN_vkQueueBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueBeginDebugUtilsLabelEXT" ) );
+      vkQueueEndDebugUtilsLabelEXT = PFN_vkQueueEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueEndDebugUtilsLabelEXT" ) );
+      vkQueueInsertDebugUtilsLabelEXT = PFN_vkQueueInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkQueueInsertDebugUtilsLabelEXT" ) );
+      vkCmdBeginDebugUtilsLabelEXT = PFN_vkCmdBeginDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdBeginDebugUtilsLabelEXT" ) );
+      vkCmdEndDebugUtilsLabelEXT = PFN_vkCmdEndDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdEndDebugUtilsLabelEXT" ) );
+      vkCmdInsertDebugUtilsLabelEXT = PFN_vkCmdInsertDebugUtilsLabelEXT( vkGetDeviceProcAddr( device, "vkCmdInsertDebugUtilsLabelEXT" ) );
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+      vkGetAndroidHardwareBufferPropertiesANDROID = PFN_vkGetAndroidHardwareBufferPropertiesANDROID( vkGetDeviceProcAddr( device, "vkGetAndroidHardwareBufferPropertiesANDROID" ) );
+      vkGetMemoryAndroidHardwareBufferANDROID = PFN_vkGetMemoryAndroidHardwareBufferANDROID( vkGetDeviceProcAddr( device, "vkGetMemoryAndroidHardwareBufferANDROID" ) );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_EXT_sample_locations ===
+      vkCmdSetSampleLocationsEXT = PFN_vkCmdSetSampleLocationsEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEXT" ) );
+
+  //=== VK_KHR_get_memory_requirements2 ===
+      vkGetImageMemoryRequirements2KHR = PFN_vkGetImageMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageMemoryRequirements2KHR" ) );
+      if ( !vkGetImageMemoryRequirements2 ) vkGetImageMemoryRequirements2 = vkGetImageMemoryRequirements2KHR;
+      vkGetBufferMemoryRequirements2KHR = PFN_vkGetBufferMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetBufferMemoryRequirements2KHR" ) );
+      if ( !vkGetBufferMemoryRequirements2 ) vkGetBufferMemoryRequirements2 = vkGetBufferMemoryRequirements2KHR;
+      vkGetImageSparseMemoryRequirements2KHR = PFN_vkGetImageSparseMemoryRequirements2KHR( vkGetDeviceProcAddr( device, "vkGetImageSparseMemoryRequirements2KHR" ) );
+      if ( !vkGetImageSparseMemoryRequirements2 ) vkGetImageSparseMemoryRequirements2 = vkGetImageSparseMemoryRequirements2KHR;
+
+  //=== VK_KHR_acceleration_structure ===
+      vkCreateAccelerationStructureKHR = PFN_vkCreateAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureKHR" ) );
+      vkDestroyAccelerationStructureKHR = PFN_vkDestroyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureKHR" ) );
+      vkCmdBuildAccelerationStructuresKHR = PFN_vkCmdBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresKHR" ) );
+      vkCmdBuildAccelerationStructuresIndirectKHR = PFN_vkCmdBuildAccelerationStructuresIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructuresIndirectKHR" ) );
+      vkBuildAccelerationStructuresKHR = PFN_vkBuildAccelerationStructuresKHR( vkGetDeviceProcAddr( device, "vkBuildAccelerationStructuresKHR" ) );
+      vkCopyAccelerationStructureKHR = PFN_vkCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureKHR" ) );
+      vkCopyAccelerationStructureToMemoryKHR = PFN_vkCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCopyAccelerationStructureToMemoryKHR" ) );
+      vkCopyMemoryToAccelerationStructureKHR = PFN_vkCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCopyMemoryToAccelerationStructureKHR" ) );
+      vkWriteAccelerationStructuresPropertiesKHR = PFN_vkWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkWriteAccelerationStructuresPropertiesKHR" ) );
+      vkCmdCopyAccelerationStructureKHR = PFN_vkCmdCopyAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureKHR" ) );
+      vkCmdCopyAccelerationStructureToMemoryKHR = PFN_vkCmdCopyAccelerationStructureToMemoryKHR( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureToMemoryKHR" ) );
+      vkCmdCopyMemoryToAccelerationStructureKHR = PFN_vkCmdCopyMemoryToAccelerationStructureKHR( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToAccelerationStructureKHR" ) );
+      vkGetAccelerationStructureDeviceAddressKHR = PFN_vkGetAccelerationStructureDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureDeviceAddressKHR" ) );
+      vkCmdWriteAccelerationStructuresPropertiesKHR = PFN_vkCmdWriteAccelerationStructuresPropertiesKHR( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesKHR" ) );
+      vkGetDeviceAccelerationStructureCompatibilityKHR = PFN_vkGetDeviceAccelerationStructureCompatibilityKHR( vkGetDeviceProcAddr( device, "vkGetDeviceAccelerationStructureCompatibilityKHR" ) );
+      vkGetAccelerationStructureBuildSizesKHR = PFN_vkGetAccelerationStructureBuildSizesKHR( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureBuildSizesKHR" ) );
+
+  //=== VK_KHR_sampler_ycbcr_conversion ===
+      vkCreateSamplerYcbcrConversionKHR = PFN_vkCreateSamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkCreateSamplerYcbcrConversionKHR" ) );
+      if ( !vkCreateSamplerYcbcrConversion ) vkCreateSamplerYcbcrConversion = vkCreateSamplerYcbcrConversionKHR;
+      vkDestroySamplerYcbcrConversionKHR = PFN_vkDestroySamplerYcbcrConversionKHR( vkGetDeviceProcAddr( device, "vkDestroySamplerYcbcrConversionKHR" ) );
+      if ( !vkDestroySamplerYcbcrConversion ) vkDestroySamplerYcbcrConversion = vkDestroySamplerYcbcrConversionKHR;
+
+  //=== VK_KHR_bind_memory2 ===
+      vkBindBufferMemory2KHR = PFN_vkBindBufferMemory2KHR( vkGetDeviceProcAddr( device, "vkBindBufferMemory2KHR" ) );
+      if ( !vkBindBufferMemory2 ) vkBindBufferMemory2 = vkBindBufferMemory2KHR;
+      vkBindImageMemory2KHR = PFN_vkBindImageMemory2KHR( vkGetDeviceProcAddr( device, "vkBindImageMemory2KHR" ) );
+      if ( !vkBindImageMemory2 ) vkBindImageMemory2 = vkBindImageMemory2KHR;
+
+  //=== VK_EXT_image_drm_format_modifier ===
+      vkGetImageDrmFormatModifierPropertiesEXT = PFN_vkGetImageDrmFormatModifierPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetImageDrmFormatModifierPropertiesEXT" ) );
+
+  //=== VK_EXT_validation_cache ===
+      vkCreateValidationCacheEXT = PFN_vkCreateValidationCacheEXT( vkGetDeviceProcAddr( device, "vkCreateValidationCacheEXT" ) );
+      vkDestroyValidationCacheEXT = PFN_vkDestroyValidationCacheEXT( vkGetDeviceProcAddr( device, "vkDestroyValidationCacheEXT" ) );
+      vkMergeValidationCachesEXT = PFN_vkMergeValidationCachesEXT( vkGetDeviceProcAddr( device, "vkMergeValidationCachesEXT" ) );
+      vkGetValidationCacheDataEXT = PFN_vkGetValidationCacheDataEXT( vkGetDeviceProcAddr( device, "vkGetValidationCacheDataEXT" ) );
+
+  //=== VK_NV_shading_rate_image ===
+      vkCmdBindShadingRateImageNV = PFN_vkCmdBindShadingRateImageNV( vkGetDeviceProcAddr( device, "vkCmdBindShadingRateImageNV" ) );
+      vkCmdSetViewportShadingRatePaletteNV = PFN_vkCmdSetViewportShadingRatePaletteNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportShadingRatePaletteNV" ) );
+      vkCmdSetCoarseSampleOrderNV = PFN_vkCmdSetCoarseSampleOrderNV( vkGetDeviceProcAddr( device, "vkCmdSetCoarseSampleOrderNV" ) );
+
+  //=== VK_NV_ray_tracing ===
+      vkCreateAccelerationStructureNV = PFN_vkCreateAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCreateAccelerationStructureNV" ) );
+      vkDestroyAccelerationStructureNV = PFN_vkDestroyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkDestroyAccelerationStructureNV" ) );
+      vkGetAccelerationStructureMemoryRequirementsNV = PFN_vkGetAccelerationStructureMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureMemoryRequirementsNV" ) );
+      vkBindAccelerationStructureMemoryNV = PFN_vkBindAccelerationStructureMemoryNV( vkGetDeviceProcAddr( device, "vkBindAccelerationStructureMemoryNV" ) );
+      vkCmdBuildAccelerationStructureNV = PFN_vkCmdBuildAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdBuildAccelerationStructureNV" ) );
+      vkCmdCopyAccelerationStructureNV = PFN_vkCmdCopyAccelerationStructureNV( vkGetDeviceProcAddr( device, "vkCmdCopyAccelerationStructureNV" ) );
+      vkCmdTraceRaysNV = PFN_vkCmdTraceRaysNV( vkGetDeviceProcAddr( device, "vkCmdTraceRaysNV" ) );
+      vkCreateRayTracingPipelinesNV = PFN_vkCreateRayTracingPipelinesNV( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesNV" ) );
+      vkGetRayTracingShaderGroupHandlesNV = PFN_vkGetRayTracingShaderGroupHandlesNV( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesNV" ) );
+      if ( !vkGetRayTracingShaderGroupHandlesKHR ) vkGetRayTracingShaderGroupHandlesKHR = vkGetRayTracingShaderGroupHandlesNV;
+      vkGetAccelerationStructureHandleNV = PFN_vkGetAccelerationStructureHandleNV( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureHandleNV" ) );
+      vkCmdWriteAccelerationStructuresPropertiesNV = PFN_vkCmdWriteAccelerationStructuresPropertiesNV( vkGetDeviceProcAddr( device, "vkCmdWriteAccelerationStructuresPropertiesNV" ) );
+      vkCompileDeferredNV = PFN_vkCompileDeferredNV( vkGetDeviceProcAddr( device, "vkCompileDeferredNV" ) );
+
+  //=== VK_KHR_maintenance3 ===
+      vkGetDescriptorSetLayoutSupportKHR = PFN_vkGetDescriptorSetLayoutSupportKHR( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSupportKHR" ) );
+      if ( !vkGetDescriptorSetLayoutSupport ) vkGetDescriptorSetLayoutSupport = vkGetDescriptorSetLayoutSupportKHR;
+
+  //=== VK_KHR_draw_indirect_count ===
+      vkCmdDrawIndirectCountKHR = PFN_vkCmdDrawIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndirectCountKHR" ) );
+      if ( !vkCmdDrawIndirectCount ) vkCmdDrawIndirectCount = vkCmdDrawIndirectCountKHR;
+      vkCmdDrawIndexedIndirectCountKHR = PFN_vkCmdDrawIndexedIndirectCountKHR( vkGetDeviceProcAddr( device, "vkCmdDrawIndexedIndirectCountKHR" ) );
+      if ( !vkCmdDrawIndexedIndirectCount ) vkCmdDrawIndexedIndirectCount = vkCmdDrawIndexedIndirectCountKHR;
+
+  //=== VK_EXT_external_memory_host ===
+      vkGetMemoryHostPointerPropertiesEXT = PFN_vkGetMemoryHostPointerPropertiesEXT( vkGetDeviceProcAddr( device, "vkGetMemoryHostPointerPropertiesEXT" ) );
+
+  //=== VK_AMD_buffer_marker ===
+      vkCmdWriteBufferMarkerAMD = PFN_vkCmdWriteBufferMarkerAMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarkerAMD" ) );
+
+  //=== VK_EXT_calibrated_timestamps ===
+      vkGetCalibratedTimestampsEXT = PFN_vkGetCalibratedTimestampsEXT( vkGetDeviceProcAddr( device, "vkGetCalibratedTimestampsEXT" ) );
+
+  //=== VK_NV_mesh_shader ===
+      vkCmdDrawMeshTasksNV = PFN_vkCmdDrawMeshTasksNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksNV" ) );
+      vkCmdDrawMeshTasksIndirectNV = PFN_vkCmdDrawMeshTasksIndirectNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectNV" ) );
+      vkCmdDrawMeshTasksIndirectCountNV = PFN_vkCmdDrawMeshTasksIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountNV" ) );
+
+  //=== VK_NV_scissor_exclusive ===
+      vkCmdSetExclusiveScissorNV = PFN_vkCmdSetExclusiveScissorNV( vkGetDeviceProcAddr( device, "vkCmdSetExclusiveScissorNV" ) );
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+      vkCmdSetCheckpointNV = PFN_vkCmdSetCheckpointNV( vkGetDeviceProcAddr( device, "vkCmdSetCheckpointNV" ) );
+      vkGetQueueCheckpointDataNV = PFN_vkGetQueueCheckpointDataNV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointDataNV" ) );
+
+  //=== VK_KHR_timeline_semaphore ===
+      vkGetSemaphoreCounterValueKHR = PFN_vkGetSemaphoreCounterValueKHR( vkGetDeviceProcAddr( device, "vkGetSemaphoreCounterValueKHR" ) );
+      if ( !vkGetSemaphoreCounterValue ) vkGetSemaphoreCounterValue = vkGetSemaphoreCounterValueKHR;
+      vkWaitSemaphoresKHR = PFN_vkWaitSemaphoresKHR( vkGetDeviceProcAddr( device, "vkWaitSemaphoresKHR" ) );
+      if ( !vkWaitSemaphores ) vkWaitSemaphores = vkWaitSemaphoresKHR;
+      vkSignalSemaphoreKHR = PFN_vkSignalSemaphoreKHR( vkGetDeviceProcAddr( device, "vkSignalSemaphoreKHR" ) );
+      if ( !vkSignalSemaphore ) vkSignalSemaphore = vkSignalSemaphoreKHR;
+
+  //=== VK_INTEL_performance_query ===
+      vkInitializePerformanceApiINTEL = PFN_vkInitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkInitializePerformanceApiINTEL" ) );
+      vkUninitializePerformanceApiINTEL = PFN_vkUninitializePerformanceApiINTEL( vkGetDeviceProcAddr( device, "vkUninitializePerformanceApiINTEL" ) );
+      vkCmdSetPerformanceMarkerINTEL = PFN_vkCmdSetPerformanceMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceMarkerINTEL" ) );
+      vkCmdSetPerformanceStreamMarkerINTEL = PFN_vkCmdSetPerformanceStreamMarkerINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceStreamMarkerINTEL" ) );
+      vkCmdSetPerformanceOverrideINTEL = PFN_vkCmdSetPerformanceOverrideINTEL( vkGetDeviceProcAddr( device, "vkCmdSetPerformanceOverrideINTEL" ) );
+      vkAcquirePerformanceConfigurationINTEL = PFN_vkAcquirePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkAcquirePerformanceConfigurationINTEL" ) );
+      vkReleasePerformanceConfigurationINTEL = PFN_vkReleasePerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkReleasePerformanceConfigurationINTEL" ) );
+      vkQueueSetPerformanceConfigurationINTEL = PFN_vkQueueSetPerformanceConfigurationINTEL( vkGetDeviceProcAddr( device, "vkQueueSetPerformanceConfigurationINTEL" ) );
+      vkGetPerformanceParameterINTEL = PFN_vkGetPerformanceParameterINTEL( vkGetDeviceProcAddr( device, "vkGetPerformanceParameterINTEL" ) );
+
+  //=== VK_AMD_display_native_hdr ===
+      vkSetLocalDimmingAMD = PFN_vkSetLocalDimmingAMD( vkGetDeviceProcAddr( device, "vkSetLocalDimmingAMD" ) );
+
+  //=== VK_KHR_fragment_shading_rate ===
+      vkCmdSetFragmentShadingRateKHR = PFN_vkCmdSetFragmentShadingRateKHR( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateKHR" ) );
+
+  //=== VK_EXT_buffer_device_address ===
+      vkGetBufferDeviceAddressEXT = PFN_vkGetBufferDeviceAddressEXT( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressEXT" ) );
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressEXT;
+
+  //=== VK_KHR_present_wait ===
+      vkWaitForPresentKHR = PFN_vkWaitForPresentKHR( vkGetDeviceProcAddr( device, "vkWaitForPresentKHR" ) );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+      vkAcquireFullScreenExclusiveModeEXT = PFN_vkAcquireFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkAcquireFullScreenExclusiveModeEXT" ) );
+      vkReleaseFullScreenExclusiveModeEXT = PFN_vkReleaseFullScreenExclusiveModeEXT( vkGetDeviceProcAddr( device, "vkReleaseFullScreenExclusiveModeEXT" ) );
+      vkGetDeviceGroupSurfacePresentModes2EXT = PFN_vkGetDeviceGroupSurfacePresentModes2EXT( vkGetDeviceProcAddr( device, "vkGetDeviceGroupSurfacePresentModes2EXT" ) );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_buffer_device_address ===
+      vkGetBufferDeviceAddressKHR = PFN_vkGetBufferDeviceAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferDeviceAddressKHR" ) );
+      if ( !vkGetBufferDeviceAddress ) vkGetBufferDeviceAddress = vkGetBufferDeviceAddressKHR;
+      vkGetBufferOpaqueCaptureAddressKHR = PFN_vkGetBufferOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureAddressKHR" ) );
+      if ( !vkGetBufferOpaqueCaptureAddress ) vkGetBufferOpaqueCaptureAddress = vkGetBufferOpaqueCaptureAddressKHR;
+      vkGetDeviceMemoryOpaqueCaptureAddressKHR = PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR( vkGetDeviceProcAddr( device, "vkGetDeviceMemoryOpaqueCaptureAddressKHR" ) );
+      if ( !vkGetDeviceMemoryOpaqueCaptureAddress ) vkGetDeviceMemoryOpaqueCaptureAddress = vkGetDeviceMemoryOpaqueCaptureAddressKHR;
+
+  //=== VK_EXT_line_rasterization ===
+      vkCmdSetLineStippleEXT = PFN_vkCmdSetLineStippleEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEXT" ) );
+
+  //=== VK_EXT_host_query_reset ===
+      vkResetQueryPoolEXT = PFN_vkResetQueryPoolEXT( vkGetDeviceProcAddr( device, "vkResetQueryPoolEXT" ) );
+      if ( !vkResetQueryPool ) vkResetQueryPool = vkResetQueryPoolEXT;
+
+  //=== VK_EXT_extended_dynamic_state ===
+      vkCmdSetCullModeEXT = PFN_vkCmdSetCullModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetCullModeEXT" ) );
+      if ( !vkCmdSetCullMode ) vkCmdSetCullMode = vkCmdSetCullModeEXT;
+      vkCmdSetFrontFaceEXT = PFN_vkCmdSetFrontFaceEXT( vkGetDeviceProcAddr( device, "vkCmdSetFrontFaceEXT" ) );
+      if ( !vkCmdSetFrontFace ) vkCmdSetFrontFace = vkCmdSetFrontFaceEXT;
+      vkCmdSetPrimitiveTopologyEXT = PFN_vkCmdSetPrimitiveTopologyEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveTopologyEXT" ) );
+      if ( !vkCmdSetPrimitiveTopology ) vkCmdSetPrimitiveTopology = vkCmdSetPrimitiveTopologyEXT;
+      vkCmdSetViewportWithCountEXT = PFN_vkCmdSetViewportWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetViewportWithCountEXT" ) );
+      if ( !vkCmdSetViewportWithCount ) vkCmdSetViewportWithCount = vkCmdSetViewportWithCountEXT;
+      vkCmdSetScissorWithCountEXT = PFN_vkCmdSetScissorWithCountEXT( vkGetDeviceProcAddr( device, "vkCmdSetScissorWithCountEXT" ) );
+      if ( !vkCmdSetScissorWithCount ) vkCmdSetScissorWithCount = vkCmdSetScissorWithCountEXT;
+      vkCmdBindVertexBuffers2EXT = PFN_vkCmdBindVertexBuffers2EXT( vkGetDeviceProcAddr( device, "vkCmdBindVertexBuffers2EXT" ) );
+      if ( !vkCmdBindVertexBuffers2 ) vkCmdBindVertexBuffers2 = vkCmdBindVertexBuffers2EXT;
+      vkCmdSetDepthTestEnableEXT = PFN_vkCmdSetDepthTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthTestEnableEXT" ) );
+      if ( !vkCmdSetDepthTestEnable ) vkCmdSetDepthTestEnable = vkCmdSetDepthTestEnableEXT;
+      vkCmdSetDepthWriteEnableEXT = PFN_vkCmdSetDepthWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthWriteEnableEXT" ) );
+      if ( !vkCmdSetDepthWriteEnable ) vkCmdSetDepthWriteEnable = vkCmdSetDepthWriteEnableEXT;
+      vkCmdSetDepthCompareOpEXT = PFN_vkCmdSetDepthCompareOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthCompareOpEXT" ) );
+      if ( !vkCmdSetDepthCompareOp ) vkCmdSetDepthCompareOp = vkCmdSetDepthCompareOpEXT;
+      vkCmdSetDepthBoundsTestEnableEXT = PFN_vkCmdSetDepthBoundsTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBoundsTestEnableEXT" ) );
+      if ( !vkCmdSetDepthBoundsTestEnable ) vkCmdSetDepthBoundsTestEnable = vkCmdSetDepthBoundsTestEnableEXT;
+      vkCmdSetStencilTestEnableEXT = PFN_vkCmdSetStencilTestEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilTestEnableEXT" ) );
+      if ( !vkCmdSetStencilTestEnable ) vkCmdSetStencilTestEnable = vkCmdSetStencilTestEnableEXT;
+      vkCmdSetStencilOpEXT = PFN_vkCmdSetStencilOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetStencilOpEXT" ) );
+      if ( !vkCmdSetStencilOp ) vkCmdSetStencilOp = vkCmdSetStencilOpEXT;
+
+  //=== VK_KHR_deferred_host_operations ===
+      vkCreateDeferredOperationKHR = PFN_vkCreateDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkCreateDeferredOperationKHR" ) );
+      vkDestroyDeferredOperationKHR = PFN_vkDestroyDeferredOperationKHR( vkGetDeviceProcAddr( device, "vkDestroyDeferredOperationKHR" ) );
+      vkGetDeferredOperationMaxConcurrencyKHR = PFN_vkGetDeferredOperationMaxConcurrencyKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationMaxConcurrencyKHR" ) );
+      vkGetDeferredOperationResultKHR = PFN_vkGetDeferredOperationResultKHR( vkGetDeviceProcAddr( device, "vkGetDeferredOperationResultKHR" ) );
+      vkDeferredOperationJoinKHR = PFN_vkDeferredOperationJoinKHR( vkGetDeviceProcAddr( device, "vkDeferredOperationJoinKHR" ) );
+
+  //=== VK_KHR_pipeline_executable_properties ===
+      vkGetPipelineExecutablePropertiesKHR = PFN_vkGetPipelineExecutablePropertiesKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutablePropertiesKHR" ) );
+      vkGetPipelineExecutableStatisticsKHR = PFN_vkGetPipelineExecutableStatisticsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableStatisticsKHR" ) );
+      vkGetPipelineExecutableInternalRepresentationsKHR = PFN_vkGetPipelineExecutableInternalRepresentationsKHR( vkGetDeviceProcAddr( device, "vkGetPipelineExecutableInternalRepresentationsKHR" ) );
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+      vkReleaseSwapchainImagesEXT = PFN_vkReleaseSwapchainImagesEXT( vkGetDeviceProcAddr( device, "vkReleaseSwapchainImagesEXT" ) );
+
+  //=== VK_NV_device_generated_commands ===
+      vkGetGeneratedCommandsMemoryRequirementsNV = PFN_vkGetGeneratedCommandsMemoryRequirementsNV( vkGetDeviceProcAddr( device, "vkGetGeneratedCommandsMemoryRequirementsNV" ) );
+      vkCmdPreprocessGeneratedCommandsNV = PFN_vkCmdPreprocessGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdPreprocessGeneratedCommandsNV" ) );
+      vkCmdExecuteGeneratedCommandsNV = PFN_vkCmdExecuteGeneratedCommandsNV( vkGetDeviceProcAddr( device, "vkCmdExecuteGeneratedCommandsNV" ) );
+      vkCmdBindPipelineShaderGroupNV = PFN_vkCmdBindPipelineShaderGroupNV( vkGetDeviceProcAddr( device, "vkCmdBindPipelineShaderGroupNV" ) );
+      vkCreateIndirectCommandsLayoutNV = PFN_vkCreateIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkCreateIndirectCommandsLayoutNV" ) );
+      vkDestroyIndirectCommandsLayoutNV = PFN_vkDestroyIndirectCommandsLayoutNV( vkGetDeviceProcAddr( device, "vkDestroyIndirectCommandsLayoutNV" ) );
+
+  //=== VK_EXT_private_data ===
+      vkCreatePrivateDataSlotEXT = PFN_vkCreatePrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkCreatePrivateDataSlotEXT" ) );
+      if ( !vkCreatePrivateDataSlot ) vkCreatePrivateDataSlot = vkCreatePrivateDataSlotEXT;
+      vkDestroyPrivateDataSlotEXT = PFN_vkDestroyPrivateDataSlotEXT( vkGetDeviceProcAddr( device, "vkDestroyPrivateDataSlotEXT" ) );
+      if ( !vkDestroyPrivateDataSlot ) vkDestroyPrivateDataSlot = vkDestroyPrivateDataSlotEXT;
+      vkSetPrivateDataEXT = PFN_vkSetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkSetPrivateDataEXT" ) );
+      if ( !vkSetPrivateData ) vkSetPrivateData = vkSetPrivateDataEXT;
+      vkGetPrivateDataEXT = PFN_vkGetPrivateDataEXT( vkGetDeviceProcAddr( device, "vkGetPrivateDataEXT" ) );
+      if ( !vkGetPrivateData ) vkGetPrivateData = vkGetPrivateDataEXT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+      vkCmdEncodeVideoKHR = PFN_vkCmdEncodeVideoKHR( vkGetDeviceProcAddr( device, "vkCmdEncodeVideoKHR" ) );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+      vkExportMetalObjectsEXT = PFN_vkExportMetalObjectsEXT( vkGetDeviceProcAddr( device, "vkExportMetalObjectsEXT" ) );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+      vkCmdSetEvent2KHR = PFN_vkCmdSetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdSetEvent2KHR" ) );
+      if ( !vkCmdSetEvent2 ) vkCmdSetEvent2 = vkCmdSetEvent2KHR;
+      vkCmdResetEvent2KHR = PFN_vkCmdResetEvent2KHR( vkGetDeviceProcAddr( device, "vkCmdResetEvent2KHR" ) );
+      if ( !vkCmdResetEvent2 ) vkCmdResetEvent2 = vkCmdResetEvent2KHR;
+      vkCmdWaitEvents2KHR = PFN_vkCmdWaitEvents2KHR( vkGetDeviceProcAddr( device, "vkCmdWaitEvents2KHR" ) );
+      if ( !vkCmdWaitEvents2 ) vkCmdWaitEvents2 = vkCmdWaitEvents2KHR;
+      vkCmdPipelineBarrier2KHR = PFN_vkCmdPipelineBarrier2KHR( vkGetDeviceProcAddr( device, "vkCmdPipelineBarrier2KHR" ) );
+      if ( !vkCmdPipelineBarrier2 ) vkCmdPipelineBarrier2 = vkCmdPipelineBarrier2KHR;
+      vkCmdWriteTimestamp2KHR = PFN_vkCmdWriteTimestamp2KHR( vkGetDeviceProcAddr( device, "vkCmdWriteTimestamp2KHR" ) );
+      if ( !vkCmdWriteTimestamp2 ) vkCmdWriteTimestamp2 = vkCmdWriteTimestamp2KHR;
+      vkQueueSubmit2KHR = PFN_vkQueueSubmit2KHR( vkGetDeviceProcAddr( device, "vkQueueSubmit2KHR" ) );
+      if ( !vkQueueSubmit2 ) vkQueueSubmit2 = vkQueueSubmit2KHR;
+      vkCmdWriteBufferMarker2AMD = PFN_vkCmdWriteBufferMarker2AMD( vkGetDeviceProcAddr( device, "vkCmdWriteBufferMarker2AMD" ) );
+      vkGetQueueCheckpointData2NV = PFN_vkGetQueueCheckpointData2NV( vkGetDeviceProcAddr( device, "vkGetQueueCheckpointData2NV" ) );
+
+  //=== VK_EXT_descriptor_buffer ===
+      vkGetDescriptorSetLayoutSizeEXT = PFN_vkGetDescriptorSetLayoutSizeEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutSizeEXT" ) );
+      vkGetDescriptorSetLayoutBindingOffsetEXT = PFN_vkGetDescriptorSetLayoutBindingOffsetEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutBindingOffsetEXT" ) );
+      vkGetDescriptorEXT = PFN_vkGetDescriptorEXT( vkGetDeviceProcAddr( device, "vkGetDescriptorEXT" ) );
+      vkCmdBindDescriptorBuffersEXT = PFN_vkCmdBindDescriptorBuffersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBuffersEXT" ) );
+      vkCmdSetDescriptorBufferOffsetsEXT = PFN_vkCmdSetDescriptorBufferOffsetsEXT( vkGetDeviceProcAddr( device, "vkCmdSetDescriptorBufferOffsetsEXT" ) );
+      vkCmdBindDescriptorBufferEmbeddedSamplersEXT = PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( vkGetDeviceProcAddr( device, "vkCmdBindDescriptorBufferEmbeddedSamplersEXT" ) );
+      vkGetBufferOpaqueCaptureDescriptorDataEXT = PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetBufferOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetImageOpaqueCaptureDescriptorDataEXT = PFN_vkGetImageOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetImageViewOpaqueCaptureDescriptorDataEXT = PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetImageViewOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetSamplerOpaqueCaptureDescriptorDataEXT = PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetSamplerOpaqueCaptureDescriptorDataEXT" ) );
+      vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( vkGetDeviceProcAddr( device, "vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT" ) );
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+      vkCmdSetFragmentShadingRateEnumNV = PFN_vkCmdSetFragmentShadingRateEnumNV( vkGetDeviceProcAddr( device, "vkCmdSetFragmentShadingRateEnumNV" ) );
+
+  //=== VK_EXT_mesh_shader ===
+      vkCmdDrawMeshTasksEXT = PFN_vkCmdDrawMeshTasksEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksEXT" ) );
+      vkCmdDrawMeshTasksIndirectEXT = PFN_vkCmdDrawMeshTasksIndirectEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectEXT" ) );
+      vkCmdDrawMeshTasksIndirectCountEXT = PFN_vkCmdDrawMeshTasksIndirectCountEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMeshTasksIndirectCountEXT" ) );
+
+  //=== VK_KHR_copy_commands2 ===
+      vkCmdCopyBuffer2KHR = PFN_vkCmdCopyBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBuffer2KHR" ) );
+      if ( !vkCmdCopyBuffer2 ) vkCmdCopyBuffer2 = vkCmdCopyBuffer2KHR;
+      vkCmdCopyImage2KHR = PFN_vkCmdCopyImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImage2KHR" ) );
+      if ( !vkCmdCopyImage2 ) vkCmdCopyImage2 = vkCmdCopyImage2KHR;
+      vkCmdCopyBufferToImage2KHR = PFN_vkCmdCopyBufferToImage2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyBufferToImage2KHR" ) );
+      if ( !vkCmdCopyBufferToImage2 ) vkCmdCopyBufferToImage2 = vkCmdCopyBufferToImage2KHR;
+      vkCmdCopyImageToBuffer2KHR = PFN_vkCmdCopyImageToBuffer2KHR( vkGetDeviceProcAddr( device, "vkCmdCopyImageToBuffer2KHR" ) );
+      if ( !vkCmdCopyImageToBuffer2 ) vkCmdCopyImageToBuffer2 = vkCmdCopyImageToBuffer2KHR;
+      vkCmdBlitImage2KHR = PFN_vkCmdBlitImage2KHR( vkGetDeviceProcAddr( device, "vkCmdBlitImage2KHR" ) );
+      if ( !vkCmdBlitImage2 ) vkCmdBlitImage2 = vkCmdBlitImage2KHR;
+      vkCmdResolveImage2KHR = PFN_vkCmdResolveImage2KHR( vkGetDeviceProcAddr( device, "vkCmdResolveImage2KHR" ) );
+      if ( !vkCmdResolveImage2 ) vkCmdResolveImage2 = vkCmdResolveImage2KHR;
+
+  //=== VK_EXT_image_compression_control ===
+      vkGetImageSubresourceLayout2EXT = PFN_vkGetImageSubresourceLayout2EXT( vkGetDeviceProcAddr( device, "vkGetImageSubresourceLayout2EXT" ) );
+
+  //=== VK_EXT_device_fault ===
+      vkGetDeviceFaultInfoEXT = PFN_vkGetDeviceFaultInfoEXT( vkGetDeviceProcAddr( device, "vkGetDeviceFaultInfoEXT" ) );
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+      vkCmdTraceRaysKHR = PFN_vkCmdTraceRaysKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysKHR" ) );
+      vkCreateRayTracingPipelinesKHR = PFN_vkCreateRayTracingPipelinesKHR( vkGetDeviceProcAddr( device, "vkCreateRayTracingPipelinesKHR" ) );
+      vkGetRayTracingShaderGroupHandlesKHR = PFN_vkGetRayTracingShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupHandlesKHR" ) );
+      vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingCaptureReplayShaderGroupHandlesKHR" ) );
+      vkCmdTraceRaysIndirectKHR = PFN_vkCmdTraceRaysIndirectKHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirectKHR" ) );
+      vkGetRayTracingShaderGroupStackSizeKHR = PFN_vkGetRayTracingShaderGroupStackSizeKHR( vkGetDeviceProcAddr( device, "vkGetRayTracingShaderGroupStackSizeKHR" ) );
+      vkCmdSetRayTracingPipelineStackSizeKHR = PFN_vkCmdSetRayTracingPipelineStackSizeKHR( vkGetDeviceProcAddr( device, "vkCmdSetRayTracingPipelineStackSizeKHR" ) );
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+      vkCmdSetVertexInputEXT = PFN_vkCmdSetVertexInputEXT( vkGetDeviceProcAddr( device, "vkCmdSetVertexInputEXT" ) );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+      vkGetMemoryZirconHandleFUCHSIA = PFN_vkGetMemoryZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandleFUCHSIA" ) );
+      vkGetMemoryZirconHandlePropertiesFUCHSIA = PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetMemoryZirconHandlePropertiesFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+      vkImportSemaphoreZirconHandleFUCHSIA = PFN_vkImportSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkImportSemaphoreZirconHandleFUCHSIA" ) );
+      vkGetSemaphoreZirconHandleFUCHSIA = PFN_vkGetSemaphoreZirconHandleFUCHSIA( vkGetDeviceProcAddr( device, "vkGetSemaphoreZirconHandleFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+      vkCreateBufferCollectionFUCHSIA = PFN_vkCreateBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkCreateBufferCollectionFUCHSIA" ) );
+      vkSetBufferCollectionImageConstraintsFUCHSIA = PFN_vkSetBufferCollectionImageConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionImageConstraintsFUCHSIA" ) );
+      vkSetBufferCollectionBufferConstraintsFUCHSIA = PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA( vkGetDeviceProcAddr( device, "vkSetBufferCollectionBufferConstraintsFUCHSIA" ) );
+      vkDestroyBufferCollectionFUCHSIA = PFN_vkDestroyBufferCollectionFUCHSIA( vkGetDeviceProcAddr( device, "vkDestroyBufferCollectionFUCHSIA" ) );
+      vkGetBufferCollectionPropertiesFUCHSIA = PFN_vkGetBufferCollectionPropertiesFUCHSIA( vkGetDeviceProcAddr( device, "vkGetBufferCollectionPropertiesFUCHSIA" ) );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+      vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI = PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI( vkGetDeviceProcAddr( device, "vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI" ) );
+      vkCmdSubpassShadingHUAWEI = PFN_vkCmdSubpassShadingHUAWEI( vkGetDeviceProcAddr( device, "vkCmdSubpassShadingHUAWEI" ) );
+
+  //=== VK_HUAWEI_invocation_mask ===
+      vkCmdBindInvocationMaskHUAWEI = PFN_vkCmdBindInvocationMaskHUAWEI( vkGetDeviceProcAddr( device, "vkCmdBindInvocationMaskHUAWEI" ) );
+
+  //=== VK_NV_external_memory_rdma ===
+      vkGetMemoryRemoteAddressNV = PFN_vkGetMemoryRemoteAddressNV( vkGetDeviceProcAddr( device, "vkGetMemoryRemoteAddressNV" ) );
+
+  //=== VK_EXT_pipeline_properties ===
+      vkGetPipelinePropertiesEXT = PFN_vkGetPipelinePropertiesEXT( vkGetDeviceProcAddr( device, "vkGetPipelinePropertiesEXT" ) );
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+      vkCmdSetPatchControlPointsEXT = PFN_vkCmdSetPatchControlPointsEXT( vkGetDeviceProcAddr( device, "vkCmdSetPatchControlPointsEXT" ) );
+      vkCmdSetRasterizerDiscardEnableEXT = PFN_vkCmdSetRasterizerDiscardEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizerDiscardEnableEXT" ) );
+      if ( !vkCmdSetRasterizerDiscardEnable ) vkCmdSetRasterizerDiscardEnable = vkCmdSetRasterizerDiscardEnableEXT;
+      vkCmdSetDepthBiasEnableEXT = PFN_vkCmdSetDepthBiasEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthBiasEnableEXT" ) );
+      if ( !vkCmdSetDepthBiasEnable ) vkCmdSetDepthBiasEnable = vkCmdSetDepthBiasEnableEXT;
+      vkCmdSetLogicOpEXT = PFN_vkCmdSetLogicOpEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEXT" ) );
+      vkCmdSetPrimitiveRestartEnableEXT = PFN_vkCmdSetPrimitiveRestartEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetPrimitiveRestartEnableEXT" ) );
+      if ( !vkCmdSetPrimitiveRestartEnable ) vkCmdSetPrimitiveRestartEnable = vkCmdSetPrimitiveRestartEnableEXT;
+
+  //=== VK_EXT_color_write_enable ===
+      vkCmdSetColorWriteEnableEXT = PFN_vkCmdSetColorWriteEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteEnableEXT" ) );
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+      vkCmdTraceRaysIndirect2KHR = PFN_vkCmdTraceRaysIndirect2KHR( vkGetDeviceProcAddr( device, "vkCmdTraceRaysIndirect2KHR" ) );
+
+  //=== VK_EXT_multi_draw ===
+      vkCmdDrawMultiEXT = PFN_vkCmdDrawMultiEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiEXT" ) );
+      vkCmdDrawMultiIndexedEXT = PFN_vkCmdDrawMultiIndexedEXT( vkGetDeviceProcAddr( device, "vkCmdDrawMultiIndexedEXT" ) );
+
+  //=== VK_EXT_opacity_micromap ===
+      vkCreateMicromapEXT = PFN_vkCreateMicromapEXT( vkGetDeviceProcAddr( device, "vkCreateMicromapEXT" ) );
+      vkDestroyMicromapEXT = PFN_vkDestroyMicromapEXT( vkGetDeviceProcAddr( device, "vkDestroyMicromapEXT" ) );
+      vkCmdBuildMicromapsEXT = PFN_vkCmdBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkCmdBuildMicromapsEXT" ) );
+      vkBuildMicromapsEXT = PFN_vkBuildMicromapsEXT( vkGetDeviceProcAddr( device, "vkBuildMicromapsEXT" ) );
+      vkCopyMicromapEXT = PFN_vkCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapEXT" ) );
+      vkCopyMicromapToMemoryEXT = PFN_vkCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCopyMicromapToMemoryEXT" ) );
+      vkCopyMemoryToMicromapEXT = PFN_vkCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCopyMemoryToMicromapEXT" ) );
+      vkWriteMicromapsPropertiesEXT = PFN_vkWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkWriteMicromapsPropertiesEXT" ) );
+      vkCmdCopyMicromapEXT = PFN_vkCmdCopyMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapEXT" ) );
+      vkCmdCopyMicromapToMemoryEXT = PFN_vkCmdCopyMicromapToMemoryEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMicromapToMemoryEXT" ) );
+      vkCmdCopyMemoryToMicromapEXT = PFN_vkCmdCopyMemoryToMicromapEXT( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToMicromapEXT" ) );
+      vkCmdWriteMicromapsPropertiesEXT = PFN_vkCmdWriteMicromapsPropertiesEXT( vkGetDeviceProcAddr( device, "vkCmdWriteMicromapsPropertiesEXT" ) );
+      vkGetDeviceMicromapCompatibilityEXT = PFN_vkGetDeviceMicromapCompatibilityEXT( vkGetDeviceProcAddr( device, "vkGetDeviceMicromapCompatibilityEXT" ) );
+      vkGetMicromapBuildSizesEXT = PFN_vkGetMicromapBuildSizesEXT( vkGetDeviceProcAddr( device, "vkGetMicromapBuildSizesEXT" ) );
+
+  //=== VK_EXT_pageable_device_local_memory ===
+      vkSetDeviceMemoryPriorityEXT = PFN_vkSetDeviceMemoryPriorityEXT( vkGetDeviceProcAddr( device, "vkSetDeviceMemoryPriorityEXT" ) );
+
+  //=== VK_KHR_maintenance4 ===
+      vkGetDeviceBufferMemoryRequirementsKHR = PFN_vkGetDeviceBufferMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceBufferMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceBufferMemoryRequirements ) vkGetDeviceBufferMemoryRequirements = vkGetDeviceBufferMemoryRequirementsKHR;
+      vkGetDeviceImageMemoryRequirementsKHR = PFN_vkGetDeviceImageMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceImageMemoryRequirements ) vkGetDeviceImageMemoryRequirements = vkGetDeviceImageMemoryRequirementsKHR;
+      vkGetDeviceImageSparseMemoryRequirementsKHR = PFN_vkGetDeviceImageSparseMemoryRequirementsKHR( vkGetDeviceProcAddr( device, "vkGetDeviceImageSparseMemoryRequirementsKHR" ) );
+      if ( !vkGetDeviceImageSparseMemoryRequirements ) vkGetDeviceImageSparseMemoryRequirements = vkGetDeviceImageSparseMemoryRequirementsKHR;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+      vkGetDescriptorSetLayoutHostMappingInfoVALVE = PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetLayoutHostMappingInfoVALVE" ) );
+      vkGetDescriptorSetHostMappingVALVE = PFN_vkGetDescriptorSetHostMappingVALVE( vkGetDeviceProcAddr( device, "vkGetDescriptorSetHostMappingVALVE" ) );
+
+  //=== VK_NV_copy_memory_indirect ===
+      vkCmdCopyMemoryIndirectNV = PFN_vkCmdCopyMemoryIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryIndirectNV" ) );
+      vkCmdCopyMemoryToImageIndirectNV = PFN_vkCmdCopyMemoryToImageIndirectNV( vkGetDeviceProcAddr( device, "vkCmdCopyMemoryToImageIndirectNV" ) );
+
+  //=== VK_NV_memory_decompression ===
+      vkCmdDecompressMemoryNV = PFN_vkCmdDecompressMemoryNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryNV" ) );
+      vkCmdDecompressMemoryIndirectCountNV = PFN_vkCmdDecompressMemoryIndirectCountNV( vkGetDeviceProcAddr( device, "vkCmdDecompressMemoryIndirectCountNV" ) );
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+      vkCmdSetTessellationDomainOriginEXT = PFN_vkCmdSetTessellationDomainOriginEXT( vkGetDeviceProcAddr( device, "vkCmdSetTessellationDomainOriginEXT" ) );
+      vkCmdSetDepthClampEnableEXT = PFN_vkCmdSetDepthClampEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClampEnableEXT" ) );
+      vkCmdSetPolygonModeEXT = PFN_vkCmdSetPolygonModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetPolygonModeEXT" ) );
+      vkCmdSetRasterizationSamplesEXT = PFN_vkCmdSetRasterizationSamplesEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationSamplesEXT" ) );
+      vkCmdSetSampleMaskEXT = PFN_vkCmdSetSampleMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleMaskEXT" ) );
+      vkCmdSetAlphaToCoverageEnableEXT = PFN_vkCmdSetAlphaToCoverageEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToCoverageEnableEXT" ) );
+      vkCmdSetAlphaToOneEnableEXT = PFN_vkCmdSetAlphaToOneEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetAlphaToOneEnableEXT" ) );
+      vkCmdSetLogicOpEnableEXT = PFN_vkCmdSetLogicOpEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLogicOpEnableEXT" ) );
+      vkCmdSetColorBlendEnableEXT = PFN_vkCmdSetColorBlendEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEnableEXT" ) );
+      vkCmdSetColorBlendEquationEXT = PFN_vkCmdSetColorBlendEquationEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendEquationEXT" ) );
+      vkCmdSetColorWriteMaskEXT = PFN_vkCmdSetColorWriteMaskEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorWriteMaskEXT" ) );
+      vkCmdSetRasterizationStreamEXT = PFN_vkCmdSetRasterizationStreamEXT( vkGetDeviceProcAddr( device, "vkCmdSetRasterizationStreamEXT" ) );
+      vkCmdSetConservativeRasterizationModeEXT = PFN_vkCmdSetConservativeRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetConservativeRasterizationModeEXT" ) );
+      vkCmdSetExtraPrimitiveOverestimationSizeEXT = PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT( vkGetDeviceProcAddr( device, "vkCmdSetExtraPrimitiveOverestimationSizeEXT" ) );
+      vkCmdSetDepthClipEnableEXT = PFN_vkCmdSetDepthClipEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipEnableEXT" ) );
+      vkCmdSetSampleLocationsEnableEXT = PFN_vkCmdSetSampleLocationsEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetSampleLocationsEnableEXT" ) );
+      vkCmdSetColorBlendAdvancedEXT = PFN_vkCmdSetColorBlendAdvancedEXT( vkGetDeviceProcAddr( device, "vkCmdSetColorBlendAdvancedEXT" ) );
+      vkCmdSetProvokingVertexModeEXT = PFN_vkCmdSetProvokingVertexModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetProvokingVertexModeEXT" ) );
+      vkCmdSetLineRasterizationModeEXT = PFN_vkCmdSetLineRasterizationModeEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineRasterizationModeEXT" ) );
+      vkCmdSetLineStippleEnableEXT = PFN_vkCmdSetLineStippleEnableEXT( vkGetDeviceProcAddr( device, "vkCmdSetLineStippleEnableEXT" ) );
+      vkCmdSetDepthClipNegativeOneToOneEXT = PFN_vkCmdSetDepthClipNegativeOneToOneEXT( vkGetDeviceProcAddr( device, "vkCmdSetDepthClipNegativeOneToOneEXT" ) );
+      vkCmdSetViewportWScalingEnableNV = PFN_vkCmdSetViewportWScalingEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportWScalingEnableNV" ) );
+      vkCmdSetViewportSwizzleNV = PFN_vkCmdSetViewportSwizzleNV( vkGetDeviceProcAddr( device, "vkCmdSetViewportSwizzleNV" ) );
+      vkCmdSetCoverageToColorEnableNV = PFN_vkCmdSetCoverageToColorEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorEnableNV" ) );
+      vkCmdSetCoverageToColorLocationNV = PFN_vkCmdSetCoverageToColorLocationNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageToColorLocationNV" ) );
+      vkCmdSetCoverageModulationModeNV = PFN_vkCmdSetCoverageModulationModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationModeNV" ) );
+      vkCmdSetCoverageModulationTableEnableNV = PFN_vkCmdSetCoverageModulationTableEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableEnableNV" ) );
+      vkCmdSetCoverageModulationTableNV = PFN_vkCmdSetCoverageModulationTableNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageModulationTableNV" ) );
+      vkCmdSetShadingRateImageEnableNV = PFN_vkCmdSetShadingRateImageEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetShadingRateImageEnableNV" ) );
+      vkCmdSetRepresentativeFragmentTestEnableNV = PFN_vkCmdSetRepresentativeFragmentTestEnableNV( vkGetDeviceProcAddr( device, "vkCmdSetRepresentativeFragmentTestEnableNV" ) );
+      vkCmdSetCoverageReductionModeNV = PFN_vkCmdSetCoverageReductionModeNV( vkGetDeviceProcAddr( device, "vkCmdSetCoverageReductionModeNV" ) );
+
+  //=== VK_EXT_shader_module_identifier ===
+      vkGetShaderModuleIdentifierEXT = PFN_vkGetShaderModuleIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleIdentifierEXT" ) );
+      vkGetShaderModuleCreateInfoIdentifierEXT = PFN_vkGetShaderModuleCreateInfoIdentifierEXT( vkGetDeviceProcAddr( device, "vkGetShaderModuleCreateInfoIdentifierEXT" ) );
+
+  //=== VK_NV_optical_flow ===
+      vkCreateOpticalFlowSessionNV = PFN_vkCreateOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkCreateOpticalFlowSessionNV" ) );
+      vkDestroyOpticalFlowSessionNV = PFN_vkDestroyOpticalFlowSessionNV( vkGetDeviceProcAddr( device, "vkDestroyOpticalFlowSessionNV" ) );
+      vkBindOpticalFlowSessionImageNV = PFN_vkBindOpticalFlowSessionImageNV( vkGetDeviceProcAddr( device, "vkBindOpticalFlowSessionImageNV" ) );
+      vkCmdOpticalFlowExecuteNV = PFN_vkCmdOpticalFlowExecuteNV( vkGetDeviceProcAddr( device, "vkCmdOpticalFlowExecuteNV" ) );
+
+  //=== VK_QCOM_tile_properties ===
+      vkGetFramebufferTilePropertiesQCOM = PFN_vkGetFramebufferTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetFramebufferTilePropertiesQCOM" ) );
+      vkGetDynamicRenderingTilePropertiesQCOM = PFN_vkGetDynamicRenderingTilePropertiesQCOM( vkGetDeviceProcAddr( device, "vkGetDynamicRenderingTilePropertiesQCOM" ) );
+
+    }
+
+    template <typename DynamicLoader>
+    void init(VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device, DynamicLoader const & dl) VULKAN_HPP_NOEXCEPT
+    {
+      PFN_vkGetInstanceProcAddr getInstanceProcAddr = dl.template getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr");
+      PFN_vkGetDeviceProcAddr getDeviceProcAddr = dl.template getProcAddress<PFN_vkGetDeviceProcAddr>("vkGetDeviceProcAddr");
+      init(static_cast<VkInstance>(instance), getInstanceProcAddr, static_cast<VkDevice>(device), device ? getDeviceProcAddr : nullptr);
+    }
+
+    template <typename DynamicLoader
+#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL
+      = VULKAN_HPP_NAMESPACE::DynamicLoader
+#endif
+    >
+    void init(VULKAN_HPP_NAMESPACE::Instance const & instance, VULKAN_HPP_NAMESPACE::Device const & device) VULKAN_HPP_NOEXCEPT
+    {
+      static DynamicLoader dl;
+      init(instance, device, dl);
+    }
+  };
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/include/vulkan/vulkan_android.h b/include/vulkan/vulkan_android.h
new file mode 100644
index 0000000..11f5397
--- /dev/null
+++ b/include/vulkan/vulkan_android.h
@@ -0,0 +1,125 @@
+#ifndef VULKAN_ANDROID_H_
+#define VULKAN_ANDROID_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_android_surface 1
+struct ANativeWindow;
+#define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6
+#define VK_KHR_ANDROID_SURFACE_EXTENSION_NAME "VK_KHR_android_surface"
+typedef VkFlags VkAndroidSurfaceCreateFlagsKHR;
+typedef struct VkAndroidSurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkAndroidSurfaceCreateFlagsKHR    flags;
+    struct ANativeWindow*             window;
+} VkAndroidSurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateAndroidSurfaceKHR)(VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
+    VkInstance                                  instance,
+    const VkAndroidSurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_ANDROID_external_memory_android_hardware_buffer 1
+struct AHardwareBuffer;
+#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 5
+#define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_EXTENSION_NAME "VK_ANDROID_external_memory_android_hardware_buffer"
+typedef struct VkAndroidHardwareBufferUsageANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           androidHardwareBufferUsage;
+} VkAndroidHardwareBufferUsageANDROID;
+
+typedef struct VkAndroidHardwareBufferPropertiesANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       allocationSize;
+    uint32_t           memoryTypeBits;
+} VkAndroidHardwareBufferPropertiesANDROID;
+
+typedef struct VkAndroidHardwareBufferFormatPropertiesANDROID {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkFormat                         format;
+    uint64_t                         externalFormat;
+    VkFormatFeatureFlags             formatFeatures;
+    VkComponentMapping               samplerYcbcrConversionComponents;
+    VkSamplerYcbcrModelConversion    suggestedYcbcrModel;
+    VkSamplerYcbcrRange              suggestedYcbcrRange;
+    VkChromaLocation                 suggestedXChromaOffset;
+    VkChromaLocation                 suggestedYChromaOffset;
+} VkAndroidHardwareBufferFormatPropertiesANDROID;
+
+typedef struct VkImportAndroidHardwareBufferInfoANDROID {
+    VkStructureType            sType;
+    const void*                pNext;
+    struct AHardwareBuffer*    buffer;
+} VkImportAndroidHardwareBufferInfoANDROID;
+
+typedef struct VkMemoryGetAndroidHardwareBufferInfoANDROID {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+} VkMemoryGetAndroidHardwareBufferInfoANDROID;
+
+typedef struct VkExternalFormatANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           externalFormat;
+} VkExternalFormatANDROID;
+
+typedef struct VkAndroidHardwareBufferFormatProperties2ANDROID {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkFormat                         format;
+    uint64_t                         externalFormat;
+    VkFormatFeatureFlags2            formatFeatures;
+    VkComponentMapping               samplerYcbcrConversionComponents;
+    VkSamplerYcbcrModelConversion    suggestedYcbcrModel;
+    VkSamplerYcbcrRange              suggestedYcbcrRange;
+    VkChromaLocation                 suggestedXChromaOffset;
+    VkChromaLocation                 suggestedYChromaOffset;
+} VkAndroidHardwareBufferFormatProperties2ANDROID;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetAndroidHardwareBufferPropertiesANDROID)(VkDevice device, const struct AHardwareBuffer* buffer, VkAndroidHardwareBufferPropertiesANDROID* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryAndroidHardwareBufferANDROID)(VkDevice device, const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo, struct AHardwareBuffer** pBuffer);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetAndroidHardwareBufferPropertiesANDROID(
+    VkDevice                                    device,
+    const struct AHardwareBuffer*               buffer,
+    VkAndroidHardwareBufferPropertiesANDROID*   pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(
+    VkDevice                                    device,
+    const VkMemoryGetAndroidHardwareBufferInfoANDROID* pInfo,
+    struct AHardwareBuffer**                    pBuffer);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vulkan/vulkan_beta.h b/include/vulkan/vulkan_beta.h
new file mode 100644
index 0000000..cfeda0e
--- /dev/null
+++ b/include/vulkan/vulkan_beta.h
@@ -0,0 +1,553 @@
+#ifndef VULKAN_BETA_H_
+#define VULKAN_BETA_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_KHR_portability_subset 1
+#define VK_KHR_PORTABILITY_SUBSET_SPEC_VERSION 1
+#define VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME "VK_KHR_portability_subset"
+typedef struct VkPhysicalDevicePortabilitySubsetFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           constantAlphaColorBlendFactors;
+    VkBool32           events;
+    VkBool32           imageViewFormatReinterpretation;
+    VkBool32           imageViewFormatSwizzle;
+    VkBool32           imageView2DOn3DImage;
+    VkBool32           multisampleArrayImage;
+    VkBool32           mutableComparisonSamplers;
+    VkBool32           pointPolygons;
+    VkBool32           samplerMipLodBias;
+    VkBool32           separateStencilMaskRef;
+    VkBool32           shaderSampleRateInterpolationFunctions;
+    VkBool32           tessellationIsolines;
+    VkBool32           tessellationPointMode;
+    VkBool32           triangleFans;
+    VkBool32           vertexAttributeAccessBeyondStride;
+} VkPhysicalDevicePortabilitySubsetFeaturesKHR;
+
+typedef struct VkPhysicalDevicePortabilitySubsetPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           minVertexInputBindingStrideAlignment;
+} VkPhysicalDevicePortabilitySubsetPropertiesKHR;
+
+
+
+#define VK_KHR_video_encode_queue 1
+#define VK_KHR_VIDEO_ENCODE_QUEUE_SPEC_VERSION 7
+#define VK_KHR_VIDEO_ENCODE_QUEUE_EXTENSION_NAME "VK_KHR_video_encode_queue"
+
+typedef enum VkVideoEncodeTuningModeKHR {
+    VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR = 0,
+    VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR = 1,
+    VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR = 2,
+    VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR = 3,
+    VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR = 4,
+    VK_VIDEO_ENCODE_TUNING_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeTuningModeKHR;
+typedef VkFlags VkVideoEncodeFlagsKHR;
+
+typedef enum VkVideoEncodeCapabilityFlagBitsKHR {
+    VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR = 0x00000001,
+    VK_VIDEO_ENCODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeCapabilityFlagBitsKHR;
+typedef VkFlags VkVideoEncodeCapabilityFlagsKHR;
+
+typedef enum VkVideoEncodeRateControlModeFlagBitsKHR {
+    VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR = 0,
+    VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR = 1,
+    VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR = 2,
+    VK_VIDEO_ENCODE_RATE_CONTROL_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeRateControlModeFlagBitsKHR;
+typedef VkFlags VkVideoEncodeRateControlModeFlagsKHR;
+
+typedef enum VkVideoEncodeUsageFlagBitsKHR {
+    VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR = 0,
+    VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001,
+    VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR = 0x00000002,
+    VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR = 0x00000004,
+    VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR = 0x00000008,
+    VK_VIDEO_ENCODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeUsageFlagBitsKHR;
+typedef VkFlags VkVideoEncodeUsageFlagsKHR;
+
+typedef enum VkVideoEncodeContentFlagBitsKHR {
+    VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR = 0,
+    VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR = 0x00000001,
+    VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR = 0x00000002,
+    VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR = 0x00000004,
+    VK_VIDEO_ENCODE_CONTENT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoEncodeContentFlagBitsKHR;
+typedef VkFlags VkVideoEncodeContentFlagsKHR;
+typedef VkFlags VkVideoEncodeRateControlFlagsKHR;
+typedef struct VkVideoEncodeInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkVideoEncodeFlagsKHR                 flags;
+    uint32_t                              qualityLevel;
+    VkBuffer                              dstBitstreamBuffer;
+    VkDeviceSize                          dstBitstreamBufferOffset;
+    VkDeviceSize                          dstBitstreamBufferMaxRange;
+    VkVideoPictureResourceInfoKHR         srcPictureResource;
+    const VkVideoReferenceSlotInfoKHR*    pSetupReferenceSlot;
+    uint32_t                              referenceSlotCount;
+    const VkVideoReferenceSlotInfoKHR*    pReferenceSlots;
+    uint32_t                              precedingExternallyEncodedBytes;
+} VkVideoEncodeInfoKHR;
+
+typedef struct VkVideoEncodeCapabilitiesKHR {
+    VkStructureType                         sType;
+    void*                                   pNext;
+    VkVideoEncodeCapabilityFlagsKHR         flags;
+    VkVideoEncodeRateControlModeFlagsKHR    rateControlModes;
+    uint8_t                                 rateControlLayerCount;
+    uint8_t                                 qualityLevelCount;
+    VkExtent2D                              inputImageDataFillAlignment;
+} VkVideoEncodeCapabilitiesKHR;
+
+typedef struct VkVideoEncodeUsageInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkVideoEncodeUsageFlagsKHR      videoUsageHints;
+    VkVideoEncodeContentFlagsKHR    videoContentHints;
+    VkVideoEncodeTuningModeKHR      tuningMode;
+} VkVideoEncodeUsageInfoKHR;
+
+typedef struct VkVideoEncodeRateControlLayerInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           averageBitrate;
+    uint32_t           maxBitrate;
+    uint32_t           frameRateNumerator;
+    uint32_t           frameRateDenominator;
+    uint32_t           virtualBufferSizeInMs;
+    uint32_t           initialVirtualBufferSizeInMs;
+} VkVideoEncodeRateControlLayerInfoKHR;
+
+typedef struct VkVideoEncodeRateControlInfoKHR {
+    VkStructureType                                sType;
+    const void*                                    pNext;
+    VkVideoEncodeRateControlFlagsKHR               flags;
+    VkVideoEncodeRateControlModeFlagBitsKHR        rateControlMode;
+    uint8_t                                        layerCount;
+    const VkVideoEncodeRateControlLayerInfoKHR*    pLayerConfigs;
+} VkVideoEncodeRateControlInfoKHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdEncodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoEncodeInfoKHR* pEncodeInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdEncodeVideoKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkVideoEncodeInfoKHR*                 pEncodeInfo);
+#endif
+
+
+#define VK_EXT_video_encode_h264 1
+#include "vk_video/vulkan_video_codec_h264std.h"
+#include "vk_video/vulkan_video_codec_h264std_encode.h"
+#define VK_EXT_VIDEO_ENCODE_H264_SPEC_VERSION 9
+#define VK_EXT_VIDEO_ENCODE_H264_EXTENSION_NAME "VK_EXT_video_encode_h264"
+
+typedef enum VkVideoEncodeH264RateControlStructureEXT {
+    VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0,
+    VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_EXT = 1,
+    VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_EXT = 2,
+    VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH264RateControlStructureEXT;
+
+typedef enum VkVideoEncodeH264CapabilityFlagBitsEXT {
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_ENABLED_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_DISABLED_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_BIT_EXT = 0x00000008,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_SCALING_LISTS_BIT_EXT = 0x00000010,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_EXT = 0x00000020,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT = 0x00000040,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT = 0x00000080,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_PIC_INIT_QP_MINUS26_BIT_EXT = 0x00000100,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_BIT_EXT = 0x00000200,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_EXPLICIT_BIT_EXT = 0x00000400,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_IMPLICIT_BIT_EXT = 0x00000800,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT = 0x00001000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT = 0x00002000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT = 0x00004000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT = 0x00008000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT = 0x00010000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT = 0x00020000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT = 0x00040000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DISABLE_DIRECT_SPATIAL_MV_PRED_BIT_EXT = 0x00080000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT = 0x00100000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_SLICE_MB_COUNT_BIT_EXT = 0x00200000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_EXT = 0x00400000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT = 0x00800000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT = 0x01000000,
+    VK_VIDEO_ENCODE_H264_CAPABILITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH264CapabilityFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH264CapabilityFlagsEXT;
+
+typedef enum VkVideoEncodeH264InputModeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H264_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH264InputModeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH264InputModeFlagsEXT;
+
+typedef enum VkVideoEncodeH264OutputModeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH264OutputModeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH264OutputModeFlagsEXT;
+typedef struct VkVideoEncodeH264CapabilitiesEXT {
+    VkStructureType                        sType;
+    void*                                  pNext;
+    VkVideoEncodeH264CapabilityFlagsEXT    flags;
+    VkVideoEncodeH264InputModeFlagsEXT     inputModeFlags;
+    VkVideoEncodeH264OutputModeFlagsEXT    outputModeFlags;
+    uint8_t                                maxPPictureL0ReferenceCount;
+    uint8_t                                maxBPictureL0ReferenceCount;
+    uint8_t                                maxL1ReferenceCount;
+    VkBool32                               motionVectorsOverPicBoundariesFlag;
+    uint32_t                               maxBytesPerPicDenom;
+    uint32_t                               maxBitsPerMbDenom;
+    uint32_t                               log2MaxMvLengthHorizontal;
+    uint32_t                               log2MaxMvLengthVertical;
+} VkVideoEncodeH264CapabilitiesEXT;
+
+typedef struct VkVideoEncodeH264SessionParametersAddInfoEXT {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   stdSPSCount;
+    const StdVideoH264SequenceParameterSet*    pStdSPSs;
+    uint32_t                                   stdPPSCount;
+    const StdVideoH264PictureParameterSet*     pStdPPSs;
+} VkVideoEncodeH264SessionParametersAddInfoEXT;
+
+typedef struct VkVideoEncodeH264SessionParametersCreateInfoEXT {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    uint32_t                                               maxStdSPSCount;
+    uint32_t                                               maxStdPPSCount;
+    const VkVideoEncodeH264SessionParametersAddInfoEXT*    pParametersAddInfo;
+} VkVideoEncodeH264SessionParametersCreateInfoEXT;
+
+typedef struct VkVideoEncodeH264DpbSlotInfoEXT {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    int8_t                                    slotIndex;
+    const StdVideoEncodeH264ReferenceInfo*    pStdReferenceInfo;
+} VkVideoEncodeH264DpbSlotInfoEXT;
+
+typedef struct VkVideoEncodeH264ReferenceListsInfoEXT {
+    VkStructureType                                      sType;
+    const void*                                          pNext;
+    uint8_t                                              referenceList0EntryCount;
+    const VkVideoEncodeH264DpbSlotInfoEXT*               pReferenceList0Entries;
+    uint8_t                                              referenceList1EntryCount;
+    const VkVideoEncodeH264DpbSlotInfoEXT*               pReferenceList1Entries;
+    const StdVideoEncodeH264RefMemMgmtCtrlOperations*    pMemMgmtCtrlOperations;
+} VkVideoEncodeH264ReferenceListsInfoEXT;
+
+typedef struct VkVideoEncodeH264NaluSliceInfoEXT {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    uint32_t                                         mbCount;
+    const VkVideoEncodeH264ReferenceListsInfoEXT*    pReferenceFinalLists;
+    const StdVideoEncodeH264SliceHeader*             pSliceHeaderStd;
+} VkVideoEncodeH264NaluSliceInfoEXT;
+
+typedef struct VkVideoEncodeH264VclFrameInfoEXT {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    const VkVideoEncodeH264ReferenceListsInfoEXT*    pReferenceFinalLists;
+    uint32_t                                         naluSliceEntryCount;
+    const VkVideoEncodeH264NaluSliceInfoEXT*         pNaluSliceEntries;
+    const StdVideoEncodeH264PictureInfo*             pCurrentPictureInfo;
+} VkVideoEncodeH264VclFrameInfoEXT;
+
+typedef struct VkVideoEncodeH264EmitPictureParametersInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint8_t            spsId;
+    VkBool32           emitSpsEnable;
+    uint32_t           ppsIdEntryCount;
+    const uint8_t*     ppsIdEntries;
+} VkVideoEncodeH264EmitPictureParametersInfoEXT;
+
+typedef struct VkVideoEncodeH264ProfileInfoEXT {
+    VkStructureType           sType;
+    const void*               pNext;
+    StdVideoH264ProfileIdc    stdProfileIdc;
+} VkVideoEncodeH264ProfileInfoEXT;
+
+typedef struct VkVideoEncodeH264RateControlInfoEXT {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    gopFrameCount;
+    uint32_t                                    idrPeriod;
+    uint32_t                                    consecutiveBFrameCount;
+    VkVideoEncodeH264RateControlStructureEXT    rateControlStructure;
+    uint8_t                                     temporalLayerCount;
+} VkVideoEncodeH264RateControlInfoEXT;
+
+typedef struct VkVideoEncodeH264QpEXT {
+    int32_t    qpI;
+    int32_t    qpP;
+    int32_t    qpB;
+} VkVideoEncodeH264QpEXT;
+
+typedef struct VkVideoEncodeH264FrameSizeEXT {
+    uint32_t    frameISize;
+    uint32_t    framePSize;
+    uint32_t    frameBSize;
+} VkVideoEncodeH264FrameSizeEXT;
+
+typedef struct VkVideoEncodeH264RateControlLayerInfoEXT {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    uint8_t                          temporalLayerId;
+    VkBool32                         useInitialRcQp;
+    VkVideoEncodeH264QpEXT           initialRcQp;
+    VkBool32                         useMinQp;
+    VkVideoEncodeH264QpEXT           minQp;
+    VkBool32                         useMaxQp;
+    VkVideoEncodeH264QpEXT           maxQp;
+    VkBool32                         useMaxFrameSize;
+    VkVideoEncodeH264FrameSizeEXT    maxFrameSize;
+} VkVideoEncodeH264RateControlLayerInfoEXT;
+
+
+
+#define VK_EXT_video_encode_h265 1
+#include "vk_video/vulkan_video_codec_h265std.h"
+#include "vk_video/vulkan_video_codec_h265std_encode.h"
+#define VK_EXT_VIDEO_ENCODE_H265_SPEC_VERSION 9
+#define VK_EXT_VIDEO_ENCODE_H265_EXTENSION_NAME "VK_EXT_video_encode_h265"
+
+typedef enum VkVideoEncodeH265RateControlStructureEXT {
+    VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT = 0,
+    VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_EXT = 1,
+    VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_EXT = 2,
+    VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265RateControlStructureEXT;
+
+typedef enum VkVideoEncodeH265CapabilityFlagBitsEXT {
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SCALING_LISTS_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SAMPLE_ADAPTIVE_OFFSET_ENABLED_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_PCM_ENABLE_BIT_EXT = 0x00000008,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SPS_TEMPORAL_MVP_ENABLED_BIT_EXT = 0x00000010,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_EXT = 0x00000020,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_INIT_QP_MINUS26_BIT_EXT = 0x00000040,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_EXT = 0x00000080,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SIGN_DATA_HIDING_ENABLED_BIT_EXT = 0x00000100,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_ENABLED_BIT_EXT = 0x00000200,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_DISABLED_BIT_EXT = 0x00000400,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_BIT_EXT = 0x00000800,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_BIT_EXT = 0x00001000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_BIPRED_BIT_EXT = 0x00002000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT = 0x00004000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSQUANT_BYPASS_ENABLED_BIT_EXT = 0x00008000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_ENTROPY_CODING_SYNC_ENABLED_BIT_EXT = 0x00010000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_DEBLOCKING_FILTER_OVERRIDE_ENABLED_BIT_EXT = 0x00020000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_FRAME_BIT_EXT = 0x00040000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_PER_TILE_BIT_EXT = 0x00080000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_SLICE_BIT_EXT = 0x00100000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_SLICE_SEGMENT_CTB_COUNT_BIT_EXT = 0x00200000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_EXT = 0x00400000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_DEPENDENT_SLICE_SEGMENT_BIT_EXT = 0x00800000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT = 0x01000000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT = 0x02000000,
+    VK_VIDEO_ENCODE_H265_CAPABILITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265CapabilityFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265CapabilityFlagsEXT;
+
+typedef enum VkVideoEncodeH265InputModeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H265_INPUT_MODE_FRAME_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_SEGMENT_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H265_INPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265InputModeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265InputModeFlagsEXT;
+
+typedef enum VkVideoEncodeH265OutputModeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FRAME_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_SEGMENT_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265OutputModeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265OutputModeFlagsEXT;
+
+typedef enum VkVideoEncodeH265CtbSizeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H265_CTB_SIZE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265CtbSizeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265CtbSizeFlagsEXT;
+
+typedef enum VkVideoEncodeH265TransformBlockSizeFlagBitsEXT {
+    VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_EXT = 0x00000001,
+    VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_EXT = 0x00000002,
+    VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_EXT = 0x00000004,
+    VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_EXT = 0x00000008,
+    VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkVideoEncodeH265TransformBlockSizeFlagBitsEXT;
+typedef VkFlags VkVideoEncodeH265TransformBlockSizeFlagsEXT;
+typedef struct VkVideoEncodeH265CapabilitiesEXT {
+    VkStructureType                                sType;
+    void*                                          pNext;
+    VkVideoEncodeH265CapabilityFlagsEXT            flags;
+    VkVideoEncodeH265InputModeFlagsEXT             inputModeFlags;
+    VkVideoEncodeH265OutputModeFlagsEXT            outputModeFlags;
+    VkVideoEncodeH265CtbSizeFlagsEXT               ctbSizes;
+    VkVideoEncodeH265TransformBlockSizeFlagsEXT    transformBlockSizes;
+    uint8_t                                        maxPPictureL0ReferenceCount;
+    uint8_t                                        maxBPictureL0ReferenceCount;
+    uint8_t                                        maxL1ReferenceCount;
+    uint8_t                                        maxSubLayersCount;
+    uint8_t                                        minLog2MinLumaCodingBlockSizeMinus3;
+    uint8_t                                        maxLog2MinLumaCodingBlockSizeMinus3;
+    uint8_t                                        minLog2MinLumaTransformBlockSizeMinus2;
+    uint8_t                                        maxLog2MinLumaTransformBlockSizeMinus2;
+    uint8_t                                        minMaxTransformHierarchyDepthInter;
+    uint8_t                                        maxMaxTransformHierarchyDepthInter;
+    uint8_t                                        minMaxTransformHierarchyDepthIntra;
+    uint8_t                                        maxMaxTransformHierarchyDepthIntra;
+    uint8_t                                        maxDiffCuQpDeltaDepth;
+    uint8_t                                        minMaxNumMergeCand;
+    uint8_t                                        maxMaxNumMergeCand;
+} VkVideoEncodeH265CapabilitiesEXT;
+
+typedef struct VkVideoEncodeH265SessionParametersAddInfoEXT {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   stdVPSCount;
+    const StdVideoH265VideoParameterSet*       pStdVPSs;
+    uint32_t                                   stdSPSCount;
+    const StdVideoH265SequenceParameterSet*    pStdSPSs;
+    uint32_t                                   stdPPSCount;
+    const StdVideoH265PictureParameterSet*     pStdPPSs;
+} VkVideoEncodeH265SessionParametersAddInfoEXT;
+
+typedef struct VkVideoEncodeH265SessionParametersCreateInfoEXT {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    uint32_t                                               maxStdVPSCount;
+    uint32_t                                               maxStdSPSCount;
+    uint32_t                                               maxStdPPSCount;
+    const VkVideoEncodeH265SessionParametersAddInfoEXT*    pParametersAddInfo;
+} VkVideoEncodeH265SessionParametersCreateInfoEXT;
+
+typedef struct VkVideoEncodeH265DpbSlotInfoEXT {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    int8_t                                    slotIndex;
+    const StdVideoEncodeH265ReferenceInfo*    pStdReferenceInfo;
+} VkVideoEncodeH265DpbSlotInfoEXT;
+
+typedef struct VkVideoEncodeH265ReferenceListsInfoEXT {
+    VkStructureType                                    sType;
+    const void*                                        pNext;
+    uint8_t                                            referenceList0EntryCount;
+    const VkVideoEncodeH265DpbSlotInfoEXT*             pReferenceList0Entries;
+    uint8_t                                            referenceList1EntryCount;
+    const VkVideoEncodeH265DpbSlotInfoEXT*             pReferenceList1Entries;
+    const StdVideoEncodeH265ReferenceModifications*    pReferenceModifications;
+} VkVideoEncodeH265ReferenceListsInfoEXT;
+
+typedef struct VkVideoEncodeH265NaluSliceSegmentInfoEXT {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    uint32_t                                         ctbCount;
+    const VkVideoEncodeH265ReferenceListsInfoEXT*    pReferenceFinalLists;
+    const StdVideoEncodeH265SliceSegmentHeader*      pSliceSegmentHeaderStd;
+} VkVideoEncodeH265NaluSliceSegmentInfoEXT;
+
+typedef struct VkVideoEncodeH265VclFrameInfoEXT {
+    VkStructureType                                    sType;
+    const void*                                        pNext;
+    const VkVideoEncodeH265ReferenceListsInfoEXT*      pReferenceFinalLists;
+    uint32_t                                           naluSliceSegmentEntryCount;
+    const VkVideoEncodeH265NaluSliceSegmentInfoEXT*    pNaluSliceSegmentEntries;
+    const StdVideoEncodeH265PictureInfo*               pCurrentPictureInfo;
+} VkVideoEncodeH265VclFrameInfoEXT;
+
+typedef struct VkVideoEncodeH265EmitPictureParametersInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint8_t            vpsId;
+    uint8_t            spsId;
+    VkBool32           emitVpsEnable;
+    VkBool32           emitSpsEnable;
+    uint32_t           ppsIdEntryCount;
+    const uint8_t*     ppsIdEntries;
+} VkVideoEncodeH265EmitPictureParametersInfoEXT;
+
+typedef struct VkVideoEncodeH265ProfileInfoEXT {
+    VkStructureType           sType;
+    const void*               pNext;
+    StdVideoH265ProfileIdc    stdProfileIdc;
+} VkVideoEncodeH265ProfileInfoEXT;
+
+typedef struct VkVideoEncodeH265RateControlInfoEXT {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    gopFrameCount;
+    uint32_t                                    idrPeriod;
+    uint32_t                                    consecutiveBFrameCount;
+    VkVideoEncodeH265RateControlStructureEXT    rateControlStructure;
+    uint8_t                                     subLayerCount;
+} VkVideoEncodeH265RateControlInfoEXT;
+
+typedef struct VkVideoEncodeH265QpEXT {
+    int32_t    qpI;
+    int32_t    qpP;
+    int32_t    qpB;
+} VkVideoEncodeH265QpEXT;
+
+typedef struct VkVideoEncodeH265FrameSizeEXT {
+    uint32_t    frameISize;
+    uint32_t    framePSize;
+    uint32_t    frameBSize;
+} VkVideoEncodeH265FrameSizeEXT;
+
+typedef struct VkVideoEncodeH265RateControlLayerInfoEXT {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    uint8_t                          temporalId;
+    VkBool32                         useInitialRcQp;
+    VkVideoEncodeH265QpEXT           initialRcQp;
+    VkBool32                         useMinQp;
+    VkVideoEncodeH265QpEXT           minQp;
+    VkBool32                         useMaxQp;
+    VkVideoEncodeH265QpEXT           maxQp;
+    VkBool32                         useMaxFrameSize;
+    VkVideoEncodeH265FrameSizeEXT    maxFrameSize;
+} VkVideoEncodeH265RateControlLayerInfoEXT;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vulkan/vulkan_core.h b/include/vulkan/vulkan_core.h
new file mode 100644
index 0000000..a6c5b4c
--- /dev/null
+++ b/include/vulkan/vulkan_core.h
@@ -0,0 +1,16870 @@
+#ifndef VULKAN_CORE_H_
+#define VULKAN_CORE_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_VERSION_1_0 1
+#include "vk_platform.h"
+
+#define VK_DEFINE_HANDLE(object) typedef struct object##_T* object;
+
+
+#ifndef VK_USE_64_BIT_PTR_DEFINES
+    #if defined(__LP64__) || defined(_WIN64) || (defined(__x86_64__) && !defined(__ILP32__) ) || defined(_M_X64) || defined(__ia64) || defined (_M_IA64) || defined(__aarch64__) || defined(__powerpc64__)
+        #define VK_USE_64_BIT_PTR_DEFINES 1
+    #else
+        #define VK_USE_64_BIT_PTR_DEFINES 0
+    #endif
+#endif
+
+
+#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE
+    #if (VK_USE_64_BIT_PTR_DEFINES==1)
+        #if (defined(__cplusplus) && (__cplusplus >= 201103L)) || (defined(_MSVC_LANG) && (_MSVC_LANG >= 201103L))
+            #define VK_NULL_HANDLE nullptr
+        #else
+            #define VK_NULL_HANDLE ((void*)0)
+        #endif
+    #else
+        #define VK_NULL_HANDLE 0ULL
+    #endif
+#endif
+#ifndef VK_NULL_HANDLE
+    #define VK_NULL_HANDLE 0
+#endif
+
+
+#ifndef VK_DEFINE_NON_DISPATCHABLE_HANDLE
+    #if (VK_USE_64_BIT_PTR_DEFINES==1)
+        #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef struct object##_T *object;
+    #else
+        #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) typedef uint64_t object;
+    #endif
+#endif
+
+// DEPRECATED: This define is deprecated. VK_MAKE_API_VERSION should be used instead.
+#define VK_MAKE_VERSION(major, minor, patch) \
+    ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch)))
+
+// DEPRECATED: This define has been removed. Specific version defines (e.g. VK_API_VERSION_1_0), or the VK_MAKE_VERSION macro, should be used instead.
+//#define VK_API_VERSION VK_MAKE_VERSION(1, 0, 0) // Patch version should always be set to 0
+
+#define VK_MAKE_API_VERSION(variant, major, minor, patch) \
+    ((((uint32_t)(variant)) << 29) | (((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch)))
+
+// Vulkan 1.0 version number
+#define VK_API_VERSION_1_0 VK_MAKE_API_VERSION(0, 1, 0, 0)// Patch version should always be set to 0
+
+// Version of this file
+#define VK_HEADER_VERSION 238
+
+// Complete version of this file
+#define VK_HEADER_VERSION_COMPLETE VK_MAKE_API_VERSION(0, 1, 3, VK_HEADER_VERSION)
+
+// DEPRECATED: This define is deprecated. VK_API_VERSION_MAJOR should be used instead.
+#define VK_VERSION_MAJOR(version) ((uint32_t)(version) >> 22)
+
+// DEPRECATED: This define is deprecated. VK_API_VERSION_MINOR should be used instead.
+#define VK_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU)
+
+// DEPRECATED: This define is deprecated. VK_API_VERSION_PATCH should be used instead.
+#define VK_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU)
+
+#define VK_API_VERSION_VARIANT(version) ((uint32_t)(version) >> 29)
+#define VK_API_VERSION_MAJOR(version) (((uint32_t)(version) >> 22) & 0x7FU)
+#define VK_API_VERSION_MINOR(version) (((uint32_t)(version) >> 12) & 0x3FFU)
+#define VK_API_VERSION_PATCH(version) ((uint32_t)(version) & 0xFFFU)
+typedef uint32_t VkBool32;
+typedef uint64_t VkDeviceAddress;
+typedef uint64_t VkDeviceSize;
+typedef uint32_t VkFlags;
+typedef uint32_t VkSampleMask;
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImage)
+VK_DEFINE_HANDLE(VkInstance)
+VK_DEFINE_HANDLE(VkPhysicalDevice)
+VK_DEFINE_HANDLE(VkDevice)
+VK_DEFINE_HANDLE(VkQueue)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSemaphore)
+VK_DEFINE_HANDLE(VkCommandBuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFence)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeviceMemory)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkEvent)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkQueryPool)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferView)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkImageView)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkShaderModule)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineCache)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipelineLayout)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPipeline)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkRenderPass)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSetLayout)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSampler)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorSet)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorPool)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkFramebuffer)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCommandPool)
+#define VK_ATTACHMENT_UNUSED              (~0U)
+#define VK_FALSE                          0U
+#define VK_LOD_CLAMP_NONE                 1000.0F
+#define VK_QUEUE_FAMILY_IGNORED           (~0U)
+#define VK_REMAINING_ARRAY_LAYERS         (~0U)
+#define VK_REMAINING_MIP_LEVELS           (~0U)
+#define VK_SUBPASS_EXTERNAL               (~0U)
+#define VK_TRUE                           1U
+#define VK_WHOLE_SIZE                     (~0ULL)
+#define VK_MAX_MEMORY_TYPES               32U
+#define VK_MAX_PHYSICAL_DEVICE_NAME_SIZE  256U
+#define VK_UUID_SIZE                      16U
+#define VK_MAX_EXTENSION_NAME_SIZE        256U
+#define VK_MAX_DESCRIPTION_SIZE           256U
+#define VK_MAX_MEMORY_HEAPS               16U
+
+typedef enum VkResult {
+    VK_SUCCESS = 0,
+    VK_NOT_READY = 1,
+    VK_TIMEOUT = 2,
+    VK_EVENT_SET = 3,
+    VK_EVENT_RESET = 4,
+    VK_INCOMPLETE = 5,
+    VK_ERROR_OUT_OF_HOST_MEMORY = -1,
+    VK_ERROR_OUT_OF_DEVICE_MEMORY = -2,
+    VK_ERROR_INITIALIZATION_FAILED = -3,
+    VK_ERROR_DEVICE_LOST = -4,
+    VK_ERROR_MEMORY_MAP_FAILED = -5,
+    VK_ERROR_LAYER_NOT_PRESENT = -6,
+    VK_ERROR_EXTENSION_NOT_PRESENT = -7,
+    VK_ERROR_FEATURE_NOT_PRESENT = -8,
+    VK_ERROR_INCOMPATIBLE_DRIVER = -9,
+    VK_ERROR_TOO_MANY_OBJECTS = -10,
+    VK_ERROR_FORMAT_NOT_SUPPORTED = -11,
+    VK_ERROR_FRAGMENTED_POOL = -12,
+    VK_ERROR_UNKNOWN = -13,
+    VK_ERROR_OUT_OF_POOL_MEMORY = -1000069000,
+    VK_ERROR_INVALID_EXTERNAL_HANDLE = -1000072003,
+    VK_ERROR_FRAGMENTATION = -1000161000,
+    VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS = -1000257000,
+    VK_PIPELINE_COMPILE_REQUIRED = 1000297000,
+    VK_ERROR_SURFACE_LOST_KHR = -1000000000,
+    VK_ERROR_NATIVE_WINDOW_IN_USE_KHR = -1000000001,
+    VK_SUBOPTIMAL_KHR = 1000001003,
+    VK_ERROR_OUT_OF_DATE_KHR = -1000001004,
+    VK_ERROR_INCOMPATIBLE_DISPLAY_KHR = -1000003001,
+    VK_ERROR_VALIDATION_FAILED_EXT = -1000011001,
+    VK_ERROR_INVALID_SHADER_NV = -1000012000,
+    VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR = -1000023000,
+    VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR = -1000023001,
+    VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR = -1000023002,
+    VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR = -1000023003,
+    VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR = -1000023004,
+    VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR = -1000023005,
+    VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT = -1000158000,
+    VK_ERROR_NOT_PERMITTED_KHR = -1000174001,
+    VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT = -1000255000,
+    VK_THREAD_IDLE_KHR = 1000268000,
+    VK_THREAD_DONE_KHR = 1000268001,
+    VK_OPERATION_DEFERRED_KHR = 1000268002,
+    VK_OPERATION_NOT_DEFERRED_KHR = 1000268003,
+    VK_ERROR_COMPRESSION_EXHAUSTED_EXT = -1000338000,
+    VK_ERROR_OUT_OF_POOL_MEMORY_KHR = VK_ERROR_OUT_OF_POOL_MEMORY,
+    VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+    VK_ERROR_FRAGMENTATION_EXT = VK_ERROR_FRAGMENTATION,
+    VK_ERROR_NOT_PERMITTED_EXT = VK_ERROR_NOT_PERMITTED_KHR,
+    VK_ERROR_INVALID_DEVICE_ADDRESS_EXT = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
+    VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
+    VK_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED,
+    VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT = VK_PIPELINE_COMPILE_REQUIRED,
+    VK_RESULT_MAX_ENUM = 0x7FFFFFFF
+} VkResult;
+
+typedef enum VkStructureType {
+    VK_STRUCTURE_TYPE_APPLICATION_INFO = 0,
+    VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO = 1,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO = 2,
+    VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO = 3,
+    VK_STRUCTURE_TYPE_SUBMIT_INFO = 4,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO = 5,
+    VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE = 6,
+    VK_STRUCTURE_TYPE_BIND_SPARSE_INFO = 7,
+    VK_STRUCTURE_TYPE_FENCE_CREATE_INFO = 8,
+    VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO = 9,
+    VK_STRUCTURE_TYPE_EVENT_CREATE_INFO = 10,
+    VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO = 11,
+    VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO = 12,
+    VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO = 13,
+    VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO = 14,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO = 15,
+    VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO = 16,
+    VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO = 17,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO = 18,
+    VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO = 19,
+    VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO = 20,
+    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO = 21,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO = 22,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO = 23,
+    VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO = 24,
+    VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO = 25,
+    VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO = 26,
+    VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO = 27,
+    VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO = 28,
+    VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO = 29,
+    VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO = 30,
+    VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO = 31,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO = 32,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO = 33,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO = 34,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET = 35,
+    VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET = 36,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO = 37,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO = 38,
+    VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO = 39,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO = 40,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO = 41,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO = 42,
+    VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO = 43,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER = 44,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER = 45,
+    VK_STRUCTURE_TYPE_MEMORY_BARRIER = 46,
+    VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO = 47,
+    VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO = 48,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES = 1000094000,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO = 1000157000,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO = 1000157001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES = 1000083000,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS = 1000127000,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO = 1000127001,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO = 1000060000,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO = 1000060003,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO = 1000060004,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO = 1000060005,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO = 1000060006,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO = 1000060013,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO = 1000060014,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES = 1000070000,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO = 1000070001,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2 = 1000146000,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2 = 1000146001,
+    VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2 = 1000146002,
+    VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2 = 1000146003,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2 = 1000146004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2 = 1000059000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2 = 1000059001,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2 = 1000059002,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2 = 1000059003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2 = 1000059004,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2 = 1000059005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2 = 1000059006,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2 = 1000059007,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2 = 1000059008,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES = 1000117000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO = 1000117001,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO = 1000117002,
+    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO = 1000117003,
+    VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO = 1000053000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES = 1000053001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES = 1000053002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES = 1000120000,
+    VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO = 1000145000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES = 1000145001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES = 1000145002,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2 = 1000145003,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO = 1000156000,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO = 1000156001,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO = 1000156002,
+    VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO = 1000156003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES = 1000156004,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES = 1000156005,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO = 1000085000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO = 1000071000,
+    VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES = 1000071001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO = 1000071002,
+    VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES = 1000071003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES = 1000071004,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO = 1000072000,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO = 1000072001,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO = 1000072002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO = 1000112000,
+    VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES = 1000112001,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO = 1000113000,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO = 1000077000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO = 1000076000,
+    VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES = 1000076001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES = 1000168000,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT = 1000168001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES = 1000063000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES = 49,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES = 50,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES = 51,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES = 52,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO = 1000147000,
+    VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2 = 1000109000,
+    VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2 = 1000109001,
+    VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2 = 1000109002,
+    VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2 = 1000109003,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2 = 1000109004,
+    VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO = 1000109005,
+    VK_STRUCTURE_TYPE_SUBPASS_END_INFO = 1000109006,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES = 1000177000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES = 1000196000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES = 1000180000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES = 1000082000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES = 1000197000,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO = 1000161000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES = 1000161001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES = 1000161002,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO = 1000161003,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT = 1000161004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES = 1000199000,
+    VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE = 1000199001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES = 1000221000,
+    VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO = 1000246000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES = 1000130000,
+    VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO = 1000130001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES = 1000211000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES = 1000108000,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO = 1000108001,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO = 1000108002,
+    VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO = 1000108003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES = 1000253000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES = 1000175000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES = 1000241000,
+    VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT = 1000241001,
+    VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT = 1000241002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES = 1000261000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES = 1000207000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES = 1000207001,
+    VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO = 1000207002,
+    VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO = 1000207003,
+    VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO = 1000207004,
+    VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO = 1000207005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES = 1000257000,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO = 1000244001,
+    VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO = 1000257002,
+    VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO = 1000257003,
+    VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO = 1000257004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES = 53,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES = 54,
+    VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO = 1000192000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES = 1000215000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES = 1000245000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES = 1000276000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES = 1000295000,
+    VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO = 1000295001,
+    VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO = 1000295002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES = 1000297000,
+    VK_STRUCTURE_TYPE_MEMORY_BARRIER_2 = 1000314000,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2 = 1000314001,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2 = 1000314002,
+    VK_STRUCTURE_TYPE_DEPENDENCY_INFO = 1000314003,
+    VK_STRUCTURE_TYPE_SUBMIT_INFO_2 = 1000314004,
+    VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO = 1000314005,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO = 1000314006,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES = 1000314007,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES = 1000325000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES = 1000335000,
+    VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2 = 1000337000,
+    VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2 = 1000337001,
+    VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2 = 1000337002,
+    VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2 = 1000337003,
+    VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2 = 1000337004,
+    VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2 = 1000337005,
+    VK_STRUCTURE_TYPE_BUFFER_COPY_2 = 1000337006,
+    VK_STRUCTURE_TYPE_IMAGE_COPY_2 = 1000337007,
+    VK_STRUCTURE_TYPE_IMAGE_BLIT_2 = 1000337008,
+    VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2 = 1000337009,
+    VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2 = 1000337010,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES = 1000225000,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO = 1000225001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES = 1000225002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES = 1000138000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES = 1000138001,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK = 1000138002,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO = 1000138003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES = 1000066000,
+    VK_STRUCTURE_TYPE_RENDERING_INFO = 1000044000,
+    VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO = 1000044001,
+    VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO = 1000044002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES = 1000044003,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO = 1000044004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES = 1000280000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES = 1000280001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES = 1000281001,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3 = 1000360000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES = 1000413000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES = 1000413001,
+    VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS = 1000413002,
+    VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS = 1000413003,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR = 1000001000,
+    VK_STRUCTURE_TYPE_PRESENT_INFO_KHR = 1000001001,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR = 1000060007,
+    VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR = 1000060008,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR = 1000060009,
+    VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR = 1000060010,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR = 1000060011,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR = 1000060012,
+    VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR = 1000002000,
+    VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR = 1000002001,
+    VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR = 1000003000,
+    VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR = 1000004000,
+    VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR = 1000005000,
+    VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR = 1000006000,
+    VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR = 1000008000,
+    VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR = 1000009000,
+    VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT = 1000011000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD = 1000018000,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT = 1000022000,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT = 1000022001,
+    VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT = 1000022002,
+    VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR = 1000023000,
+    VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR = 1000023001,
+    VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR = 1000023002,
+    VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR = 1000023003,
+    VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR = 1000023004,
+    VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR = 1000023005,
+    VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000023006,
+    VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR = 1000023007,
+    VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR = 1000023008,
+    VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR = 1000023009,
+    VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR = 1000023010,
+    VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR = 1000023011,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR = 1000023012,
+    VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR = 1000023013,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR = 1000023014,
+    VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR = 1000023015,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR = 1000023016,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR = 1000024000,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR = 1000024001,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR = 1000024002,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV = 1000026000,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV = 1000026001,
+    VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV = 1000026002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT = 1000028000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT = 1000028001,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT = 1000028002,
+    VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX = 1000029000,
+    VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX = 1000029001,
+    VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX = 1000029002,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX = 1000030000,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX = 1000030001,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT = 1000038000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000038001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT = 1000038002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT = 1000038003,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT = 1000038004,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT = 1000038005,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT = 1000038006,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_EXT = 1000038007,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT = 1000038008,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT = 1000038009,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT = 1000038010,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT = 1000039000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT = 1000039001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT = 1000039002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT = 1000039003,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT = 1000039004,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT = 1000039005,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT = 1000039006,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_EXT = 1000039007,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT = 1000039008,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT = 1000039009,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT = 1000039010,
+#endif
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR = 1000040000,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR = 1000040001,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR = 1000040003,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000040004,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR = 1000040005,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR = 1000040006,
+    VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD = 1000041000,
+    VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000044006,
+    VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT = 1000044007,
+    VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD = 1000044008,
+    VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX = 1000044009,
+    VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP = 1000049000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV = 1000050000,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV = 1000056000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV = 1000056001,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV = 1000057001,
+    VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV = 1000058000,
+    VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT = 1000061000,
+    VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN = 1000062000,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT = 1000067000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT = 1000067001,
+    VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT = 1000068000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT = 1000068001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT = 1000068002,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073000,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR = 1000073001,
+    VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR = 1000073002,
+    VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR = 1000073003,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR = 1000074000,
+    VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR = 1000074001,
+    VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR = 1000074002,
+    VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR = 1000075000,
+    VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078000,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR = 1000078001,
+    VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR = 1000078002,
+    VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR = 1000078003,
+    VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR = 1000079000,
+    VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR = 1000079001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR = 1000080000,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT = 1000081000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT = 1000081001,
+    VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT = 1000081002,
+    VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR = 1000084000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV = 1000087000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT = 1000090000,
+    VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT = 1000091000,
+    VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT = 1000091001,
+    VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT = 1000091002,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT = 1000091003,
+    VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE = 1000092000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX = 1000097000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV = 1000098000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT = 1000099000,
+    VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT = 1000099001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT = 1000101000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT = 1000101001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT = 1000102000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT = 1000102001,
+    VK_STRUCTURE_TYPE_HDR_METADATA_EXT = 1000105000,
+    VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR = 1000111000,
+    VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114000,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR = 1000114001,
+    VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR = 1000114002,
+    VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR = 1000115000,
+    VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR = 1000115001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR = 1000116000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR = 1000116001,
+    VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR = 1000116002,
+    VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR = 1000116003,
+    VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR = 1000116004,
+    VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR = 1000116005,
+    VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR = 1000116006,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR = 1000119000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR = 1000119001,
+    VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR = 1000119002,
+    VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR = 1000121000,
+    VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR = 1000121001,
+    VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR = 1000121002,
+    VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR = 1000121003,
+    VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR = 1000121004,
+    VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK = 1000122000,
+    VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK = 1000123000,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT = 1000128000,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT = 1000128001,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT = 1000128002,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT = 1000128003,
+    VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT = 1000128004,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID = 1000129000,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID = 1000129001,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID = 1000129002,
+    VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129003,
+    VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID = 1000129004,
+    VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID = 1000129005,
+    VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID = 1000129006,
+    VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT = 1000143000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT = 1000143001,
+    VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT = 1000143002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT = 1000143003,
+    VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT = 1000143004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT = 1000148000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT = 1000148001,
+    VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT = 1000148002,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV = 1000149000,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR = 1000150007,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR = 1000150000,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR = 1000150002,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR = 1000150003,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR = 1000150004,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR = 1000150005,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR = 1000150006,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR = 1000150009,
+    VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR = 1000150010,
+    VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR = 1000150011,
+    VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR = 1000150012,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR = 1000150013,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR = 1000150014,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR = 1000150017,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR = 1000150020,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR = 1000347000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR = 1000347001,
+    VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR = 1000150015,
+    VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR = 1000150016,
+    VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR = 1000150018,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR = 1000348013,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV = 1000152000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV = 1000154000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV = 1000154001,
+    VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT = 1000158000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT = 1000158002,
+    VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT = 1000158003,
+    VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT = 1000158004,
+    VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT = 1000158005,
+    VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT = 1000158006,
+    VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160000,
+    VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT = 1000160001,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR = 1000163000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR = 1000163001,
+#endif
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV = 1000164000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV = 1000164001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV = 1000164002,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV = 1000164005,
+    VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV = 1000165000,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV = 1000165001,
+    VK_STRUCTURE_TYPE_GEOMETRY_NV = 1000165003,
+    VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV = 1000165004,
+    VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV = 1000165005,
+    VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV = 1000165006,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV = 1000165007,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV = 1000165008,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV = 1000165009,
+    VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV = 1000165011,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV = 1000165012,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV = 1000166000,
+    VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV = 1000166001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT = 1000170000,
+    VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT = 1000170001,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT = 1000178000,
+    VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT = 1000178001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT = 1000178002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR = 1000181000,
+    VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD = 1000183000,
+    VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT = 1000184000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD = 1000185000,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR = 1000187000,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR = 1000187001,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR = 1000187002,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR = 1000187003,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR = 1000187004,
+    VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR = 1000187005,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR = 1000174000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR = 1000388000,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR = 1000388001,
+    VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD = 1000189000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT = 1000190000,
+    VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT = 1000190001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT = 1000190002,
+    VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP = 1000191000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV = 1000201000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV = 1000202000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV = 1000202001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV = 1000204000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV = 1000205000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV = 1000205002,
+    VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV = 1000206000,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV = 1000206001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL = 1000209000,
+    VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL = 1000210000,
+    VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL = 1000210001,
+    VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL = 1000210002,
+    VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL = 1000210003,
+    VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL = 1000210004,
+    VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL = 1000210005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT = 1000212000,
+    VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD = 1000213000,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD = 1000213001,
+    VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA = 1000214000,
+    VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT = 1000217000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT = 1000218000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT = 1000218001,
+    VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT = 1000218002,
+    VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR = 1000226000,
+    VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR = 1000226001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR = 1000226002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR = 1000226003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR = 1000226004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD = 1000227000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD = 1000229000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT = 1000234000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT = 1000237000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT = 1000238000,
+    VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT = 1000238001,
+    VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR = 1000239000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV = 1000240000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT = 1000244000,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT = 1000244002,
+    VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT = 1000247000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR = 1000248000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV = 1000249000,
+    VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV = 1000249002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV = 1000250000,
+    VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV = 1000250001,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV = 1000250002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT = 1000251000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT = 1000252000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT = 1000254000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT = 1000254001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT = 1000254002,
+    VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT = 1000255000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT = 1000255002,
+    VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT = 1000255001,
+    VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT = 1000256000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT = 1000259000,
+    VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT = 1000259001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT = 1000259002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT = 1000260000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT = 1000265000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT = 1000267000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR = 1000269000,
+    VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR = 1000269001,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR = 1000269002,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR = 1000269003,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR = 1000269004,
+    VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR = 1000269005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT = 1000273000,
+    VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT = 1000274000,
+    VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT = 1000274001,
+    VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT = 1000274002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT = 1000275000,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT = 1000275001,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT = 1000275002,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT = 1000275003,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT = 1000275004,
+    VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT = 1000275005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV = 1000277000,
+    VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV = 1000277001,
+    VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV = 1000277002,
+    VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV = 1000277003,
+    VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV = 1000277004,
+    VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV = 1000277005,
+    VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV = 1000277006,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV = 1000277007,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV = 1000278000,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV = 1000278001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT = 1000281000,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM = 1000282000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM = 1000282001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT = 1000284000,
+    VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT = 1000284001,
+    VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT = 1000284002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT = 1000286000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT = 1000286001,
+    VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT = 1000287000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT = 1000287001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT = 1000287002,
+    VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR = 1000290000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV = 1000292000,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV = 1000292001,
+    VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV = 1000292002,
+    VK_STRUCTURE_TYPE_PRESENT_ID_KHR = 1000294000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR = 1000294001,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR = 1000299000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR = 1000299001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR = 1000299002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR = 1000299003,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR = 1000299004,
+#endif
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV = 1000300000,
+    VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV = 1000300001,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT = 1000311000,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT = 1000311001,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT = 1000311002,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT = 1000311003,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT = 1000311004,
+    VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT = 1000311005,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT = 1000311006,
+    VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT = 1000311007,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT = 1000311008,
+    VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT = 1000311009,
+    VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311010,
+    VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT = 1000311011,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV = 1000314008,
+    VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV = 1000314009,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT = 1000316000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT = 1000316001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT = 1000316002,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT = 1000316003,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT = 1000316004,
+    VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316005,
+    VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316006,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316007,
+    VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316008,
+    VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT = 1000316010,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT = 1000316011,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT = 1000316012,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT = 1000316009,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT = 1000320000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT = 1000320001,
+    VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT = 1000320002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD = 1000321000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR = 1000203000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR = 1000322000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR = 1000323000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV = 1000326000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV = 1000326001,
+    VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV = 1000326002,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV = 1000327000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV = 1000327001,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV = 1000327002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT = 1000328000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT = 1000328001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT = 1000330000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT = 1000332000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT = 1000332001,
+    VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM = 1000333000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR = 1000336000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT = 1000338000,
+    VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT = 1000338001,
+    VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT = 1000338002,
+    VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT = 1000338003,
+    VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT = 1000338004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT = 1000339000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT = 1000340000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT = 1000341000,
+    VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT = 1000341001,
+    VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT = 1000341002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT = 1000344000,
+    VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT = 1000346000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT = 1000352000,
+    VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT = 1000352001,
+    VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT = 1000352002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT = 1000353000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT = 1000354000,
+    VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT = 1000354001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT = 1000355000,
+    VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT = 1000355001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT = 1000356000,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364000,
+    VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA = 1000364001,
+    VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000364002,
+    VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365000,
+    VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA = 1000365001,
+    VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA = 1000366000,
+    VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA = 1000366001,
+    VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA = 1000366002,
+    VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA = 1000366003,
+    VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA = 1000366004,
+    VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA = 1000366005,
+    VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA = 1000366006,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA = 1000366007,
+    VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA = 1000366008,
+    VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA = 1000366009,
+    VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI = 1000369000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI = 1000369001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI = 1000369002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI = 1000370000,
+    VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV = 1000371000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV = 1000371001,
+    VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT = 1000372000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT = 1000372001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT = 1000376000,
+    VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT = 1000376001,
+    VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT = 1000376002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT = 1000377000,
+    VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX = 1000378000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT = 1000381000,
+    VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT = 1000381001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT = 1000382000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR = 1000386000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT = 1000391000,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT = 1000391001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT = 1000392000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT = 1000392001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT = 1000393000,
+    VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT = 1000396000,
+    VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT = 1000396001,
+    VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT = 1000396002,
+    VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT = 1000396003,
+    VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT = 1000396004,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT = 1000396005,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT = 1000396006,
+    VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT = 1000396007,
+    VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT = 1000396008,
+    VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT = 1000396009,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT = 1000411000,
+    VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT = 1000411001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT = 1000412000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE = 1000420000,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE = 1000420001,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE = 1000420002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT = 1000421000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT = 1000422000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM = 1000425000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM = 1000425001,
+    VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM = 1000425002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV = 1000426000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV = 1000426001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV = 1000427000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV = 1000427001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV = 1000430000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT = 1000437000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM = 1000440000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM = 1000440001,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM = 1000440002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT = 1000455000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT = 1000455001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT = 1000458000,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT = 1000458001,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT = 1000458002,
+    VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT = 1000458003,
+    VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG = 1000459000,
+    VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG = 1000459001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT = 1000462000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT = 1000462001,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT = 1000462002,
+    VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT = 1000462003,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT = 1000342000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV = 1000464000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV = 1000464001,
+    VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV = 1000464002,
+    VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV = 1000464003,
+    VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV = 1000464004,
+    VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV = 1000464005,
+    VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV = 1000464010,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT = 1000465000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT = 1000466000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM = 1000484000,
+    VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM = 1000484001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC = 1000485000,
+    VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC = 1000485001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM = 1000488000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV = 1000490000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV = 1000490001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT = 1000351000,
+    VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT = 1000351002,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM = 1000497000,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM = 1000497001,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
+    VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
+    VK_STRUCTURE_TYPE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_INFO,
+    VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
+    VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO,
+    VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD,
+    VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
+    VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
+    VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
+    VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES2_EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
+    VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
+    VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
+    VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
+    VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
+    VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
+    VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
+    VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
+    VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
+    VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
+    VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
+    VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
+    VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+    VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+    VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
+    VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES,
+    VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+    VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
+    VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+    VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
+    VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
+    VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
+    VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
+    VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
+    VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
+    VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
+    VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
+    VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
+    VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
+    VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
+    VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
+    VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
+    VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES,
+    VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
+    VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
+    VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES,
+    VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
+    VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+    VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
+    VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
+    VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES,
+    VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES,
+    VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2,
+    VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2,
+    VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
+    VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
+    VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
+    VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
+    VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES,
+    VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2,
+    VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2,
+    VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2,
+    VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2,
+    VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2,
+    VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2,
+    VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2,
+    VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2,
+    VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2,
+    VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2,
+    VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT,
+    VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT,
+    VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3,
+    VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR,
+    VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES,
+    VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
+    VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS,
+    VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS,
+    VK_STRUCTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkStructureType;
+
+typedef enum VkPipelineCacheHeaderVersion {
+    VK_PIPELINE_CACHE_HEADER_VERSION_ONE = 1,
+    VK_PIPELINE_CACHE_HEADER_VERSION_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCacheHeaderVersion;
+
+typedef enum VkImageLayout {
+    VK_IMAGE_LAYOUT_UNDEFINED = 0,
+    VK_IMAGE_LAYOUT_GENERAL = 1,
+    VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL = 2,
+    VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL = 3,
+    VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL = 4,
+    VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL = 5,
+    VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL = 6,
+    VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL = 7,
+    VK_IMAGE_LAYOUT_PREINITIALIZED = 8,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL = 1000117000,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL = 1000117001,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL = 1000241000,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL = 1000241001,
+    VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL = 1000241002,
+    VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL = 1000241003,
+    VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL = 1000314000,
+    VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL = 1000314001,
+    VK_IMAGE_LAYOUT_PRESENT_SRC_KHR = 1000001002,
+    VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR = 1000024000,
+    VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR = 1000024001,
+    VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR = 1000024002,
+    VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR = 1000111000,
+    VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT = 1000218000,
+    VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR = 1000164003,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR = 1000299000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR = 1000299001,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR = 1000299002,
+#endif
+    VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT = 1000339000,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+    VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR,
+    VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
+    VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
+    VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
+    VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
+    VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL,
+    VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL,
+    VK_IMAGE_LAYOUT_MAX_ENUM = 0x7FFFFFFF
+} VkImageLayout;
+
+typedef enum VkObjectType {
+    VK_OBJECT_TYPE_UNKNOWN = 0,
+    VK_OBJECT_TYPE_INSTANCE = 1,
+    VK_OBJECT_TYPE_PHYSICAL_DEVICE = 2,
+    VK_OBJECT_TYPE_DEVICE = 3,
+    VK_OBJECT_TYPE_QUEUE = 4,
+    VK_OBJECT_TYPE_SEMAPHORE = 5,
+    VK_OBJECT_TYPE_COMMAND_BUFFER = 6,
+    VK_OBJECT_TYPE_FENCE = 7,
+    VK_OBJECT_TYPE_DEVICE_MEMORY = 8,
+    VK_OBJECT_TYPE_BUFFER = 9,
+    VK_OBJECT_TYPE_IMAGE = 10,
+    VK_OBJECT_TYPE_EVENT = 11,
+    VK_OBJECT_TYPE_QUERY_POOL = 12,
+    VK_OBJECT_TYPE_BUFFER_VIEW = 13,
+    VK_OBJECT_TYPE_IMAGE_VIEW = 14,
+    VK_OBJECT_TYPE_SHADER_MODULE = 15,
+    VK_OBJECT_TYPE_PIPELINE_CACHE = 16,
+    VK_OBJECT_TYPE_PIPELINE_LAYOUT = 17,
+    VK_OBJECT_TYPE_RENDER_PASS = 18,
+    VK_OBJECT_TYPE_PIPELINE = 19,
+    VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT = 20,
+    VK_OBJECT_TYPE_SAMPLER = 21,
+    VK_OBJECT_TYPE_DESCRIPTOR_POOL = 22,
+    VK_OBJECT_TYPE_DESCRIPTOR_SET = 23,
+    VK_OBJECT_TYPE_FRAMEBUFFER = 24,
+    VK_OBJECT_TYPE_COMMAND_POOL = 25,
+    VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION = 1000156000,
+    VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE = 1000085000,
+    VK_OBJECT_TYPE_PRIVATE_DATA_SLOT = 1000295000,
+    VK_OBJECT_TYPE_SURFACE_KHR = 1000000000,
+    VK_OBJECT_TYPE_SWAPCHAIN_KHR = 1000001000,
+    VK_OBJECT_TYPE_DISPLAY_KHR = 1000002000,
+    VK_OBJECT_TYPE_DISPLAY_MODE_KHR = 1000002001,
+    VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT = 1000011000,
+    VK_OBJECT_TYPE_VIDEO_SESSION_KHR = 1000023000,
+    VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR = 1000023001,
+    VK_OBJECT_TYPE_CU_MODULE_NVX = 1000029000,
+    VK_OBJECT_TYPE_CU_FUNCTION_NVX = 1000029001,
+    VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT = 1000128000,
+    VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000,
+    VK_OBJECT_TYPE_VALIDATION_CACHE_EXT = 1000160000,
+    VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000,
+    VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL = 1000210000,
+    VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR = 1000268000,
+    VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV = 1000277000,
+    VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA = 1000366000,
+    VK_OBJECT_TYPE_MICROMAP_EXT = 1000396000,
+    VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV = 1000464000,
+    VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
+    VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
+    VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT,
+    VK_OBJECT_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkObjectType;
+
+typedef enum VkVendorId {
+    VK_VENDOR_ID_VIV = 0x10001,
+    VK_VENDOR_ID_VSI = 0x10002,
+    VK_VENDOR_ID_KAZAN = 0x10003,
+    VK_VENDOR_ID_CODEPLAY = 0x10004,
+    VK_VENDOR_ID_MESA = 0x10005,
+    VK_VENDOR_ID_POCL = 0x10006,
+    VK_VENDOR_ID_MAX_ENUM = 0x7FFFFFFF
+} VkVendorId;
+
+typedef enum VkSystemAllocationScope {
+    VK_SYSTEM_ALLOCATION_SCOPE_COMMAND = 0,
+    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT = 1,
+    VK_SYSTEM_ALLOCATION_SCOPE_CACHE = 2,
+    VK_SYSTEM_ALLOCATION_SCOPE_DEVICE = 3,
+    VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE = 4,
+    VK_SYSTEM_ALLOCATION_SCOPE_MAX_ENUM = 0x7FFFFFFF
+} VkSystemAllocationScope;
+
+typedef enum VkInternalAllocationType {
+    VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE = 0,
+    VK_INTERNAL_ALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkInternalAllocationType;
+
+typedef enum VkFormat {
+    VK_FORMAT_UNDEFINED = 0,
+    VK_FORMAT_R4G4_UNORM_PACK8 = 1,
+    VK_FORMAT_R4G4B4A4_UNORM_PACK16 = 2,
+    VK_FORMAT_B4G4R4A4_UNORM_PACK16 = 3,
+    VK_FORMAT_R5G6B5_UNORM_PACK16 = 4,
+    VK_FORMAT_B5G6R5_UNORM_PACK16 = 5,
+    VK_FORMAT_R5G5B5A1_UNORM_PACK16 = 6,
+    VK_FORMAT_B5G5R5A1_UNORM_PACK16 = 7,
+    VK_FORMAT_A1R5G5B5_UNORM_PACK16 = 8,
+    VK_FORMAT_R8_UNORM = 9,
+    VK_FORMAT_R8_SNORM = 10,
+    VK_FORMAT_R8_USCALED = 11,
+    VK_FORMAT_R8_SSCALED = 12,
+    VK_FORMAT_R8_UINT = 13,
+    VK_FORMAT_R8_SINT = 14,
+    VK_FORMAT_R8_SRGB = 15,
+    VK_FORMAT_R8G8_UNORM = 16,
+    VK_FORMAT_R8G8_SNORM = 17,
+    VK_FORMAT_R8G8_USCALED = 18,
+    VK_FORMAT_R8G8_SSCALED = 19,
+    VK_FORMAT_R8G8_UINT = 20,
+    VK_FORMAT_R8G8_SINT = 21,
+    VK_FORMAT_R8G8_SRGB = 22,
+    VK_FORMAT_R8G8B8_UNORM = 23,
+    VK_FORMAT_R8G8B8_SNORM = 24,
+    VK_FORMAT_R8G8B8_USCALED = 25,
+    VK_FORMAT_R8G8B8_SSCALED = 26,
+    VK_FORMAT_R8G8B8_UINT = 27,
+    VK_FORMAT_R8G8B8_SINT = 28,
+    VK_FORMAT_R8G8B8_SRGB = 29,
+    VK_FORMAT_B8G8R8_UNORM = 30,
+    VK_FORMAT_B8G8R8_SNORM = 31,
+    VK_FORMAT_B8G8R8_USCALED = 32,
+    VK_FORMAT_B8G8R8_SSCALED = 33,
+    VK_FORMAT_B8G8R8_UINT = 34,
+    VK_FORMAT_B8G8R8_SINT = 35,
+    VK_FORMAT_B8G8R8_SRGB = 36,
+    VK_FORMAT_R8G8B8A8_UNORM = 37,
+    VK_FORMAT_R8G8B8A8_SNORM = 38,
+    VK_FORMAT_R8G8B8A8_USCALED = 39,
+    VK_FORMAT_R8G8B8A8_SSCALED = 40,
+    VK_FORMAT_R8G8B8A8_UINT = 41,
+    VK_FORMAT_R8G8B8A8_SINT = 42,
+    VK_FORMAT_R8G8B8A8_SRGB = 43,
+    VK_FORMAT_B8G8R8A8_UNORM = 44,
+    VK_FORMAT_B8G8R8A8_SNORM = 45,
+    VK_FORMAT_B8G8R8A8_USCALED = 46,
+    VK_FORMAT_B8G8R8A8_SSCALED = 47,
+    VK_FORMAT_B8G8R8A8_UINT = 48,
+    VK_FORMAT_B8G8R8A8_SINT = 49,
+    VK_FORMAT_B8G8R8A8_SRGB = 50,
+    VK_FORMAT_A8B8G8R8_UNORM_PACK32 = 51,
+    VK_FORMAT_A8B8G8R8_SNORM_PACK32 = 52,
+    VK_FORMAT_A8B8G8R8_USCALED_PACK32 = 53,
+    VK_FORMAT_A8B8G8R8_SSCALED_PACK32 = 54,
+    VK_FORMAT_A8B8G8R8_UINT_PACK32 = 55,
+    VK_FORMAT_A8B8G8R8_SINT_PACK32 = 56,
+    VK_FORMAT_A8B8G8R8_SRGB_PACK32 = 57,
+    VK_FORMAT_A2R10G10B10_UNORM_PACK32 = 58,
+    VK_FORMAT_A2R10G10B10_SNORM_PACK32 = 59,
+    VK_FORMAT_A2R10G10B10_USCALED_PACK32 = 60,
+    VK_FORMAT_A2R10G10B10_SSCALED_PACK32 = 61,
+    VK_FORMAT_A2R10G10B10_UINT_PACK32 = 62,
+    VK_FORMAT_A2R10G10B10_SINT_PACK32 = 63,
+    VK_FORMAT_A2B10G10R10_UNORM_PACK32 = 64,
+    VK_FORMAT_A2B10G10R10_SNORM_PACK32 = 65,
+    VK_FORMAT_A2B10G10R10_USCALED_PACK32 = 66,
+    VK_FORMAT_A2B10G10R10_SSCALED_PACK32 = 67,
+    VK_FORMAT_A2B10G10R10_UINT_PACK32 = 68,
+    VK_FORMAT_A2B10G10R10_SINT_PACK32 = 69,
+    VK_FORMAT_R16_UNORM = 70,
+    VK_FORMAT_R16_SNORM = 71,
+    VK_FORMAT_R16_USCALED = 72,
+    VK_FORMAT_R16_SSCALED = 73,
+    VK_FORMAT_R16_UINT = 74,
+    VK_FORMAT_R16_SINT = 75,
+    VK_FORMAT_R16_SFLOAT = 76,
+    VK_FORMAT_R16G16_UNORM = 77,
+    VK_FORMAT_R16G16_SNORM = 78,
+    VK_FORMAT_R16G16_USCALED = 79,
+    VK_FORMAT_R16G16_SSCALED = 80,
+    VK_FORMAT_R16G16_UINT = 81,
+    VK_FORMAT_R16G16_SINT = 82,
+    VK_FORMAT_R16G16_SFLOAT = 83,
+    VK_FORMAT_R16G16B16_UNORM = 84,
+    VK_FORMAT_R16G16B16_SNORM = 85,
+    VK_FORMAT_R16G16B16_USCALED = 86,
+    VK_FORMAT_R16G16B16_SSCALED = 87,
+    VK_FORMAT_R16G16B16_UINT = 88,
+    VK_FORMAT_R16G16B16_SINT = 89,
+    VK_FORMAT_R16G16B16_SFLOAT = 90,
+    VK_FORMAT_R16G16B16A16_UNORM = 91,
+    VK_FORMAT_R16G16B16A16_SNORM = 92,
+    VK_FORMAT_R16G16B16A16_USCALED = 93,
+    VK_FORMAT_R16G16B16A16_SSCALED = 94,
+    VK_FORMAT_R16G16B16A16_UINT = 95,
+    VK_FORMAT_R16G16B16A16_SINT = 96,
+    VK_FORMAT_R16G16B16A16_SFLOAT = 97,
+    VK_FORMAT_R32_UINT = 98,
+    VK_FORMAT_R32_SINT = 99,
+    VK_FORMAT_R32_SFLOAT = 100,
+    VK_FORMAT_R32G32_UINT = 101,
+    VK_FORMAT_R32G32_SINT = 102,
+    VK_FORMAT_R32G32_SFLOAT = 103,
+    VK_FORMAT_R32G32B32_UINT = 104,
+    VK_FORMAT_R32G32B32_SINT = 105,
+    VK_FORMAT_R32G32B32_SFLOAT = 106,
+    VK_FORMAT_R32G32B32A32_UINT = 107,
+    VK_FORMAT_R32G32B32A32_SINT = 108,
+    VK_FORMAT_R32G32B32A32_SFLOAT = 109,
+    VK_FORMAT_R64_UINT = 110,
+    VK_FORMAT_R64_SINT = 111,
+    VK_FORMAT_R64_SFLOAT = 112,
+    VK_FORMAT_R64G64_UINT = 113,
+    VK_FORMAT_R64G64_SINT = 114,
+    VK_FORMAT_R64G64_SFLOAT = 115,
+    VK_FORMAT_R64G64B64_UINT = 116,
+    VK_FORMAT_R64G64B64_SINT = 117,
+    VK_FORMAT_R64G64B64_SFLOAT = 118,
+    VK_FORMAT_R64G64B64A64_UINT = 119,
+    VK_FORMAT_R64G64B64A64_SINT = 120,
+    VK_FORMAT_R64G64B64A64_SFLOAT = 121,
+    VK_FORMAT_B10G11R11_UFLOAT_PACK32 = 122,
+    VK_FORMAT_E5B9G9R9_UFLOAT_PACK32 = 123,
+    VK_FORMAT_D16_UNORM = 124,
+    VK_FORMAT_X8_D24_UNORM_PACK32 = 125,
+    VK_FORMAT_D32_SFLOAT = 126,
+    VK_FORMAT_S8_UINT = 127,
+    VK_FORMAT_D16_UNORM_S8_UINT = 128,
+    VK_FORMAT_D24_UNORM_S8_UINT = 129,
+    VK_FORMAT_D32_SFLOAT_S8_UINT = 130,
+    VK_FORMAT_BC1_RGB_UNORM_BLOCK = 131,
+    VK_FORMAT_BC1_RGB_SRGB_BLOCK = 132,
+    VK_FORMAT_BC1_RGBA_UNORM_BLOCK = 133,
+    VK_FORMAT_BC1_RGBA_SRGB_BLOCK = 134,
+    VK_FORMAT_BC2_UNORM_BLOCK = 135,
+    VK_FORMAT_BC2_SRGB_BLOCK = 136,
+    VK_FORMAT_BC3_UNORM_BLOCK = 137,
+    VK_FORMAT_BC3_SRGB_BLOCK = 138,
+    VK_FORMAT_BC4_UNORM_BLOCK = 139,
+    VK_FORMAT_BC4_SNORM_BLOCK = 140,
+    VK_FORMAT_BC5_UNORM_BLOCK = 141,
+    VK_FORMAT_BC5_SNORM_BLOCK = 142,
+    VK_FORMAT_BC6H_UFLOAT_BLOCK = 143,
+    VK_FORMAT_BC6H_SFLOAT_BLOCK = 144,
+    VK_FORMAT_BC7_UNORM_BLOCK = 145,
+    VK_FORMAT_BC7_SRGB_BLOCK = 146,
+    VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK = 147,
+    VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK = 148,
+    VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK = 149,
+    VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK = 150,
+    VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK = 151,
+    VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK = 152,
+    VK_FORMAT_EAC_R11_UNORM_BLOCK = 153,
+    VK_FORMAT_EAC_R11_SNORM_BLOCK = 154,
+    VK_FORMAT_EAC_R11G11_UNORM_BLOCK = 155,
+    VK_FORMAT_EAC_R11G11_SNORM_BLOCK = 156,
+    VK_FORMAT_ASTC_4x4_UNORM_BLOCK = 157,
+    VK_FORMAT_ASTC_4x4_SRGB_BLOCK = 158,
+    VK_FORMAT_ASTC_5x4_UNORM_BLOCK = 159,
+    VK_FORMAT_ASTC_5x4_SRGB_BLOCK = 160,
+    VK_FORMAT_ASTC_5x5_UNORM_BLOCK = 161,
+    VK_FORMAT_ASTC_5x5_SRGB_BLOCK = 162,
+    VK_FORMAT_ASTC_6x5_UNORM_BLOCK = 163,
+    VK_FORMAT_ASTC_6x5_SRGB_BLOCK = 164,
+    VK_FORMAT_ASTC_6x6_UNORM_BLOCK = 165,
+    VK_FORMAT_ASTC_6x6_SRGB_BLOCK = 166,
+    VK_FORMAT_ASTC_8x5_UNORM_BLOCK = 167,
+    VK_FORMAT_ASTC_8x5_SRGB_BLOCK = 168,
+    VK_FORMAT_ASTC_8x6_UNORM_BLOCK = 169,
+    VK_FORMAT_ASTC_8x6_SRGB_BLOCK = 170,
+    VK_FORMAT_ASTC_8x8_UNORM_BLOCK = 171,
+    VK_FORMAT_ASTC_8x8_SRGB_BLOCK = 172,
+    VK_FORMAT_ASTC_10x5_UNORM_BLOCK = 173,
+    VK_FORMAT_ASTC_10x5_SRGB_BLOCK = 174,
+    VK_FORMAT_ASTC_10x6_UNORM_BLOCK = 175,
+    VK_FORMAT_ASTC_10x6_SRGB_BLOCK = 176,
+    VK_FORMAT_ASTC_10x8_UNORM_BLOCK = 177,
+    VK_FORMAT_ASTC_10x8_SRGB_BLOCK = 178,
+    VK_FORMAT_ASTC_10x10_UNORM_BLOCK = 179,
+    VK_FORMAT_ASTC_10x10_SRGB_BLOCK = 180,
+    VK_FORMAT_ASTC_12x10_UNORM_BLOCK = 181,
+    VK_FORMAT_ASTC_12x10_SRGB_BLOCK = 182,
+    VK_FORMAT_ASTC_12x12_UNORM_BLOCK = 183,
+    VK_FORMAT_ASTC_12x12_SRGB_BLOCK = 184,
+    VK_FORMAT_G8B8G8R8_422_UNORM = 1000156000,
+    VK_FORMAT_B8G8R8G8_422_UNORM = 1000156001,
+    VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM = 1000156002,
+    VK_FORMAT_G8_B8R8_2PLANE_420_UNORM = 1000156003,
+    VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM = 1000156004,
+    VK_FORMAT_G8_B8R8_2PLANE_422_UNORM = 1000156005,
+    VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM = 1000156006,
+    VK_FORMAT_R10X6_UNORM_PACK16 = 1000156007,
+    VK_FORMAT_R10X6G10X6_UNORM_2PACK16 = 1000156008,
+    VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16 = 1000156009,
+    VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16 = 1000156010,
+    VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16 = 1000156011,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16 = 1000156012,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16 = 1000156013,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16 = 1000156014,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16 = 1000156015,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16 = 1000156016,
+    VK_FORMAT_R12X4_UNORM_PACK16 = 1000156017,
+    VK_FORMAT_R12X4G12X4_UNORM_2PACK16 = 1000156018,
+    VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 = 1000156019,
+    VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16 = 1000156020,
+    VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16 = 1000156021,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16 = 1000156022,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16 = 1000156023,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16 = 1000156024,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16 = 1000156025,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16 = 1000156026,
+    VK_FORMAT_G16B16G16R16_422_UNORM = 1000156027,
+    VK_FORMAT_B16G16R16G16_422_UNORM = 1000156028,
+    VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM = 1000156029,
+    VK_FORMAT_G16_B16R16_2PLANE_420_UNORM = 1000156030,
+    VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM = 1000156031,
+    VK_FORMAT_G16_B16R16_2PLANE_422_UNORM = 1000156032,
+    VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM = 1000156033,
+    VK_FORMAT_G8_B8R8_2PLANE_444_UNORM = 1000330000,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16 = 1000330001,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16 = 1000330002,
+    VK_FORMAT_G16_B16R16_2PLANE_444_UNORM = 1000330003,
+    VK_FORMAT_A4R4G4B4_UNORM_PACK16 = 1000340000,
+    VK_FORMAT_A4B4G4R4_UNORM_PACK16 = 1000340001,
+    VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK = 1000066000,
+    VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK = 1000066001,
+    VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK = 1000066002,
+    VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK = 1000066003,
+    VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK = 1000066004,
+    VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK = 1000066005,
+    VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK = 1000066006,
+    VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK = 1000066007,
+    VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK = 1000066008,
+    VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK = 1000066009,
+    VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK = 1000066010,
+    VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK = 1000066011,
+    VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK = 1000066012,
+    VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK = 1000066013,
+    VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG = 1000054000,
+    VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG = 1000054001,
+    VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG = 1000054002,
+    VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG = 1000054003,
+    VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG = 1000054004,
+    VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG = 1000054005,
+    VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG = 1000054006,
+    VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG = 1000054007,
+    VK_FORMAT_R16G16_S10_5_NV = 1000464000,
+    VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK,
+    VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK,
+    VK_FORMAT_G8B8G8R8_422_UNORM_KHR = VK_FORMAT_G8B8G8R8_422_UNORM,
+    VK_FORMAT_B8G8R8G8_422_UNORM_KHR = VK_FORMAT_B8G8R8G8_422_UNORM,
+    VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
+    VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
+    VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
+    VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
+    VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
+    VK_FORMAT_R10X6_UNORM_PACK16_KHR = VK_FORMAT_R10X6_UNORM_PACK16,
+    VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
+    VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
+    VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
+    VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
+    VK_FORMAT_R12X4_UNORM_PACK16_KHR = VK_FORMAT_R12X4_UNORM_PACK16,
+    VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
+    VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
+    VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
+    VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
+    VK_FORMAT_G16B16G16R16_422_UNORM_KHR = VK_FORMAT_G16B16G16R16_422_UNORM,
+    VK_FORMAT_B16G16R16G16_422_UNORM_KHR = VK_FORMAT_B16G16R16G16_422_UNORM,
+    VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
+    VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
+    VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
+    VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
+    VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
+    VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM,
+    VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16,
+    VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16,
+    VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM,
+    VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16,
+    VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16,
+    VK_FORMAT_MAX_ENUM = 0x7FFFFFFF
+} VkFormat;
+
+typedef enum VkImageTiling {
+    VK_IMAGE_TILING_OPTIMAL = 0,
+    VK_IMAGE_TILING_LINEAR = 1,
+    VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT = 1000158000,
+    VK_IMAGE_TILING_MAX_ENUM = 0x7FFFFFFF
+} VkImageTiling;
+
+typedef enum VkImageType {
+    VK_IMAGE_TYPE_1D = 0,
+    VK_IMAGE_TYPE_2D = 1,
+    VK_IMAGE_TYPE_3D = 2,
+    VK_IMAGE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageType;
+
+typedef enum VkPhysicalDeviceType {
+    VK_PHYSICAL_DEVICE_TYPE_OTHER = 0,
+    VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU = 1,
+    VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU = 2,
+    VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU = 3,
+    VK_PHYSICAL_DEVICE_TYPE_CPU = 4,
+    VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkPhysicalDeviceType;
+
+typedef enum VkQueryType {
+    VK_QUERY_TYPE_OCCLUSION = 0,
+    VK_QUERY_TYPE_PIPELINE_STATISTICS = 1,
+    VK_QUERY_TYPE_TIMESTAMP = 2,
+    VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR = 1000023000,
+    VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT = 1000028004,
+    VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR = 1000116000,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR = 1000150000,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR = 1000150001,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV = 1000165000,
+    VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL = 1000210000,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR = 1000299000,
+#endif
+    VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT = 1000328000,
+    VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT = 1000382000,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR = 1000386000,
+    VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR = 1000386001,
+    VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT = 1000396000,
+    VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT = 1000396001,
+    VK_QUERY_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkQueryType;
+
+typedef enum VkSharingMode {
+    VK_SHARING_MODE_EXCLUSIVE = 0,
+    VK_SHARING_MODE_CONCURRENT = 1,
+    VK_SHARING_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSharingMode;
+
+typedef enum VkComponentSwizzle {
+    VK_COMPONENT_SWIZZLE_IDENTITY = 0,
+    VK_COMPONENT_SWIZZLE_ZERO = 1,
+    VK_COMPONENT_SWIZZLE_ONE = 2,
+    VK_COMPONENT_SWIZZLE_R = 3,
+    VK_COMPONENT_SWIZZLE_G = 4,
+    VK_COMPONENT_SWIZZLE_B = 5,
+    VK_COMPONENT_SWIZZLE_A = 6,
+    VK_COMPONENT_SWIZZLE_MAX_ENUM = 0x7FFFFFFF
+} VkComponentSwizzle;
+
+typedef enum VkImageViewType {
+    VK_IMAGE_VIEW_TYPE_1D = 0,
+    VK_IMAGE_VIEW_TYPE_2D = 1,
+    VK_IMAGE_VIEW_TYPE_3D = 2,
+    VK_IMAGE_VIEW_TYPE_CUBE = 3,
+    VK_IMAGE_VIEW_TYPE_1D_ARRAY = 4,
+    VK_IMAGE_VIEW_TYPE_2D_ARRAY = 5,
+    VK_IMAGE_VIEW_TYPE_CUBE_ARRAY = 6,
+    VK_IMAGE_VIEW_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkImageViewType;
+
+typedef enum VkBlendFactor {
+    VK_BLEND_FACTOR_ZERO = 0,
+    VK_BLEND_FACTOR_ONE = 1,
+    VK_BLEND_FACTOR_SRC_COLOR = 2,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR = 3,
+    VK_BLEND_FACTOR_DST_COLOR = 4,
+    VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR = 5,
+    VK_BLEND_FACTOR_SRC_ALPHA = 6,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA = 7,
+    VK_BLEND_FACTOR_DST_ALPHA = 8,
+    VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA = 9,
+    VK_BLEND_FACTOR_CONSTANT_COLOR = 10,
+    VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR = 11,
+    VK_BLEND_FACTOR_CONSTANT_ALPHA = 12,
+    VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA = 13,
+    VK_BLEND_FACTOR_SRC_ALPHA_SATURATE = 14,
+    VK_BLEND_FACTOR_SRC1_COLOR = 15,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR = 16,
+    VK_BLEND_FACTOR_SRC1_ALPHA = 17,
+    VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA = 18,
+    VK_BLEND_FACTOR_MAX_ENUM = 0x7FFFFFFF
+} VkBlendFactor;
+
+typedef enum VkBlendOp {
+    VK_BLEND_OP_ADD = 0,
+    VK_BLEND_OP_SUBTRACT = 1,
+    VK_BLEND_OP_REVERSE_SUBTRACT = 2,
+    VK_BLEND_OP_MIN = 3,
+    VK_BLEND_OP_MAX = 4,
+    VK_BLEND_OP_ZERO_EXT = 1000148000,
+    VK_BLEND_OP_SRC_EXT = 1000148001,
+    VK_BLEND_OP_DST_EXT = 1000148002,
+    VK_BLEND_OP_SRC_OVER_EXT = 1000148003,
+    VK_BLEND_OP_DST_OVER_EXT = 1000148004,
+    VK_BLEND_OP_SRC_IN_EXT = 1000148005,
+    VK_BLEND_OP_DST_IN_EXT = 1000148006,
+    VK_BLEND_OP_SRC_OUT_EXT = 1000148007,
+    VK_BLEND_OP_DST_OUT_EXT = 1000148008,
+    VK_BLEND_OP_SRC_ATOP_EXT = 1000148009,
+    VK_BLEND_OP_DST_ATOP_EXT = 1000148010,
+    VK_BLEND_OP_XOR_EXT = 1000148011,
+    VK_BLEND_OP_MULTIPLY_EXT = 1000148012,
+    VK_BLEND_OP_SCREEN_EXT = 1000148013,
+    VK_BLEND_OP_OVERLAY_EXT = 1000148014,
+    VK_BLEND_OP_DARKEN_EXT = 1000148015,
+    VK_BLEND_OP_LIGHTEN_EXT = 1000148016,
+    VK_BLEND_OP_COLORDODGE_EXT = 1000148017,
+    VK_BLEND_OP_COLORBURN_EXT = 1000148018,
+    VK_BLEND_OP_HARDLIGHT_EXT = 1000148019,
+    VK_BLEND_OP_SOFTLIGHT_EXT = 1000148020,
+    VK_BLEND_OP_DIFFERENCE_EXT = 1000148021,
+    VK_BLEND_OP_EXCLUSION_EXT = 1000148022,
+    VK_BLEND_OP_INVERT_EXT = 1000148023,
+    VK_BLEND_OP_INVERT_RGB_EXT = 1000148024,
+    VK_BLEND_OP_LINEARDODGE_EXT = 1000148025,
+    VK_BLEND_OP_LINEARBURN_EXT = 1000148026,
+    VK_BLEND_OP_VIVIDLIGHT_EXT = 1000148027,
+    VK_BLEND_OP_LINEARLIGHT_EXT = 1000148028,
+    VK_BLEND_OP_PINLIGHT_EXT = 1000148029,
+    VK_BLEND_OP_HARDMIX_EXT = 1000148030,
+    VK_BLEND_OP_HSL_HUE_EXT = 1000148031,
+    VK_BLEND_OP_HSL_SATURATION_EXT = 1000148032,
+    VK_BLEND_OP_HSL_COLOR_EXT = 1000148033,
+    VK_BLEND_OP_HSL_LUMINOSITY_EXT = 1000148034,
+    VK_BLEND_OP_PLUS_EXT = 1000148035,
+    VK_BLEND_OP_PLUS_CLAMPED_EXT = 1000148036,
+    VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT = 1000148037,
+    VK_BLEND_OP_PLUS_DARKER_EXT = 1000148038,
+    VK_BLEND_OP_MINUS_EXT = 1000148039,
+    VK_BLEND_OP_MINUS_CLAMPED_EXT = 1000148040,
+    VK_BLEND_OP_CONTRAST_EXT = 1000148041,
+    VK_BLEND_OP_INVERT_OVG_EXT = 1000148042,
+    VK_BLEND_OP_RED_EXT = 1000148043,
+    VK_BLEND_OP_GREEN_EXT = 1000148044,
+    VK_BLEND_OP_BLUE_EXT = 1000148045,
+    VK_BLEND_OP_MAX_ENUM = 0x7FFFFFFF
+} VkBlendOp;
+
+typedef enum VkCompareOp {
+    VK_COMPARE_OP_NEVER = 0,
+    VK_COMPARE_OP_LESS = 1,
+    VK_COMPARE_OP_EQUAL = 2,
+    VK_COMPARE_OP_LESS_OR_EQUAL = 3,
+    VK_COMPARE_OP_GREATER = 4,
+    VK_COMPARE_OP_NOT_EQUAL = 5,
+    VK_COMPARE_OP_GREATER_OR_EQUAL = 6,
+    VK_COMPARE_OP_ALWAYS = 7,
+    VK_COMPARE_OP_MAX_ENUM = 0x7FFFFFFF
+} VkCompareOp;
+
+typedef enum VkDynamicState {
+    VK_DYNAMIC_STATE_VIEWPORT = 0,
+    VK_DYNAMIC_STATE_SCISSOR = 1,
+    VK_DYNAMIC_STATE_LINE_WIDTH = 2,
+    VK_DYNAMIC_STATE_DEPTH_BIAS = 3,
+    VK_DYNAMIC_STATE_BLEND_CONSTANTS = 4,
+    VK_DYNAMIC_STATE_DEPTH_BOUNDS = 5,
+    VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK = 6,
+    VK_DYNAMIC_STATE_STENCIL_WRITE_MASK = 7,
+    VK_DYNAMIC_STATE_STENCIL_REFERENCE = 8,
+    VK_DYNAMIC_STATE_CULL_MODE = 1000267000,
+    VK_DYNAMIC_STATE_FRONT_FACE = 1000267001,
+    VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY = 1000267002,
+    VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT = 1000267003,
+    VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT = 1000267004,
+    VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE = 1000267005,
+    VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE = 1000267006,
+    VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE = 1000267007,
+    VK_DYNAMIC_STATE_DEPTH_COMPARE_OP = 1000267008,
+    VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE = 1000267009,
+    VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE = 1000267010,
+    VK_DYNAMIC_STATE_STENCIL_OP = 1000267011,
+    VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE = 1000377001,
+    VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE = 1000377002,
+    VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE = 1000377004,
+    VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV = 1000087000,
+    VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT = 1000099000,
+    VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT = 1000143000,
+    VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR = 1000347000,
+    VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV = 1000164004,
+    VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV = 1000164006,
+    VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV = 1000205001,
+    VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR = 1000226000,
+    VK_DYNAMIC_STATE_LINE_STIPPLE_EXT = 1000259000,
+    VK_DYNAMIC_STATE_VERTEX_INPUT_EXT = 1000352000,
+    VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT = 1000377000,
+    VK_DYNAMIC_STATE_LOGIC_OP_EXT = 1000377003,
+    VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT = 1000381000,
+    VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT = 1000455002,
+    VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT = 1000455003,
+    VK_DYNAMIC_STATE_POLYGON_MODE_EXT = 1000455004,
+    VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT = 1000455005,
+    VK_DYNAMIC_STATE_SAMPLE_MASK_EXT = 1000455006,
+    VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT = 1000455007,
+    VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT = 1000455008,
+    VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT = 1000455009,
+    VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT = 1000455010,
+    VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT = 1000455011,
+    VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT = 1000455012,
+    VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT = 1000455013,
+    VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT = 1000455014,
+    VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT = 1000455015,
+    VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT = 1000455016,
+    VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT = 1000455017,
+    VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT = 1000455018,
+    VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT = 1000455019,
+    VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT = 1000455020,
+    VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT = 1000455021,
+    VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT = 1000455022,
+    VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV = 1000455023,
+    VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV = 1000455024,
+    VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV = 1000455025,
+    VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV = 1000455026,
+    VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV = 1000455027,
+    VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV = 1000455028,
+    VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV = 1000455029,
+    VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV = 1000455030,
+    VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV = 1000455031,
+    VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV = 1000455032,
+    VK_DYNAMIC_STATE_CULL_MODE_EXT = VK_DYNAMIC_STATE_CULL_MODE,
+    VK_DYNAMIC_STATE_FRONT_FACE_EXT = VK_DYNAMIC_STATE_FRONT_FACE,
+    VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
+    VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT,
+    VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT,
+    VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE,
+    VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE,
+    VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE,
+    VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP,
+    VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE,
+    VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE,
+    VK_DYNAMIC_STATE_STENCIL_OP_EXT = VK_DYNAMIC_STATE_STENCIL_OP,
+    VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE,
+    VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE,
+    VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE,
+    VK_DYNAMIC_STATE_MAX_ENUM = 0x7FFFFFFF
+} VkDynamicState;
+
+typedef enum VkFrontFace {
+    VK_FRONT_FACE_COUNTER_CLOCKWISE = 0,
+    VK_FRONT_FACE_CLOCKWISE = 1,
+    VK_FRONT_FACE_MAX_ENUM = 0x7FFFFFFF
+} VkFrontFace;
+
+typedef enum VkVertexInputRate {
+    VK_VERTEX_INPUT_RATE_VERTEX = 0,
+    VK_VERTEX_INPUT_RATE_INSTANCE = 1,
+    VK_VERTEX_INPUT_RATE_MAX_ENUM = 0x7FFFFFFF
+} VkVertexInputRate;
+
+typedef enum VkPrimitiveTopology {
+    VK_PRIMITIVE_TOPOLOGY_POINT_LIST = 0,
+    VK_PRIMITIVE_TOPOLOGY_LINE_LIST = 1,
+    VK_PRIMITIVE_TOPOLOGY_LINE_STRIP = 2,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST = 3,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP = 4,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN = 5,
+    VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY = 6,
+    VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY = 7,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY = 8,
+    VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY = 9,
+    VK_PRIMITIVE_TOPOLOGY_PATCH_LIST = 10,
+    VK_PRIMITIVE_TOPOLOGY_MAX_ENUM = 0x7FFFFFFF
+} VkPrimitiveTopology;
+
+typedef enum VkPolygonMode {
+    VK_POLYGON_MODE_FILL = 0,
+    VK_POLYGON_MODE_LINE = 1,
+    VK_POLYGON_MODE_POINT = 2,
+    VK_POLYGON_MODE_FILL_RECTANGLE_NV = 1000153000,
+    VK_POLYGON_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkPolygonMode;
+
+typedef enum VkStencilOp {
+    VK_STENCIL_OP_KEEP = 0,
+    VK_STENCIL_OP_ZERO = 1,
+    VK_STENCIL_OP_REPLACE = 2,
+    VK_STENCIL_OP_INCREMENT_AND_CLAMP = 3,
+    VK_STENCIL_OP_DECREMENT_AND_CLAMP = 4,
+    VK_STENCIL_OP_INVERT = 5,
+    VK_STENCIL_OP_INCREMENT_AND_WRAP = 6,
+    VK_STENCIL_OP_DECREMENT_AND_WRAP = 7,
+    VK_STENCIL_OP_MAX_ENUM = 0x7FFFFFFF
+} VkStencilOp;
+
+typedef enum VkLogicOp {
+    VK_LOGIC_OP_CLEAR = 0,
+    VK_LOGIC_OP_AND = 1,
+    VK_LOGIC_OP_AND_REVERSE = 2,
+    VK_LOGIC_OP_COPY = 3,
+    VK_LOGIC_OP_AND_INVERTED = 4,
+    VK_LOGIC_OP_NO_OP = 5,
+    VK_LOGIC_OP_XOR = 6,
+    VK_LOGIC_OP_OR = 7,
+    VK_LOGIC_OP_NOR = 8,
+    VK_LOGIC_OP_EQUIVALENT = 9,
+    VK_LOGIC_OP_INVERT = 10,
+    VK_LOGIC_OP_OR_REVERSE = 11,
+    VK_LOGIC_OP_COPY_INVERTED = 12,
+    VK_LOGIC_OP_OR_INVERTED = 13,
+    VK_LOGIC_OP_NAND = 14,
+    VK_LOGIC_OP_SET = 15,
+    VK_LOGIC_OP_MAX_ENUM = 0x7FFFFFFF
+} VkLogicOp;
+
+typedef enum VkBorderColor {
+    VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK = 0,
+    VK_BORDER_COLOR_INT_TRANSPARENT_BLACK = 1,
+    VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK = 2,
+    VK_BORDER_COLOR_INT_OPAQUE_BLACK = 3,
+    VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE = 4,
+    VK_BORDER_COLOR_INT_OPAQUE_WHITE = 5,
+    VK_BORDER_COLOR_FLOAT_CUSTOM_EXT = 1000287003,
+    VK_BORDER_COLOR_INT_CUSTOM_EXT = 1000287004,
+    VK_BORDER_COLOR_MAX_ENUM = 0x7FFFFFFF
+} VkBorderColor;
+
+typedef enum VkFilter {
+    VK_FILTER_NEAREST = 0,
+    VK_FILTER_LINEAR = 1,
+    VK_FILTER_CUBIC_EXT = 1000015000,
+    VK_FILTER_CUBIC_IMG = VK_FILTER_CUBIC_EXT,
+    VK_FILTER_MAX_ENUM = 0x7FFFFFFF
+} VkFilter;
+
+typedef enum VkSamplerAddressMode {
+    VK_SAMPLER_ADDRESS_MODE_REPEAT = 0,
+    VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT = 1,
+    VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE = 2,
+    VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER = 3,
+    VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE = 4,
+    VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
+    VK_SAMPLER_ADDRESS_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerAddressMode;
+
+typedef enum VkSamplerMipmapMode {
+    VK_SAMPLER_MIPMAP_MODE_NEAREST = 0,
+    VK_SAMPLER_MIPMAP_MODE_LINEAR = 1,
+    VK_SAMPLER_MIPMAP_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerMipmapMode;
+
+typedef enum VkDescriptorType {
+    VK_DESCRIPTOR_TYPE_SAMPLER = 0,
+    VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER = 1,
+    VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE = 2,
+    VK_DESCRIPTOR_TYPE_STORAGE_IMAGE = 3,
+    VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER = 4,
+    VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER = 5,
+    VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER = 6,
+    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER = 7,
+    VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC = 8,
+    VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC = 9,
+    VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT = 10,
+    VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK = 1000138000,
+    VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR = 1000150000,
+    VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV = 1000165000,
+    VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM = 1000440000,
+    VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM = 1000440001,
+    VK_DESCRIPTOR_TYPE_MUTABLE_EXT = 1000351000,
+    VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK,
+    VK_DESCRIPTOR_TYPE_MUTABLE_VALVE = VK_DESCRIPTOR_TYPE_MUTABLE_EXT,
+    VK_DESCRIPTOR_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorType;
+
+typedef enum VkAttachmentLoadOp {
+    VK_ATTACHMENT_LOAD_OP_LOAD = 0,
+    VK_ATTACHMENT_LOAD_OP_CLEAR = 1,
+    VK_ATTACHMENT_LOAD_OP_DONT_CARE = 2,
+    VK_ATTACHMENT_LOAD_OP_NONE_EXT = 1000400000,
+    VK_ATTACHMENT_LOAD_OP_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentLoadOp;
+
+typedef enum VkAttachmentStoreOp {
+    VK_ATTACHMENT_STORE_OP_STORE = 0,
+    VK_ATTACHMENT_STORE_OP_DONT_CARE = 1,
+    VK_ATTACHMENT_STORE_OP_NONE = 1000301000,
+    VK_ATTACHMENT_STORE_OP_NONE_KHR = VK_ATTACHMENT_STORE_OP_NONE,
+    VK_ATTACHMENT_STORE_OP_NONE_QCOM = VK_ATTACHMENT_STORE_OP_NONE,
+    VK_ATTACHMENT_STORE_OP_NONE_EXT = VK_ATTACHMENT_STORE_OP_NONE,
+    VK_ATTACHMENT_STORE_OP_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentStoreOp;
+
+typedef enum VkPipelineBindPoint {
+    VK_PIPELINE_BIND_POINT_GRAPHICS = 0,
+    VK_PIPELINE_BIND_POINT_COMPUTE = 1,
+    VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR = 1000165000,
+    VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI = 1000369003,
+    VK_PIPELINE_BIND_POINT_RAY_TRACING_NV = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
+    VK_PIPELINE_BIND_POINT_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineBindPoint;
+
+typedef enum VkCommandBufferLevel {
+    VK_COMMAND_BUFFER_LEVEL_PRIMARY = 0,
+    VK_COMMAND_BUFFER_LEVEL_SECONDARY = 1,
+    VK_COMMAND_BUFFER_LEVEL_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferLevel;
+
+typedef enum VkIndexType {
+    VK_INDEX_TYPE_UINT16 = 0,
+    VK_INDEX_TYPE_UINT32 = 1,
+    VK_INDEX_TYPE_NONE_KHR = 1000165000,
+    VK_INDEX_TYPE_UINT8_EXT = 1000265000,
+    VK_INDEX_TYPE_NONE_NV = VK_INDEX_TYPE_NONE_KHR,
+    VK_INDEX_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkIndexType;
+
+typedef enum VkSubpassContents {
+    VK_SUBPASS_CONTENTS_INLINE = 0,
+    VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS = 1,
+    VK_SUBPASS_CONTENTS_MAX_ENUM = 0x7FFFFFFF
+} VkSubpassContents;
+
+typedef enum VkAccessFlagBits {
+    VK_ACCESS_INDIRECT_COMMAND_READ_BIT = 0x00000001,
+    VK_ACCESS_INDEX_READ_BIT = 0x00000002,
+    VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004,
+    VK_ACCESS_UNIFORM_READ_BIT = 0x00000008,
+    VK_ACCESS_INPUT_ATTACHMENT_READ_BIT = 0x00000010,
+    VK_ACCESS_SHADER_READ_BIT = 0x00000020,
+    VK_ACCESS_SHADER_WRITE_BIT = 0x00000040,
+    VK_ACCESS_COLOR_ATTACHMENT_READ_BIT = 0x00000080,
+    VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100,
+    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200,
+    VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400,
+    VK_ACCESS_TRANSFER_READ_BIT = 0x00000800,
+    VK_ACCESS_TRANSFER_WRITE_BIT = 0x00001000,
+    VK_ACCESS_HOST_READ_BIT = 0x00002000,
+    VK_ACCESS_HOST_WRITE_BIT = 0x00004000,
+    VK_ACCESS_MEMORY_READ_BIT = 0x00008000,
+    VK_ACCESS_MEMORY_WRITE_BIT = 0x00010000,
+    VK_ACCESS_NONE = 0,
+    VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000,
+    VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000,
+    VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000,
+    VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000,
+    VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000,
+    VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000,
+    VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000,
+    VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000,
+    VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000,
+    VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000,
+    VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000,
+    VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
+    VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+    VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
+    VK_ACCESS_NONE_KHR = VK_ACCESS_NONE,
+    VK_ACCESS_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkAccessFlagBits;
+typedef VkFlags VkAccessFlags;
+
+typedef enum VkImageAspectFlagBits {
+    VK_IMAGE_ASPECT_COLOR_BIT = 0x00000001,
+    VK_IMAGE_ASPECT_DEPTH_BIT = 0x00000002,
+    VK_IMAGE_ASPECT_STENCIL_BIT = 0x00000004,
+    VK_IMAGE_ASPECT_METADATA_BIT = 0x00000008,
+    VK_IMAGE_ASPECT_PLANE_0_BIT = 0x00000010,
+    VK_IMAGE_ASPECT_PLANE_1_BIT = 0x00000020,
+    VK_IMAGE_ASPECT_PLANE_2_BIT = 0x00000040,
+    VK_IMAGE_ASPECT_NONE = 0,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT = 0x00000080,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT = 0x00000100,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT = 0x00000200,
+    VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT = 0x00000400,
+    VK_IMAGE_ASPECT_PLANE_0_BIT_KHR = VK_IMAGE_ASPECT_PLANE_0_BIT,
+    VK_IMAGE_ASPECT_PLANE_1_BIT_KHR = VK_IMAGE_ASPECT_PLANE_1_BIT,
+    VK_IMAGE_ASPECT_PLANE_2_BIT_KHR = VK_IMAGE_ASPECT_PLANE_2_BIT,
+    VK_IMAGE_ASPECT_NONE_KHR = VK_IMAGE_ASPECT_NONE,
+    VK_IMAGE_ASPECT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageAspectFlagBits;
+typedef VkFlags VkImageAspectFlags;
+
+typedef enum VkFormatFeatureFlagBits {
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT = 0x00000001,
+    VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT = 0x00000002,
+    VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004,
+    VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008,
+    VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT = 0x00000010,
+    VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020,
+    VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT = 0x00000040,
+    VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT = 0x00000080,
+    VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100,
+    VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200,
+    VK_FORMAT_FEATURE_BLIT_SRC_BIT = 0x00000400,
+    VK_FORMAT_FEATURE_BLIT_DST_BIT = 0x00000800,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000,
+    VK_FORMAT_FEATURE_TRANSFER_SRC_BIT = 0x00004000,
+    VK_FORMAT_FEATURE_TRANSFER_DST_BIT = 0x00008000,
+    VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000,
+    VK_FORMAT_FEATURE_DISJOINT_BIT = 0x00400000,
+    VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT = 0x00800000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000,
+    VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000,
+    VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000,
+    VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000,
+    VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000,
+    VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000,
+#endif
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
+    VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+    VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
+    VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+    VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+    VK_FORMAT_FEATURE_DISJOINT_BIT_KHR = VK_FORMAT_FEATURE_DISJOINT_BIT,
+    VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+    VK_FORMAT_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFormatFeatureFlagBits;
+typedef VkFlags VkFormatFeatureFlags;
+
+typedef enum VkImageCreateFlagBits {
+    VK_IMAGE_CREATE_SPARSE_BINDING_BIT = 0x00000001,
+    VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
+    VK_IMAGE_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
+    VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT = 0x00000008,
+    VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT = 0x00000010,
+    VK_IMAGE_CREATE_ALIAS_BIT = 0x00000400,
+    VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT = 0x00000040,
+    VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT = 0x00000020,
+    VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT = 0x00000080,
+    VK_IMAGE_CREATE_EXTENDED_USAGE_BIT = 0x00000100,
+    VK_IMAGE_CREATE_PROTECTED_BIT = 0x00000800,
+    VK_IMAGE_CREATE_DISJOINT_BIT = 0x00000200,
+    VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV = 0x00002000,
+    VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT = 0x00001000,
+    VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT = 0x00004000,
+    VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00010000,
+    VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT = 0x00040000,
+    VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT = 0x00020000,
+    VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM = 0x00008000,
+    VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+    VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+    VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+    VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+    VK_IMAGE_CREATE_DISJOINT_BIT_KHR = VK_IMAGE_CREATE_DISJOINT_BIT,
+    VK_IMAGE_CREATE_ALIAS_BIT_KHR = VK_IMAGE_CREATE_ALIAS_BIT,
+    VK_IMAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageCreateFlagBits;
+typedef VkFlags VkImageCreateFlags;
+
+typedef enum VkSampleCountFlagBits {
+    VK_SAMPLE_COUNT_1_BIT = 0x00000001,
+    VK_SAMPLE_COUNT_2_BIT = 0x00000002,
+    VK_SAMPLE_COUNT_4_BIT = 0x00000004,
+    VK_SAMPLE_COUNT_8_BIT = 0x00000008,
+    VK_SAMPLE_COUNT_16_BIT = 0x00000010,
+    VK_SAMPLE_COUNT_32_BIT = 0x00000020,
+    VK_SAMPLE_COUNT_64_BIT = 0x00000040,
+    VK_SAMPLE_COUNT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSampleCountFlagBits;
+typedef VkFlags VkSampleCountFlags;
+
+typedef enum VkImageUsageFlagBits {
+    VK_IMAGE_USAGE_TRANSFER_SRC_BIT = 0x00000001,
+    VK_IMAGE_USAGE_TRANSFER_DST_BIT = 0x00000002,
+    VK_IMAGE_USAGE_SAMPLED_BIT = 0x00000004,
+    VK_IMAGE_USAGE_STORAGE_BIT = 0x00000008,
+    VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT = 0x00000010,
+    VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000020,
+    VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT = 0x00000040,
+    VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT = 0x00000080,
+    VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00000400,
+    VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00000800,
+    VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR = 0x00001000,
+    VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x00000200,
+    VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00000100,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00002000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00004000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR = 0x00008000,
+#endif
+    VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x00080000,
+    VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI = 0x00040000,
+    VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM = 0x00100000,
+    VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM = 0x00200000,
+    VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    VK_IMAGE_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageUsageFlagBits;
+typedef VkFlags VkImageUsageFlags;
+
+typedef enum VkInstanceCreateFlagBits {
+    VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR = 0x00000001,
+    VK_INSTANCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkInstanceCreateFlagBits;
+typedef VkFlags VkInstanceCreateFlags;
+
+typedef enum VkMemoryHeapFlagBits {
+    VK_MEMORY_HEAP_DEVICE_LOCAL_BIT = 0x00000001,
+    VK_MEMORY_HEAP_MULTI_INSTANCE_BIT = 0x00000002,
+    VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
+    VK_MEMORY_HEAP_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryHeapFlagBits;
+typedef VkFlags VkMemoryHeapFlags;
+
+typedef enum VkMemoryPropertyFlagBits {
+    VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT = 0x00000001,
+    VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT = 0x00000002,
+    VK_MEMORY_PROPERTY_HOST_COHERENT_BIT = 0x00000004,
+    VK_MEMORY_PROPERTY_HOST_CACHED_BIT = 0x00000008,
+    VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT = 0x00000010,
+    VK_MEMORY_PROPERTY_PROTECTED_BIT = 0x00000020,
+    VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD = 0x00000040,
+    VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD = 0x00000080,
+    VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV = 0x00000100,
+    VK_MEMORY_PROPERTY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryPropertyFlagBits;
+typedef VkFlags VkMemoryPropertyFlags;
+
+typedef enum VkQueueFlagBits {
+    VK_QUEUE_GRAPHICS_BIT = 0x00000001,
+    VK_QUEUE_COMPUTE_BIT = 0x00000002,
+    VK_QUEUE_TRANSFER_BIT = 0x00000004,
+    VK_QUEUE_SPARSE_BINDING_BIT = 0x00000008,
+    VK_QUEUE_PROTECTED_BIT = 0x00000010,
+    VK_QUEUE_VIDEO_DECODE_BIT_KHR = 0x00000020,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_QUEUE_VIDEO_ENCODE_BIT_KHR = 0x00000040,
+#endif
+    VK_QUEUE_OPTICAL_FLOW_BIT_NV = 0x00000100,
+    VK_QUEUE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueueFlagBits;
+typedef VkFlags VkQueueFlags;
+typedef VkFlags VkDeviceCreateFlags;
+
+typedef enum VkDeviceQueueCreateFlagBits {
+    VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT = 0x00000001,
+    VK_DEVICE_QUEUE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDeviceQueueCreateFlagBits;
+typedef VkFlags VkDeviceQueueCreateFlags;
+
+typedef enum VkPipelineStageFlagBits {
+    VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT = 0x00000001,
+    VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT = 0x00000002,
+    VK_PIPELINE_STAGE_VERTEX_INPUT_BIT = 0x00000004,
+    VK_PIPELINE_STAGE_VERTEX_SHADER_BIT = 0x00000008,
+    VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010,
+    VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020,
+    VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT = 0x00000040,
+    VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT = 0x00000080,
+    VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT = 0x00000100,
+    VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT = 0x00000200,
+    VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400,
+    VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT = 0x00000800,
+    VK_PIPELINE_STAGE_TRANSFER_BIT = 0x00001000,
+    VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT = 0x00002000,
+    VK_PIPELINE_STAGE_HOST_BIT = 0x00004000,
+    VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT = 0x00008000,
+    VK_PIPELINE_STAGE_ALL_COMMANDS_BIT = 0x00010000,
+    VK_PIPELINE_STAGE_NONE = 0,
+    VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000,
+    VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000,
+    VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000,
+    VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR = 0x00200000,
+    VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000,
+    VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000,
+    VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV = 0x00020000,
+    VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT = 0x00080000,
+    VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT = 0x00100000,
+    VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
+    VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
+    VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT,
+    VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT,
+    VK_PIPELINE_STAGE_NONE_KHR = VK_PIPELINE_STAGE_NONE,
+    VK_PIPELINE_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineStageFlagBits;
+typedef VkFlags VkPipelineStageFlags;
+typedef VkFlags VkMemoryMapFlags;
+
+typedef enum VkSparseMemoryBindFlagBits {
+    VK_SPARSE_MEMORY_BIND_METADATA_BIT = 0x00000001,
+    VK_SPARSE_MEMORY_BIND_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSparseMemoryBindFlagBits;
+typedef VkFlags VkSparseMemoryBindFlags;
+
+typedef enum VkSparseImageFormatFlagBits {
+    VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT = 0x00000001,
+    VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT = 0x00000002,
+    VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT = 0x00000004,
+    VK_SPARSE_IMAGE_FORMAT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSparseImageFormatFlagBits;
+typedef VkFlags VkSparseImageFormatFlags;
+
+typedef enum VkFenceCreateFlagBits {
+    VK_FENCE_CREATE_SIGNALED_BIT = 0x00000001,
+    VK_FENCE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFenceCreateFlagBits;
+typedef VkFlags VkFenceCreateFlags;
+typedef VkFlags VkSemaphoreCreateFlags;
+
+typedef enum VkEventCreateFlagBits {
+    VK_EVENT_CREATE_DEVICE_ONLY_BIT = 0x00000001,
+    VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT,
+    VK_EVENT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkEventCreateFlagBits;
+typedef VkFlags VkEventCreateFlags;
+
+typedef enum VkQueryPipelineStatisticFlagBits {
+    VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT = 0x00000001,
+    VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT = 0x00000002,
+    VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT = 0x00000004,
+    VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT = 0x00000008,
+    VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT = 0x00000010,
+    VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT = 0x00000020,
+    VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT = 0x00000040,
+    VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT = 0x00000080,
+    VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT = 0x00000100,
+    VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT = 0x00000200,
+    VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT = 0x00000400,
+    VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT = 0x00000800,
+    VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT = 0x00001000,
+    VK_QUERY_PIPELINE_STATISTIC_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryPipelineStatisticFlagBits;
+typedef VkFlags VkQueryPipelineStatisticFlags;
+typedef VkFlags VkQueryPoolCreateFlags;
+
+typedef enum VkQueryResultFlagBits {
+    VK_QUERY_RESULT_64_BIT = 0x00000001,
+    VK_QUERY_RESULT_WAIT_BIT = 0x00000002,
+    VK_QUERY_RESULT_WITH_AVAILABILITY_BIT = 0x00000004,
+    VK_QUERY_RESULT_PARTIAL_BIT = 0x00000008,
+    VK_QUERY_RESULT_WITH_STATUS_BIT_KHR = 0x00000010,
+    VK_QUERY_RESULT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryResultFlagBits;
+typedef VkFlags VkQueryResultFlags;
+
+typedef enum VkBufferCreateFlagBits {
+    VK_BUFFER_CREATE_SPARSE_BINDING_BIT = 0x00000001,
+    VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT = 0x00000002,
+    VK_BUFFER_CREATE_SPARSE_ALIASED_BIT = 0x00000004,
+    VK_BUFFER_CREATE_PROTECTED_BIT = 0x00000008,
+    VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000010,
+    VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000020,
+    VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+    VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+    VK_BUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkBufferCreateFlagBits;
+typedef VkFlags VkBufferCreateFlags;
+
+typedef enum VkBufferUsageFlagBits {
+    VK_BUFFER_USAGE_TRANSFER_SRC_BIT = 0x00000001,
+    VK_BUFFER_USAGE_TRANSFER_DST_BIT = 0x00000002,
+    VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT = 0x00000004,
+    VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT = 0x00000008,
+    VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT = 0x00000010,
+    VK_BUFFER_USAGE_STORAGE_BUFFER_BIT = 0x00000020,
+    VK_BUFFER_USAGE_INDEX_BUFFER_BIT = 0x00000040,
+    VK_BUFFER_USAGE_VERTEX_BUFFER_BIT = 0x00000080,
+    VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT = 0x00000100,
+    VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT = 0x00020000,
+    VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR = 0x00002000,
+    VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR = 0x00004000,
+    VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT = 0x00000800,
+    VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT = 0x00001000,
+    VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT = 0x00000200,
+    VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR = 0x00080000,
+    VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR = 0x00100000,
+    VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR = 0x00000400,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR = 0x00008000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR = 0x00010000,
+#endif
+    VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT = 0x00200000,
+    VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00400000,
+    VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT = 0x04000000,
+    VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT = 0x00800000,
+    VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT = 0x01000000,
+    VK_BUFFER_USAGE_RAY_TRACING_BIT_NV = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR,
+    VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
+    VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
+    VK_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkBufferUsageFlagBits;
+typedef VkFlags VkBufferUsageFlags;
+typedef VkFlags VkBufferViewCreateFlags;
+
+typedef enum VkImageViewCreateFlagBits {
+    VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT = 0x00000001,
+    VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000004,
+    VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT = 0x00000002,
+    VK_IMAGE_VIEW_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkImageViewCreateFlagBits;
+typedef VkFlags VkImageViewCreateFlags;
+typedef VkFlags VkShaderModuleCreateFlags;
+
+typedef enum VkPipelineCacheCreateFlagBits {
+    VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT = 0x00000001,
+    VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT,
+    VK_PIPELINE_CACHE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCacheCreateFlagBits;
+typedef VkFlags VkPipelineCacheCreateFlags;
+
+typedef enum VkColorComponentFlagBits {
+    VK_COLOR_COMPONENT_R_BIT = 0x00000001,
+    VK_COLOR_COMPONENT_G_BIT = 0x00000002,
+    VK_COLOR_COMPONENT_B_BIT = 0x00000004,
+    VK_COLOR_COMPONENT_A_BIT = 0x00000008,
+    VK_COLOR_COMPONENT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkColorComponentFlagBits;
+typedef VkFlags VkColorComponentFlags;
+
+typedef enum VkPipelineCreateFlagBits {
+    VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT = 0x00000001,
+    VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT = 0x00000002,
+    VK_PIPELINE_CREATE_DERIVATIVE_BIT = 0x00000004,
+    VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT = 0x00000008,
+    VK_PIPELINE_CREATE_DISPATCH_BASE_BIT = 0x00000010,
+    VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT = 0x00000100,
+    VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT = 0x00000200,
+    VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00200000,
+    VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = 0x00400000,
+    VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR = 0x00004000,
+    VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR = 0x00008000,
+    VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR = 0x00010000,
+    VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR = 0x00020000,
+    VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR = 0x00001000,
+    VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR = 0x00002000,
+    VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR = 0x00080000,
+    VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV = 0x00000020,
+    VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR = 0x00000040,
+    VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR = 0x00000080,
+    VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV = 0x00040000,
+    VK_PIPELINE_CREATE_LIBRARY_BIT_KHR = 0x00000800,
+    VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x20000000,
+    VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT = 0x00800000,
+    VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT = 0x00000400,
+    VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV = 0x00100000,
+    VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x02000000,
+    VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT = 0x04000000,
+    VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT = 0x01000000,
+    VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT = 0x08000000,
+    VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT = 0x40000000,
+    VK_PIPELINE_CREATE_DISPATCH_BASE = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
+    VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
+    VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+    VK_PIPELINE_CREATE_DISPATCH_BASE_KHR = VK_PIPELINE_CREATE_DISPATCH_BASE,
+    VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT,
+    VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT,
+    VK_PIPELINE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCreateFlagBits;
+typedef VkFlags VkPipelineCreateFlags;
+
+typedef enum VkPipelineShaderStageCreateFlagBits {
+    VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT = 0x00000001,
+    VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT = 0x00000002,
+    VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT,
+    VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT,
+    VK_PIPELINE_SHADER_STAGE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineShaderStageCreateFlagBits;
+typedef VkFlags VkPipelineShaderStageCreateFlags;
+
+typedef enum VkShaderStageFlagBits {
+    VK_SHADER_STAGE_VERTEX_BIT = 0x00000001,
+    VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT = 0x00000002,
+    VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT = 0x00000004,
+    VK_SHADER_STAGE_GEOMETRY_BIT = 0x00000008,
+    VK_SHADER_STAGE_FRAGMENT_BIT = 0x00000010,
+    VK_SHADER_STAGE_COMPUTE_BIT = 0x00000020,
+    VK_SHADER_STAGE_ALL_GRAPHICS = 0x0000001F,
+    VK_SHADER_STAGE_ALL = 0x7FFFFFFF,
+    VK_SHADER_STAGE_RAYGEN_BIT_KHR = 0x00000100,
+    VK_SHADER_STAGE_ANY_HIT_BIT_KHR = 0x00000200,
+    VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR = 0x00000400,
+    VK_SHADER_STAGE_MISS_BIT_KHR = 0x00000800,
+    VK_SHADER_STAGE_INTERSECTION_BIT_KHR = 0x00001000,
+    VK_SHADER_STAGE_CALLABLE_BIT_KHR = 0x00002000,
+    VK_SHADER_STAGE_TASK_BIT_EXT = 0x00000040,
+    VK_SHADER_STAGE_MESH_BIT_EXT = 0x00000080,
+    VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI = 0x00004000,
+    VK_SHADER_STAGE_RAYGEN_BIT_NV = VK_SHADER_STAGE_RAYGEN_BIT_KHR,
+    VK_SHADER_STAGE_ANY_HIT_BIT_NV = VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
+    VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR,
+    VK_SHADER_STAGE_MISS_BIT_NV = VK_SHADER_STAGE_MISS_BIT_KHR,
+    VK_SHADER_STAGE_INTERSECTION_BIT_NV = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
+    VK_SHADER_STAGE_CALLABLE_BIT_NV = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
+    VK_SHADER_STAGE_TASK_BIT_NV = VK_SHADER_STAGE_TASK_BIT_EXT,
+    VK_SHADER_STAGE_MESH_BIT_NV = VK_SHADER_STAGE_MESH_BIT_EXT,
+    VK_SHADER_STAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkShaderStageFlagBits;
+
+typedef enum VkCullModeFlagBits {
+    VK_CULL_MODE_NONE = 0,
+    VK_CULL_MODE_FRONT_BIT = 0x00000001,
+    VK_CULL_MODE_BACK_BIT = 0x00000002,
+    VK_CULL_MODE_FRONT_AND_BACK = 0x00000003,
+    VK_CULL_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCullModeFlagBits;
+typedef VkFlags VkCullModeFlags;
+typedef VkFlags VkPipelineVertexInputStateCreateFlags;
+typedef VkFlags VkPipelineInputAssemblyStateCreateFlags;
+typedef VkFlags VkPipelineTessellationStateCreateFlags;
+typedef VkFlags VkPipelineViewportStateCreateFlags;
+typedef VkFlags VkPipelineRasterizationStateCreateFlags;
+typedef VkFlags VkPipelineMultisampleStateCreateFlags;
+
+typedef enum VkPipelineDepthStencilStateCreateFlagBits {
+    VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000001,
+    VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000002,
+    VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+    VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
+    VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineDepthStencilStateCreateFlagBits;
+typedef VkFlags VkPipelineDepthStencilStateCreateFlags;
+
+typedef enum VkPipelineColorBlendStateCreateFlagBits {
+    VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT = 0x00000001,
+    VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT,
+    VK_PIPELINE_COLOR_BLEND_STATE_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineColorBlendStateCreateFlagBits;
+typedef VkFlags VkPipelineColorBlendStateCreateFlags;
+typedef VkFlags VkPipelineDynamicStateCreateFlags;
+
+typedef enum VkPipelineLayoutCreateFlagBits {
+    VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT = 0x00000002,
+    VK_PIPELINE_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineLayoutCreateFlagBits;
+typedef VkFlags VkPipelineLayoutCreateFlags;
+typedef VkFlags VkShaderStageFlags;
+
+typedef enum VkSamplerCreateFlagBits {
+    VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT = 0x00000001,
+    VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT = 0x00000002,
+    VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008,
+    VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT = 0x00000004,
+    VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM = 0x00000010,
+    VK_SAMPLER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerCreateFlagBits;
+typedef VkFlags VkSamplerCreateFlags;
+
+typedef enum VkDescriptorPoolCreateFlagBits {
+    VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT = 0x00000001,
+    VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT = 0x00000002,
+    VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT = 0x00000004,
+    VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
+    VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT,
+    VK_DESCRIPTOR_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorPoolCreateFlagBits;
+typedef VkFlags VkDescriptorPoolCreateFlags;
+typedef VkFlags VkDescriptorPoolResetFlags;
+
+typedef enum VkDescriptorSetLayoutCreateFlagBits {
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT = 0x00000002,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR = 0x00000001,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT = 0x00000010,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT = 0x00000020,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT = 0x00000004,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT,
+    VK_DESCRIPTOR_SET_LAYOUT_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorSetLayoutCreateFlagBits;
+typedef VkFlags VkDescriptorSetLayoutCreateFlags;
+
+typedef enum VkAttachmentDescriptionFlagBits {
+    VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT = 0x00000001,
+    VK_ATTACHMENT_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkAttachmentDescriptionFlagBits;
+typedef VkFlags VkAttachmentDescriptionFlags;
+
+typedef enum VkDependencyFlagBits {
+    VK_DEPENDENCY_BY_REGION_BIT = 0x00000001,
+    VK_DEPENDENCY_DEVICE_GROUP_BIT = 0x00000004,
+    VK_DEPENDENCY_VIEW_LOCAL_BIT = 0x00000002,
+    VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT = 0x00000008,
+    VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+    VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+    VK_DEPENDENCY_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDependencyFlagBits;
+typedef VkFlags VkDependencyFlags;
+
+typedef enum VkFramebufferCreateFlagBits {
+    VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT = 0x00000001,
+    VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
+    VK_FRAMEBUFFER_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFramebufferCreateFlagBits;
+typedef VkFlags VkFramebufferCreateFlags;
+
+typedef enum VkRenderPassCreateFlagBits {
+    VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM = 0x00000002,
+    VK_RENDER_PASS_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkRenderPassCreateFlagBits;
+typedef VkFlags VkRenderPassCreateFlags;
+
+typedef enum VkSubpassDescriptionFlagBits {
+    VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX = 0x00000001,
+    VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX = 0x00000002,
+    VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM = 0x00000004,
+    VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM = 0x00000008,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT = 0x00000010,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT = 0x00000020,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT = 0x00000040,
+    VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000080,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+    VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
+    VK_SUBPASS_DESCRIPTION_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSubpassDescriptionFlagBits;
+typedef VkFlags VkSubpassDescriptionFlags;
+
+typedef enum VkCommandPoolCreateFlagBits {
+    VK_COMMAND_POOL_CREATE_TRANSIENT_BIT = 0x00000001,
+    VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT = 0x00000002,
+    VK_COMMAND_POOL_CREATE_PROTECTED_BIT = 0x00000004,
+    VK_COMMAND_POOL_CREATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandPoolCreateFlagBits;
+typedef VkFlags VkCommandPoolCreateFlags;
+
+typedef enum VkCommandPoolResetFlagBits {
+    VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT = 0x00000001,
+    VK_COMMAND_POOL_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandPoolResetFlagBits;
+typedef VkFlags VkCommandPoolResetFlags;
+
+typedef enum VkCommandBufferUsageFlagBits {
+    VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT = 0x00000001,
+    VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT = 0x00000002,
+    VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT = 0x00000004,
+    VK_COMMAND_BUFFER_USAGE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferUsageFlagBits;
+typedef VkFlags VkCommandBufferUsageFlags;
+
+typedef enum VkQueryControlFlagBits {
+    VK_QUERY_CONTROL_PRECISE_BIT = 0x00000001,
+    VK_QUERY_CONTROL_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkQueryControlFlagBits;
+typedef VkFlags VkQueryControlFlags;
+
+typedef enum VkCommandBufferResetFlagBits {
+    VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT = 0x00000001,
+    VK_COMMAND_BUFFER_RESET_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkCommandBufferResetFlagBits;
+typedef VkFlags VkCommandBufferResetFlags;
+
+typedef enum VkStencilFaceFlagBits {
+    VK_STENCIL_FACE_FRONT_BIT = 0x00000001,
+    VK_STENCIL_FACE_BACK_BIT = 0x00000002,
+    VK_STENCIL_FACE_FRONT_AND_BACK = 0x00000003,
+    VK_STENCIL_FRONT_AND_BACK = VK_STENCIL_FACE_FRONT_AND_BACK,
+    VK_STENCIL_FACE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkStencilFaceFlagBits;
+typedef VkFlags VkStencilFaceFlags;
+typedef struct VkExtent2D {
+    uint32_t    width;
+    uint32_t    height;
+} VkExtent2D;
+
+typedef struct VkExtent3D {
+    uint32_t    width;
+    uint32_t    height;
+    uint32_t    depth;
+} VkExtent3D;
+
+typedef struct VkOffset2D {
+    int32_t    x;
+    int32_t    y;
+} VkOffset2D;
+
+typedef struct VkOffset3D {
+    int32_t    x;
+    int32_t    y;
+    int32_t    z;
+} VkOffset3D;
+
+typedef struct VkRect2D {
+    VkOffset2D    offset;
+    VkExtent2D    extent;
+} VkRect2D;
+
+typedef struct VkBaseInStructure {
+    VkStructureType                    sType;
+    const struct VkBaseInStructure*    pNext;
+} VkBaseInStructure;
+
+typedef struct VkBaseOutStructure {
+    VkStructureType               sType;
+    struct VkBaseOutStructure*    pNext;
+} VkBaseOutStructure;
+
+typedef struct VkBufferMemoryBarrier {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkAccessFlags      srcAccessMask;
+    VkAccessFlags      dstAccessMask;
+    uint32_t           srcQueueFamilyIndex;
+    uint32_t           dstQueueFamilyIndex;
+    VkBuffer           buffer;
+    VkDeviceSize       offset;
+    VkDeviceSize       size;
+} VkBufferMemoryBarrier;
+
+typedef struct VkDispatchIndirectCommand {
+    uint32_t    x;
+    uint32_t    y;
+    uint32_t    z;
+} VkDispatchIndirectCommand;
+
+typedef struct VkDrawIndexedIndirectCommand {
+    uint32_t    indexCount;
+    uint32_t    instanceCount;
+    uint32_t    firstIndex;
+    int32_t     vertexOffset;
+    uint32_t    firstInstance;
+} VkDrawIndexedIndirectCommand;
+
+typedef struct VkDrawIndirectCommand {
+    uint32_t    vertexCount;
+    uint32_t    instanceCount;
+    uint32_t    firstVertex;
+    uint32_t    firstInstance;
+} VkDrawIndirectCommand;
+
+typedef struct VkImageSubresourceRange {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              baseMipLevel;
+    uint32_t              levelCount;
+    uint32_t              baseArrayLayer;
+    uint32_t              layerCount;
+} VkImageSubresourceRange;
+
+typedef struct VkImageMemoryBarrier {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkAccessFlags              srcAccessMask;
+    VkAccessFlags              dstAccessMask;
+    VkImageLayout              oldLayout;
+    VkImageLayout              newLayout;
+    uint32_t                   srcQueueFamilyIndex;
+    uint32_t                   dstQueueFamilyIndex;
+    VkImage                    image;
+    VkImageSubresourceRange    subresourceRange;
+} VkImageMemoryBarrier;
+
+typedef struct VkMemoryBarrier {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkAccessFlags      srcAccessMask;
+    VkAccessFlags      dstAccessMask;
+} VkMemoryBarrier;
+
+typedef struct VkPipelineCacheHeaderVersionOne {
+    uint32_t                        headerSize;
+    VkPipelineCacheHeaderVersion    headerVersion;
+    uint32_t                        vendorID;
+    uint32_t                        deviceID;
+    uint8_t                         pipelineCacheUUID[VK_UUID_SIZE];
+} VkPipelineCacheHeaderVersionOne;
+
+typedef void* (VKAPI_PTR *PFN_vkAllocationFunction)(
+    void*                                       pUserData,
+    size_t                                      size,
+    size_t                                      alignment,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkFreeFunction)(
+    void*                                       pUserData,
+    void*                                       pMemory);
+
+typedef void (VKAPI_PTR *PFN_vkInternalAllocationNotification)(
+    void*                                       pUserData,
+    size_t                                      size,
+    VkInternalAllocationType                    allocationType,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkInternalFreeNotification)(
+    void*                                       pUserData,
+    size_t                                      size,
+    VkInternalAllocationType                    allocationType,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void* (VKAPI_PTR *PFN_vkReallocationFunction)(
+    void*                                       pUserData,
+    void*                                       pOriginal,
+    size_t                                      size,
+    size_t                                      alignment,
+    VkSystemAllocationScope                     allocationScope);
+
+typedef void (VKAPI_PTR *PFN_vkVoidFunction)(void);
+typedef struct VkAllocationCallbacks {
+    void*                                   pUserData;
+    PFN_vkAllocationFunction                pfnAllocation;
+    PFN_vkReallocationFunction              pfnReallocation;
+    PFN_vkFreeFunction                      pfnFree;
+    PFN_vkInternalAllocationNotification    pfnInternalAllocation;
+    PFN_vkInternalFreeNotification          pfnInternalFree;
+} VkAllocationCallbacks;
+
+typedef struct VkApplicationInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pApplicationName;
+    uint32_t           applicationVersion;
+    const char*        pEngineName;
+    uint32_t           engineVersion;
+    uint32_t           apiVersion;
+} VkApplicationInfo;
+
+typedef struct VkFormatProperties {
+    VkFormatFeatureFlags    linearTilingFeatures;
+    VkFormatFeatureFlags    optimalTilingFeatures;
+    VkFormatFeatureFlags    bufferFeatures;
+} VkFormatProperties;
+
+typedef struct VkImageFormatProperties {
+    VkExtent3D            maxExtent;
+    uint32_t              maxMipLevels;
+    uint32_t              maxArrayLayers;
+    VkSampleCountFlags    sampleCounts;
+    VkDeviceSize          maxResourceSize;
+} VkImageFormatProperties;
+
+typedef struct VkInstanceCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkInstanceCreateFlags       flags;
+    const VkApplicationInfo*    pApplicationInfo;
+    uint32_t                    enabledLayerCount;
+    const char* const*          ppEnabledLayerNames;
+    uint32_t                    enabledExtensionCount;
+    const char* const*          ppEnabledExtensionNames;
+} VkInstanceCreateInfo;
+
+typedef struct VkMemoryHeap {
+    VkDeviceSize         size;
+    VkMemoryHeapFlags    flags;
+} VkMemoryHeap;
+
+typedef struct VkMemoryType {
+    VkMemoryPropertyFlags    propertyFlags;
+    uint32_t                 heapIndex;
+} VkMemoryType;
+
+typedef struct VkPhysicalDeviceFeatures {
+    VkBool32    robustBufferAccess;
+    VkBool32    fullDrawIndexUint32;
+    VkBool32    imageCubeArray;
+    VkBool32    independentBlend;
+    VkBool32    geometryShader;
+    VkBool32    tessellationShader;
+    VkBool32    sampleRateShading;
+    VkBool32    dualSrcBlend;
+    VkBool32    logicOp;
+    VkBool32    multiDrawIndirect;
+    VkBool32    drawIndirectFirstInstance;
+    VkBool32    depthClamp;
+    VkBool32    depthBiasClamp;
+    VkBool32    fillModeNonSolid;
+    VkBool32    depthBounds;
+    VkBool32    wideLines;
+    VkBool32    largePoints;
+    VkBool32    alphaToOne;
+    VkBool32    multiViewport;
+    VkBool32    samplerAnisotropy;
+    VkBool32    textureCompressionETC2;
+    VkBool32    textureCompressionASTC_LDR;
+    VkBool32    textureCompressionBC;
+    VkBool32    occlusionQueryPrecise;
+    VkBool32    pipelineStatisticsQuery;
+    VkBool32    vertexPipelineStoresAndAtomics;
+    VkBool32    fragmentStoresAndAtomics;
+    VkBool32    shaderTessellationAndGeometryPointSize;
+    VkBool32    shaderImageGatherExtended;
+    VkBool32    shaderStorageImageExtendedFormats;
+    VkBool32    shaderStorageImageMultisample;
+    VkBool32    shaderStorageImageReadWithoutFormat;
+    VkBool32    shaderStorageImageWriteWithoutFormat;
+    VkBool32    shaderUniformBufferArrayDynamicIndexing;
+    VkBool32    shaderSampledImageArrayDynamicIndexing;
+    VkBool32    shaderStorageBufferArrayDynamicIndexing;
+    VkBool32    shaderStorageImageArrayDynamicIndexing;
+    VkBool32    shaderClipDistance;
+    VkBool32    shaderCullDistance;
+    VkBool32    shaderFloat64;
+    VkBool32    shaderInt64;
+    VkBool32    shaderInt16;
+    VkBool32    shaderResourceResidency;
+    VkBool32    shaderResourceMinLod;
+    VkBool32    sparseBinding;
+    VkBool32    sparseResidencyBuffer;
+    VkBool32    sparseResidencyImage2D;
+    VkBool32    sparseResidencyImage3D;
+    VkBool32    sparseResidency2Samples;
+    VkBool32    sparseResidency4Samples;
+    VkBool32    sparseResidency8Samples;
+    VkBool32    sparseResidency16Samples;
+    VkBool32    sparseResidencyAliased;
+    VkBool32    variableMultisampleRate;
+    VkBool32    inheritedQueries;
+} VkPhysicalDeviceFeatures;
+
+typedef struct VkPhysicalDeviceLimits {
+    uint32_t              maxImageDimension1D;
+    uint32_t              maxImageDimension2D;
+    uint32_t              maxImageDimension3D;
+    uint32_t              maxImageDimensionCube;
+    uint32_t              maxImageArrayLayers;
+    uint32_t              maxTexelBufferElements;
+    uint32_t              maxUniformBufferRange;
+    uint32_t              maxStorageBufferRange;
+    uint32_t              maxPushConstantsSize;
+    uint32_t              maxMemoryAllocationCount;
+    uint32_t              maxSamplerAllocationCount;
+    VkDeviceSize          bufferImageGranularity;
+    VkDeviceSize          sparseAddressSpaceSize;
+    uint32_t              maxBoundDescriptorSets;
+    uint32_t              maxPerStageDescriptorSamplers;
+    uint32_t              maxPerStageDescriptorUniformBuffers;
+    uint32_t              maxPerStageDescriptorStorageBuffers;
+    uint32_t              maxPerStageDescriptorSampledImages;
+    uint32_t              maxPerStageDescriptorStorageImages;
+    uint32_t              maxPerStageDescriptorInputAttachments;
+    uint32_t              maxPerStageResources;
+    uint32_t              maxDescriptorSetSamplers;
+    uint32_t              maxDescriptorSetUniformBuffers;
+    uint32_t              maxDescriptorSetUniformBuffersDynamic;
+    uint32_t              maxDescriptorSetStorageBuffers;
+    uint32_t              maxDescriptorSetStorageBuffersDynamic;
+    uint32_t              maxDescriptorSetSampledImages;
+    uint32_t              maxDescriptorSetStorageImages;
+    uint32_t              maxDescriptorSetInputAttachments;
+    uint32_t              maxVertexInputAttributes;
+    uint32_t              maxVertexInputBindings;
+    uint32_t              maxVertexInputAttributeOffset;
+    uint32_t              maxVertexInputBindingStride;
+    uint32_t              maxVertexOutputComponents;
+    uint32_t              maxTessellationGenerationLevel;
+    uint32_t              maxTessellationPatchSize;
+    uint32_t              maxTessellationControlPerVertexInputComponents;
+    uint32_t              maxTessellationControlPerVertexOutputComponents;
+    uint32_t              maxTessellationControlPerPatchOutputComponents;
+    uint32_t              maxTessellationControlTotalOutputComponents;
+    uint32_t              maxTessellationEvaluationInputComponents;
+    uint32_t              maxTessellationEvaluationOutputComponents;
+    uint32_t              maxGeometryShaderInvocations;
+    uint32_t              maxGeometryInputComponents;
+    uint32_t              maxGeometryOutputComponents;
+    uint32_t              maxGeometryOutputVertices;
+    uint32_t              maxGeometryTotalOutputComponents;
+    uint32_t              maxFragmentInputComponents;
+    uint32_t              maxFragmentOutputAttachments;
+    uint32_t              maxFragmentDualSrcAttachments;
+    uint32_t              maxFragmentCombinedOutputResources;
+    uint32_t              maxComputeSharedMemorySize;
+    uint32_t              maxComputeWorkGroupCount[3];
+    uint32_t              maxComputeWorkGroupInvocations;
+    uint32_t              maxComputeWorkGroupSize[3];
+    uint32_t              subPixelPrecisionBits;
+    uint32_t              subTexelPrecisionBits;
+    uint32_t              mipmapPrecisionBits;
+    uint32_t              maxDrawIndexedIndexValue;
+    uint32_t              maxDrawIndirectCount;
+    float                 maxSamplerLodBias;
+    float                 maxSamplerAnisotropy;
+    uint32_t              maxViewports;
+    uint32_t              maxViewportDimensions[2];
+    float                 viewportBoundsRange[2];
+    uint32_t              viewportSubPixelBits;
+    size_t                minMemoryMapAlignment;
+    VkDeviceSize          minTexelBufferOffsetAlignment;
+    VkDeviceSize          minUniformBufferOffsetAlignment;
+    VkDeviceSize          minStorageBufferOffsetAlignment;
+    int32_t               minTexelOffset;
+    uint32_t              maxTexelOffset;
+    int32_t               minTexelGatherOffset;
+    uint32_t              maxTexelGatherOffset;
+    float                 minInterpolationOffset;
+    float                 maxInterpolationOffset;
+    uint32_t              subPixelInterpolationOffsetBits;
+    uint32_t              maxFramebufferWidth;
+    uint32_t              maxFramebufferHeight;
+    uint32_t              maxFramebufferLayers;
+    VkSampleCountFlags    framebufferColorSampleCounts;
+    VkSampleCountFlags    framebufferDepthSampleCounts;
+    VkSampleCountFlags    framebufferStencilSampleCounts;
+    VkSampleCountFlags    framebufferNoAttachmentsSampleCounts;
+    uint32_t              maxColorAttachments;
+    VkSampleCountFlags    sampledImageColorSampleCounts;
+    VkSampleCountFlags    sampledImageIntegerSampleCounts;
+    VkSampleCountFlags    sampledImageDepthSampleCounts;
+    VkSampleCountFlags    sampledImageStencilSampleCounts;
+    VkSampleCountFlags    storageImageSampleCounts;
+    uint32_t              maxSampleMaskWords;
+    VkBool32              timestampComputeAndGraphics;
+    float                 timestampPeriod;
+    uint32_t              maxClipDistances;
+    uint32_t              maxCullDistances;
+    uint32_t              maxCombinedClipAndCullDistances;
+    uint32_t              discreteQueuePriorities;
+    float                 pointSizeRange[2];
+    float                 lineWidthRange[2];
+    float                 pointSizeGranularity;
+    float                 lineWidthGranularity;
+    VkBool32              strictLines;
+    VkBool32              standardSampleLocations;
+    VkDeviceSize          optimalBufferCopyOffsetAlignment;
+    VkDeviceSize          optimalBufferCopyRowPitchAlignment;
+    VkDeviceSize          nonCoherentAtomSize;
+} VkPhysicalDeviceLimits;
+
+typedef struct VkPhysicalDeviceMemoryProperties {
+    uint32_t        memoryTypeCount;
+    VkMemoryType    memoryTypes[VK_MAX_MEMORY_TYPES];
+    uint32_t        memoryHeapCount;
+    VkMemoryHeap    memoryHeaps[VK_MAX_MEMORY_HEAPS];
+} VkPhysicalDeviceMemoryProperties;
+
+typedef struct VkPhysicalDeviceSparseProperties {
+    VkBool32    residencyStandard2DBlockShape;
+    VkBool32    residencyStandard2DMultisampleBlockShape;
+    VkBool32    residencyStandard3DBlockShape;
+    VkBool32    residencyAlignedMipSize;
+    VkBool32    residencyNonResidentStrict;
+} VkPhysicalDeviceSparseProperties;
+
+typedef struct VkPhysicalDeviceProperties {
+    uint32_t                            apiVersion;
+    uint32_t                            driverVersion;
+    uint32_t                            vendorID;
+    uint32_t                            deviceID;
+    VkPhysicalDeviceType                deviceType;
+    char                                deviceName[VK_MAX_PHYSICAL_DEVICE_NAME_SIZE];
+    uint8_t                             pipelineCacheUUID[VK_UUID_SIZE];
+    VkPhysicalDeviceLimits              limits;
+    VkPhysicalDeviceSparseProperties    sparseProperties;
+} VkPhysicalDeviceProperties;
+
+typedef struct VkQueueFamilyProperties {
+    VkQueueFlags    queueFlags;
+    uint32_t        queueCount;
+    uint32_t        timestampValidBits;
+    VkExtent3D      minImageTransferGranularity;
+} VkQueueFamilyProperties;
+
+typedef struct VkDeviceQueueCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkDeviceQueueCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+    uint32_t                    queueCount;
+    const float*                pQueuePriorities;
+} VkDeviceQueueCreateInfo;
+
+typedef struct VkDeviceCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkDeviceCreateFlags                flags;
+    uint32_t                           queueCreateInfoCount;
+    const VkDeviceQueueCreateInfo*     pQueueCreateInfos;
+    uint32_t                           enabledLayerCount;
+    const char* const*                 ppEnabledLayerNames;
+    uint32_t                           enabledExtensionCount;
+    const char* const*                 ppEnabledExtensionNames;
+    const VkPhysicalDeviceFeatures*    pEnabledFeatures;
+} VkDeviceCreateInfo;
+
+typedef struct VkExtensionProperties {
+    char        extensionName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t    specVersion;
+} VkExtensionProperties;
+
+typedef struct VkLayerProperties {
+    char        layerName[VK_MAX_EXTENSION_NAME_SIZE];
+    uint32_t    specVersion;
+    uint32_t    implementationVersion;
+    char        description[VK_MAX_DESCRIPTION_SIZE];
+} VkLayerProperties;
+
+typedef struct VkSubmitInfo {
+    VkStructureType                sType;
+    const void*                    pNext;
+    uint32_t                       waitSemaphoreCount;
+    const VkSemaphore*             pWaitSemaphores;
+    const VkPipelineStageFlags*    pWaitDstStageMask;
+    uint32_t                       commandBufferCount;
+    const VkCommandBuffer*         pCommandBuffers;
+    uint32_t                       signalSemaphoreCount;
+    const VkSemaphore*             pSignalSemaphores;
+} VkSubmitInfo;
+
+typedef struct VkMappedMemoryRange {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+    VkDeviceSize       offset;
+    VkDeviceSize       size;
+} VkMappedMemoryRange;
+
+typedef struct VkMemoryAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceSize       allocationSize;
+    uint32_t           memoryTypeIndex;
+} VkMemoryAllocateInfo;
+
+typedef struct VkMemoryRequirements {
+    VkDeviceSize    size;
+    VkDeviceSize    alignment;
+    uint32_t        memoryTypeBits;
+} VkMemoryRequirements;
+
+typedef struct VkSparseMemoryBind {
+    VkDeviceSize               resourceOffset;
+    VkDeviceSize               size;
+    VkDeviceMemory             memory;
+    VkDeviceSize               memoryOffset;
+    VkSparseMemoryBindFlags    flags;
+} VkSparseMemoryBind;
+
+typedef struct VkSparseBufferMemoryBindInfo {
+    VkBuffer                     buffer;
+    uint32_t                     bindCount;
+    const VkSparseMemoryBind*    pBinds;
+} VkSparseBufferMemoryBindInfo;
+
+typedef struct VkSparseImageOpaqueMemoryBindInfo {
+    VkImage                      image;
+    uint32_t                     bindCount;
+    const VkSparseMemoryBind*    pBinds;
+} VkSparseImageOpaqueMemoryBindInfo;
+
+typedef struct VkImageSubresource {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              mipLevel;
+    uint32_t              arrayLayer;
+} VkImageSubresource;
+
+typedef struct VkSparseImageMemoryBind {
+    VkImageSubresource         subresource;
+    VkOffset3D                 offset;
+    VkExtent3D                 extent;
+    VkDeviceMemory             memory;
+    VkDeviceSize               memoryOffset;
+    VkSparseMemoryBindFlags    flags;
+} VkSparseImageMemoryBind;
+
+typedef struct VkSparseImageMemoryBindInfo {
+    VkImage                           image;
+    uint32_t                          bindCount;
+    const VkSparseImageMemoryBind*    pBinds;
+} VkSparseImageMemoryBindInfo;
+
+typedef struct VkBindSparseInfo {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    waitSemaphoreCount;
+    const VkSemaphore*                          pWaitSemaphores;
+    uint32_t                                    bufferBindCount;
+    const VkSparseBufferMemoryBindInfo*         pBufferBinds;
+    uint32_t                                    imageOpaqueBindCount;
+    const VkSparseImageOpaqueMemoryBindInfo*    pImageOpaqueBinds;
+    uint32_t                                    imageBindCount;
+    const VkSparseImageMemoryBindInfo*          pImageBinds;
+    uint32_t                                    signalSemaphoreCount;
+    const VkSemaphore*                          pSignalSemaphores;
+} VkBindSparseInfo;
+
+typedef struct VkSparseImageFormatProperties {
+    VkImageAspectFlags          aspectMask;
+    VkExtent3D                  imageGranularity;
+    VkSparseImageFormatFlags    flags;
+} VkSparseImageFormatProperties;
+
+typedef struct VkSparseImageMemoryRequirements {
+    VkSparseImageFormatProperties    formatProperties;
+    uint32_t                         imageMipTailFirstLod;
+    VkDeviceSize                     imageMipTailSize;
+    VkDeviceSize                     imageMipTailOffset;
+    VkDeviceSize                     imageMipTailStride;
+} VkSparseImageMemoryRequirements;
+
+typedef struct VkFenceCreateInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkFenceCreateFlags    flags;
+} VkFenceCreateInfo;
+
+typedef struct VkSemaphoreCreateInfo {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkSemaphoreCreateFlags    flags;
+} VkSemaphoreCreateInfo;
+
+typedef struct VkEventCreateInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkEventCreateFlags    flags;
+} VkEventCreateInfo;
+
+typedef struct VkQueryPoolCreateInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkQueryPoolCreateFlags           flags;
+    VkQueryType                      queryType;
+    uint32_t                         queryCount;
+    VkQueryPipelineStatisticFlags    pipelineStatistics;
+} VkQueryPoolCreateInfo;
+
+typedef struct VkBufferCreateInfo {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkBufferCreateFlags    flags;
+    VkDeviceSize           size;
+    VkBufferUsageFlags     usage;
+    VkSharingMode          sharingMode;
+    uint32_t               queueFamilyIndexCount;
+    const uint32_t*        pQueueFamilyIndices;
+} VkBufferCreateInfo;
+
+typedef struct VkBufferViewCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkBufferViewCreateFlags    flags;
+    VkBuffer                   buffer;
+    VkFormat                   format;
+    VkDeviceSize               offset;
+    VkDeviceSize               range;
+} VkBufferViewCreateInfo;
+
+typedef struct VkImageCreateInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageCreateFlags       flags;
+    VkImageType              imageType;
+    VkFormat                 format;
+    VkExtent3D               extent;
+    uint32_t                 mipLevels;
+    uint32_t                 arrayLayers;
+    VkSampleCountFlagBits    samples;
+    VkImageTiling            tiling;
+    VkImageUsageFlags        usage;
+    VkSharingMode            sharingMode;
+    uint32_t                 queueFamilyIndexCount;
+    const uint32_t*          pQueueFamilyIndices;
+    VkImageLayout            initialLayout;
+} VkImageCreateInfo;
+
+typedef struct VkSubresourceLayout {
+    VkDeviceSize    offset;
+    VkDeviceSize    size;
+    VkDeviceSize    rowPitch;
+    VkDeviceSize    arrayPitch;
+    VkDeviceSize    depthPitch;
+} VkSubresourceLayout;
+
+typedef struct VkComponentMapping {
+    VkComponentSwizzle    r;
+    VkComponentSwizzle    g;
+    VkComponentSwizzle    b;
+    VkComponentSwizzle    a;
+} VkComponentMapping;
+
+typedef struct VkImageViewCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkImageViewCreateFlags     flags;
+    VkImage                    image;
+    VkImageViewType            viewType;
+    VkFormat                   format;
+    VkComponentMapping         components;
+    VkImageSubresourceRange    subresourceRange;
+} VkImageViewCreateInfo;
+
+typedef struct VkShaderModuleCreateInfo {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkShaderModuleCreateFlags    flags;
+    size_t                       codeSize;
+    const uint32_t*              pCode;
+} VkShaderModuleCreateInfo;
+
+typedef struct VkPipelineCacheCreateInfo {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkPipelineCacheCreateFlags    flags;
+    size_t                        initialDataSize;
+    const void*                   pInitialData;
+} VkPipelineCacheCreateInfo;
+
+typedef struct VkSpecializationMapEntry {
+    uint32_t    constantID;
+    uint32_t    offset;
+    size_t      size;
+} VkSpecializationMapEntry;
+
+typedef struct VkSpecializationInfo {
+    uint32_t                           mapEntryCount;
+    const VkSpecializationMapEntry*    pMapEntries;
+    size_t                             dataSize;
+    const void*                        pData;
+} VkSpecializationInfo;
+
+typedef struct VkPipelineShaderStageCreateInfo {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkPipelineShaderStageCreateFlags    flags;
+    VkShaderStageFlagBits               stage;
+    VkShaderModule                      module;
+    const char*                         pName;
+    const VkSpecializationInfo*         pSpecializationInfo;
+} VkPipelineShaderStageCreateInfo;
+
+typedef struct VkComputePipelineCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkPipelineCreateFlags              flags;
+    VkPipelineShaderStageCreateInfo    stage;
+    VkPipelineLayout                   layout;
+    VkPipeline                         basePipelineHandle;
+    int32_t                            basePipelineIndex;
+} VkComputePipelineCreateInfo;
+
+typedef struct VkVertexInputBindingDescription {
+    uint32_t             binding;
+    uint32_t             stride;
+    VkVertexInputRate    inputRate;
+} VkVertexInputBindingDescription;
+
+typedef struct VkVertexInputAttributeDescription {
+    uint32_t    location;
+    uint32_t    binding;
+    VkFormat    format;
+    uint32_t    offset;
+} VkVertexInputAttributeDescription;
+
+typedef struct VkPipelineVertexInputStateCreateInfo {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkPipelineVertexInputStateCreateFlags       flags;
+    uint32_t                                    vertexBindingDescriptionCount;
+    const VkVertexInputBindingDescription*      pVertexBindingDescriptions;
+    uint32_t                                    vertexAttributeDescriptionCount;
+    const VkVertexInputAttributeDescription*    pVertexAttributeDescriptions;
+} VkPipelineVertexInputStateCreateInfo;
+
+typedef struct VkPipelineInputAssemblyStateCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineInputAssemblyStateCreateFlags    flags;
+    VkPrimitiveTopology                        topology;
+    VkBool32                                   primitiveRestartEnable;
+} VkPipelineInputAssemblyStateCreateInfo;
+
+typedef struct VkPipelineTessellationStateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkPipelineTessellationStateCreateFlags    flags;
+    uint32_t                                  patchControlPoints;
+} VkPipelineTessellationStateCreateInfo;
+
+typedef struct VkViewport {
+    float    x;
+    float    y;
+    float    width;
+    float    height;
+    float    minDepth;
+    float    maxDepth;
+} VkViewport;
+
+typedef struct VkPipelineViewportStateCreateInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkPipelineViewportStateCreateFlags    flags;
+    uint32_t                              viewportCount;
+    const VkViewport*                     pViewports;
+    uint32_t                              scissorCount;
+    const VkRect2D*                       pScissors;
+} VkPipelineViewportStateCreateInfo;
+
+typedef struct VkPipelineRasterizationStateCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    VkPipelineRasterizationStateCreateFlags    flags;
+    VkBool32                                   depthClampEnable;
+    VkBool32                                   rasterizerDiscardEnable;
+    VkPolygonMode                              polygonMode;
+    VkCullModeFlags                            cullMode;
+    VkFrontFace                                frontFace;
+    VkBool32                                   depthBiasEnable;
+    float                                      depthBiasConstantFactor;
+    float                                      depthBiasClamp;
+    float                                      depthBiasSlopeFactor;
+    float                                      lineWidth;
+} VkPipelineRasterizationStateCreateInfo;
+
+typedef struct VkPipelineMultisampleStateCreateInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkPipelineMultisampleStateCreateFlags    flags;
+    VkSampleCountFlagBits                    rasterizationSamples;
+    VkBool32                                 sampleShadingEnable;
+    float                                    minSampleShading;
+    const VkSampleMask*                      pSampleMask;
+    VkBool32                                 alphaToCoverageEnable;
+    VkBool32                                 alphaToOneEnable;
+} VkPipelineMultisampleStateCreateInfo;
+
+typedef struct VkStencilOpState {
+    VkStencilOp    failOp;
+    VkStencilOp    passOp;
+    VkStencilOp    depthFailOp;
+    VkCompareOp    compareOp;
+    uint32_t       compareMask;
+    uint32_t       writeMask;
+    uint32_t       reference;
+} VkStencilOpState;
+
+typedef struct VkPipelineDepthStencilStateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkPipelineDepthStencilStateCreateFlags    flags;
+    VkBool32                                  depthTestEnable;
+    VkBool32                                  depthWriteEnable;
+    VkCompareOp                               depthCompareOp;
+    VkBool32                                  depthBoundsTestEnable;
+    VkBool32                                  stencilTestEnable;
+    VkStencilOpState                          front;
+    VkStencilOpState                          back;
+    float                                     minDepthBounds;
+    float                                     maxDepthBounds;
+} VkPipelineDepthStencilStateCreateInfo;
+
+typedef struct VkPipelineColorBlendAttachmentState {
+    VkBool32                 blendEnable;
+    VkBlendFactor            srcColorBlendFactor;
+    VkBlendFactor            dstColorBlendFactor;
+    VkBlendOp                colorBlendOp;
+    VkBlendFactor            srcAlphaBlendFactor;
+    VkBlendFactor            dstAlphaBlendFactor;
+    VkBlendOp                alphaBlendOp;
+    VkColorComponentFlags    colorWriteMask;
+} VkPipelineColorBlendAttachmentState;
+
+typedef struct VkPipelineColorBlendStateCreateInfo {
+    VkStructureType                               sType;
+    const void*                                   pNext;
+    VkPipelineColorBlendStateCreateFlags          flags;
+    VkBool32                                      logicOpEnable;
+    VkLogicOp                                     logicOp;
+    uint32_t                                      attachmentCount;
+    const VkPipelineColorBlendAttachmentState*    pAttachments;
+    float                                         blendConstants[4];
+} VkPipelineColorBlendStateCreateInfo;
+
+typedef struct VkPipelineDynamicStateCreateInfo {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkPipelineDynamicStateCreateFlags    flags;
+    uint32_t                             dynamicStateCount;
+    const VkDynamicState*                pDynamicStates;
+} VkPipelineDynamicStateCreateInfo;
+
+typedef struct VkGraphicsPipelineCreateInfo {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineCreateFlags                            flags;
+    uint32_t                                         stageCount;
+    const VkPipelineShaderStageCreateInfo*           pStages;
+    const VkPipelineVertexInputStateCreateInfo*      pVertexInputState;
+    const VkPipelineInputAssemblyStateCreateInfo*    pInputAssemblyState;
+    const VkPipelineTessellationStateCreateInfo*     pTessellationState;
+    const VkPipelineViewportStateCreateInfo*         pViewportState;
+    const VkPipelineRasterizationStateCreateInfo*    pRasterizationState;
+    const VkPipelineMultisampleStateCreateInfo*      pMultisampleState;
+    const VkPipelineDepthStencilStateCreateInfo*     pDepthStencilState;
+    const VkPipelineColorBlendStateCreateInfo*       pColorBlendState;
+    const VkPipelineDynamicStateCreateInfo*          pDynamicState;
+    VkPipelineLayout                                 layout;
+    VkRenderPass                                     renderPass;
+    uint32_t                                         subpass;
+    VkPipeline                                       basePipelineHandle;
+    int32_t                                          basePipelineIndex;
+} VkGraphicsPipelineCreateInfo;
+
+typedef struct VkPushConstantRange {
+    VkShaderStageFlags    stageFlags;
+    uint32_t              offset;
+    uint32_t              size;
+} VkPushConstantRange;
+
+typedef struct VkPipelineLayoutCreateInfo {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkPipelineLayoutCreateFlags     flags;
+    uint32_t                        setLayoutCount;
+    const VkDescriptorSetLayout*    pSetLayouts;
+    uint32_t                        pushConstantRangeCount;
+    const VkPushConstantRange*      pPushConstantRanges;
+} VkPipelineLayoutCreateInfo;
+
+typedef struct VkSamplerCreateInfo {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkSamplerCreateFlags    flags;
+    VkFilter                magFilter;
+    VkFilter                minFilter;
+    VkSamplerMipmapMode     mipmapMode;
+    VkSamplerAddressMode    addressModeU;
+    VkSamplerAddressMode    addressModeV;
+    VkSamplerAddressMode    addressModeW;
+    float                   mipLodBias;
+    VkBool32                anisotropyEnable;
+    float                   maxAnisotropy;
+    VkBool32                compareEnable;
+    VkCompareOp             compareOp;
+    float                   minLod;
+    float                   maxLod;
+    VkBorderColor           borderColor;
+    VkBool32                unnormalizedCoordinates;
+} VkSamplerCreateInfo;
+
+typedef struct VkCopyDescriptorSet {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDescriptorSet    srcSet;
+    uint32_t           srcBinding;
+    uint32_t           srcArrayElement;
+    VkDescriptorSet    dstSet;
+    uint32_t           dstBinding;
+    uint32_t           dstArrayElement;
+    uint32_t           descriptorCount;
+} VkCopyDescriptorSet;
+
+typedef struct VkDescriptorBufferInfo {
+    VkBuffer        buffer;
+    VkDeviceSize    offset;
+    VkDeviceSize    range;
+} VkDescriptorBufferInfo;
+
+typedef struct VkDescriptorImageInfo {
+    VkSampler        sampler;
+    VkImageView      imageView;
+    VkImageLayout    imageLayout;
+} VkDescriptorImageInfo;
+
+typedef struct VkDescriptorPoolSize {
+    VkDescriptorType    type;
+    uint32_t            descriptorCount;
+} VkDescriptorPoolSize;
+
+typedef struct VkDescriptorPoolCreateInfo {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkDescriptorPoolCreateFlags    flags;
+    uint32_t                       maxSets;
+    uint32_t                       poolSizeCount;
+    const VkDescriptorPoolSize*    pPoolSizes;
+} VkDescriptorPoolCreateInfo;
+
+typedef struct VkDescriptorSetAllocateInfo {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkDescriptorPool                descriptorPool;
+    uint32_t                        descriptorSetCount;
+    const VkDescriptorSetLayout*    pSetLayouts;
+} VkDescriptorSetAllocateInfo;
+
+typedef struct VkDescriptorSetLayoutBinding {
+    uint32_t              binding;
+    VkDescriptorType      descriptorType;
+    uint32_t              descriptorCount;
+    VkShaderStageFlags    stageFlags;
+    const VkSampler*      pImmutableSamplers;
+} VkDescriptorSetLayoutBinding;
+
+typedef struct VkDescriptorSetLayoutCreateInfo {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkDescriptorSetLayoutCreateFlags       flags;
+    uint32_t                               bindingCount;
+    const VkDescriptorSetLayoutBinding*    pBindings;
+} VkDescriptorSetLayoutCreateInfo;
+
+typedef struct VkWriteDescriptorSet {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDescriptorSet                  dstSet;
+    uint32_t                         dstBinding;
+    uint32_t                         dstArrayElement;
+    uint32_t                         descriptorCount;
+    VkDescriptorType                 descriptorType;
+    const VkDescriptorImageInfo*     pImageInfo;
+    const VkDescriptorBufferInfo*    pBufferInfo;
+    const VkBufferView*              pTexelBufferView;
+} VkWriteDescriptorSet;
+
+typedef struct VkAttachmentDescription {
+    VkAttachmentDescriptionFlags    flags;
+    VkFormat                        format;
+    VkSampleCountFlagBits           samples;
+    VkAttachmentLoadOp              loadOp;
+    VkAttachmentStoreOp             storeOp;
+    VkAttachmentLoadOp              stencilLoadOp;
+    VkAttachmentStoreOp             stencilStoreOp;
+    VkImageLayout                   initialLayout;
+    VkImageLayout                   finalLayout;
+} VkAttachmentDescription;
+
+typedef struct VkAttachmentReference {
+    uint32_t         attachment;
+    VkImageLayout    layout;
+} VkAttachmentReference;
+
+typedef struct VkFramebufferCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkFramebufferCreateFlags    flags;
+    VkRenderPass                renderPass;
+    uint32_t                    attachmentCount;
+    const VkImageView*          pAttachments;
+    uint32_t                    width;
+    uint32_t                    height;
+    uint32_t                    layers;
+} VkFramebufferCreateInfo;
+
+typedef struct VkSubpassDescription {
+    VkSubpassDescriptionFlags       flags;
+    VkPipelineBindPoint             pipelineBindPoint;
+    uint32_t                        inputAttachmentCount;
+    const VkAttachmentReference*    pInputAttachments;
+    uint32_t                        colorAttachmentCount;
+    const VkAttachmentReference*    pColorAttachments;
+    const VkAttachmentReference*    pResolveAttachments;
+    const VkAttachmentReference*    pDepthStencilAttachment;
+    uint32_t                        preserveAttachmentCount;
+    const uint32_t*                 pPreserveAttachments;
+} VkSubpassDescription;
+
+typedef struct VkSubpassDependency {
+    uint32_t                srcSubpass;
+    uint32_t                dstSubpass;
+    VkPipelineStageFlags    srcStageMask;
+    VkPipelineStageFlags    dstStageMask;
+    VkAccessFlags           srcAccessMask;
+    VkAccessFlags           dstAccessMask;
+    VkDependencyFlags       dependencyFlags;
+} VkSubpassDependency;
+
+typedef struct VkRenderPassCreateInfo {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkRenderPassCreateFlags           flags;
+    uint32_t                          attachmentCount;
+    const VkAttachmentDescription*    pAttachments;
+    uint32_t                          subpassCount;
+    const VkSubpassDescription*       pSubpasses;
+    uint32_t                          dependencyCount;
+    const VkSubpassDependency*        pDependencies;
+} VkRenderPassCreateInfo;
+
+typedef struct VkCommandPoolCreateInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkCommandPoolCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+} VkCommandPoolCreateInfo;
+
+typedef struct VkCommandBufferAllocateInfo {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkCommandPool           commandPool;
+    VkCommandBufferLevel    level;
+    uint32_t                commandBufferCount;
+} VkCommandBufferAllocateInfo;
+
+typedef struct VkCommandBufferInheritanceInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkRenderPass                     renderPass;
+    uint32_t                         subpass;
+    VkFramebuffer                    framebuffer;
+    VkBool32                         occlusionQueryEnable;
+    VkQueryControlFlags              queryFlags;
+    VkQueryPipelineStatisticFlags    pipelineStatistics;
+} VkCommandBufferInheritanceInfo;
+
+typedef struct VkCommandBufferBeginInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkCommandBufferUsageFlags                flags;
+    const VkCommandBufferInheritanceInfo*    pInheritanceInfo;
+} VkCommandBufferBeginInfo;
+
+typedef struct VkBufferCopy {
+    VkDeviceSize    srcOffset;
+    VkDeviceSize    dstOffset;
+    VkDeviceSize    size;
+} VkBufferCopy;
+
+typedef struct VkImageSubresourceLayers {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              mipLevel;
+    uint32_t              baseArrayLayer;
+    uint32_t              layerCount;
+} VkImageSubresourceLayers;
+
+typedef struct VkBufferImageCopy {
+    VkDeviceSize                bufferOffset;
+    uint32_t                    bufferRowLength;
+    uint32_t                    bufferImageHeight;
+    VkImageSubresourceLayers    imageSubresource;
+    VkOffset3D                  imageOffset;
+    VkExtent3D                  imageExtent;
+} VkBufferImageCopy;
+
+typedef union VkClearColorValue {
+    float       float32[4];
+    int32_t     int32[4];
+    uint32_t    uint32[4];
+} VkClearColorValue;
+
+typedef struct VkClearDepthStencilValue {
+    float       depth;
+    uint32_t    stencil;
+} VkClearDepthStencilValue;
+
+typedef union VkClearValue {
+    VkClearColorValue           color;
+    VkClearDepthStencilValue    depthStencil;
+} VkClearValue;
+
+typedef struct VkClearAttachment {
+    VkImageAspectFlags    aspectMask;
+    uint32_t              colorAttachment;
+    VkClearValue          clearValue;
+} VkClearAttachment;
+
+typedef struct VkClearRect {
+    VkRect2D    rect;
+    uint32_t    baseArrayLayer;
+    uint32_t    layerCount;
+} VkClearRect;
+
+typedef struct VkImageBlit {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffsets[2];
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffsets[2];
+} VkImageBlit;
+
+typedef struct VkImageCopy {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageCopy;
+
+typedef struct VkImageResolve {
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageResolve;
+
+typedef struct VkRenderPassBeginInfo {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkRenderPass           renderPass;
+    VkFramebuffer          framebuffer;
+    VkRect2D               renderArea;
+    uint32_t               clearValueCount;
+    const VkClearValue*    pClearValues;
+} VkRenderPassBeginInfo;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateInstance)(const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance);
+typedef void (VKAPI_PTR *PFN_vkDestroyInstance)(VkInstance instance, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDevices)(VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddr)(VkInstance instance, const char* pName);
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetDeviceProcAddr)(VkDevice device, const char* pName);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDevice)(VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice);
+typedef void (VKAPI_PTR *PFN_vkDestroyDevice)(VkDevice device, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceExtensionProperties)(const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceExtensionProperties)(VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceLayerProperties)(uint32_t* pPropertyCount, VkLayerProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateDeviceLayerProperties)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue)(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit)(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueWaitIdle)(VkQueue queue);
+typedef VkResult (VKAPI_PTR *PFN_vkDeviceWaitIdle)(VkDevice device);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateMemory)(VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory);
+typedef void (VKAPI_PTR *PFN_vkFreeMemory)(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkMapMemory)(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData);
+typedef void (VKAPI_PTR *PFN_vkUnmapMemory)(VkDevice device, VkDeviceMemory memory);
+typedef VkResult (VKAPI_PTR *PFN_vkFlushMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
+typedef VkResult (VKAPI_PTR *PFN_vkInvalidateMappedMemoryRanges)(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceMemoryCommitment)(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes);
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory)(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory)(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements)(VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements)(VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements)(VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueBindSparse)(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateFence)(VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef void (VKAPI_PTR *PFN_vkDestroyFence)(VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceStatus)(VkDevice device, VkFence fence);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitForFences)(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSemaphore)(VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore);
+typedef void (VKAPI_PTR *PFN_vkDestroySemaphore)(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateEvent)(VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent);
+typedef void (VKAPI_PTR *PFN_vkDestroyEvent)(VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetEventStatus)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkSetEvent)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkResetEvent)(VkDevice device, VkEvent event);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateQueryPool)(VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyQueryPool)(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetQueryPoolResults)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateBuffer)(VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer);
+typedef void (VKAPI_PTR *PFN_vkDestroyBuffer)(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferView)(VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView);
+typedef void (VKAPI_PTR *PFN_vkDestroyBufferView)(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImage)(VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage);
+typedef void (VKAPI_PTR *PFN_vkDestroyImage)(VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout)(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImageView)(VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView);
+typedef void (VKAPI_PTR *PFN_vkDestroyImageView)(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateShaderModule)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule);
+typedef void (VKAPI_PTR *PFN_vkDestroyShaderModule)(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineCache)(VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipelineCache)(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineCacheData)(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkMergePipelineCaches)(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateGraphicsPipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateComputePipelines)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipeline)(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePipelineLayout)(VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyPipelineLayout)(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSampler)(VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler);
+typedef void (VKAPI_PTR *PFN_vkDestroySampler)(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorSetLayout)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorSetLayout)(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorPool)(VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetDescriptorPool)(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateDescriptorSets)(VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets);
+typedef VkResult (VKAPI_PTR *PFN_vkFreeDescriptorSets)(VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSets)(VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateFramebuffer)(VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer);
+typedef void (VKAPI_PTR *PFN_vkDestroyFramebuffer)(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass)(VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkDestroyRenderPass)(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetRenderAreaGranularity)(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateCommandPool)(VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool);
+typedef void (VKAPI_PTR *PFN_vkDestroyCommandPool)(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkResetCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags);
+typedef VkResult (VKAPI_PTR *PFN_vkAllocateCommandBuffers)(VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers);
+typedef void (VKAPI_PTR *PFN_vkFreeCommandBuffers)(VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers);
+typedef VkResult (VKAPI_PTR *PFN_vkBeginCommandBuffer)(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkEndCommandBuffer)(VkCommandBuffer commandBuffer);
+typedef VkResult (VKAPI_PTR *PFN_vkResetCommandBuffer)(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdBindPipeline)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewport)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports);
+typedef void (VKAPI_PTR *PFN_vkCmdSetScissor)(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineWidth)(VkCommandBuffer commandBuffer, float lineWidth);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBias)(VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor);
+typedef void (VKAPI_PTR *PFN_vkCmdSetBlendConstants)(VkCommandBuffer commandBuffer, const float blendConstants[4]);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBounds)(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilCompareMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilWriteMask)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilReference)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference);
+typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorSets)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdBindIndexBuffer)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType);
+typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdDraw)(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexed)(VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatch)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchIndirect)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdBlitImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage)(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdUpdateBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdFillBuffer)(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data);
+typedef void (VKAPI_PTR *PFN_vkCmdClearColorImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
+typedef void (VKAPI_PTR *PFN_vkCmdClearDepthStencilImage)(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges);
+typedef void (VKAPI_PTR *PFN_vkCmdClearAttachments)(VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects);
+typedef void (VKAPI_PTR *PFN_vkCmdResolveImage)(VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdSetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdResetEvent)(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents)(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier)(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdEndQuery)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query);
+typedef void (VKAPI_PTR *PFN_vkCmdResetQueryPool)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyQueryPoolResults)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags);
+typedef void (VKAPI_PTR *PFN_vkCmdPushConstants)(VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass)(VkCommandBuffer commandBuffer, VkSubpassContents contents);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdExecuteCommands)(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateInstance(
+    const VkInstanceCreateInfo*                 pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkInstance*                                 pInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyInstance(
+    VkInstance                                  instance,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDevices(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceCount,
+    VkPhysicalDevice*                           pPhysicalDevices);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures*                   pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties*                         pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkImageFormatProperties*                    pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties*                 pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties*                    pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties*           pMemoryProperties);
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(
+    VkInstance                                  instance,
+    const char*                                 pName);
+
+VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(
+    VkDevice                                    device,
+    const char*                                 pName);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDevice(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDeviceCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDevice*                                   pDevice);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDevice(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceExtensionProperties(
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceExtensionProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const char*                                 pLayerName,
+    uint32_t*                                   pPropertyCount,
+    VkExtensionProperties*                      pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkLayerProperties*                          pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue(
+    VkDevice                                    device,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t                                    queueIndex,
+    VkQueue*                                    pQueue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo*                         pSubmits,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueWaitIdle(
+    VkQueue                                     queue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDeviceWaitIdle(
+    VkDevice                                    device);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateMemory(
+    VkDevice                                    device,
+    const VkMemoryAllocateInfo*                 pAllocateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeviceMemory*                             pMemory);
+
+VKAPI_ATTR void VKAPI_CALL vkFreeMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                offset,
+    VkDeviceSize                                size,
+    VkMemoryMapFlags                            flags,
+    void**                                      ppData);
+
+VKAPI_ATTR void VKAPI_CALL vkUnmapMemory(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkFlushMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkInvalidateMappedMemoryRanges(
+    VkDevice                                    device,
+    uint32_t                                    memoryRangeCount,
+    const VkMappedMemoryRange*                  pMemoryRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceMemoryCommitment(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    VkDeviceSize*                               pCommittedMemoryInBytes);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkDeviceMemory                              memory,
+    VkDeviceSize                                memoryOffset);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkMemoryRequirements*                       pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    VkImage                                     image,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements*            pSparseMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkSampleCountFlagBits                       samples,
+    VkImageUsageFlags                           usage,
+    VkImageTiling                               tiling,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties*              pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueBindSparse(
+    VkQueue                                     queue,
+    uint32_t                                    bindInfoCount,
+    const VkBindSparseInfo*                     pBindInfo,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateFence(
+    VkDevice                                    device,
+    const VkFenceCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyFence(
+    VkDevice                                    device,
+    VkFence                                     fence,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceStatus(
+    VkDevice                                    device,
+    VkFence                                     fence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitForFences(
+    VkDevice                                    device,
+    uint32_t                                    fenceCount,
+    const VkFence*                              pFences,
+    VkBool32                                    waitAll,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSemaphore*                                pSemaphore);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySemaphore(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateEvent(
+    VkDevice                                    device,
+    const VkEventCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkEvent*                                    pEvent);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyEvent(
+    VkDevice                                    device,
+    VkEvent                                     event,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetEventStatus(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetEvent(
+    VkDevice                                    device,
+    VkEvent                                     event);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateQueryPool(
+    VkDevice                                    device,
+    const VkQueryPoolCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkQueryPool*                                pQueryPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetQueryPoolResults(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    size_t                                      dataSize,
+    void*                                       pData,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBuffer(
+    VkDevice                                    device,
+    const VkBufferCreateInfo*                   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBuffer*                                   pBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBuffer(
+    VkDevice                                    device,
+    VkBuffer                                    buffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferView(
+    VkDevice                                    device,
+    const VkBufferViewCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferView*                               pView);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBufferView(
+    VkDevice                                    device,
+    VkBufferView                                bufferView,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImage(
+    VkDevice                                    device,
+    const VkImageCreateInfo*                    pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImage*                                    pImage);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyImage(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource*                   pSubresource,
+    VkSubresourceLayout*                        pLayout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImageView(
+    VkDevice                                    device,
+    const VkImageViewCreateInfo*                pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkImageView*                                pView);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyImageView(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateShaderModule(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkShaderModule*                             pShaderModule);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyShaderModule(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineCache(
+    VkDevice                                    device,
+    const VkPipelineCacheCreateInfo*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineCache*                            pPipelineCache);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineCache(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineCacheData(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMergePipelineCaches(
+    VkDevice                                    device,
+    VkPipelineCache                             dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkPipelineCache*                      pSrcCaches);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateGraphicsPipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkGraphicsPipelineCreateInfo*         pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateComputePipelines(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkComputePipelineCreateInfo*          pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipeline(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePipelineLayout(
+    VkDevice                                    device,
+    const VkPipelineLayoutCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipelineLayout*                           pPipelineLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPipelineLayout(
+    VkDevice                                    device,
+    VkPipelineLayout                            pipelineLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSampler(
+    VkDevice                                    device,
+    const VkSamplerCreateInfo*                  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSampler*                                  pSampler);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySampler(
+    VkDevice                                    device,
+    VkSampler                                   sampler,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorSetLayout(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorSetLayout*                      pSetLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorSetLayout(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       descriptorSetLayout,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorPool(
+    VkDevice                                    device,
+    const VkDescriptorPoolCreateInfo*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorPool*                           pDescriptorPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetDescriptorPool(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    VkDescriptorPoolResetFlags                  flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateDescriptorSets(
+    VkDevice                                    device,
+    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
+    VkDescriptorSet*                            pDescriptorSets);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkFreeDescriptorSets(
+    VkDevice                                    device,
+    VkDescriptorPool                            descriptorPool,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSets(
+    VkDevice                                    device,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites,
+    uint32_t                                    descriptorCopyCount,
+    const VkCopyDescriptorSet*                  pDescriptorCopies);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateFramebuffer(
+    VkDevice                                    device,
+    const VkFramebufferCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFramebuffer*                              pFramebuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyFramebuffer(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo*               pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyRenderPass(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetRenderAreaGranularity(
+    VkDevice                                    device,
+    VkRenderPass                                renderPass,
+    VkExtent2D*                                 pGranularity);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCommandPool(
+    VkDevice                                    device,
+    const VkCommandPoolCreateInfo*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCommandPool*                              pCommandPool);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolResetFlags                     flags);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAllocateCommandBuffers(
+    VkDevice                                    device,
+    const VkCommandBufferAllocateInfo*          pAllocateInfo,
+    VkCommandBuffer*                            pCommandBuffers);
+
+VKAPI_ATTR void VKAPI_CALL vkFreeCommandBuffers(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBeginCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    const VkCommandBufferBeginInfo*             pBeginInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEndCommandBuffer(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkResetCommandBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkCommandBufferResetFlags                   flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindPipeline(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewport(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetScissor(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstScissor,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineWidth(
+    VkCommandBuffer                             commandBuffer,
+    float                                       lineWidth);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBias(
+    VkCommandBuffer                             commandBuffer,
+    float                                       depthBiasConstantFactor,
+    float                                       depthBiasClamp,
+    float                                       depthBiasSlopeFactor);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetBlendConstants(
+    VkCommandBuffer                             commandBuffer,
+    const float                                 blendConstants[4]);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBounds(
+    VkCommandBuffer                             commandBuffer,
+    float                                       minDepthBounds,
+    float                                       maxDepthBounds);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilCompareMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    compareMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilWriteMask(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    writeMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilReference(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    uint32_t                                    reference);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorSets(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    descriptorSetCount,
+    const VkDescriptorSet*                      pDescriptorSets,
+    uint32_t                                    dynamicOffsetCount,
+    const uint32_t*                             pDynamicOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindIndexBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkIndexType                                 indexType);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDraw(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstVertex,
+    uint32_t                                    firstInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexed(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    indexCount,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstIndex,
+    int32_t                                     vertexOffset,
+    uint32_t                                    firstInstance);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatch(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchIndirect(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferCopy*                         pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageCopy*                          pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageBlit*                          pRegions,
+    VkFilter                                    filter);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    srcBuffer,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkBuffer                                    dstBuffer,
+    uint32_t                                    regionCount,
+    const VkBufferImageCopy*                    pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdUpdateBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                dataSize,
+    const void*                                 pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdFillBuffer(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                size,
+    uint32_t                                    data);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearColorImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearColorValue*                    pColor,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearDepthStencilImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     image,
+    VkImageLayout                               imageLayout,
+    const VkClearDepthStencilValue*             pDepthStencil,
+    uint32_t                                    rangeCount,
+    const VkImageSubresourceRange*              pRanges);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdClearAttachments(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkClearAttachment*                    pAttachments,
+    uint32_t                                    rectCount,
+    const VkClearRect*                          pRects);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage(
+    VkCommandBuffer                             commandBuffer,
+    VkImage                                     srcImage,
+    VkImageLayout                               srcImageLayout,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    uint32_t                                    regionCount,
+    const VkImageResolve*                       pRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags                        stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags                        srcStageMask,
+    VkPipelineStageFlags                        dstStageMask,
+    VkDependencyFlags                           dependencyFlags,
+    uint32_t                                    memoryBarrierCount,
+    const VkMemoryBarrier*                      pMemoryBarriers,
+    uint32_t                                    bufferMemoryBarrierCount,
+    const VkBufferMemoryBarrier*                pBufferMemoryBarriers,
+    uint32_t                                    imageMemoryBarrierCount,
+    const VkImageMemoryBarrier*                 pImageMemoryBarriers);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndQuery(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetQueryPool(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyQueryPoolResults(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    VkDeviceSize                                stride,
+    VkQueryResultFlags                          flags);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPushConstants(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineLayout                            layout,
+    VkShaderStageFlags                          stageFlags,
+    uint32_t                                    offset,
+    uint32_t                                    size,
+    const void*                                 pValues);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    VkSubpassContents                           contents);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass(
+    VkCommandBuffer                             commandBuffer,
+    VkSubpassContents                           contents);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdExecuteCommands(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    commandBufferCount,
+    const VkCommandBuffer*                      pCommandBuffers);
+#endif
+
+
+#define VK_VERSION_1_1 1
+// Vulkan 1.1 version number
+#define VK_API_VERSION_1_1 VK_MAKE_API_VERSION(0, 1, 1, 0)// Patch version should always be set to 0
+
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSamplerYcbcrConversion)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDescriptorUpdateTemplate)
+#define VK_MAX_DEVICE_GROUP_SIZE          32U
+#define VK_LUID_SIZE                      8U
+#define VK_QUEUE_FAMILY_EXTERNAL          (~1U)
+
+typedef enum VkPointClippingBehavior {
+    VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES = 0,
+    VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY = 1,
+    VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES_KHR = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
+    VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY_KHR = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY,
+    VK_POINT_CLIPPING_BEHAVIOR_MAX_ENUM = 0x7FFFFFFF
+} VkPointClippingBehavior;
+
+typedef enum VkTessellationDomainOrigin {
+    VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT = 0,
+    VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT = 1,
+    VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
+    VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT_KHR = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT,
+    VK_TESSELLATION_DOMAIN_ORIGIN_MAX_ENUM = 0x7FFFFFFF
+} VkTessellationDomainOrigin;
+
+typedef enum VkSamplerYcbcrModelConversion {
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY = 0,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY = 1,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709 = 2,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601 = 3,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020 = 4,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020_KHR = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020,
+    VK_SAMPLER_YCBCR_MODEL_CONVERSION_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerYcbcrModelConversion;
+
+typedef enum VkSamplerYcbcrRange {
+    VK_SAMPLER_YCBCR_RANGE_ITU_FULL = 0,
+    VK_SAMPLER_YCBCR_RANGE_ITU_NARROW = 1,
+    VK_SAMPLER_YCBCR_RANGE_ITU_FULL_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+    VK_SAMPLER_YCBCR_RANGE_ITU_NARROW_KHR = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW,
+    VK_SAMPLER_YCBCR_RANGE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerYcbcrRange;
+
+typedef enum VkChromaLocation {
+    VK_CHROMA_LOCATION_COSITED_EVEN = 0,
+    VK_CHROMA_LOCATION_MIDPOINT = 1,
+    VK_CHROMA_LOCATION_COSITED_EVEN_KHR = VK_CHROMA_LOCATION_COSITED_EVEN,
+    VK_CHROMA_LOCATION_MIDPOINT_KHR = VK_CHROMA_LOCATION_MIDPOINT,
+    VK_CHROMA_LOCATION_MAX_ENUM = 0x7FFFFFFF
+} VkChromaLocation;
+
+typedef enum VkDescriptorUpdateTemplateType {
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET = 0,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR = 1,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET_KHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+    VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorUpdateTemplateType;
+
+typedef enum VkSubgroupFeatureFlagBits {
+    VK_SUBGROUP_FEATURE_BASIC_BIT = 0x00000001,
+    VK_SUBGROUP_FEATURE_VOTE_BIT = 0x00000002,
+    VK_SUBGROUP_FEATURE_ARITHMETIC_BIT = 0x00000004,
+    VK_SUBGROUP_FEATURE_BALLOT_BIT = 0x00000008,
+    VK_SUBGROUP_FEATURE_SHUFFLE_BIT = 0x00000010,
+    VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT = 0x00000020,
+    VK_SUBGROUP_FEATURE_CLUSTERED_BIT = 0x00000040,
+    VK_SUBGROUP_FEATURE_QUAD_BIT = 0x00000080,
+    VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV = 0x00000100,
+    VK_SUBGROUP_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSubgroupFeatureFlagBits;
+typedef VkFlags VkSubgroupFeatureFlags;
+
+typedef enum VkPeerMemoryFeatureFlagBits {
+    VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT = 0x00000001,
+    VK_PEER_MEMORY_FEATURE_COPY_DST_BIT = 0x00000002,
+    VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT = 0x00000004,
+    VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT = 0x00000008,
+    VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
+    VK_PEER_MEMORY_FEATURE_COPY_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
+    VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
+    VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT_KHR = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT,
+    VK_PEER_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPeerMemoryFeatureFlagBits;
+typedef VkFlags VkPeerMemoryFeatureFlags;
+
+typedef enum VkMemoryAllocateFlagBits {
+    VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT = 0x00000001,
+    VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT = 0x00000002,
+    VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT = 0x00000004,
+    VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
+    VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
+    VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+    VK_MEMORY_ALLOCATE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkMemoryAllocateFlagBits;
+typedef VkFlags VkMemoryAllocateFlags;
+typedef VkFlags VkCommandPoolTrimFlags;
+typedef VkFlags VkDescriptorUpdateTemplateCreateFlags;
+
+typedef enum VkExternalMemoryHandleTypeFlagBits {
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT = 0x00000008,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT = 0x00000010,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT = 0x00000020,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT = 0x00000040,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT = 0x00000200,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID = 0x00000400,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT = 0x00000080,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT = 0x00000100,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA = 0x00000800,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV = 0x00001000,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT_KHR = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalMemoryHandleTypeFlagBits;
+typedef VkFlags VkExternalMemoryHandleTypeFlags;
+
+typedef enum VkExternalMemoryFeatureFlagBits {
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT = 0x00000001,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT = 0x00000002,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT = 0x00000004,
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT,
+    VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalMemoryFeatureFlagBits;
+typedef VkFlags VkExternalMemoryFeatureFlags;
+
+typedef enum VkExternalFenceHandleTypeFlagBits {
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000008,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT,
+    VK_EXTERNAL_FENCE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalFenceHandleTypeFlagBits;
+typedef VkFlags VkExternalFenceHandleTypeFlags;
+
+typedef enum VkExternalFenceFeatureFlagBits {
+    VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT = 0x00000001,
+    VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT = 0x00000002,
+    VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
+    VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT,
+    VK_EXTERNAL_FENCE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalFenceFeatureFlagBits;
+typedef VkFlags VkExternalFenceFeatureFlags;
+
+typedef enum VkFenceImportFlagBits {
+    VK_FENCE_IMPORT_TEMPORARY_BIT = 0x00000001,
+    VK_FENCE_IMPORT_TEMPORARY_BIT_KHR = VK_FENCE_IMPORT_TEMPORARY_BIT,
+    VK_FENCE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkFenceImportFlagBits;
+typedef VkFlags VkFenceImportFlags;
+
+typedef enum VkSemaphoreImportFlagBits {
+    VK_SEMAPHORE_IMPORT_TEMPORARY_BIT = 0x00000001,
+    VK_SEMAPHORE_IMPORT_TEMPORARY_BIT_KHR = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
+    VK_SEMAPHORE_IMPORT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSemaphoreImportFlagBits;
+typedef VkFlags VkSemaphoreImportFlags;
+
+typedef enum VkExternalSemaphoreHandleTypeFlagBits {
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT = 0x00000001,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT = 0x00000002,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT = 0x00000004,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT = 0x00000008,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT = 0x00000010,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA = 0x00000080,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT_KHR = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
+    VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalSemaphoreHandleTypeFlagBits;
+typedef VkFlags VkExternalSemaphoreHandleTypeFlags;
+
+typedef enum VkExternalSemaphoreFeatureFlagBits {
+    VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT = 0x00000001,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT = 0x00000002,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT_KHR = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT,
+    VK_EXTERNAL_SEMAPHORE_FEATURE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkExternalSemaphoreFeatureFlagBits;
+typedef VkFlags VkExternalSemaphoreFeatureFlags;
+typedef struct VkPhysicalDeviceSubgroupProperties {
+    VkStructureType           sType;
+    void*                     pNext;
+    uint32_t                  subgroupSize;
+    VkShaderStageFlags        supportedStages;
+    VkSubgroupFeatureFlags    supportedOperations;
+    VkBool32                  quadOperationsInAllStages;
+} VkPhysicalDeviceSubgroupProperties;
+
+typedef struct VkBindBufferMemoryInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+    VkDeviceMemory     memory;
+    VkDeviceSize       memoryOffset;
+} VkBindBufferMemoryInfo;
+
+typedef struct VkBindImageMemoryInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkDeviceMemory     memory;
+    VkDeviceSize       memoryOffset;
+} VkBindImageMemoryInfo;
+
+typedef struct VkPhysicalDevice16BitStorageFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           storageBuffer16BitAccess;
+    VkBool32           uniformAndStorageBuffer16BitAccess;
+    VkBool32           storagePushConstant16;
+    VkBool32           storageInputOutput16;
+} VkPhysicalDevice16BitStorageFeatures;
+
+typedef struct VkMemoryDedicatedRequirements {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           prefersDedicatedAllocation;
+    VkBool32           requiresDedicatedAllocation;
+} VkMemoryDedicatedRequirements;
+
+typedef struct VkMemoryDedicatedAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkBuffer           buffer;
+} VkMemoryDedicatedAllocateInfo;
+
+typedef struct VkMemoryAllocateFlagsInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkMemoryAllocateFlags    flags;
+    uint32_t                 deviceMask;
+} VkMemoryAllocateFlagsInfo;
+
+typedef struct VkDeviceGroupRenderPassBeginInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceMask;
+    uint32_t           deviceRenderAreaCount;
+    const VkRect2D*    pDeviceRenderAreas;
+} VkDeviceGroupRenderPassBeginInfo;
+
+typedef struct VkDeviceGroupCommandBufferBeginInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceMask;
+} VkDeviceGroupCommandBufferBeginInfo;
+
+typedef struct VkDeviceGroupSubmitInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           waitSemaphoreCount;
+    const uint32_t*    pWaitSemaphoreDeviceIndices;
+    uint32_t           commandBufferCount;
+    const uint32_t*    pCommandBufferDeviceMasks;
+    uint32_t           signalSemaphoreCount;
+    const uint32_t*    pSignalSemaphoreDeviceIndices;
+} VkDeviceGroupSubmitInfo;
+
+typedef struct VkDeviceGroupBindSparseInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           resourceDeviceIndex;
+    uint32_t           memoryDeviceIndex;
+} VkDeviceGroupBindSparseInfo;
+
+typedef struct VkBindBufferMemoryDeviceGroupInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceIndexCount;
+    const uint32_t*    pDeviceIndices;
+} VkBindBufferMemoryDeviceGroupInfo;
+
+typedef struct VkBindImageMemoryDeviceGroupInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           deviceIndexCount;
+    const uint32_t*    pDeviceIndices;
+    uint32_t           splitInstanceBindRegionCount;
+    const VkRect2D*    pSplitInstanceBindRegions;
+} VkBindImageMemoryDeviceGroupInfo;
+
+typedef struct VkPhysicalDeviceGroupProperties {
+    VkStructureType     sType;
+    void*               pNext;
+    uint32_t            physicalDeviceCount;
+    VkPhysicalDevice    physicalDevices[VK_MAX_DEVICE_GROUP_SIZE];
+    VkBool32            subsetAllocation;
+} VkPhysicalDeviceGroupProperties;
+
+typedef struct VkDeviceGroupDeviceCreateInfo {
+    VkStructureType            sType;
+    const void*                pNext;
+    uint32_t                   physicalDeviceCount;
+    const VkPhysicalDevice*    pPhysicalDevices;
+} VkDeviceGroupDeviceCreateInfo;
+
+typedef struct VkBufferMemoryRequirementsInfo2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+} VkBufferMemoryRequirementsInfo2;
+
+typedef struct VkImageMemoryRequirementsInfo2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+} VkImageMemoryRequirementsInfo2;
+
+typedef struct VkImageSparseMemoryRequirementsInfo2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+} VkImageSparseMemoryRequirementsInfo2;
+
+typedef struct VkMemoryRequirements2 {
+    VkStructureType         sType;
+    void*                   pNext;
+    VkMemoryRequirements    memoryRequirements;
+} VkMemoryRequirements2;
+
+typedef struct VkSparseImageMemoryRequirements2 {
+    VkStructureType                    sType;
+    void*                              pNext;
+    VkSparseImageMemoryRequirements    memoryRequirements;
+} VkSparseImageMemoryRequirements2;
+
+typedef struct VkPhysicalDeviceFeatures2 {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkPhysicalDeviceFeatures    features;
+} VkPhysicalDeviceFeatures2;
+
+typedef struct VkPhysicalDeviceProperties2 {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkPhysicalDeviceProperties    properties;
+} VkPhysicalDeviceProperties2;
+
+typedef struct VkFormatProperties2 {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkFormatProperties    formatProperties;
+} VkFormatProperties2;
+
+typedef struct VkImageFormatProperties2 {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkImageFormatProperties    imageFormatProperties;
+} VkImageFormatProperties2;
+
+typedef struct VkPhysicalDeviceImageFormatInfo2 {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkFormat              format;
+    VkImageType           type;
+    VkImageTiling         tiling;
+    VkImageUsageFlags     usage;
+    VkImageCreateFlags    flags;
+} VkPhysicalDeviceImageFormatInfo2;
+
+typedef struct VkQueueFamilyProperties2 {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkQueueFamilyProperties    queueFamilyProperties;
+} VkQueueFamilyProperties2;
+
+typedef struct VkPhysicalDeviceMemoryProperties2 {
+    VkStructureType                     sType;
+    void*                               pNext;
+    VkPhysicalDeviceMemoryProperties    memoryProperties;
+} VkPhysicalDeviceMemoryProperties2;
+
+typedef struct VkSparseImageFormatProperties2 {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkSparseImageFormatProperties    properties;
+} VkSparseImageFormatProperties2;
+
+typedef struct VkPhysicalDeviceSparseImageFormatInfo2 {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkFormat                 format;
+    VkImageType              type;
+    VkSampleCountFlagBits    samples;
+    VkImageUsageFlags        usage;
+    VkImageTiling            tiling;
+} VkPhysicalDeviceSparseImageFormatInfo2;
+
+typedef struct VkPhysicalDevicePointClippingProperties {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkPointClippingBehavior    pointClippingBehavior;
+} VkPhysicalDevicePointClippingProperties;
+
+typedef struct VkInputAttachmentAspectReference {
+    uint32_t              subpass;
+    uint32_t              inputAttachmentIndex;
+    VkImageAspectFlags    aspectMask;
+} VkInputAttachmentAspectReference;
+
+typedef struct VkRenderPassInputAttachmentAspectCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   aspectReferenceCount;
+    const VkInputAttachmentAspectReference*    pAspectReferences;
+} VkRenderPassInputAttachmentAspectCreateInfo;
+
+typedef struct VkImageViewUsageCreateInfo {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkImageUsageFlags    usage;
+} VkImageViewUsageCreateInfo;
+
+typedef struct VkPipelineTessellationDomainOriginStateCreateInfo {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkTessellationDomainOrigin    domainOrigin;
+} VkPipelineTessellationDomainOriginStateCreateInfo;
+
+typedef struct VkRenderPassMultiviewCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           subpassCount;
+    const uint32_t*    pViewMasks;
+    uint32_t           dependencyCount;
+    const int32_t*     pViewOffsets;
+    uint32_t           correlationMaskCount;
+    const uint32_t*    pCorrelationMasks;
+} VkRenderPassMultiviewCreateInfo;
+
+typedef struct VkPhysicalDeviceMultiviewFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           multiview;
+    VkBool32           multiviewGeometryShader;
+    VkBool32           multiviewTessellationShader;
+} VkPhysicalDeviceMultiviewFeatures;
+
+typedef struct VkPhysicalDeviceMultiviewProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxMultiviewViewCount;
+    uint32_t           maxMultiviewInstanceIndex;
+} VkPhysicalDeviceMultiviewProperties;
+
+typedef struct VkPhysicalDeviceVariablePointersFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           variablePointersStorageBuffer;
+    VkBool32           variablePointers;
+} VkPhysicalDeviceVariablePointersFeatures;
+
+typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeatures;
+
+typedef struct VkPhysicalDeviceProtectedMemoryFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           protectedMemory;
+} VkPhysicalDeviceProtectedMemoryFeatures;
+
+typedef struct VkPhysicalDeviceProtectedMemoryProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           protectedNoFault;
+} VkPhysicalDeviceProtectedMemoryProperties;
+
+typedef struct VkDeviceQueueInfo2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkDeviceQueueCreateFlags    flags;
+    uint32_t                    queueFamilyIndex;
+    uint32_t                    queueIndex;
+} VkDeviceQueueInfo2;
+
+typedef struct VkProtectedSubmitInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           protectedSubmit;
+} VkProtectedSubmitInfo;
+
+typedef struct VkSamplerYcbcrConversionCreateInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkFormat                         format;
+    VkSamplerYcbcrModelConversion    ycbcrModel;
+    VkSamplerYcbcrRange              ycbcrRange;
+    VkComponentMapping               components;
+    VkChromaLocation                 xChromaOffset;
+    VkChromaLocation                 yChromaOffset;
+    VkFilter                         chromaFilter;
+    VkBool32                         forceExplicitReconstruction;
+} VkSamplerYcbcrConversionCreateInfo;
+
+typedef struct VkSamplerYcbcrConversionInfo {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkSamplerYcbcrConversion    conversion;
+} VkSamplerYcbcrConversionInfo;
+
+typedef struct VkBindImagePlaneMemoryInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageAspectFlagBits    planeAspect;
+} VkBindImagePlaneMemoryInfo;
+
+typedef struct VkImagePlaneMemoryRequirementsInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageAspectFlagBits    planeAspect;
+} VkImagePlaneMemoryRequirementsInfo;
+
+typedef struct VkPhysicalDeviceSamplerYcbcrConversionFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           samplerYcbcrConversion;
+} VkPhysicalDeviceSamplerYcbcrConversionFeatures;
+
+typedef struct VkSamplerYcbcrConversionImageFormatProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           combinedImageSamplerDescriptorCount;
+} VkSamplerYcbcrConversionImageFormatProperties;
+
+typedef struct VkDescriptorUpdateTemplateEntry {
+    uint32_t            dstBinding;
+    uint32_t            dstArrayElement;
+    uint32_t            descriptorCount;
+    VkDescriptorType    descriptorType;
+    size_t              offset;
+    size_t              stride;
+} VkDescriptorUpdateTemplateEntry;
+
+typedef struct VkDescriptorUpdateTemplateCreateInfo {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkDescriptorUpdateTemplateCreateFlags     flags;
+    uint32_t                                  descriptorUpdateEntryCount;
+    const VkDescriptorUpdateTemplateEntry*    pDescriptorUpdateEntries;
+    VkDescriptorUpdateTemplateType            templateType;
+    VkDescriptorSetLayout                     descriptorSetLayout;
+    VkPipelineBindPoint                       pipelineBindPoint;
+    VkPipelineLayout                          pipelineLayout;
+    uint32_t                                  set;
+} VkDescriptorUpdateTemplateCreateInfo;
+
+typedef struct VkExternalMemoryProperties {
+    VkExternalMemoryFeatureFlags       externalMemoryFeatures;
+    VkExternalMemoryHandleTypeFlags    exportFromImportedHandleTypes;
+    VkExternalMemoryHandleTypeFlags    compatibleHandleTypes;
+} VkExternalMemoryProperties;
+
+typedef struct VkPhysicalDeviceExternalImageFormatInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalImageFormatInfo;
+
+typedef struct VkExternalImageFormatProperties {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkExternalMemoryProperties    externalMemoryProperties;
+} VkExternalImageFormatProperties;
+
+typedef struct VkPhysicalDeviceExternalBufferInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkBufferCreateFlags                   flags;
+    VkBufferUsageFlags                    usage;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalBufferInfo;
+
+typedef struct VkExternalBufferProperties {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkExternalMemoryProperties    externalMemoryProperties;
+} VkExternalBufferProperties;
+
+typedef struct VkPhysicalDeviceIDProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint8_t            deviceUUID[VK_UUID_SIZE];
+    uint8_t            driverUUID[VK_UUID_SIZE];
+    uint8_t            deviceLUID[VK_LUID_SIZE];
+    uint32_t           deviceNodeMask;
+    VkBool32           deviceLUIDValid;
+} VkPhysicalDeviceIDProperties;
+
+typedef struct VkExternalMemoryImageCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkExternalMemoryHandleTypeFlags    handleTypes;
+} VkExternalMemoryImageCreateInfo;
+
+typedef struct VkExternalMemoryBufferCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkExternalMemoryHandleTypeFlags    handleTypes;
+} VkExternalMemoryBufferCreateInfo;
+
+typedef struct VkExportMemoryAllocateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkExternalMemoryHandleTypeFlags    handleTypes;
+} VkExportMemoryAllocateInfo;
+
+typedef struct VkPhysicalDeviceExternalFenceInfo {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalFenceInfo;
+
+typedef struct VkExternalFenceProperties {
+    VkStructureType                   sType;
+    void*                             pNext;
+    VkExternalFenceHandleTypeFlags    exportFromImportedHandleTypes;
+    VkExternalFenceHandleTypeFlags    compatibleHandleTypes;
+    VkExternalFenceFeatureFlags       externalFenceFeatures;
+} VkExternalFenceProperties;
+
+typedef struct VkExportFenceCreateInfo {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkExternalFenceHandleTypeFlags    handleTypes;
+} VkExportFenceCreateInfo;
+
+typedef struct VkExportSemaphoreCreateInfo {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalSemaphoreHandleTypeFlags    handleTypes;
+} VkExportSemaphoreCreateInfo;
+
+typedef struct VkPhysicalDeviceExternalSemaphoreInfo {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+} VkPhysicalDeviceExternalSemaphoreInfo;
+
+typedef struct VkExternalSemaphoreProperties {
+    VkStructureType                       sType;
+    void*                                 pNext;
+    VkExternalSemaphoreHandleTypeFlags    exportFromImportedHandleTypes;
+    VkExternalSemaphoreHandleTypeFlags    compatibleHandleTypes;
+    VkExternalSemaphoreFeatureFlags       externalSemaphoreFeatures;
+} VkExternalSemaphoreProperties;
+
+typedef struct VkPhysicalDeviceMaintenance3Properties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxPerSetDescriptors;
+    VkDeviceSize       maxMemoryAllocationSize;
+} VkPhysicalDeviceMaintenance3Properties;
+
+typedef struct VkDescriptorSetLayoutSupport {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           supported;
+} VkDescriptorSetLayoutSupport;
+
+typedef struct VkPhysicalDeviceShaderDrawParametersFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderDrawParameters;
+} VkPhysicalDeviceShaderDrawParametersFeatures;
+
+typedef VkPhysicalDeviceShaderDrawParametersFeatures VkPhysicalDeviceShaderDrawParameterFeatures;
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumerateInstanceVersion)(uint32_t* pApiVersion);
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeatures)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMask)(VkCommandBuffer commandBuffer, uint32_t deviceMask);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchBase)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroups)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties);
+typedef void (VKAPI_PTR *PFN_vkTrimCommandPool)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceQueue2)(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversion)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion);
+typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversion)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplate)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplate)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplate)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFenceProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphoreProperties)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupport)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceVersion(
+    uint32_t*                                   pApiVersion);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeatures(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMask(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBase(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroups(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkTrimCommandPool(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceQueue2(
+    VkDevice                                    device,
+    const VkDeviceQueueInfo2*                   pQueueInfo,
+    VkQueue*                                    pQueue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversion(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversion(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplate(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplate(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFenceProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphoreProperties(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupport(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+#endif
+
+
+#define VK_VERSION_1_2 1
+// Vulkan 1.2 version number
+#define VK_API_VERSION_1_2 VK_MAKE_API_VERSION(0, 1, 2, 0)// Patch version should always be set to 0
+
+#define VK_MAX_DRIVER_NAME_SIZE           256U
+#define VK_MAX_DRIVER_INFO_SIZE           256U
+
+typedef enum VkDriverId {
+    VK_DRIVER_ID_AMD_PROPRIETARY = 1,
+    VK_DRIVER_ID_AMD_OPEN_SOURCE = 2,
+    VK_DRIVER_ID_MESA_RADV = 3,
+    VK_DRIVER_ID_NVIDIA_PROPRIETARY = 4,
+    VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS = 5,
+    VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA = 6,
+    VK_DRIVER_ID_IMAGINATION_PROPRIETARY = 7,
+    VK_DRIVER_ID_QUALCOMM_PROPRIETARY = 8,
+    VK_DRIVER_ID_ARM_PROPRIETARY = 9,
+    VK_DRIVER_ID_GOOGLE_SWIFTSHADER = 10,
+    VK_DRIVER_ID_GGP_PROPRIETARY = 11,
+    VK_DRIVER_ID_BROADCOM_PROPRIETARY = 12,
+    VK_DRIVER_ID_MESA_LLVMPIPE = 13,
+    VK_DRIVER_ID_MOLTENVK = 14,
+    VK_DRIVER_ID_COREAVI_PROPRIETARY = 15,
+    VK_DRIVER_ID_JUICE_PROPRIETARY = 16,
+    VK_DRIVER_ID_VERISILICON_PROPRIETARY = 17,
+    VK_DRIVER_ID_MESA_TURNIP = 18,
+    VK_DRIVER_ID_MESA_V3DV = 19,
+    VK_DRIVER_ID_MESA_PANVK = 20,
+    VK_DRIVER_ID_SAMSUNG_PROPRIETARY = 21,
+    VK_DRIVER_ID_MESA_VENUS = 22,
+    VK_DRIVER_ID_MESA_DOZEN = 23,
+    VK_DRIVER_ID_MESA_NVK = 24,
+    VK_DRIVER_ID_AMD_PROPRIETARY_KHR = VK_DRIVER_ID_AMD_PROPRIETARY,
+    VK_DRIVER_ID_AMD_OPEN_SOURCE_KHR = VK_DRIVER_ID_AMD_OPEN_SOURCE,
+    VK_DRIVER_ID_MESA_RADV_KHR = VK_DRIVER_ID_MESA_RADV,
+    VK_DRIVER_ID_NVIDIA_PROPRIETARY_KHR = VK_DRIVER_ID_NVIDIA_PROPRIETARY,
+    VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS_KHR = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS,
+    VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA_KHR = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA,
+    VK_DRIVER_ID_IMAGINATION_PROPRIETARY_KHR = VK_DRIVER_ID_IMAGINATION_PROPRIETARY,
+    VK_DRIVER_ID_QUALCOMM_PROPRIETARY_KHR = VK_DRIVER_ID_QUALCOMM_PROPRIETARY,
+    VK_DRIVER_ID_ARM_PROPRIETARY_KHR = VK_DRIVER_ID_ARM_PROPRIETARY,
+    VK_DRIVER_ID_GOOGLE_SWIFTSHADER_KHR = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
+    VK_DRIVER_ID_GGP_PROPRIETARY_KHR = VK_DRIVER_ID_GGP_PROPRIETARY,
+    VK_DRIVER_ID_BROADCOM_PROPRIETARY_KHR = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
+    VK_DRIVER_ID_MAX_ENUM = 0x7FFFFFFF
+} VkDriverId;
+
+typedef enum VkShaderFloatControlsIndependence {
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY = 0,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL = 1,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE = 2,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE_KHR = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE,
+    VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_MAX_ENUM = 0x7FFFFFFF
+} VkShaderFloatControlsIndependence;
+
+typedef enum VkSamplerReductionMode {
+    VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE = 0,
+    VK_SAMPLER_REDUCTION_MODE_MIN = 1,
+    VK_SAMPLER_REDUCTION_MODE_MAX = 2,
+    VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE_EXT = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
+    VK_SAMPLER_REDUCTION_MODE_MIN_EXT = VK_SAMPLER_REDUCTION_MODE_MIN,
+    VK_SAMPLER_REDUCTION_MODE_MAX_EXT = VK_SAMPLER_REDUCTION_MODE_MAX,
+    VK_SAMPLER_REDUCTION_MODE_MAX_ENUM = 0x7FFFFFFF
+} VkSamplerReductionMode;
+
+typedef enum VkSemaphoreType {
+    VK_SEMAPHORE_TYPE_BINARY = 0,
+    VK_SEMAPHORE_TYPE_TIMELINE = 1,
+    VK_SEMAPHORE_TYPE_BINARY_KHR = VK_SEMAPHORE_TYPE_BINARY,
+    VK_SEMAPHORE_TYPE_TIMELINE_KHR = VK_SEMAPHORE_TYPE_TIMELINE,
+    VK_SEMAPHORE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} VkSemaphoreType;
+
+typedef enum VkResolveModeFlagBits {
+    VK_RESOLVE_MODE_NONE = 0,
+    VK_RESOLVE_MODE_SAMPLE_ZERO_BIT = 0x00000001,
+    VK_RESOLVE_MODE_AVERAGE_BIT = 0x00000002,
+    VK_RESOLVE_MODE_MIN_BIT = 0x00000004,
+    VK_RESOLVE_MODE_MAX_BIT = 0x00000008,
+    VK_RESOLVE_MODE_NONE_KHR = VK_RESOLVE_MODE_NONE,
+    VK_RESOLVE_MODE_SAMPLE_ZERO_BIT_KHR = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
+    VK_RESOLVE_MODE_AVERAGE_BIT_KHR = VK_RESOLVE_MODE_AVERAGE_BIT,
+    VK_RESOLVE_MODE_MIN_BIT_KHR = VK_RESOLVE_MODE_MIN_BIT,
+    VK_RESOLVE_MODE_MAX_BIT_KHR = VK_RESOLVE_MODE_MAX_BIT,
+    VK_RESOLVE_MODE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkResolveModeFlagBits;
+typedef VkFlags VkResolveModeFlags;
+
+typedef enum VkDescriptorBindingFlagBits {
+    VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT = 0x00000001,
+    VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT = 0x00000002,
+    VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT = 0x00000004,
+    VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT = 0x00000008,
+    VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
+    VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT_EXT = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT,
+    VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT_EXT = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT,
+    VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT_EXT = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT,
+    VK_DESCRIPTOR_BINDING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkDescriptorBindingFlagBits;
+typedef VkFlags VkDescriptorBindingFlags;
+
+typedef enum VkSemaphoreWaitFlagBits {
+    VK_SEMAPHORE_WAIT_ANY_BIT = 0x00000001,
+    VK_SEMAPHORE_WAIT_ANY_BIT_KHR = VK_SEMAPHORE_WAIT_ANY_BIT,
+    VK_SEMAPHORE_WAIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSemaphoreWaitFlagBits;
+typedef VkFlags VkSemaphoreWaitFlags;
+typedef struct VkPhysicalDeviceVulkan11Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           storageBuffer16BitAccess;
+    VkBool32           uniformAndStorageBuffer16BitAccess;
+    VkBool32           storagePushConstant16;
+    VkBool32           storageInputOutput16;
+    VkBool32           multiview;
+    VkBool32           multiviewGeometryShader;
+    VkBool32           multiviewTessellationShader;
+    VkBool32           variablePointersStorageBuffer;
+    VkBool32           variablePointers;
+    VkBool32           protectedMemory;
+    VkBool32           samplerYcbcrConversion;
+    VkBool32           shaderDrawParameters;
+} VkPhysicalDeviceVulkan11Features;
+
+typedef struct VkPhysicalDeviceVulkan11Properties {
+    VkStructureType            sType;
+    void*                      pNext;
+    uint8_t                    deviceUUID[VK_UUID_SIZE];
+    uint8_t                    driverUUID[VK_UUID_SIZE];
+    uint8_t                    deviceLUID[VK_LUID_SIZE];
+    uint32_t                   deviceNodeMask;
+    VkBool32                   deviceLUIDValid;
+    uint32_t                   subgroupSize;
+    VkShaderStageFlags         subgroupSupportedStages;
+    VkSubgroupFeatureFlags     subgroupSupportedOperations;
+    VkBool32                   subgroupQuadOperationsInAllStages;
+    VkPointClippingBehavior    pointClippingBehavior;
+    uint32_t                   maxMultiviewViewCount;
+    uint32_t                   maxMultiviewInstanceIndex;
+    VkBool32                   protectedNoFault;
+    uint32_t                   maxPerSetDescriptors;
+    VkDeviceSize               maxMemoryAllocationSize;
+} VkPhysicalDeviceVulkan11Properties;
+
+typedef struct VkPhysicalDeviceVulkan12Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           samplerMirrorClampToEdge;
+    VkBool32           drawIndirectCount;
+    VkBool32           storageBuffer8BitAccess;
+    VkBool32           uniformAndStorageBuffer8BitAccess;
+    VkBool32           storagePushConstant8;
+    VkBool32           shaderBufferInt64Atomics;
+    VkBool32           shaderSharedInt64Atomics;
+    VkBool32           shaderFloat16;
+    VkBool32           shaderInt8;
+    VkBool32           descriptorIndexing;
+    VkBool32           shaderInputAttachmentArrayDynamicIndexing;
+    VkBool32           shaderUniformTexelBufferArrayDynamicIndexing;
+    VkBool32           shaderStorageTexelBufferArrayDynamicIndexing;
+    VkBool32           shaderUniformBufferArrayNonUniformIndexing;
+    VkBool32           shaderSampledImageArrayNonUniformIndexing;
+    VkBool32           shaderStorageBufferArrayNonUniformIndexing;
+    VkBool32           shaderStorageImageArrayNonUniformIndexing;
+    VkBool32           shaderInputAttachmentArrayNonUniformIndexing;
+    VkBool32           shaderUniformTexelBufferArrayNonUniformIndexing;
+    VkBool32           shaderStorageTexelBufferArrayNonUniformIndexing;
+    VkBool32           descriptorBindingUniformBufferUpdateAfterBind;
+    VkBool32           descriptorBindingSampledImageUpdateAfterBind;
+    VkBool32           descriptorBindingStorageImageUpdateAfterBind;
+    VkBool32           descriptorBindingStorageBufferUpdateAfterBind;
+    VkBool32           descriptorBindingUniformTexelBufferUpdateAfterBind;
+    VkBool32           descriptorBindingStorageTexelBufferUpdateAfterBind;
+    VkBool32           descriptorBindingUpdateUnusedWhilePending;
+    VkBool32           descriptorBindingPartiallyBound;
+    VkBool32           descriptorBindingVariableDescriptorCount;
+    VkBool32           runtimeDescriptorArray;
+    VkBool32           samplerFilterMinmax;
+    VkBool32           scalarBlockLayout;
+    VkBool32           imagelessFramebuffer;
+    VkBool32           uniformBufferStandardLayout;
+    VkBool32           shaderSubgroupExtendedTypes;
+    VkBool32           separateDepthStencilLayouts;
+    VkBool32           hostQueryReset;
+    VkBool32           timelineSemaphore;
+    VkBool32           bufferDeviceAddress;
+    VkBool32           bufferDeviceAddressCaptureReplay;
+    VkBool32           bufferDeviceAddressMultiDevice;
+    VkBool32           vulkanMemoryModel;
+    VkBool32           vulkanMemoryModelDeviceScope;
+    VkBool32           vulkanMemoryModelAvailabilityVisibilityChains;
+    VkBool32           shaderOutputViewportIndex;
+    VkBool32           shaderOutputLayer;
+    VkBool32           subgroupBroadcastDynamicId;
+} VkPhysicalDeviceVulkan12Features;
+
+typedef struct VkConformanceVersion {
+    uint8_t    major;
+    uint8_t    minor;
+    uint8_t    subminor;
+    uint8_t    patch;
+} VkConformanceVersion;
+
+typedef struct VkPhysicalDeviceVulkan12Properties {
+    VkStructureType                      sType;
+    void*                                pNext;
+    VkDriverId                           driverID;
+    char                                 driverName[VK_MAX_DRIVER_NAME_SIZE];
+    char                                 driverInfo[VK_MAX_DRIVER_INFO_SIZE];
+    VkConformanceVersion                 conformanceVersion;
+    VkShaderFloatControlsIndependence    denormBehaviorIndependence;
+    VkShaderFloatControlsIndependence    roundingModeIndependence;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat16;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat32;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat64;
+    VkBool32                             shaderDenormPreserveFloat16;
+    VkBool32                             shaderDenormPreserveFloat32;
+    VkBool32                             shaderDenormPreserveFloat64;
+    VkBool32                             shaderDenormFlushToZeroFloat16;
+    VkBool32                             shaderDenormFlushToZeroFloat32;
+    VkBool32                             shaderDenormFlushToZeroFloat64;
+    VkBool32                             shaderRoundingModeRTEFloat16;
+    VkBool32                             shaderRoundingModeRTEFloat32;
+    VkBool32                             shaderRoundingModeRTEFloat64;
+    VkBool32                             shaderRoundingModeRTZFloat16;
+    VkBool32                             shaderRoundingModeRTZFloat32;
+    VkBool32                             shaderRoundingModeRTZFloat64;
+    uint32_t                             maxUpdateAfterBindDescriptorsInAllPools;
+    VkBool32                             shaderUniformBufferArrayNonUniformIndexingNative;
+    VkBool32                             shaderSampledImageArrayNonUniformIndexingNative;
+    VkBool32                             shaderStorageBufferArrayNonUniformIndexingNative;
+    VkBool32                             shaderStorageImageArrayNonUniformIndexingNative;
+    VkBool32                             shaderInputAttachmentArrayNonUniformIndexingNative;
+    VkBool32                             robustBufferAccessUpdateAfterBind;
+    VkBool32                             quadDivergentImplicitLod;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindSamplers;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindSampledImages;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindStorageImages;
+    uint32_t                             maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    uint32_t                             maxPerStageUpdateAfterBindResources;
+    uint32_t                             maxDescriptorSetUpdateAfterBindSamplers;
+    uint32_t                             maxDescriptorSetUpdateAfterBindUniformBuffers;
+    uint32_t                             maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    uint32_t                             maxDescriptorSetUpdateAfterBindStorageBuffers;
+    uint32_t                             maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    uint32_t                             maxDescriptorSetUpdateAfterBindSampledImages;
+    uint32_t                             maxDescriptorSetUpdateAfterBindStorageImages;
+    uint32_t                             maxDescriptorSetUpdateAfterBindInputAttachments;
+    VkResolveModeFlags                   supportedDepthResolveModes;
+    VkResolveModeFlags                   supportedStencilResolveModes;
+    VkBool32                             independentResolveNone;
+    VkBool32                             independentResolve;
+    VkBool32                             filterMinmaxSingleComponentFormats;
+    VkBool32                             filterMinmaxImageComponentMapping;
+    uint64_t                             maxTimelineSemaphoreValueDifference;
+    VkSampleCountFlags                   framebufferIntegerColorSampleCounts;
+} VkPhysicalDeviceVulkan12Properties;
+
+typedef struct VkImageFormatListCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           viewFormatCount;
+    const VkFormat*    pViewFormats;
+} VkImageFormatListCreateInfo;
+
+typedef struct VkAttachmentDescription2 {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkAttachmentDescriptionFlags    flags;
+    VkFormat                        format;
+    VkSampleCountFlagBits           samples;
+    VkAttachmentLoadOp              loadOp;
+    VkAttachmentStoreOp             storeOp;
+    VkAttachmentLoadOp              stencilLoadOp;
+    VkAttachmentStoreOp             stencilStoreOp;
+    VkImageLayout                   initialLayout;
+    VkImageLayout                   finalLayout;
+} VkAttachmentDescription2;
+
+typedef struct VkAttachmentReference2 {
+    VkStructureType       sType;
+    const void*           pNext;
+    uint32_t              attachment;
+    VkImageLayout         layout;
+    VkImageAspectFlags    aspectMask;
+} VkAttachmentReference2;
+
+typedef struct VkSubpassDescription2 {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkSubpassDescriptionFlags        flags;
+    VkPipelineBindPoint              pipelineBindPoint;
+    uint32_t                         viewMask;
+    uint32_t                         inputAttachmentCount;
+    const VkAttachmentReference2*    pInputAttachments;
+    uint32_t                         colorAttachmentCount;
+    const VkAttachmentReference2*    pColorAttachments;
+    const VkAttachmentReference2*    pResolveAttachments;
+    const VkAttachmentReference2*    pDepthStencilAttachment;
+    uint32_t                         preserveAttachmentCount;
+    const uint32_t*                  pPreserveAttachments;
+} VkSubpassDescription2;
+
+typedef struct VkSubpassDependency2 {
+    VkStructureType         sType;
+    const void*             pNext;
+    uint32_t                srcSubpass;
+    uint32_t                dstSubpass;
+    VkPipelineStageFlags    srcStageMask;
+    VkPipelineStageFlags    dstStageMask;
+    VkAccessFlags           srcAccessMask;
+    VkAccessFlags           dstAccessMask;
+    VkDependencyFlags       dependencyFlags;
+    int32_t                 viewOffset;
+} VkSubpassDependency2;
+
+typedef struct VkRenderPassCreateInfo2 {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkRenderPassCreateFlags            flags;
+    uint32_t                           attachmentCount;
+    const VkAttachmentDescription2*    pAttachments;
+    uint32_t                           subpassCount;
+    const VkSubpassDescription2*       pSubpasses;
+    uint32_t                           dependencyCount;
+    const VkSubpassDependency2*        pDependencies;
+    uint32_t                           correlatedViewMaskCount;
+    const uint32_t*                    pCorrelatedViewMasks;
+} VkRenderPassCreateInfo2;
+
+typedef struct VkSubpassBeginInfo {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkSubpassContents    contents;
+} VkSubpassBeginInfo;
+
+typedef struct VkSubpassEndInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+} VkSubpassEndInfo;
+
+typedef struct VkPhysicalDevice8BitStorageFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           storageBuffer8BitAccess;
+    VkBool32           uniformAndStorageBuffer8BitAccess;
+    VkBool32           storagePushConstant8;
+} VkPhysicalDevice8BitStorageFeatures;
+
+typedef struct VkPhysicalDeviceDriverProperties {
+    VkStructureType         sType;
+    void*                   pNext;
+    VkDriverId              driverID;
+    char                    driverName[VK_MAX_DRIVER_NAME_SIZE];
+    char                    driverInfo[VK_MAX_DRIVER_INFO_SIZE];
+    VkConformanceVersion    conformanceVersion;
+} VkPhysicalDeviceDriverProperties;
+
+typedef struct VkPhysicalDeviceShaderAtomicInt64Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderBufferInt64Atomics;
+    VkBool32           shaderSharedInt64Atomics;
+} VkPhysicalDeviceShaderAtomicInt64Features;
+
+typedef struct VkPhysicalDeviceShaderFloat16Int8Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderFloat16;
+    VkBool32           shaderInt8;
+} VkPhysicalDeviceShaderFloat16Int8Features;
+
+typedef struct VkPhysicalDeviceFloatControlsProperties {
+    VkStructureType                      sType;
+    void*                                pNext;
+    VkShaderFloatControlsIndependence    denormBehaviorIndependence;
+    VkShaderFloatControlsIndependence    roundingModeIndependence;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat16;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat32;
+    VkBool32                             shaderSignedZeroInfNanPreserveFloat64;
+    VkBool32                             shaderDenormPreserveFloat16;
+    VkBool32                             shaderDenormPreserveFloat32;
+    VkBool32                             shaderDenormPreserveFloat64;
+    VkBool32                             shaderDenormFlushToZeroFloat16;
+    VkBool32                             shaderDenormFlushToZeroFloat32;
+    VkBool32                             shaderDenormFlushToZeroFloat64;
+    VkBool32                             shaderRoundingModeRTEFloat16;
+    VkBool32                             shaderRoundingModeRTEFloat32;
+    VkBool32                             shaderRoundingModeRTEFloat64;
+    VkBool32                             shaderRoundingModeRTZFloat16;
+    VkBool32                             shaderRoundingModeRTZFloat32;
+    VkBool32                             shaderRoundingModeRTZFloat64;
+} VkPhysicalDeviceFloatControlsProperties;
+
+typedef struct VkDescriptorSetLayoutBindingFlagsCreateInfo {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    uint32_t                           bindingCount;
+    const VkDescriptorBindingFlags*    pBindingFlags;
+} VkDescriptorSetLayoutBindingFlagsCreateInfo;
+
+typedef struct VkPhysicalDeviceDescriptorIndexingFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderInputAttachmentArrayDynamicIndexing;
+    VkBool32           shaderUniformTexelBufferArrayDynamicIndexing;
+    VkBool32           shaderStorageTexelBufferArrayDynamicIndexing;
+    VkBool32           shaderUniformBufferArrayNonUniformIndexing;
+    VkBool32           shaderSampledImageArrayNonUniformIndexing;
+    VkBool32           shaderStorageBufferArrayNonUniformIndexing;
+    VkBool32           shaderStorageImageArrayNonUniformIndexing;
+    VkBool32           shaderInputAttachmentArrayNonUniformIndexing;
+    VkBool32           shaderUniformTexelBufferArrayNonUniformIndexing;
+    VkBool32           shaderStorageTexelBufferArrayNonUniformIndexing;
+    VkBool32           descriptorBindingUniformBufferUpdateAfterBind;
+    VkBool32           descriptorBindingSampledImageUpdateAfterBind;
+    VkBool32           descriptorBindingStorageImageUpdateAfterBind;
+    VkBool32           descriptorBindingStorageBufferUpdateAfterBind;
+    VkBool32           descriptorBindingUniformTexelBufferUpdateAfterBind;
+    VkBool32           descriptorBindingStorageTexelBufferUpdateAfterBind;
+    VkBool32           descriptorBindingUpdateUnusedWhilePending;
+    VkBool32           descriptorBindingPartiallyBound;
+    VkBool32           descriptorBindingVariableDescriptorCount;
+    VkBool32           runtimeDescriptorArray;
+} VkPhysicalDeviceDescriptorIndexingFeatures;
+
+typedef struct VkPhysicalDeviceDescriptorIndexingProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxUpdateAfterBindDescriptorsInAllPools;
+    VkBool32           shaderUniformBufferArrayNonUniformIndexingNative;
+    VkBool32           shaderSampledImageArrayNonUniformIndexingNative;
+    VkBool32           shaderStorageBufferArrayNonUniformIndexingNative;
+    VkBool32           shaderStorageImageArrayNonUniformIndexingNative;
+    VkBool32           shaderInputAttachmentArrayNonUniformIndexingNative;
+    VkBool32           robustBufferAccessUpdateAfterBind;
+    VkBool32           quadDivergentImplicitLod;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindSamplers;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindUniformBuffers;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindStorageBuffers;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindSampledImages;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindStorageImages;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindInputAttachments;
+    uint32_t           maxPerStageUpdateAfterBindResources;
+    uint32_t           maxDescriptorSetUpdateAfterBindSamplers;
+    uint32_t           maxDescriptorSetUpdateAfterBindUniformBuffers;
+    uint32_t           maxDescriptorSetUpdateAfterBindUniformBuffersDynamic;
+    uint32_t           maxDescriptorSetUpdateAfterBindStorageBuffers;
+    uint32_t           maxDescriptorSetUpdateAfterBindStorageBuffersDynamic;
+    uint32_t           maxDescriptorSetUpdateAfterBindSampledImages;
+    uint32_t           maxDescriptorSetUpdateAfterBindStorageImages;
+    uint32_t           maxDescriptorSetUpdateAfterBindInputAttachments;
+} VkPhysicalDeviceDescriptorIndexingProperties;
+
+typedef struct VkDescriptorSetVariableDescriptorCountAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           descriptorSetCount;
+    const uint32_t*    pDescriptorCounts;
+} VkDescriptorSetVariableDescriptorCountAllocateInfo;
+
+typedef struct VkDescriptorSetVariableDescriptorCountLayoutSupport {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxVariableDescriptorCount;
+} VkDescriptorSetVariableDescriptorCountLayoutSupport;
+
+typedef struct VkSubpassDescriptionDepthStencilResolve {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkResolveModeFlagBits            depthResolveMode;
+    VkResolveModeFlagBits            stencilResolveMode;
+    const VkAttachmentReference2*    pDepthStencilResolveAttachment;
+} VkSubpassDescriptionDepthStencilResolve;
+
+typedef struct VkPhysicalDeviceDepthStencilResolveProperties {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkResolveModeFlags    supportedDepthResolveModes;
+    VkResolveModeFlags    supportedStencilResolveModes;
+    VkBool32              independentResolveNone;
+    VkBool32              independentResolve;
+} VkPhysicalDeviceDepthStencilResolveProperties;
+
+typedef struct VkPhysicalDeviceScalarBlockLayoutFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           scalarBlockLayout;
+} VkPhysicalDeviceScalarBlockLayoutFeatures;
+
+typedef struct VkImageStencilUsageCreateInfo {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkImageUsageFlags    stencilUsage;
+} VkImageStencilUsageCreateInfo;
+
+typedef struct VkSamplerReductionModeCreateInfo {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkSamplerReductionMode    reductionMode;
+} VkSamplerReductionModeCreateInfo;
+
+typedef struct VkPhysicalDeviceSamplerFilterMinmaxProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           filterMinmaxSingleComponentFormats;
+    VkBool32           filterMinmaxImageComponentMapping;
+} VkPhysicalDeviceSamplerFilterMinmaxProperties;
+
+typedef struct VkPhysicalDeviceVulkanMemoryModelFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           vulkanMemoryModel;
+    VkBool32           vulkanMemoryModelDeviceScope;
+    VkBool32           vulkanMemoryModelAvailabilityVisibilityChains;
+} VkPhysicalDeviceVulkanMemoryModelFeatures;
+
+typedef struct VkPhysicalDeviceImagelessFramebufferFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           imagelessFramebuffer;
+} VkPhysicalDeviceImagelessFramebufferFeatures;
+
+typedef struct VkFramebufferAttachmentImageInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkImageCreateFlags    flags;
+    VkImageUsageFlags     usage;
+    uint32_t              width;
+    uint32_t              height;
+    uint32_t              layerCount;
+    uint32_t              viewFormatCount;
+    const VkFormat*       pViewFormats;
+} VkFramebufferAttachmentImageInfo;
+
+typedef struct VkFramebufferAttachmentsCreateInfo {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   attachmentImageInfoCount;
+    const VkFramebufferAttachmentImageInfo*    pAttachmentImageInfos;
+} VkFramebufferAttachmentsCreateInfo;
+
+typedef struct VkRenderPassAttachmentBeginInfo {
+    VkStructureType       sType;
+    const void*           pNext;
+    uint32_t              attachmentCount;
+    const VkImageView*    pAttachments;
+} VkRenderPassAttachmentBeginInfo;
+
+typedef struct VkPhysicalDeviceUniformBufferStandardLayoutFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           uniformBufferStandardLayout;
+} VkPhysicalDeviceUniformBufferStandardLayoutFeatures;
+
+typedef struct VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSubgroupExtendedTypes;
+} VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+
+typedef struct VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           separateDepthStencilLayouts;
+} VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+
+typedef struct VkAttachmentReferenceStencilLayout {
+    VkStructureType    sType;
+    void*              pNext;
+    VkImageLayout      stencilLayout;
+} VkAttachmentReferenceStencilLayout;
+
+typedef struct VkAttachmentDescriptionStencilLayout {
+    VkStructureType    sType;
+    void*              pNext;
+    VkImageLayout      stencilInitialLayout;
+    VkImageLayout      stencilFinalLayout;
+} VkAttachmentDescriptionStencilLayout;
+
+typedef struct VkPhysicalDeviceHostQueryResetFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           hostQueryReset;
+} VkPhysicalDeviceHostQueryResetFeatures;
+
+typedef struct VkPhysicalDeviceTimelineSemaphoreFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           timelineSemaphore;
+} VkPhysicalDeviceTimelineSemaphoreFeatures;
+
+typedef struct VkPhysicalDeviceTimelineSemaphoreProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           maxTimelineSemaphoreValueDifference;
+} VkPhysicalDeviceTimelineSemaphoreProperties;
+
+typedef struct VkSemaphoreTypeCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSemaphoreType    semaphoreType;
+    uint64_t           initialValue;
+} VkSemaphoreTypeCreateInfo;
+
+typedef struct VkTimelineSemaphoreSubmitInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           waitSemaphoreValueCount;
+    const uint64_t*    pWaitSemaphoreValues;
+    uint32_t           signalSemaphoreValueCount;
+    const uint64_t*    pSignalSemaphoreValues;
+} VkTimelineSemaphoreSubmitInfo;
+
+typedef struct VkSemaphoreWaitInfo {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkSemaphoreWaitFlags    flags;
+    uint32_t                semaphoreCount;
+    const VkSemaphore*      pSemaphores;
+    const uint64_t*         pValues;
+} VkSemaphoreWaitInfo;
+
+typedef struct VkSemaphoreSignalInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSemaphore        semaphore;
+    uint64_t           value;
+} VkSemaphoreSignalInfo;
+
+typedef struct VkPhysicalDeviceBufferDeviceAddressFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           bufferDeviceAddress;
+    VkBool32           bufferDeviceAddressCaptureReplay;
+    VkBool32           bufferDeviceAddressMultiDevice;
+} VkPhysicalDeviceBufferDeviceAddressFeatures;
+
+typedef struct VkBufferDeviceAddressInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+} VkBufferDeviceAddressInfo;
+
+typedef struct VkBufferOpaqueCaptureAddressCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           opaqueCaptureAddress;
+} VkBufferOpaqueCaptureAddressCreateInfo;
+
+typedef struct VkMemoryOpaqueCaptureAddressAllocateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           opaqueCaptureAddress;
+} VkMemoryOpaqueCaptureAddressAllocateInfo;
+
+typedef struct VkDeviceMemoryOpaqueCaptureAddressInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceMemory     memory;
+} VkDeviceMemoryOpaqueCaptureAddressInfo;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCount)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfo*      pSubpassBeginInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo*      pSubpassBeginInfo, const VkSubpassEndInfo*        pSubpassEndInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo*        pSubpassEndInfo);
+typedef void (VKAPI_PTR *PFN_vkResetQueryPool)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValue)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphores)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphore)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo);
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddress)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddress)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCount(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCount(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfo*                   pSubpassBeginInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfo*                   pSubpassBeginInfo,
+    const VkSubpassEndInfo*                     pSubpassEndInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfo*                     pSubpassEndInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkResetQueryPool(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValue(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphores(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfo*                  pWaitInfo,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphore(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfo*                pSignalInfo);
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddress(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfo*            pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddress(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfo*            pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddress(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+#endif
+
+
+#define VK_VERSION_1_3 1
+// Vulkan 1.3 version number
+#define VK_API_VERSION_1_3 VK_MAKE_API_VERSION(0, 1, 3, 0)// Patch version should always be set to 0
+
+typedef uint64_t VkFlags64;
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPrivateDataSlot)
+
+typedef enum VkPipelineCreationFeedbackFlagBits {
+    VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT = 0x00000001,
+    VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT = 0x00000002,
+    VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT = 0x00000004,
+    VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT,
+    VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT,
+    VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT_EXT = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT,
+    VK_PIPELINE_CREATION_FEEDBACK_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkPipelineCreationFeedbackFlagBits;
+typedef VkFlags VkPipelineCreationFeedbackFlags;
+
+typedef enum VkToolPurposeFlagBits {
+    VK_TOOL_PURPOSE_VALIDATION_BIT = 0x00000001,
+    VK_TOOL_PURPOSE_PROFILING_BIT = 0x00000002,
+    VK_TOOL_PURPOSE_TRACING_BIT = 0x00000004,
+    VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT = 0x00000008,
+    VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT = 0x00000010,
+    VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT = 0x00000020,
+    VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT = 0x00000040,
+    VK_TOOL_PURPOSE_VALIDATION_BIT_EXT = VK_TOOL_PURPOSE_VALIDATION_BIT,
+    VK_TOOL_PURPOSE_PROFILING_BIT_EXT = VK_TOOL_PURPOSE_PROFILING_BIT,
+    VK_TOOL_PURPOSE_TRACING_BIT_EXT = VK_TOOL_PURPOSE_TRACING_BIT,
+    VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT,
+    VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT_EXT = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT,
+    VK_TOOL_PURPOSE_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkToolPurposeFlagBits;
+typedef VkFlags VkToolPurposeFlags;
+typedef VkFlags VkPrivateDataSlotCreateFlags;
+typedef VkFlags64 VkPipelineStageFlags2;
+
+// Flag bits for VkPipelineStageFlagBits2
+typedef VkFlags64 VkPipelineStageFlagBits2;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE = 0ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_NONE_KHR = 0ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT = 0x00000001ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT_KHR = 0x00000001ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT = 0x00000002ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT_KHR = 0x00000002ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT = 0x00000004ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT_KHR = 0x00000004ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT = 0x00000008ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT_KHR = 0x00000008ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT = 0x00000010ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT_KHR = 0x00000010ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT = 0x00000020ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT_KHR = 0x00000020ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT = 0x00000040ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT_KHR = 0x00000040ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT = 0x00000080ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT_KHR = 0x00000080ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT = 0x00000100ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT_KHR = 0x00000100ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT = 0x00000200ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT_KHR = 0x00000200ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT = 0x00000400ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT_KHR = 0x00000400ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT = 0x00000800ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT_KHR = 0x00000800ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT = 0x00001000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT_KHR = 0x00001000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT = 0x00001000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR = 0x00001000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT = 0x00002000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT_KHR = 0x00002000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT = 0x00004000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_HOST_BIT_KHR = 0x00004000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT = 0x00008000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT_KHR = 0x00008000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT = 0x00010000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT_KHR = 0x00010000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT = 0x100000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COPY_BIT_KHR = 0x100000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT = 0x200000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RESOLVE_BIT_KHR = 0x200000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT = 0x400000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_BLIT_BIT_KHR = 0x400000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT = 0x800000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CLEAR_BIT_KHR = 0x800000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT = 0x1000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT_KHR = 0x1000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT = 0x2000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT_KHR = 0x2000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT = 0x4000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT_KHR = 0x4000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR = 0x04000000ULL;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR = 0x08000000ULL;
+#endif
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT = 0x01000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT = 0x00040000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV = 0x00020000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x00400000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV = 0x00400000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR = 0x02000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR = 0x00200000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV = 0x00200000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV = 0x02000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT = 0x00800000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV = 0x00080000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV = 0x00100000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT = 0x00080000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT = 0x00100000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI = 0x8000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI = 0x10000000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR = 0x10000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT = 0x40000000ULL;
+static const VkPipelineStageFlagBits2 VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV = 0x20000000ULL;
+
+typedef VkFlags64 VkAccessFlags2;
+
+// Flag bits for VkAccessFlagBits2
+typedef VkFlags64 VkAccessFlagBits2;
+static const VkAccessFlagBits2 VK_ACCESS_2_NONE = 0ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_NONE_KHR = 0ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT = 0x00000001ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT_KHR = 0x00000001ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT = 0x00000002ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INDEX_READ_BIT_KHR = 0x00000002ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT = 0x00000004ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT_KHR = 0x00000004ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT = 0x00000008ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_UNIFORM_READ_BIT_KHR = 0x00000008ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT = 0x00000010ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT_KHR = 0x00000010ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT = 0x00000020ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_READ_BIT_KHR = 0x00000020ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT = 0x00000040ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_WRITE_BIT_KHR = 0x00000040ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT = 0x00000080ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT_KHR = 0x00000080ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT = 0x00000100ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT_KHR = 0x00000100ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT = 0x00000200ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT_KHR = 0x00000200ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT = 0x00000400ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT_KHR = 0x00000400ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT = 0x00000800ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_READ_BIT_KHR = 0x00000800ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT = 0x00001000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFER_WRITE_BIT_KHR = 0x00001000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT = 0x00002000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_HOST_READ_BIT_KHR = 0x00002000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT = 0x00004000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_HOST_WRITE_BIT_KHR = 0x00004000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT = 0x00008000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_READ_BIT_KHR = 0x00008000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT = 0x00010000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MEMORY_WRITE_BIT_KHR = 0x00010000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT = 0x100000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT_KHR = 0x100000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT = 0x200000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_READ_BIT_KHR = 0x200000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT = 0x400000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT_KHR = 0x400000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR = 0x800000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR = 0x1000000000ULL;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR = 0x2000000000ULL;
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkAccessFlagBits2 VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR = 0x4000000000ULL;
+#endif
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT = 0x02000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT = 0x04000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT = 0x08000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT = 0x00100000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV = 0x00020000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV = 0x00040000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR = 0x00800000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV = 0x00800000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR = 0x00200000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR = 0x00400000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV = 0x00200000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV = 0x00400000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT = 0x01000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT = 0x00080000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT = 0x20000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI = 0x8000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR = 0x10000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_READ_BIT_EXT = 0x100000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT = 0x200000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV = 0x40000000000ULL;
+static const VkAccessFlagBits2 VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV = 0x80000000000ULL;
+
+
+typedef enum VkSubmitFlagBits {
+    VK_SUBMIT_PROTECTED_BIT = 0x00000001,
+    VK_SUBMIT_PROTECTED_BIT_KHR = VK_SUBMIT_PROTECTED_BIT,
+    VK_SUBMIT_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkSubmitFlagBits;
+typedef VkFlags VkSubmitFlags;
+
+typedef enum VkRenderingFlagBits {
+    VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT = 0x00000001,
+    VK_RENDERING_SUSPENDING_BIT = 0x00000002,
+    VK_RENDERING_RESUMING_BIT = 0x00000004,
+    VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT = 0x00000008,
+    VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT_KHR = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT,
+    VK_RENDERING_SUSPENDING_BIT_KHR = VK_RENDERING_SUSPENDING_BIT,
+    VK_RENDERING_RESUMING_BIT_KHR = VK_RENDERING_RESUMING_BIT,
+    VK_RENDERING_FLAG_BITS_MAX_ENUM = 0x7FFFFFFF
+} VkRenderingFlagBits;
+typedef VkFlags VkRenderingFlags;
+typedef VkFlags64 VkFormatFeatureFlags2;
+
+// Flag bits for VkFormatFeatureFlagBits2
+typedef VkFlags64 VkFormatFeatureFlagBits2;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT = 0x00000001ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT_KHR = 0x00000001ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT = 0x00000002ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT_KHR = 0x00000002ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT = 0x00000004ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT_KHR = 0x00000004ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT = 0x00000008ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT_KHR = 0x00000008ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT = 0x00000010ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT_KHR = 0x00000010ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT = 0x00000020ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT_KHR = 0x00000020ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT = 0x00000040ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT_KHR = 0x00000040ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT = 0x00000080ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT_KHR = 0x00000080ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT = 0x00000100ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT_KHR = 0x00000100ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT = 0x00000200ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT_KHR = 0x00000200ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT = 0x00000400ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_SRC_BIT_KHR = 0x00000400ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT = 0x00000800ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLIT_DST_BIT_KHR = 0x00000800ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT = 0x00001000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT_KHR = 0x00001000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT = 0x00002000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT = 0x00002000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT = 0x00004000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT_KHR = 0x00004000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT = 0x00008000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT_KHR = 0x00008000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT = 0x00010000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT_KHR = 0x00010000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT = 0x00020000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR = 0x00020000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT = 0x00040000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR = 0x00040000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT = 0x00080000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR = 0x00080000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT = 0x00100000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR = 0x00100000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT = 0x00200000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR = 0x00200000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT = 0x00400000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_DISJOINT_BIT_KHR = 0x00400000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT = 0x00800000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT_KHR = 0x00800000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT = 0x80000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT_KHR = 0x80000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT = 0x100000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT_KHR = 0x100000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT = 0x200000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT_KHR = 0x200000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR = 0x02000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR = 0x04000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR = 0x20000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT = 0x01000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR = 0x40000000ULL;
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR = 0x08000000ULL;
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR = 0x10000000ULL;
+#endif
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV = 0x4000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM = 0x400000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM = 0x800000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM = 0x1000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM = 0x2000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV = 0x10000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV = 0x20000000000ULL;
+static const VkFormatFeatureFlagBits2 VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV = 0x40000000000ULL;
+
+typedef struct VkPhysicalDeviceVulkan13Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           robustImageAccess;
+    VkBool32           inlineUniformBlock;
+    VkBool32           descriptorBindingInlineUniformBlockUpdateAfterBind;
+    VkBool32           pipelineCreationCacheControl;
+    VkBool32           privateData;
+    VkBool32           shaderDemoteToHelperInvocation;
+    VkBool32           shaderTerminateInvocation;
+    VkBool32           subgroupSizeControl;
+    VkBool32           computeFullSubgroups;
+    VkBool32           synchronization2;
+    VkBool32           textureCompressionASTC_HDR;
+    VkBool32           shaderZeroInitializeWorkgroupMemory;
+    VkBool32           dynamicRendering;
+    VkBool32           shaderIntegerDotProduct;
+    VkBool32           maintenance4;
+} VkPhysicalDeviceVulkan13Features;
+
+typedef struct VkPhysicalDeviceVulkan13Properties {
+    VkStructureType       sType;
+    void*                 pNext;
+    uint32_t              minSubgroupSize;
+    uint32_t              maxSubgroupSize;
+    uint32_t              maxComputeWorkgroupSubgroups;
+    VkShaderStageFlags    requiredSubgroupSizeStages;
+    uint32_t              maxInlineUniformBlockSize;
+    uint32_t              maxPerStageDescriptorInlineUniformBlocks;
+    uint32_t              maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    uint32_t              maxDescriptorSetInlineUniformBlocks;
+    uint32_t              maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+    uint32_t              maxInlineUniformTotalSize;
+    VkBool32              integerDotProduct8BitUnsignedAccelerated;
+    VkBool32              integerDotProduct8BitSignedAccelerated;
+    VkBool32              integerDotProduct8BitMixedSignednessAccelerated;
+    VkBool32              integerDotProduct4x8BitPackedUnsignedAccelerated;
+    VkBool32              integerDotProduct4x8BitPackedSignedAccelerated;
+    VkBool32              integerDotProduct4x8BitPackedMixedSignednessAccelerated;
+    VkBool32              integerDotProduct16BitUnsignedAccelerated;
+    VkBool32              integerDotProduct16BitSignedAccelerated;
+    VkBool32              integerDotProduct16BitMixedSignednessAccelerated;
+    VkBool32              integerDotProduct32BitUnsignedAccelerated;
+    VkBool32              integerDotProduct32BitSignedAccelerated;
+    VkBool32              integerDotProduct32BitMixedSignednessAccelerated;
+    VkBool32              integerDotProduct64BitUnsignedAccelerated;
+    VkBool32              integerDotProduct64BitSignedAccelerated;
+    VkBool32              integerDotProduct64BitMixedSignednessAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating8BitUnsignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating8BitSignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating16BitUnsignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating16BitSignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating32BitUnsignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating32BitSignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating64BitUnsignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating64BitSignedAccelerated;
+    VkBool32              integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated;
+    VkDeviceSize          storageTexelBufferOffsetAlignmentBytes;
+    VkBool32              storageTexelBufferOffsetSingleTexelAlignment;
+    VkDeviceSize          uniformTexelBufferOffsetAlignmentBytes;
+    VkBool32              uniformTexelBufferOffsetSingleTexelAlignment;
+    VkDeviceSize          maxBufferSize;
+} VkPhysicalDeviceVulkan13Properties;
+
+typedef struct VkPipelineCreationFeedback {
+    VkPipelineCreationFeedbackFlags    flags;
+    uint64_t                           duration;
+} VkPipelineCreationFeedback;
+
+typedef struct VkPipelineCreationFeedbackCreateInfo {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkPipelineCreationFeedback*    pPipelineCreationFeedback;
+    uint32_t                       pipelineStageCreationFeedbackCount;
+    VkPipelineCreationFeedback*    pPipelineStageCreationFeedbacks;
+} VkPipelineCreationFeedbackCreateInfo;
+
+typedef struct VkPhysicalDeviceShaderTerminateInvocationFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderTerminateInvocation;
+} VkPhysicalDeviceShaderTerminateInvocationFeatures;
+
+typedef struct VkPhysicalDeviceToolProperties {
+    VkStructureType       sType;
+    void*                 pNext;
+    char                  name[VK_MAX_EXTENSION_NAME_SIZE];
+    char                  version[VK_MAX_EXTENSION_NAME_SIZE];
+    VkToolPurposeFlags    purposes;
+    char                  description[VK_MAX_DESCRIPTION_SIZE];
+    char                  layer[VK_MAX_EXTENSION_NAME_SIZE];
+} VkPhysicalDeviceToolProperties;
+
+typedef struct VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderDemoteToHelperInvocation;
+} VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+
+typedef struct VkPhysicalDevicePrivateDataFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           privateData;
+} VkPhysicalDevicePrivateDataFeatures;
+
+typedef struct VkDevicePrivateDataCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           privateDataSlotRequestCount;
+} VkDevicePrivateDataCreateInfo;
+
+typedef struct VkPrivateDataSlotCreateInfo {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkPrivateDataSlotCreateFlags    flags;
+} VkPrivateDataSlotCreateInfo;
+
+typedef struct VkPhysicalDevicePipelineCreationCacheControlFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineCreationCacheControl;
+} VkPhysicalDevicePipelineCreationCacheControlFeatures;
+
+typedef struct VkMemoryBarrier2 {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkPipelineStageFlags2    srcStageMask;
+    VkAccessFlags2           srcAccessMask;
+    VkPipelineStageFlags2    dstStageMask;
+    VkAccessFlags2           dstAccessMask;
+} VkMemoryBarrier2;
+
+typedef struct VkBufferMemoryBarrier2 {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkPipelineStageFlags2    srcStageMask;
+    VkAccessFlags2           srcAccessMask;
+    VkPipelineStageFlags2    dstStageMask;
+    VkAccessFlags2           dstAccessMask;
+    uint32_t                 srcQueueFamilyIndex;
+    uint32_t                 dstQueueFamilyIndex;
+    VkBuffer                 buffer;
+    VkDeviceSize             offset;
+    VkDeviceSize             size;
+} VkBufferMemoryBarrier2;
+
+typedef struct VkImageMemoryBarrier2 {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkPipelineStageFlags2      srcStageMask;
+    VkAccessFlags2             srcAccessMask;
+    VkPipelineStageFlags2      dstStageMask;
+    VkAccessFlags2             dstAccessMask;
+    VkImageLayout              oldLayout;
+    VkImageLayout              newLayout;
+    uint32_t                   srcQueueFamilyIndex;
+    uint32_t                   dstQueueFamilyIndex;
+    VkImage                    image;
+    VkImageSubresourceRange    subresourceRange;
+} VkImageMemoryBarrier2;
+
+typedef struct VkDependencyInfo {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDependencyFlags                dependencyFlags;
+    uint32_t                         memoryBarrierCount;
+    const VkMemoryBarrier2*          pMemoryBarriers;
+    uint32_t                         bufferMemoryBarrierCount;
+    const VkBufferMemoryBarrier2*    pBufferMemoryBarriers;
+    uint32_t                         imageMemoryBarrierCount;
+    const VkImageMemoryBarrier2*     pImageMemoryBarriers;
+} VkDependencyInfo;
+
+typedef struct VkSemaphoreSubmitInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkSemaphore              semaphore;
+    uint64_t                 value;
+    VkPipelineStageFlags2    stageMask;
+    uint32_t                 deviceIndex;
+} VkSemaphoreSubmitInfo;
+
+typedef struct VkCommandBufferSubmitInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkCommandBuffer    commandBuffer;
+    uint32_t           deviceMask;
+} VkCommandBufferSubmitInfo;
+
+typedef struct VkSubmitInfo2 {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkSubmitFlags                       flags;
+    uint32_t                            waitSemaphoreInfoCount;
+    const VkSemaphoreSubmitInfo*        pWaitSemaphoreInfos;
+    uint32_t                            commandBufferInfoCount;
+    const VkCommandBufferSubmitInfo*    pCommandBufferInfos;
+    uint32_t                            signalSemaphoreInfoCount;
+    const VkSemaphoreSubmitInfo*        pSignalSemaphoreInfos;
+} VkSubmitInfo2;
+
+typedef struct VkPhysicalDeviceSynchronization2Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           synchronization2;
+} VkPhysicalDeviceSynchronization2Features;
+
+typedef struct VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderZeroInitializeWorkgroupMemory;
+} VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+
+typedef struct VkPhysicalDeviceImageRobustnessFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           robustImageAccess;
+} VkPhysicalDeviceImageRobustnessFeatures;
+
+typedef struct VkBufferCopy2 {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceSize       srcOffset;
+    VkDeviceSize       dstOffset;
+    VkDeviceSize       size;
+} VkBufferCopy2;
+
+typedef struct VkCopyBufferInfo2 {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkBuffer                srcBuffer;
+    VkBuffer                dstBuffer;
+    uint32_t                regionCount;
+    const VkBufferCopy2*    pRegions;
+} VkCopyBufferInfo2;
+
+typedef struct VkImageCopy2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageCopy2;
+
+typedef struct VkCopyImageInfo2 {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkImage                srcImage;
+    VkImageLayout          srcImageLayout;
+    VkImage                dstImage;
+    VkImageLayout          dstImageLayout;
+    uint32_t               regionCount;
+    const VkImageCopy2*    pRegions;
+} VkCopyImageInfo2;
+
+typedef struct VkBufferImageCopy2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkDeviceSize                bufferOffset;
+    uint32_t                    bufferRowLength;
+    uint32_t                    bufferImageHeight;
+    VkImageSubresourceLayers    imageSubresource;
+    VkOffset3D                  imageOffset;
+    VkExtent3D                  imageExtent;
+} VkBufferImageCopy2;
+
+typedef struct VkCopyBufferToImageInfo2 {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkBuffer                     srcBuffer;
+    VkImage                      dstImage;
+    VkImageLayout                dstImageLayout;
+    uint32_t                     regionCount;
+    const VkBufferImageCopy2*    pRegions;
+} VkCopyBufferToImageInfo2;
+
+typedef struct VkCopyImageToBufferInfo2 {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkImage                      srcImage;
+    VkImageLayout                srcImageLayout;
+    VkBuffer                     dstBuffer;
+    uint32_t                     regionCount;
+    const VkBufferImageCopy2*    pRegions;
+} VkCopyImageToBufferInfo2;
+
+typedef struct VkImageBlit2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffsets[2];
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffsets[2];
+} VkImageBlit2;
+
+typedef struct VkBlitImageInfo2 {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkImage                srcImage;
+    VkImageLayout          srcImageLayout;
+    VkImage                dstImage;
+    VkImageLayout          dstImageLayout;
+    uint32_t               regionCount;
+    const VkImageBlit2*    pRegions;
+    VkFilter               filter;
+} VkBlitImageInfo2;
+
+typedef struct VkImageResolve2 {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkImageSubresourceLayers    srcSubresource;
+    VkOffset3D                  srcOffset;
+    VkImageSubresourceLayers    dstSubresource;
+    VkOffset3D                  dstOffset;
+    VkExtent3D                  extent;
+} VkImageResolve2;
+
+typedef struct VkResolveImageInfo2 {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkImage                   srcImage;
+    VkImageLayout             srcImageLayout;
+    VkImage                   dstImage;
+    VkImageLayout             dstImageLayout;
+    uint32_t                  regionCount;
+    const VkImageResolve2*    pRegions;
+} VkResolveImageInfo2;
+
+typedef struct VkPhysicalDeviceSubgroupSizeControlFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           subgroupSizeControl;
+    VkBool32           computeFullSubgroups;
+} VkPhysicalDeviceSubgroupSizeControlFeatures;
+
+typedef struct VkPhysicalDeviceSubgroupSizeControlProperties {
+    VkStructureType       sType;
+    void*                 pNext;
+    uint32_t              minSubgroupSize;
+    uint32_t              maxSubgroupSize;
+    uint32_t              maxComputeWorkgroupSubgroups;
+    VkShaderStageFlags    requiredSubgroupSizeStages;
+} VkPhysicalDeviceSubgroupSizeControlProperties;
+
+typedef struct VkPipelineShaderStageRequiredSubgroupSizeCreateInfo {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           requiredSubgroupSize;
+} VkPipelineShaderStageRequiredSubgroupSizeCreateInfo;
+
+typedef struct VkPhysicalDeviceInlineUniformBlockFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           inlineUniformBlock;
+    VkBool32           descriptorBindingInlineUniformBlockUpdateAfterBind;
+} VkPhysicalDeviceInlineUniformBlockFeatures;
+
+typedef struct VkPhysicalDeviceInlineUniformBlockProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxInlineUniformBlockSize;
+    uint32_t           maxPerStageDescriptorInlineUniformBlocks;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindInlineUniformBlocks;
+    uint32_t           maxDescriptorSetInlineUniformBlocks;
+    uint32_t           maxDescriptorSetUpdateAfterBindInlineUniformBlocks;
+} VkPhysicalDeviceInlineUniformBlockProperties;
+
+typedef struct VkWriteDescriptorSetInlineUniformBlock {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           dataSize;
+    const void*        pData;
+} VkWriteDescriptorSetInlineUniformBlock;
+
+typedef struct VkDescriptorPoolInlineUniformBlockCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           maxInlineUniformBlockBindings;
+} VkDescriptorPoolInlineUniformBlockCreateInfo;
+
+typedef struct VkPhysicalDeviceTextureCompressionASTCHDRFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           textureCompressionASTC_HDR;
+} VkPhysicalDeviceTextureCompressionASTCHDRFeatures;
+
+typedef struct VkRenderingAttachmentInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkImageView              imageView;
+    VkImageLayout            imageLayout;
+    VkResolveModeFlagBits    resolveMode;
+    VkImageView              resolveImageView;
+    VkImageLayout            resolveImageLayout;
+    VkAttachmentLoadOp       loadOp;
+    VkAttachmentStoreOp      storeOp;
+    VkClearValue             clearValue;
+} VkRenderingAttachmentInfo;
+
+typedef struct VkRenderingInfo {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkRenderingFlags                    flags;
+    VkRect2D                            renderArea;
+    uint32_t                            layerCount;
+    uint32_t                            viewMask;
+    uint32_t                            colorAttachmentCount;
+    const VkRenderingAttachmentInfo*    pColorAttachments;
+    const VkRenderingAttachmentInfo*    pDepthAttachment;
+    const VkRenderingAttachmentInfo*    pStencilAttachment;
+} VkRenderingInfo;
+
+typedef struct VkPipelineRenderingCreateInfo {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           viewMask;
+    uint32_t           colorAttachmentCount;
+    const VkFormat*    pColorAttachmentFormats;
+    VkFormat           depthAttachmentFormat;
+    VkFormat           stencilAttachmentFormat;
+} VkPipelineRenderingCreateInfo;
+
+typedef struct VkPhysicalDeviceDynamicRenderingFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           dynamicRendering;
+} VkPhysicalDeviceDynamicRenderingFeatures;
+
+typedef struct VkCommandBufferInheritanceRenderingInfo {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkRenderingFlags         flags;
+    uint32_t                 viewMask;
+    uint32_t                 colorAttachmentCount;
+    const VkFormat*          pColorAttachmentFormats;
+    VkFormat                 depthAttachmentFormat;
+    VkFormat                 stencilAttachmentFormat;
+    VkSampleCountFlagBits    rasterizationSamples;
+} VkCommandBufferInheritanceRenderingInfo;
+
+typedef struct VkPhysicalDeviceShaderIntegerDotProductFeatures {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderIntegerDotProduct;
+} VkPhysicalDeviceShaderIntegerDotProductFeatures;
+
+typedef struct VkPhysicalDeviceShaderIntegerDotProductProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           integerDotProduct8BitUnsignedAccelerated;
+    VkBool32           integerDotProduct8BitSignedAccelerated;
+    VkBool32           integerDotProduct8BitMixedSignednessAccelerated;
+    VkBool32           integerDotProduct4x8BitPackedUnsignedAccelerated;
+    VkBool32           integerDotProduct4x8BitPackedSignedAccelerated;
+    VkBool32           integerDotProduct4x8BitPackedMixedSignednessAccelerated;
+    VkBool32           integerDotProduct16BitUnsignedAccelerated;
+    VkBool32           integerDotProduct16BitSignedAccelerated;
+    VkBool32           integerDotProduct16BitMixedSignednessAccelerated;
+    VkBool32           integerDotProduct32BitUnsignedAccelerated;
+    VkBool32           integerDotProduct32BitSignedAccelerated;
+    VkBool32           integerDotProduct32BitMixedSignednessAccelerated;
+    VkBool32           integerDotProduct64BitUnsignedAccelerated;
+    VkBool32           integerDotProduct64BitSignedAccelerated;
+    VkBool32           integerDotProduct64BitMixedSignednessAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating8BitUnsignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating8BitSignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating8BitMixedSignednessAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating4x8BitPackedUnsignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating4x8BitPackedSignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating4x8BitPackedMixedSignednessAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating16BitUnsignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating16BitSignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating16BitMixedSignednessAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating32BitUnsignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating32BitSignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating32BitMixedSignednessAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating64BitUnsignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating64BitSignedAccelerated;
+    VkBool32           integerDotProductAccumulatingSaturating64BitMixedSignednessAccelerated;
+} VkPhysicalDeviceShaderIntegerDotProductProperties;
+
+typedef struct VkPhysicalDeviceTexelBufferAlignmentProperties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       storageTexelBufferOffsetAlignmentBytes;
+    VkBool32           storageTexelBufferOffsetSingleTexelAlignment;
+    VkDeviceSize       uniformTexelBufferOffsetAlignmentBytes;
+    VkBool32           uniformTexelBufferOffsetSingleTexelAlignment;
+} VkPhysicalDeviceTexelBufferAlignmentProperties;
+
+typedef struct VkFormatProperties3 {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkFormatFeatureFlags2    linearTilingFeatures;
+    VkFormatFeatureFlags2    optimalTilingFeatures;
+    VkFormatFeatureFlags2    bufferFeatures;
+} VkFormatProperties3;
+
+typedef struct VkPhysicalDeviceMaintenance4Features {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           maintenance4;
+} VkPhysicalDeviceMaintenance4Features;
+
+typedef struct VkPhysicalDeviceMaintenance4Properties {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       maxBufferSize;
+} VkPhysicalDeviceMaintenance4Properties;
+
+typedef struct VkDeviceBufferMemoryRequirements {
+    VkStructureType              sType;
+    const void*                  pNext;
+    const VkBufferCreateInfo*    pCreateInfo;
+} VkDeviceBufferMemoryRequirements;
+
+typedef struct VkDeviceImageMemoryRequirements {
+    VkStructureType             sType;
+    const void*                 pNext;
+    const VkImageCreateInfo*    pCreateInfo;
+    VkImageAspectFlagBits       planeAspect;
+} VkDeviceImageMemoryRequirements;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolProperties)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlot)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot);
+typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlot)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data);
+typedef void (VKAPI_PTR *PFN_vkGetPrivateData)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2)(VkCommandBuffer                   commandBuffer, VkEvent                                             event, const VkDependencyInfo*                             pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2)(VkCommandBuffer                   commandBuffer, VkEvent                                             event, VkPipelineStageFlags2               stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2)(VkCommandBuffer                   commandBuffer, uint32_t                                            eventCount, const VkEvent*                     pEvents, const VkDependencyInfo*            pDependencyInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2)(VkCommandBuffer                   commandBuffer, const VkDependencyInfo*                             pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2)(VkCommandBuffer                   commandBuffer, VkPipelineStageFlags2               stage, VkQueryPool                                         queryPool, uint32_t                                            query);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2)(VkQueue                           queue, uint32_t                            submitCount, const VkSubmitInfo2*              pSubmits, VkFence           fence);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRendering)(VkCommandBuffer                   commandBuffer, const VkRenderingInfo*                              pRenderingInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRendering)(VkCommandBuffer                   commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCullMode)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFace)(VkCommandBuffer commandBuffer, VkFrontFace frontFace);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopology)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCount)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports);
+typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCount)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors);
+typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnable)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOp)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnable)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOp)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnable)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnable)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnable)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirements)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirements)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolProperties(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolProperties*             pToolProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlot(
+    VkDevice                                    device,
+    const VkPrivateDataSlotCreateInfo*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPrivateDataSlot*                          pPrivateDataSlot);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlot(
+    VkDevice                                    device,
+    VkPrivateDataSlot                           privateDataSlot,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateData(
+    VkDevice                                    device,
+    VkObjectType                                objectType,
+    uint64_t                                    objectHandle,
+    VkPrivateDataSlot                           privateDataSlot,
+    uint64_t                                    data);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPrivateData(
+    VkDevice                                    device,
+    VkObjectType                                objectType,
+    uint64_t                                    objectHandle,
+    VkPrivateDataSlot                           privateDataSlot,
+    uint64_t*                                   pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    const VkDependencyInfo*                     pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags2                       stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    const VkDependencyInfo*                     pDependencyInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2(
+    VkCommandBuffer                             commandBuffer,
+    const VkDependencyInfo*                     pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags2                       stage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo2*                        pSubmits,
+    VkFence                                     fence);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyBufferInfo2*                    pCopyBufferInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyImageInfo2*                     pCopyImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyBufferToImageInfo2*             pCopyBufferToImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyImageToBufferInfo2*             pCopyImageToBufferInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2(
+    VkCommandBuffer                             commandBuffer,
+    const VkBlitImageInfo2*                     pBlitImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2(
+    VkCommandBuffer                             commandBuffer,
+    const VkResolveImageInfo2*                  pResolveImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRendering(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderingInfo*                      pRenderingInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRendering(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCullMode(
+    VkCommandBuffer                             commandBuffer,
+    VkCullModeFlags                             cullMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFace(
+    VkCommandBuffer                             commandBuffer,
+    VkFrontFace                                 frontFace);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopology(
+    VkCommandBuffer                             commandBuffer,
+    VkPrimitiveTopology                         primitiveTopology);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCount(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCount(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes,
+    const VkDeviceSize*                         pStrides);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthWriteEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOp(
+    VkCommandBuffer                             commandBuffer,
+    VkCompareOp                                 depthCompareOp);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthBoundsTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    stencilTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOp(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    VkStencilOp                                 failOp,
+    VkStencilOp                                 passOp,
+    VkStencilOp                                 depthFailOp,
+    VkCompareOp                                 compareOp);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    rasterizerDiscardEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthBiasEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnable(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    primitiveRestartEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirements(
+    VkDevice                                    device,
+    const VkDeviceBufferMemoryRequirements*     pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirements(
+    VkDevice                                    device,
+    const VkDeviceImageMemoryRequirements*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirements(
+    VkDevice                                    device,
+    const VkDeviceImageMemoryRequirements*      pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+#endif
+
+
+#define VK_KHR_surface 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSurfaceKHR)
+#define VK_KHR_SURFACE_SPEC_VERSION       25
+#define VK_KHR_SURFACE_EXTENSION_NAME     "VK_KHR_surface"
+
+typedef enum VkPresentModeKHR {
+    VK_PRESENT_MODE_IMMEDIATE_KHR = 0,
+    VK_PRESENT_MODE_MAILBOX_KHR = 1,
+    VK_PRESENT_MODE_FIFO_KHR = 2,
+    VK_PRESENT_MODE_FIFO_RELAXED_KHR = 3,
+    VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR = 1000111000,
+    VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR = 1000111001,
+    VK_PRESENT_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPresentModeKHR;
+
+typedef enum VkColorSpaceKHR {
+    VK_COLOR_SPACE_SRGB_NONLINEAR_KHR = 0,
+    VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT = 1000104001,
+    VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT = 1000104002,
+    VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT = 1000104003,
+    VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT = 1000104004,
+    VK_COLOR_SPACE_BT709_LINEAR_EXT = 1000104005,
+    VK_COLOR_SPACE_BT709_NONLINEAR_EXT = 1000104006,
+    VK_COLOR_SPACE_BT2020_LINEAR_EXT = 1000104007,
+    VK_COLOR_SPACE_HDR10_ST2084_EXT = 1000104008,
+    VK_COLOR_SPACE_DOLBYVISION_EXT = 1000104009,
+    VK_COLOR_SPACE_HDR10_HLG_EXT = 1000104010,
+    VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT = 1000104011,
+    VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT = 1000104012,
+    VK_COLOR_SPACE_PASS_THROUGH_EXT = 1000104013,
+    VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT = 1000104014,
+    VK_COLOR_SPACE_DISPLAY_NATIVE_AMD = 1000213000,
+    VK_COLORSPACE_SRGB_NONLINEAR_KHR = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    VK_COLOR_SPACE_DCI_P3_LINEAR_EXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
+    VK_COLOR_SPACE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkColorSpaceKHR;
+
+typedef enum VkSurfaceTransformFlagBitsKHR {
+    VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR = 0x00000001,
+    VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR = 0x00000002,
+    VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR = 0x00000004,
+    VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR = 0x00000008,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR = 0x00000010,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR = 0x00000020,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR = 0x00000040,
+    VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR = 0x00000080,
+    VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR = 0x00000100,
+    VK_SURFACE_TRANSFORM_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSurfaceTransformFlagBitsKHR;
+
+typedef enum VkCompositeAlphaFlagBitsKHR {
+    VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR = 0x00000001,
+    VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR = 0x00000002,
+    VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR = 0x00000004,
+    VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR = 0x00000008,
+    VK_COMPOSITE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkCompositeAlphaFlagBitsKHR;
+typedef VkFlags VkCompositeAlphaFlagsKHR;
+typedef VkFlags VkSurfaceTransformFlagsKHR;
+typedef struct VkSurfaceCapabilitiesKHR {
+    uint32_t                         minImageCount;
+    uint32_t                         maxImageCount;
+    VkExtent2D                       currentExtent;
+    VkExtent2D                       minImageExtent;
+    VkExtent2D                       maxImageExtent;
+    uint32_t                         maxImageArrayLayers;
+    VkSurfaceTransformFlagsKHR       supportedTransforms;
+    VkSurfaceTransformFlagBitsKHR    currentTransform;
+    VkCompositeAlphaFlagsKHR         supportedCompositeAlpha;
+    VkImageUsageFlags                supportedUsageFlags;
+} VkSurfaceCapabilitiesKHR;
+
+typedef struct VkSurfaceFormatKHR {
+    VkFormat           format;
+    VkColorSpaceKHR    colorSpace;
+} VkSurfaceFormatKHR;
+
+typedef void (VKAPI_PTR *PFN_vkDestroySurfaceKHR)(VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceSupportKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormatsKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfacePresentModesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkDestroySurfaceKHR(
+    VkInstance                                  instance,
+    VkSurfaceKHR                                surface,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceSupportKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    VkSurfaceKHR                                surface,
+    VkBool32*                                   pSupported);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilitiesKHR*                   pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormatsKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormatKHR*                         pSurfaceFormats);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfacePresentModesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pPresentModeCount,
+    VkPresentModeKHR*                           pPresentModes);
+#endif
+
+
+#define VK_KHR_swapchain 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkSwapchainKHR)
+#define VK_KHR_SWAPCHAIN_SPEC_VERSION     70
+#define VK_KHR_SWAPCHAIN_EXTENSION_NAME   "VK_KHR_swapchain"
+
+typedef enum VkSwapchainCreateFlagBitsKHR {
+    VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR = 0x00000001,
+    VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR = 0x00000002,
+    VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR = 0x00000004,
+    VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT = 0x00000008,
+    VK_SWAPCHAIN_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkSwapchainCreateFlagBitsKHR;
+typedef VkFlags VkSwapchainCreateFlagsKHR;
+
+typedef enum VkDeviceGroupPresentModeFlagBitsKHR {
+    VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR = 0x00000001,
+    VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR = 0x00000002,
+    VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR = 0x00000004,
+    VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR = 0x00000008,
+    VK_DEVICE_GROUP_PRESENT_MODE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkDeviceGroupPresentModeFlagBitsKHR;
+typedef VkFlags VkDeviceGroupPresentModeFlagsKHR;
+typedef struct VkSwapchainCreateInfoKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkSwapchainCreateFlagsKHR        flags;
+    VkSurfaceKHR                     surface;
+    uint32_t                         minImageCount;
+    VkFormat                         imageFormat;
+    VkColorSpaceKHR                  imageColorSpace;
+    VkExtent2D                       imageExtent;
+    uint32_t                         imageArrayLayers;
+    VkImageUsageFlags                imageUsage;
+    VkSharingMode                    imageSharingMode;
+    uint32_t                         queueFamilyIndexCount;
+    const uint32_t*                  pQueueFamilyIndices;
+    VkSurfaceTransformFlagBitsKHR    preTransform;
+    VkCompositeAlphaFlagBitsKHR      compositeAlpha;
+    VkPresentModeKHR                 presentMode;
+    VkBool32                         clipped;
+    VkSwapchainKHR                   oldSwapchain;
+} VkSwapchainCreateInfoKHR;
+
+typedef struct VkPresentInfoKHR {
+    VkStructureType          sType;
+    const void*              pNext;
+    uint32_t                 waitSemaphoreCount;
+    const VkSemaphore*       pWaitSemaphores;
+    uint32_t                 swapchainCount;
+    const VkSwapchainKHR*    pSwapchains;
+    const uint32_t*          pImageIndices;
+    VkResult*                pResults;
+} VkPresentInfoKHR;
+
+typedef struct VkImageSwapchainCreateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+} VkImageSwapchainCreateInfoKHR;
+
+typedef struct VkBindImageMemorySwapchainInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+    uint32_t           imageIndex;
+} VkBindImageMemorySwapchainInfoKHR;
+
+typedef struct VkAcquireNextImageInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+    uint64_t           timeout;
+    VkSemaphore        semaphore;
+    VkFence            fence;
+    uint32_t           deviceMask;
+} VkAcquireNextImageInfoKHR;
+
+typedef struct VkDeviceGroupPresentCapabilitiesKHR {
+    VkStructureType                     sType;
+    void*                               pNext;
+    uint32_t                            presentMask[VK_MAX_DEVICE_GROUP_SIZE];
+    VkDeviceGroupPresentModeFlagsKHR    modes;
+} VkDeviceGroupPresentCapabilitiesKHR;
+
+typedef struct VkDeviceGroupPresentInfoKHR {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    uint32_t                               swapchainCount;
+    const uint32_t*                        pDeviceMasks;
+    VkDeviceGroupPresentModeFlagBitsKHR    mode;
+} VkDeviceGroupPresentInfoKHR;
+
+typedef struct VkDeviceGroupSwapchainCreateInfoKHR {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkDeviceGroupPresentModeFlagsKHR    modes;
+} VkDeviceGroupSwapchainCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSwapchainKHR)(VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain);
+typedef void (VKAPI_PTR *PFN_vkDestroySwapchainKHR)(VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainImagesKHR)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImageKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex);
+typedef VkResult (VKAPI_PTR *PFN_vkQueuePresentKHR)(VkQueue queue, const VkPresentInfoKHR* pPresentInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupPresentCapabilitiesKHR)(VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceGroupSurfacePresentModesKHR)(VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDevicePresentRectanglesKHR)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireNextImage2KHR)(VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSwapchainKHR(
+    VkDevice                                    device,
+    const VkSwapchainCreateInfoKHR*             pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchain);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySwapchainKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainImagesKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pSwapchainImageCount,
+    VkImage*                                    pSwapchainImages);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImageKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    timeout,
+    VkSemaphore                                 semaphore,
+    VkFence                                     fence,
+    uint32_t*                                   pImageIndex);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueuePresentKHR(
+    VkQueue                                     queue,
+    const VkPresentInfoKHR*                     pPresentInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupPresentCapabilitiesKHR(
+    VkDevice                                    device,
+    VkDeviceGroupPresentCapabilitiesKHR*        pDeviceGroupPresentCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModesKHR(
+    VkDevice                                    device,
+    VkSurfaceKHR                                surface,
+    VkDeviceGroupPresentModeFlagsKHR*           pModes);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDevicePresentRectanglesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    uint32_t*                                   pRectCount,
+    VkRect2D*                                   pRects);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireNextImage2KHR(
+    VkDevice                                    device,
+    const VkAcquireNextImageInfoKHR*            pAcquireInfo,
+    uint32_t*                                   pImageIndex);
+#endif
+
+
+#define VK_KHR_display 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayKHR)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDisplayModeKHR)
+#define VK_KHR_DISPLAY_SPEC_VERSION       23
+#define VK_KHR_DISPLAY_EXTENSION_NAME     "VK_KHR_display"
+typedef VkFlags VkDisplayModeCreateFlagsKHR;
+
+typedef enum VkDisplayPlaneAlphaFlagBitsKHR {
+    VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR = 0x00000001,
+    VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR = 0x00000002,
+    VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR = 0x00000004,
+    VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR = 0x00000008,
+    VK_DISPLAY_PLANE_ALPHA_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkDisplayPlaneAlphaFlagBitsKHR;
+typedef VkFlags VkDisplayPlaneAlphaFlagsKHR;
+typedef VkFlags VkDisplaySurfaceCreateFlagsKHR;
+typedef struct VkDisplayModeParametersKHR {
+    VkExtent2D    visibleRegion;
+    uint32_t      refreshRate;
+} VkDisplayModeParametersKHR;
+
+typedef struct VkDisplayModeCreateInfoKHR {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkDisplayModeCreateFlagsKHR    flags;
+    VkDisplayModeParametersKHR     parameters;
+} VkDisplayModeCreateInfoKHR;
+
+typedef struct VkDisplayModePropertiesKHR {
+    VkDisplayModeKHR              displayMode;
+    VkDisplayModeParametersKHR    parameters;
+} VkDisplayModePropertiesKHR;
+
+typedef struct VkDisplayPlaneCapabilitiesKHR {
+    VkDisplayPlaneAlphaFlagsKHR    supportedAlpha;
+    VkOffset2D                     minSrcPosition;
+    VkOffset2D                     maxSrcPosition;
+    VkExtent2D                     minSrcExtent;
+    VkExtent2D                     maxSrcExtent;
+    VkOffset2D                     minDstPosition;
+    VkOffset2D                     maxDstPosition;
+    VkExtent2D                     minDstExtent;
+    VkExtent2D                     maxDstExtent;
+} VkDisplayPlaneCapabilitiesKHR;
+
+typedef struct VkDisplayPlanePropertiesKHR {
+    VkDisplayKHR    currentDisplay;
+    uint32_t        currentStackIndex;
+} VkDisplayPlanePropertiesKHR;
+
+typedef struct VkDisplayPropertiesKHR {
+    VkDisplayKHR                  display;
+    const char*                   displayName;
+    VkExtent2D                    physicalDimensions;
+    VkExtent2D                    physicalResolution;
+    VkSurfaceTransformFlagsKHR    supportedTransforms;
+    VkBool32                      planeReorderPossible;
+    VkBool32                      persistentContent;
+} VkDisplayPropertiesKHR;
+
+typedef struct VkDisplaySurfaceCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkDisplaySurfaceCreateFlagsKHR    flags;
+    VkDisplayModeKHR                  displayMode;
+    uint32_t                          planeIndex;
+    uint32_t                          planeStackIndex;
+    VkSurfaceTransformFlagBitsKHR     transform;
+    float                             globalAlpha;
+    VkDisplayPlaneAlphaFlagBitsKHR    alphaMode;
+    VkExtent2D                        imageExtent;
+} VkDisplaySurfaceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneSupportedDisplaysKHR)(VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModePropertiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayModeKHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilitiesKHR)(VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDisplayPlaneSurfaceKHR)(VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPropertiesKHR*                     pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlanePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlanePropertiesKHR*                pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneSupportedDisplaysKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    planeIndex,
+    uint32_t*                                   pDisplayCount,
+    VkDisplayKHR*                               pDisplays);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModePropertiesKHR*                 pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayModeKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    const VkDisplayModeCreateInfoKHR*           pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDisplayModeKHR*                           pMode);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayModeKHR                            mode,
+    uint32_t                                    planeIndex,
+    VkDisplayPlaneCapabilitiesKHR*              pCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDisplayPlaneSurfaceKHR(
+    VkInstance                                  instance,
+    const VkDisplaySurfaceCreateInfoKHR*        pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_KHR_display_swapchain 1
+#define VK_KHR_DISPLAY_SWAPCHAIN_SPEC_VERSION 10
+#define VK_KHR_DISPLAY_SWAPCHAIN_EXTENSION_NAME "VK_KHR_display_swapchain"
+typedef struct VkDisplayPresentInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkRect2D           srcRect;
+    VkRect2D           dstRect;
+    VkBool32           persistent;
+} VkDisplayPresentInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSharedSwapchainsKHR)(VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSharedSwapchainsKHR(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainCreateInfoKHR*             pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSwapchainKHR*                             pSwapchains);
+#endif
+
+
+#define VK_KHR_sampler_mirror_clamp_to_edge 1
+#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_SPEC_VERSION 3
+#define VK_KHR_SAMPLER_MIRROR_CLAMP_TO_EDGE_EXTENSION_NAME "VK_KHR_sampler_mirror_clamp_to_edge"
+
+
+#define VK_KHR_video_queue 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionKHR)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkVideoSessionParametersKHR)
+#define VK_KHR_VIDEO_QUEUE_SPEC_VERSION   8
+#define VK_KHR_VIDEO_QUEUE_EXTENSION_NAME "VK_KHR_video_queue"
+
+typedef enum VkQueryResultStatusKHR {
+    VK_QUERY_RESULT_STATUS_ERROR_KHR = -1,
+    VK_QUERY_RESULT_STATUS_NOT_READY_KHR = 0,
+    VK_QUERY_RESULT_STATUS_COMPLETE_KHR = 1,
+    VK_QUERY_RESULT_STATUS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkQueryResultStatusKHR;
+
+typedef enum VkVideoCodecOperationFlagBitsKHR {
+    VK_VIDEO_CODEC_OPERATION_NONE_KHR = 0,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT = 0x00010000,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT = 0x00020000,
+#endif
+    VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR = 0x00000001,
+    VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR = 0x00000002,
+    VK_VIDEO_CODEC_OPERATION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoCodecOperationFlagBitsKHR;
+typedef VkFlags VkVideoCodecOperationFlagsKHR;
+
+typedef enum VkVideoChromaSubsamplingFlagBitsKHR {
+    VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR = 0,
+    VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR = 0x00000001,
+    VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR = 0x00000002,
+    VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR = 0x00000004,
+    VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR = 0x00000008,
+    VK_VIDEO_CHROMA_SUBSAMPLING_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoChromaSubsamplingFlagBitsKHR;
+typedef VkFlags VkVideoChromaSubsamplingFlagsKHR;
+
+typedef enum VkVideoComponentBitDepthFlagBitsKHR {
+    VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR = 0,
+    VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR = 0x00000001,
+    VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR = 0x00000004,
+    VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR = 0x00000010,
+    VK_VIDEO_COMPONENT_BIT_DEPTH_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoComponentBitDepthFlagBitsKHR;
+typedef VkFlags VkVideoComponentBitDepthFlagsKHR;
+
+typedef enum VkVideoCapabilityFlagBitsKHR {
+    VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR = 0x00000001,
+    VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR = 0x00000002,
+    VK_VIDEO_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoCapabilityFlagBitsKHR;
+typedef VkFlags VkVideoCapabilityFlagsKHR;
+
+typedef enum VkVideoSessionCreateFlagBitsKHR {
+    VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR = 0x00000001,
+    VK_VIDEO_SESSION_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoSessionCreateFlagBitsKHR;
+typedef VkFlags VkVideoSessionCreateFlagsKHR;
+typedef VkFlags VkVideoSessionParametersCreateFlagsKHR;
+typedef VkFlags VkVideoBeginCodingFlagsKHR;
+typedef VkFlags VkVideoEndCodingFlagsKHR;
+
+typedef enum VkVideoCodingControlFlagBitsKHR {
+    VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR = 0x00000001,
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR = 0x00000002,
+#endif
+#ifdef VK_ENABLE_BETA_EXTENSIONS
+    VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_LAYER_BIT_KHR = 0x00000004,
+#endif
+    VK_VIDEO_CODING_CONTROL_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoCodingControlFlagBitsKHR;
+typedef VkFlags VkVideoCodingControlFlagsKHR;
+typedef struct VkQueueFamilyQueryResultStatusPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           queryResultStatusSupport;
+} VkQueueFamilyQueryResultStatusPropertiesKHR;
+
+typedef struct VkQueueFamilyVideoPropertiesKHR {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkVideoCodecOperationFlagsKHR    videoCodecOperations;
+} VkQueueFamilyVideoPropertiesKHR;
+
+typedef struct VkVideoProfileInfoKHR {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkVideoCodecOperationFlagBitsKHR    videoCodecOperation;
+    VkVideoChromaSubsamplingFlagsKHR    chromaSubsampling;
+    VkVideoComponentBitDepthFlagsKHR    lumaBitDepth;
+    VkVideoComponentBitDepthFlagsKHR    chromaBitDepth;
+} VkVideoProfileInfoKHR;
+
+typedef struct VkVideoProfileListInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    uint32_t                        profileCount;
+    const VkVideoProfileInfoKHR*    pProfiles;
+} VkVideoProfileListInfoKHR;
+
+typedef struct VkVideoCapabilitiesKHR {
+    VkStructureType              sType;
+    void*                        pNext;
+    VkVideoCapabilityFlagsKHR    flags;
+    VkDeviceSize                 minBitstreamBufferOffsetAlignment;
+    VkDeviceSize                 minBitstreamBufferSizeAlignment;
+    VkExtent2D                   pictureAccessGranularity;
+    VkExtent2D                   minCodedExtent;
+    VkExtent2D                   maxCodedExtent;
+    uint32_t                     maxDpbSlots;
+    uint32_t                     maxActiveReferencePictures;
+    VkExtensionProperties        stdHeaderVersion;
+} VkVideoCapabilitiesKHR;
+
+typedef struct VkPhysicalDeviceVideoFormatInfoKHR {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkImageUsageFlags    imageUsage;
+} VkPhysicalDeviceVideoFormatInfoKHR;
+
+typedef struct VkVideoFormatPropertiesKHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkFormat              format;
+    VkComponentMapping    componentMapping;
+    VkImageCreateFlags    imageCreateFlags;
+    VkImageType           imageType;
+    VkImageTiling         imageTiling;
+    VkImageUsageFlags     imageUsageFlags;
+} VkVideoFormatPropertiesKHR;
+
+typedef struct VkVideoPictureResourceInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkOffset2D         codedOffset;
+    VkExtent2D         codedExtent;
+    uint32_t           baseArrayLayer;
+    VkImageView        imageViewBinding;
+} VkVideoPictureResourceInfoKHR;
+
+typedef struct VkVideoReferenceSlotInfoKHR {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    int32_t                                 slotIndex;
+    const VkVideoPictureResourceInfoKHR*    pPictureResource;
+} VkVideoReferenceSlotInfoKHR;
+
+typedef struct VkVideoSessionMemoryRequirementsKHR {
+    VkStructureType         sType;
+    void*                   pNext;
+    uint32_t                memoryBindIndex;
+    VkMemoryRequirements    memoryRequirements;
+} VkVideoSessionMemoryRequirementsKHR;
+
+typedef struct VkBindVideoSessionMemoryInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           memoryBindIndex;
+    VkDeviceMemory     memory;
+    VkDeviceSize       memoryOffset;
+    VkDeviceSize       memorySize;
+} VkBindVideoSessionMemoryInfoKHR;
+
+typedef struct VkVideoSessionCreateInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    uint32_t                        queueFamilyIndex;
+    VkVideoSessionCreateFlagsKHR    flags;
+    const VkVideoProfileInfoKHR*    pVideoProfile;
+    VkFormat                        pictureFormat;
+    VkExtent2D                      maxCodedExtent;
+    VkFormat                        referencePictureFormat;
+    uint32_t                        maxDpbSlots;
+    uint32_t                        maxActiveReferencePictures;
+    const VkExtensionProperties*    pStdHeaderVersion;
+} VkVideoSessionCreateInfoKHR;
+
+typedef struct VkVideoSessionParametersCreateInfoKHR {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkVideoSessionParametersCreateFlagsKHR    flags;
+    VkVideoSessionParametersKHR               videoSessionParametersTemplate;
+    VkVideoSessionKHR                         videoSession;
+} VkVideoSessionParametersCreateInfoKHR;
+
+typedef struct VkVideoSessionParametersUpdateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           updateSequenceCount;
+} VkVideoSessionParametersUpdateInfoKHR;
+
+typedef struct VkVideoBeginCodingInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkVideoBeginCodingFlagsKHR            flags;
+    VkVideoSessionKHR                     videoSession;
+    VkVideoSessionParametersKHR           videoSessionParameters;
+    uint32_t                              referenceSlotCount;
+    const VkVideoReferenceSlotInfoKHR*    pReferenceSlots;
+} VkVideoBeginCodingInfoKHR;
+
+typedef struct VkVideoEndCodingInfoKHR {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkVideoEndCodingFlagsKHR    flags;
+} VkVideoEndCodingInfoKHR;
+
+typedef struct VkVideoCodingControlInfoKHR {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkVideoCodingControlFlagsKHR    flags;
+} VkVideoCodingControlInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoCapabilitiesKHR)(VkPhysicalDevice physicalDevice, const VkVideoProfileInfoKHR* pVideoProfile, VkVideoCapabilitiesKHR* pCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceVideoFormatPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceVideoFormatInfoKHR* pVideoFormatInfo, uint32_t* pVideoFormatPropertyCount, VkVideoFormatPropertiesKHR* pVideoFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionKHR)(VkDevice device, const VkVideoSessionCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionKHR* pVideoSession);
+typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionKHR)(VkDevice device, VkVideoSessionKHR videoSession, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetVideoSessionMemoryRequirementsKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t* pMemoryRequirementsCount, VkVideoSessionMemoryRequirementsKHR* pMemoryRequirements);
+typedef VkResult (VKAPI_PTR *PFN_vkBindVideoSessionMemoryKHR)(VkDevice device, VkVideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VkBindVideoSessionMemoryInfoKHR* pBindSessionMemoryInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateVideoSessionParametersKHR)(VkDevice device, const VkVideoSessionParametersCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkVideoSessionParametersKHR* pVideoSessionParameters);
+typedef VkResult (VKAPI_PTR *PFN_vkUpdateVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo);
+typedef void (VKAPI_PTR *PFN_vkDestroyVideoSessionParametersKHR)(VkDevice device, VkVideoSessionParametersKHR videoSessionParameters, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoBeginCodingInfoKHR* pBeginInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoEndCodingInfoKHR* pEndCodingInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdControlVideoCodingKHR)(VkCommandBuffer commandBuffer, const VkVideoCodingControlInfoKHR* pCodingControlInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoCapabilitiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkVideoProfileInfoKHR*                pVideoProfile,
+    VkVideoCapabilitiesKHR*                     pCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceVideoFormatPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceVideoFormatInfoKHR*   pVideoFormatInfo,
+    uint32_t*                                   pVideoFormatPropertyCount,
+    VkVideoFormatPropertiesKHR*                 pVideoFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionKHR(
+    VkDevice                                    device,
+    const VkVideoSessionCreateInfoKHR*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkVideoSessionKHR*                          pVideoSession);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionKHR(
+    VkDevice                                    device,
+    VkVideoSessionKHR                           videoSession,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetVideoSessionMemoryRequirementsKHR(
+    VkDevice                                    device,
+    VkVideoSessionKHR                           videoSession,
+    uint32_t*                                   pMemoryRequirementsCount,
+    VkVideoSessionMemoryRequirementsKHR*        pMemoryRequirements);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindVideoSessionMemoryKHR(
+    VkDevice                                    device,
+    VkVideoSessionKHR                           videoSession,
+    uint32_t                                    bindSessionMemoryInfoCount,
+    const VkBindVideoSessionMemoryInfoKHR*      pBindSessionMemoryInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateVideoSessionParametersKHR(
+    VkDevice                                    device,
+    const VkVideoSessionParametersCreateInfoKHR* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkVideoSessionParametersKHR*                pVideoSessionParameters);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkUpdateVideoSessionParametersKHR(
+    VkDevice                                    device,
+    VkVideoSessionParametersKHR                 videoSessionParameters,
+    const VkVideoSessionParametersUpdateInfoKHR* pUpdateInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyVideoSessionParametersKHR(
+    VkDevice                                    device,
+    VkVideoSessionParametersKHR                 videoSessionParameters,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginVideoCodingKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkVideoBeginCodingInfoKHR*            pBeginInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndVideoCodingKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkVideoEndCodingInfoKHR*              pEndCodingInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdControlVideoCodingKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkVideoCodingControlInfoKHR*          pCodingControlInfo);
+#endif
+
+
+#define VK_KHR_video_decode_queue 1
+#define VK_KHR_VIDEO_DECODE_QUEUE_SPEC_VERSION 7
+#define VK_KHR_VIDEO_DECODE_QUEUE_EXTENSION_NAME "VK_KHR_video_decode_queue"
+
+typedef enum VkVideoDecodeCapabilityFlagBitsKHR {
+    VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR = 0x00000001,
+    VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR = 0x00000002,
+    VK_VIDEO_DECODE_CAPABILITY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoDecodeCapabilityFlagBitsKHR;
+typedef VkFlags VkVideoDecodeCapabilityFlagsKHR;
+
+typedef enum VkVideoDecodeUsageFlagBitsKHR {
+    VK_VIDEO_DECODE_USAGE_DEFAULT_KHR = 0,
+    VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR = 0x00000001,
+    VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR = 0x00000002,
+    VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR = 0x00000004,
+    VK_VIDEO_DECODE_USAGE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoDecodeUsageFlagBitsKHR;
+typedef VkFlags VkVideoDecodeUsageFlagsKHR;
+typedef VkFlags VkVideoDecodeFlagsKHR;
+typedef struct VkVideoDecodeCapabilitiesKHR {
+    VkStructureType                    sType;
+    void*                              pNext;
+    VkVideoDecodeCapabilityFlagsKHR    flags;
+} VkVideoDecodeCapabilitiesKHR;
+
+typedef struct VkVideoDecodeUsageInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkVideoDecodeUsageFlagsKHR    videoUsageHints;
+} VkVideoDecodeUsageInfoKHR;
+
+typedef struct VkVideoDecodeInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkVideoDecodeFlagsKHR                 flags;
+    VkBuffer                              srcBuffer;
+    VkDeviceSize                          srcBufferOffset;
+    VkDeviceSize                          srcBufferRange;
+    VkVideoPictureResourceInfoKHR         dstPictureResource;
+    const VkVideoReferenceSlotInfoKHR*    pSetupReferenceSlot;
+    uint32_t                              referenceSlotCount;
+    const VkVideoReferenceSlotInfoKHR*    pReferenceSlots;
+} VkVideoDecodeInfoKHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDecodeVideoKHR)(VkCommandBuffer commandBuffer, const VkVideoDecodeInfoKHR* pDecodeInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDecodeVideoKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkVideoDecodeInfoKHR*                 pDecodeInfo);
+#endif
+
+
+#define VK_KHR_video_decode_h264 1
+#include "vk_video/vulkan_video_codec_h264std.h"
+#include "vk_video/vulkan_video_codec_h264std_decode.h"
+#define VK_KHR_VIDEO_DECODE_H264_SPEC_VERSION 8
+#define VK_KHR_VIDEO_DECODE_H264_EXTENSION_NAME "VK_KHR_video_decode_h264"
+
+typedef enum VkVideoDecodeH264PictureLayoutFlagBitsKHR {
+    VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR = 0,
+    VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR = 0x00000001,
+    VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR = 0x00000002,
+    VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkVideoDecodeH264PictureLayoutFlagBitsKHR;
+typedef VkFlags VkVideoDecodeH264PictureLayoutFlagsKHR;
+typedef struct VkVideoDecodeH264ProfileInfoKHR {
+    VkStructureType                              sType;
+    const void*                                  pNext;
+    StdVideoH264ProfileIdc                       stdProfileIdc;
+    VkVideoDecodeH264PictureLayoutFlagBitsKHR    pictureLayout;
+} VkVideoDecodeH264ProfileInfoKHR;
+
+typedef struct VkVideoDecodeH264CapabilitiesKHR {
+    VkStructureType         sType;
+    void*                   pNext;
+    StdVideoH264LevelIdc    maxLevelIdc;
+    VkOffset2D              fieldOffsetGranularity;
+} VkVideoDecodeH264CapabilitiesKHR;
+
+typedef struct VkVideoDecodeH264SessionParametersAddInfoKHR {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   stdSPSCount;
+    const StdVideoH264SequenceParameterSet*    pStdSPSs;
+    uint32_t                                   stdPPSCount;
+    const StdVideoH264PictureParameterSet*     pStdPPSs;
+} VkVideoDecodeH264SessionParametersAddInfoKHR;
+
+typedef struct VkVideoDecodeH264SessionParametersCreateInfoKHR {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    uint32_t                                               maxStdSPSCount;
+    uint32_t                                               maxStdPPSCount;
+    const VkVideoDecodeH264SessionParametersAddInfoKHR*    pParametersAddInfo;
+} VkVideoDecodeH264SessionParametersCreateInfoKHR;
+
+typedef struct VkVideoDecodeH264PictureInfoKHR {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    const StdVideoDecodeH264PictureInfo*    pStdPictureInfo;
+    uint32_t                                sliceCount;
+    const uint32_t*                         pSliceOffsets;
+} VkVideoDecodeH264PictureInfoKHR;
+
+typedef struct VkVideoDecodeH264DpbSlotInfoKHR {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    const StdVideoDecodeH264ReferenceInfo*    pStdReferenceInfo;
+} VkVideoDecodeH264DpbSlotInfoKHR;
+
+
+
+#define VK_KHR_dynamic_rendering 1
+#define VK_KHR_DYNAMIC_RENDERING_SPEC_VERSION 1
+#define VK_KHR_DYNAMIC_RENDERING_EXTENSION_NAME "VK_KHR_dynamic_rendering"
+typedef VkRenderingFlags VkRenderingFlagsKHR;
+
+typedef VkRenderingFlagBits VkRenderingFlagBitsKHR;
+
+typedef VkRenderingInfo VkRenderingInfoKHR;
+
+typedef VkRenderingAttachmentInfo VkRenderingAttachmentInfoKHR;
+
+typedef VkPipelineRenderingCreateInfo VkPipelineRenderingCreateInfoKHR;
+
+typedef VkPhysicalDeviceDynamicRenderingFeatures VkPhysicalDeviceDynamicRenderingFeaturesKHR;
+
+typedef VkCommandBufferInheritanceRenderingInfo VkCommandBufferInheritanceRenderingInfoKHR;
+
+typedef struct VkRenderingFragmentShadingRateAttachmentInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImageView        imageView;
+    VkImageLayout      imageLayout;
+    VkExtent2D         shadingRateAttachmentTexelSize;
+} VkRenderingFragmentShadingRateAttachmentInfoKHR;
+
+typedef struct VkRenderingFragmentDensityMapAttachmentInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImageView        imageView;
+    VkImageLayout      imageLayout;
+} VkRenderingFragmentDensityMapAttachmentInfoEXT;
+
+typedef struct VkAttachmentSampleCountInfoAMD {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    uint32_t                        colorAttachmentCount;
+    const VkSampleCountFlagBits*    pColorAttachmentSamples;
+    VkSampleCountFlagBits           depthStencilAttachmentSamples;
+} VkAttachmentSampleCountInfoAMD;
+
+typedef VkAttachmentSampleCountInfoAMD VkAttachmentSampleCountInfoNV;
+
+typedef struct VkMultiviewPerViewAttributesInfoNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           perViewAttributes;
+    VkBool32           perViewAttributesPositionXOnly;
+} VkMultiviewPerViewAttributesInfoNVX;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderingKHR)(VkCommandBuffer                   commandBuffer, const VkRenderingInfo*                              pRenderingInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderingKHR)(VkCommandBuffer                   commandBuffer);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderingKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderingInfo*                      pRenderingInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderingKHR(
+    VkCommandBuffer                             commandBuffer);
+#endif
+
+
+#define VK_KHR_multiview 1
+#define VK_KHR_MULTIVIEW_SPEC_VERSION     1
+#define VK_KHR_MULTIVIEW_EXTENSION_NAME   "VK_KHR_multiview"
+typedef VkRenderPassMultiviewCreateInfo VkRenderPassMultiviewCreateInfoKHR;
+
+typedef VkPhysicalDeviceMultiviewFeatures VkPhysicalDeviceMultiviewFeaturesKHR;
+
+typedef VkPhysicalDeviceMultiviewProperties VkPhysicalDeviceMultiviewPropertiesKHR;
+
+
+
+#define VK_KHR_get_physical_device_properties2 1
+#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION 2
+#define VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_physical_device_properties2"
+typedef VkPhysicalDeviceFeatures2 VkPhysicalDeviceFeatures2KHR;
+
+typedef VkPhysicalDeviceProperties2 VkPhysicalDeviceProperties2KHR;
+
+typedef VkFormatProperties2 VkFormatProperties2KHR;
+
+typedef VkImageFormatProperties2 VkImageFormatProperties2KHR;
+
+typedef VkPhysicalDeviceImageFormatInfo2 VkPhysicalDeviceImageFormatInfo2KHR;
+
+typedef VkQueueFamilyProperties2 VkQueueFamilyProperties2KHR;
+
+typedef VkPhysicalDeviceMemoryProperties2 VkPhysicalDeviceMemoryProperties2KHR;
+
+typedef VkSparseImageFormatProperties2 VkSparseImageFormatProperties2KHR;
+
+typedef VkPhysicalDeviceSparseImageFormatInfo2 VkPhysicalDeviceSparseImageFormatInfo2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFeatures2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceFormatProperties2KHR)(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMemoryProperties2KHR)(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFeatures2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceFeatures2*                  pFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceProperties2*                pProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkFormatProperties2*                        pFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceImageFormatInfo2*     pImageFormatInfo,
+    VkImageFormatProperties2*                   pImageFormatProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pQueueFamilyPropertyCount,
+    VkQueueFamilyProperties2*                   pQueueFamilyProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMemoryProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkPhysicalDeviceMemoryProperties2*          pMemoryProperties);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceSparseImageFormatProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo,
+    uint32_t*                                   pPropertyCount,
+    VkSparseImageFormatProperties2*             pProperties);
+#endif
+
+
+#define VK_KHR_device_group 1
+#define VK_KHR_DEVICE_GROUP_SPEC_VERSION  4
+#define VK_KHR_DEVICE_GROUP_EXTENSION_NAME "VK_KHR_device_group"
+typedef VkPeerMemoryFeatureFlags VkPeerMemoryFeatureFlagsKHR;
+
+typedef VkPeerMemoryFeatureFlagBits VkPeerMemoryFeatureFlagBitsKHR;
+
+typedef VkMemoryAllocateFlags VkMemoryAllocateFlagsKHR;
+
+typedef VkMemoryAllocateFlagBits VkMemoryAllocateFlagBitsKHR;
+
+typedef VkMemoryAllocateFlagsInfo VkMemoryAllocateFlagsInfoKHR;
+
+typedef VkDeviceGroupRenderPassBeginInfo VkDeviceGroupRenderPassBeginInfoKHR;
+
+typedef VkDeviceGroupCommandBufferBeginInfo VkDeviceGroupCommandBufferBeginInfoKHR;
+
+typedef VkDeviceGroupSubmitInfo VkDeviceGroupSubmitInfoKHR;
+
+typedef VkDeviceGroupBindSparseInfo VkDeviceGroupBindSparseInfoKHR;
+
+typedef VkBindBufferMemoryDeviceGroupInfo VkBindBufferMemoryDeviceGroupInfoKHR;
+
+typedef VkBindImageMemoryDeviceGroupInfo VkBindImageMemoryDeviceGroupInfoKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR)(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDeviceMaskKHR)(VkCommandBuffer commandBuffer, uint32_t deviceMask);
+typedef void (VKAPI_PTR *PFN_vkCmdDispatchBaseKHR)(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceGroupPeerMemoryFeaturesKHR(
+    VkDevice                                    device,
+    uint32_t                                    heapIndex,
+    uint32_t                                    localDeviceIndex,
+    uint32_t                                    remoteDeviceIndex,
+    VkPeerMemoryFeatureFlags*                   pPeerMemoryFeatures);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDeviceMaskKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    deviceMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDispatchBaseKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    baseGroupX,
+    uint32_t                                    baseGroupY,
+    uint32_t                                    baseGroupZ,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+#endif
+
+
+#define VK_KHR_shader_draw_parameters 1
+#define VK_KHR_SHADER_DRAW_PARAMETERS_SPEC_VERSION 1
+#define VK_KHR_SHADER_DRAW_PARAMETERS_EXTENSION_NAME "VK_KHR_shader_draw_parameters"
+
+
+#define VK_KHR_maintenance1 1
+#define VK_KHR_MAINTENANCE_1_SPEC_VERSION 2
+#define VK_KHR_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_maintenance1"
+#define VK_KHR_MAINTENANCE1_SPEC_VERSION  VK_KHR_MAINTENANCE_1_SPEC_VERSION
+#define VK_KHR_MAINTENANCE1_EXTENSION_NAME VK_KHR_MAINTENANCE_1_EXTENSION_NAME
+typedef VkCommandPoolTrimFlags VkCommandPoolTrimFlagsKHR;
+
+typedef void (VKAPI_PTR *PFN_vkTrimCommandPoolKHR)(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkTrimCommandPoolKHR(
+    VkDevice                                    device,
+    VkCommandPool                               commandPool,
+    VkCommandPoolTrimFlags                      flags);
+#endif
+
+
+#define VK_KHR_device_group_creation 1
+#define VK_KHR_DEVICE_GROUP_CREATION_SPEC_VERSION 1
+#define VK_KHR_DEVICE_GROUP_CREATION_EXTENSION_NAME "VK_KHR_device_group_creation"
+#define VK_MAX_DEVICE_GROUP_SIZE_KHR      VK_MAX_DEVICE_GROUP_SIZE
+typedef VkPhysicalDeviceGroupProperties VkPhysicalDeviceGroupPropertiesKHR;
+
+typedef VkDeviceGroupDeviceCreateInfo VkDeviceGroupDeviceCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceGroupsKHR)(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceGroupsKHR(
+    VkInstance                                  instance,
+    uint32_t*                                   pPhysicalDeviceGroupCount,
+    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties);
+#endif
+
+
+#define VK_KHR_external_memory_capabilities 1
+#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_memory_capabilities"
+#define VK_LUID_SIZE_KHR                  VK_LUID_SIZE
+typedef VkExternalMemoryHandleTypeFlags VkExternalMemoryHandleTypeFlagsKHR;
+
+typedef VkExternalMemoryHandleTypeFlagBits VkExternalMemoryHandleTypeFlagBitsKHR;
+
+typedef VkExternalMemoryFeatureFlags VkExternalMemoryFeatureFlagsKHR;
+
+typedef VkExternalMemoryFeatureFlagBits VkExternalMemoryFeatureFlagBitsKHR;
+
+typedef VkExternalMemoryProperties VkExternalMemoryPropertiesKHR;
+
+typedef VkPhysicalDeviceExternalImageFormatInfo VkPhysicalDeviceExternalImageFormatInfoKHR;
+
+typedef VkExternalImageFormatProperties VkExternalImageFormatPropertiesKHR;
+
+typedef VkPhysicalDeviceExternalBufferInfo VkPhysicalDeviceExternalBufferInfoKHR;
+
+typedef VkExternalBufferProperties VkExternalBufferPropertiesKHR;
+
+typedef VkPhysicalDeviceIDProperties VkPhysicalDeviceIDPropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalBufferPropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalBufferInfo*   pExternalBufferInfo,
+    VkExternalBufferProperties*                 pExternalBufferProperties);
+#endif
+
+
+#define VK_KHR_external_memory 1
+#define VK_KHR_EXTERNAL_MEMORY_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_EXTENSION_NAME "VK_KHR_external_memory"
+#define VK_QUEUE_FAMILY_EXTERNAL_KHR      VK_QUEUE_FAMILY_EXTERNAL
+typedef VkExternalMemoryImageCreateInfo VkExternalMemoryImageCreateInfoKHR;
+
+typedef VkExternalMemoryBufferCreateInfo VkExternalMemoryBufferCreateInfoKHR;
+
+typedef VkExportMemoryAllocateInfo VkExportMemoryAllocateInfoKHR;
+
+
+
+#define VK_KHR_external_memory_fd 1
+#define VK_KHR_EXTERNAL_MEMORY_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_MEMORY_FD_EXTENSION_NAME "VK_KHR_external_memory_fd"
+typedef struct VkImportMemoryFdInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+    int                                   fd;
+} VkImportMemoryFdInfoKHR;
+
+typedef struct VkMemoryFdPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryFdPropertiesKHR;
+
+typedef struct VkMemoryGetFdInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceMemory                        memory;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkMemoryGetFdInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdKHR)(VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryFdPropertiesKHR)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdKHR(
+    VkDevice                                    device,
+    const VkMemoryGetFdInfoKHR*                 pGetFdInfo,
+    int*                                        pFd);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryFdPropertiesKHR(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    int                                         fd,
+    VkMemoryFdPropertiesKHR*                    pMemoryFdProperties);
+#endif
+
+
+#define VK_KHR_external_semaphore_capabilities 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_semaphore_capabilities"
+typedef VkExternalSemaphoreHandleTypeFlags VkExternalSemaphoreHandleTypeFlagsKHR;
+
+typedef VkExternalSemaphoreHandleTypeFlagBits VkExternalSemaphoreHandleTypeFlagBitsKHR;
+
+typedef VkExternalSemaphoreFeatureFlags VkExternalSemaphoreFeatureFlagsKHR;
+
+typedef VkExternalSemaphoreFeatureFlagBits VkExternalSemaphoreFeatureFlagBitsKHR;
+
+typedef VkPhysicalDeviceExternalSemaphoreInfo VkPhysicalDeviceExternalSemaphoreInfoKHR;
+
+typedef VkExternalSemaphoreProperties VkExternalSemaphorePropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo,
+    VkExternalSemaphoreProperties*              pExternalSemaphoreProperties);
+#endif
+
+
+#define VK_KHR_external_semaphore 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_KHR_external_semaphore"
+typedef VkSemaphoreImportFlags VkSemaphoreImportFlagsKHR;
+
+typedef VkSemaphoreImportFlagBits VkSemaphoreImportFlagBitsKHR;
+
+typedef VkExportSemaphoreCreateInfo VkExportSemaphoreCreateInfoKHR;
+
+
+
+#define VK_KHR_external_semaphore_fd 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_SEMAPHORE_FD_EXTENSION_NAME "VK_KHR_external_semaphore_fd"
+typedef struct VkImportSemaphoreFdInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkSemaphoreImportFlags                   flags;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+    int                                      fd;
+} VkImportSemaphoreFdInfoKHR;
+
+typedef struct VkSemaphoreGetFdInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+} VkSemaphoreGetFdInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreFdKHR)(VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreFdKHR)(VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkImportSemaphoreFdInfoKHR*           pImportSemaphoreFdInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreFdKHR(
+    VkDevice                                    device,
+    const VkSemaphoreGetFdInfoKHR*              pGetFdInfo,
+    int*                                        pFd);
+#endif
+
+
+#define VK_KHR_push_descriptor 1
+#define VK_KHR_PUSH_DESCRIPTOR_SPEC_VERSION 2
+#define VK_KHR_PUSH_DESCRIPTOR_EXTENSION_NAME "VK_KHR_push_descriptor"
+typedef struct VkPhysicalDevicePushDescriptorPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxPushDescriptors;
+} VkPhysicalDevicePushDescriptorPropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetKHR)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites);
+typedef void (VKAPI_PTR *PFN_vkCmdPushDescriptorSetWithTemplateKHR)(VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    uint32_t                                    descriptorWriteCount,
+    const VkWriteDescriptorSet*                 pDescriptorWrites);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPushDescriptorSetWithTemplateKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set,
+    const void*                                 pData);
+#endif
+
+
+#define VK_KHR_shader_float16_int8 1
+#define VK_KHR_SHADER_FLOAT16_INT8_SPEC_VERSION 1
+#define VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME "VK_KHR_shader_float16_int8"
+typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceShaderFloat16Int8FeaturesKHR;
+
+typedef VkPhysicalDeviceShaderFloat16Int8Features VkPhysicalDeviceFloat16Int8FeaturesKHR;
+
+
+
+#define VK_KHR_16bit_storage 1
+#define VK_KHR_16BIT_STORAGE_SPEC_VERSION 1
+#define VK_KHR_16BIT_STORAGE_EXTENSION_NAME "VK_KHR_16bit_storage"
+typedef VkPhysicalDevice16BitStorageFeatures VkPhysicalDevice16BitStorageFeaturesKHR;
+
+
+
+#define VK_KHR_incremental_present 1
+#define VK_KHR_INCREMENTAL_PRESENT_SPEC_VERSION 2
+#define VK_KHR_INCREMENTAL_PRESENT_EXTENSION_NAME "VK_KHR_incremental_present"
+typedef struct VkRectLayerKHR {
+    VkOffset2D    offset;
+    VkExtent2D    extent;
+    uint32_t      layer;
+} VkRectLayerKHR;
+
+typedef struct VkPresentRegionKHR {
+    uint32_t                 rectangleCount;
+    const VkRectLayerKHR*    pRectangles;
+} VkPresentRegionKHR;
+
+typedef struct VkPresentRegionsKHR {
+    VkStructureType              sType;
+    const void*                  pNext;
+    uint32_t                     swapchainCount;
+    const VkPresentRegionKHR*    pRegions;
+} VkPresentRegionsKHR;
+
+
+
+#define VK_KHR_descriptor_update_template 1
+typedef VkDescriptorUpdateTemplate VkDescriptorUpdateTemplateKHR;
+
+#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_SPEC_VERSION 1
+#define VK_KHR_DESCRIPTOR_UPDATE_TEMPLATE_EXTENSION_NAME "VK_KHR_descriptor_update_template"
+typedef VkDescriptorUpdateTemplateType VkDescriptorUpdateTemplateTypeKHR;
+
+typedef VkDescriptorUpdateTemplateCreateFlags VkDescriptorUpdateTemplateCreateFlagsKHR;
+
+typedef VkDescriptorUpdateTemplateEntry VkDescriptorUpdateTemplateEntryKHR;
+
+typedef VkDescriptorUpdateTemplateCreateInfo VkDescriptorUpdateTemplateCreateInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDescriptorUpdateTemplateKHR)(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate);
+typedef void (VKAPI_PTR *PFN_vkDestroyDescriptorUpdateTemplateKHR)(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkUpdateDescriptorSetWithTemplateKHR)(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDescriptorUpdateTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkUpdateDescriptorSetWithTemplateKHR(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
+    const void*                                 pData);
+#endif
+
+
+#define VK_KHR_imageless_framebuffer 1
+#define VK_KHR_IMAGELESS_FRAMEBUFFER_SPEC_VERSION 1
+#define VK_KHR_IMAGELESS_FRAMEBUFFER_EXTENSION_NAME "VK_KHR_imageless_framebuffer"
+typedef VkPhysicalDeviceImagelessFramebufferFeatures VkPhysicalDeviceImagelessFramebufferFeaturesKHR;
+
+typedef VkFramebufferAttachmentsCreateInfo VkFramebufferAttachmentsCreateInfoKHR;
+
+typedef VkFramebufferAttachmentImageInfo VkFramebufferAttachmentImageInfoKHR;
+
+typedef VkRenderPassAttachmentBeginInfo VkRenderPassAttachmentBeginInfoKHR;
+
+
+
+#define VK_KHR_create_renderpass2 1
+#define VK_KHR_CREATE_RENDERPASS_2_SPEC_VERSION 1
+#define VK_KHR_CREATE_RENDERPASS_2_EXTENSION_NAME "VK_KHR_create_renderpass2"
+typedef VkRenderPassCreateInfo2 VkRenderPassCreateInfo2KHR;
+
+typedef VkAttachmentDescription2 VkAttachmentDescription2KHR;
+
+typedef VkAttachmentReference2 VkAttachmentReference2KHR;
+
+typedef VkSubpassDescription2 VkSubpassDescription2KHR;
+
+typedef VkSubpassDependency2 VkSubpassDependency2KHR;
+
+typedef VkSubpassBeginInfo VkSubpassBeginInfoKHR;
+
+typedef VkSubpassEndInfo VkSubpassEndInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRenderPass2KHR)(VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo*      pRenderPassBegin, const VkSubpassBeginInfo*      pSubpassBeginInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdNextSubpass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassBeginInfo*      pSubpassBeginInfo, const VkSubpassEndInfo*        pSubpassEndInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndRenderPass2KHR)(VkCommandBuffer commandBuffer, const VkSubpassEndInfo*        pSubpassEndInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRenderPass2KHR(
+    VkDevice                                    device,
+    const VkRenderPassCreateInfo2*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkRenderPass*                               pRenderPass);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkRenderPassBeginInfo*                pRenderPassBegin,
+    const VkSubpassBeginInfo*                   pSubpassBeginInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdNextSubpass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassBeginInfo*                   pSubpassBeginInfo,
+    const VkSubpassEndInfo*                     pSubpassEndInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndRenderPass2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkSubpassEndInfo*                     pSubpassEndInfo);
+#endif
+
+
+#define VK_KHR_shared_presentable_image 1
+#define VK_KHR_SHARED_PRESENTABLE_IMAGE_SPEC_VERSION 1
+#define VK_KHR_SHARED_PRESENTABLE_IMAGE_EXTENSION_NAME "VK_KHR_shared_presentable_image"
+typedef struct VkSharedPresentSurfaceCapabilitiesKHR {
+    VkStructureType      sType;
+    void*                pNext;
+    VkImageUsageFlags    sharedPresentSupportedUsageFlags;
+} VkSharedPresentSurfaceCapabilitiesKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainStatusKHR)(VkDevice device, VkSwapchainKHR swapchain);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainStatusKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain);
+#endif
+
+
+#define VK_KHR_external_fence_capabilities 1
+#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_CAPABILITIES_EXTENSION_NAME "VK_KHR_external_fence_capabilities"
+typedef VkExternalFenceHandleTypeFlags VkExternalFenceHandleTypeFlagsKHR;
+
+typedef VkExternalFenceHandleTypeFlagBits VkExternalFenceHandleTypeFlagBitsKHR;
+
+typedef VkExternalFenceFeatureFlags VkExternalFenceFeatureFlagsKHR;
+
+typedef VkExternalFenceFeatureFlagBits VkExternalFenceFeatureFlagBitsKHR;
+
+typedef VkPhysicalDeviceExternalFenceInfo VkPhysicalDeviceExternalFenceInfoKHR;
+
+typedef VkExternalFenceProperties VkExternalFencePropertiesKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceExternalFencePropertiesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceExternalFenceInfo*    pExternalFenceInfo,
+    VkExternalFenceProperties*                  pExternalFenceProperties);
+#endif
+
+
+#define VK_KHR_external_fence 1
+#define VK_KHR_EXTERNAL_FENCE_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_EXTENSION_NAME "VK_KHR_external_fence"
+typedef VkFenceImportFlags VkFenceImportFlagsKHR;
+
+typedef VkFenceImportFlagBits VkFenceImportFlagBitsKHR;
+
+typedef VkExportFenceCreateInfo VkExportFenceCreateInfoKHR;
+
+
+
+#define VK_KHR_external_fence_fd 1
+#define VK_KHR_EXTERNAL_FENCE_FD_SPEC_VERSION 1
+#define VK_KHR_EXTERNAL_FENCE_FD_EXTENSION_NAME "VK_KHR_external_fence_fd"
+typedef struct VkImportFenceFdInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkFence                              fence;
+    VkFenceImportFlags                   flags;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+    int                                  fd;
+} VkImportFenceFdInfoKHR;
+
+typedef struct VkFenceGetFdInfoKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkFence                              fence;
+    VkExternalFenceHandleTypeFlagBits    handleType;
+} VkFenceGetFdInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportFenceFdKHR)(VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetFenceFdKHR)(VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportFenceFdKHR(
+    VkDevice                                    device,
+    const VkImportFenceFdInfoKHR*               pImportFenceFdInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceFdKHR(
+    VkDevice                                    device,
+    const VkFenceGetFdInfoKHR*                  pGetFdInfo,
+    int*                                        pFd);
+#endif
+
+
+#define VK_KHR_performance_query 1
+#define VK_KHR_PERFORMANCE_QUERY_SPEC_VERSION 1
+#define VK_KHR_PERFORMANCE_QUERY_EXTENSION_NAME "VK_KHR_performance_query"
+
+typedef enum VkPerformanceCounterUnitKHR {
+    VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR = 0,
+    VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR = 1,
+    VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR = 2,
+    VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR = 3,
+    VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR = 4,
+    VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR = 5,
+    VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR = 6,
+    VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR = 7,
+    VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR = 8,
+    VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR = 9,
+    VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR = 10,
+    VK_PERFORMANCE_COUNTER_UNIT_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterUnitKHR;
+
+typedef enum VkPerformanceCounterScopeKHR {
+    VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR = 0,
+    VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR = 1,
+    VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR = 2,
+    VK_QUERY_SCOPE_COMMAND_BUFFER_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
+    VK_QUERY_SCOPE_RENDER_PASS_KHR = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
+    VK_QUERY_SCOPE_COMMAND_KHR = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
+    VK_PERFORMANCE_COUNTER_SCOPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterScopeKHR;
+
+typedef enum VkPerformanceCounterStorageKHR {
+    VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR = 0,
+    VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR = 1,
+    VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR = 2,
+    VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR = 3,
+    VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR = 4,
+    VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR = 5,
+    VK_PERFORMANCE_COUNTER_STORAGE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterStorageKHR;
+
+typedef enum VkPerformanceCounterDescriptionFlagBitsKHR {
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR = 0x00000001,
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR = 0x00000002,
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR,
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_KHR = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR,
+    VK_PERFORMANCE_COUNTER_DESCRIPTION_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPerformanceCounterDescriptionFlagBitsKHR;
+typedef VkFlags VkPerformanceCounterDescriptionFlagsKHR;
+
+typedef enum VkAcquireProfilingLockFlagBitsKHR {
+    VK_ACQUIRE_PROFILING_LOCK_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAcquireProfilingLockFlagBitsKHR;
+typedef VkFlags VkAcquireProfilingLockFlagsKHR;
+typedef struct VkPhysicalDevicePerformanceQueryFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           performanceCounterQueryPools;
+    VkBool32           performanceCounterMultipleQueryPools;
+} VkPhysicalDevicePerformanceQueryFeaturesKHR;
+
+typedef struct VkPhysicalDevicePerformanceQueryPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           allowCommandBufferQueryCopies;
+} VkPhysicalDevicePerformanceQueryPropertiesKHR;
+
+typedef struct VkPerformanceCounterKHR {
+    VkStructureType                   sType;
+    void*                             pNext;
+    VkPerformanceCounterUnitKHR       unit;
+    VkPerformanceCounterScopeKHR      scope;
+    VkPerformanceCounterStorageKHR    storage;
+    uint8_t                           uuid[VK_UUID_SIZE];
+} VkPerformanceCounterKHR;
+
+typedef struct VkPerformanceCounterDescriptionKHR {
+    VkStructureType                            sType;
+    void*                                      pNext;
+    VkPerformanceCounterDescriptionFlagsKHR    flags;
+    char                                       name[VK_MAX_DESCRIPTION_SIZE];
+    char                                       category[VK_MAX_DESCRIPTION_SIZE];
+    char                                       description[VK_MAX_DESCRIPTION_SIZE];
+} VkPerformanceCounterDescriptionKHR;
+
+typedef struct VkQueryPoolPerformanceCreateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           queueFamilyIndex;
+    uint32_t           counterIndexCount;
+    const uint32_t*    pCounterIndices;
+} VkQueryPoolPerformanceCreateInfoKHR;
+
+typedef union VkPerformanceCounterResultKHR {
+    int32_t     int32;
+    int64_t     int64;
+    uint32_t    uint32;
+    uint64_t    uint64;
+    float       float32;
+    double      float64;
+} VkPerformanceCounterResultKHR;
+
+typedef struct VkAcquireProfilingLockInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkAcquireProfilingLockFlagsKHR    flags;
+    uint64_t                          timeout;
+} VkAcquireProfilingLockInfoKHR;
+
+typedef struct VkPerformanceQuerySubmitInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           counterPassIndex;
+} VkPerformanceQuerySubmitInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR)(VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireProfilingLockKHR)(VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkReleaseProfilingLockKHR)(VkDevice device);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    uint32_t*                                   pCounterCount,
+    VkPerformanceCounterKHR*                    pCounters,
+    VkPerformanceCounterDescriptionKHR*         pCounterDescriptions);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkQueryPoolPerformanceCreateInfoKHR*  pPerformanceQueryCreateInfo,
+    uint32_t*                                   pNumPasses);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireProfilingLockKHR(
+    VkDevice                                    device,
+    const VkAcquireProfilingLockInfoKHR*        pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkReleaseProfilingLockKHR(
+    VkDevice                                    device);
+#endif
+
+
+#define VK_KHR_maintenance2 1
+#define VK_KHR_MAINTENANCE_2_SPEC_VERSION 1
+#define VK_KHR_MAINTENANCE_2_EXTENSION_NAME "VK_KHR_maintenance2"
+#define VK_KHR_MAINTENANCE2_SPEC_VERSION  VK_KHR_MAINTENANCE_2_SPEC_VERSION
+#define VK_KHR_MAINTENANCE2_EXTENSION_NAME VK_KHR_MAINTENANCE_2_EXTENSION_NAME
+typedef VkPointClippingBehavior VkPointClippingBehaviorKHR;
+
+typedef VkTessellationDomainOrigin VkTessellationDomainOriginKHR;
+
+typedef VkPhysicalDevicePointClippingProperties VkPhysicalDevicePointClippingPropertiesKHR;
+
+typedef VkRenderPassInputAttachmentAspectCreateInfo VkRenderPassInputAttachmentAspectCreateInfoKHR;
+
+typedef VkInputAttachmentAspectReference VkInputAttachmentAspectReferenceKHR;
+
+typedef VkImageViewUsageCreateInfo VkImageViewUsageCreateInfoKHR;
+
+typedef VkPipelineTessellationDomainOriginStateCreateInfo VkPipelineTessellationDomainOriginStateCreateInfoKHR;
+
+
+
+#define VK_KHR_get_surface_capabilities2 1
+#define VK_KHR_GET_SURFACE_CAPABILITIES_2_SPEC_VERSION 1
+#define VK_KHR_GET_SURFACE_CAPABILITIES_2_EXTENSION_NAME "VK_KHR_get_surface_capabilities2"
+typedef struct VkPhysicalDeviceSurfaceInfo2KHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSurfaceKHR       surface;
+} VkPhysicalDeviceSurfaceInfo2KHR;
+
+typedef struct VkSurfaceCapabilities2KHR {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkSurfaceCapabilitiesKHR    surfaceCapabilities;
+} VkSurfaceCapabilities2KHR;
+
+typedef struct VkSurfaceFormat2KHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkSurfaceFormatKHR    surfaceFormat;
+} VkSurfaceFormat2KHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceFormats2KHR)(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    VkSurfaceCapabilities2KHR*                  pSurfaceCapabilities);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceFormats2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkPhysicalDeviceSurfaceInfo2KHR*      pSurfaceInfo,
+    uint32_t*                                   pSurfaceFormatCount,
+    VkSurfaceFormat2KHR*                        pSurfaceFormats);
+#endif
+
+
+#define VK_KHR_variable_pointers 1
+#define VK_KHR_VARIABLE_POINTERS_SPEC_VERSION 1
+#define VK_KHR_VARIABLE_POINTERS_EXTENSION_NAME "VK_KHR_variable_pointers"
+typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointerFeaturesKHR;
+
+typedef VkPhysicalDeviceVariablePointersFeatures VkPhysicalDeviceVariablePointersFeaturesKHR;
+
+
+
+#define VK_KHR_get_display_properties2 1
+#define VK_KHR_GET_DISPLAY_PROPERTIES_2_SPEC_VERSION 1
+#define VK_KHR_GET_DISPLAY_PROPERTIES_2_EXTENSION_NAME "VK_KHR_get_display_properties2"
+typedef struct VkDisplayProperties2KHR {
+    VkStructureType           sType;
+    void*                     pNext;
+    VkDisplayPropertiesKHR    displayProperties;
+} VkDisplayProperties2KHR;
+
+typedef struct VkDisplayPlaneProperties2KHR {
+    VkStructureType                sType;
+    void*                          pNext;
+    VkDisplayPlanePropertiesKHR    displayPlaneProperties;
+} VkDisplayPlaneProperties2KHR;
+
+typedef struct VkDisplayModeProperties2KHR {
+    VkStructureType               sType;
+    void*                         pNext;
+    VkDisplayModePropertiesKHR    displayModeProperties;
+} VkDisplayModeProperties2KHR;
+
+typedef struct VkDisplayPlaneInfo2KHR {
+    VkStructureType     sType;
+    const void*         pNext;
+    VkDisplayModeKHR    mode;
+    uint32_t            planeIndex;
+} VkDisplayPlaneInfo2KHR;
+
+typedef struct VkDisplayPlaneCapabilities2KHR {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkDisplayPlaneCapabilitiesKHR    capabilities;
+} VkDisplayPlaneCapabilities2KHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayModeProperties2KHR)(VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDisplayPlaneCapabilities2KHR)(VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayProperties2KHR*                    pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceDisplayPlaneProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayPlaneProperties2KHR*               pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayModeProperties2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display,
+    uint32_t*                                   pPropertyCount,
+    VkDisplayModeProperties2KHR*                pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDisplayPlaneCapabilities2KHR(
+    VkPhysicalDevice                            physicalDevice,
+    const VkDisplayPlaneInfo2KHR*               pDisplayPlaneInfo,
+    VkDisplayPlaneCapabilities2KHR*             pCapabilities);
+#endif
+
+
+#define VK_KHR_dedicated_allocation 1
+#define VK_KHR_DEDICATED_ALLOCATION_SPEC_VERSION 3
+#define VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_KHR_dedicated_allocation"
+typedef VkMemoryDedicatedRequirements VkMemoryDedicatedRequirementsKHR;
+
+typedef VkMemoryDedicatedAllocateInfo VkMemoryDedicatedAllocateInfoKHR;
+
+
+
+#define VK_KHR_storage_buffer_storage_class 1
+#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_SPEC_VERSION 1
+#define VK_KHR_STORAGE_BUFFER_STORAGE_CLASS_EXTENSION_NAME "VK_KHR_storage_buffer_storage_class"
+
+
+#define VK_KHR_relaxed_block_layout 1
+#define VK_KHR_RELAXED_BLOCK_LAYOUT_SPEC_VERSION 1
+#define VK_KHR_RELAXED_BLOCK_LAYOUT_EXTENSION_NAME "VK_KHR_relaxed_block_layout"
+
+
+#define VK_KHR_get_memory_requirements2 1
+#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_SPEC_VERSION 1
+#define VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME "VK_KHR_get_memory_requirements2"
+typedef VkBufferMemoryRequirementsInfo2 VkBufferMemoryRequirementsInfo2KHR;
+
+typedef VkImageMemoryRequirementsInfo2 VkImageMemoryRequirementsInfo2KHR;
+
+typedef VkImageSparseMemoryRequirementsInfo2 VkImageSparseMemoryRequirementsInfo2KHR;
+
+typedef VkMemoryRequirements2 VkMemoryRequirements2KHR;
+
+typedef VkSparseImageMemoryRequirements2 VkSparseImageMemoryRequirements2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetImageMemoryRequirements2KHR)(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetBufferMemoryRequirements2KHR)(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetImageSparseMemoryRequirements2KHR)(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetImageMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageMemoryRequirementsInfo2*       pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetBufferMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkBufferMemoryRequirementsInfo2*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetImageSparseMemoryRequirements2KHR(
+    VkDevice                                    device,
+    const VkImageSparseMemoryRequirementsInfo2* pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+#endif
+
+
+#define VK_KHR_image_format_list 1
+#define VK_KHR_IMAGE_FORMAT_LIST_SPEC_VERSION 1
+#define VK_KHR_IMAGE_FORMAT_LIST_EXTENSION_NAME "VK_KHR_image_format_list"
+typedef VkImageFormatListCreateInfo VkImageFormatListCreateInfoKHR;
+
+
+
+#define VK_KHR_sampler_ycbcr_conversion 1
+typedef VkSamplerYcbcrConversion VkSamplerYcbcrConversionKHR;
+
+#define VK_KHR_SAMPLER_YCBCR_CONVERSION_SPEC_VERSION 14
+#define VK_KHR_SAMPLER_YCBCR_CONVERSION_EXTENSION_NAME "VK_KHR_sampler_ycbcr_conversion"
+typedef VkSamplerYcbcrModelConversion VkSamplerYcbcrModelConversionKHR;
+
+typedef VkSamplerYcbcrRange VkSamplerYcbcrRangeKHR;
+
+typedef VkChromaLocation VkChromaLocationKHR;
+
+typedef VkSamplerYcbcrConversionCreateInfo VkSamplerYcbcrConversionCreateInfoKHR;
+
+typedef VkSamplerYcbcrConversionInfo VkSamplerYcbcrConversionInfoKHR;
+
+typedef VkBindImagePlaneMemoryInfo VkBindImagePlaneMemoryInfoKHR;
+
+typedef VkImagePlaneMemoryRequirementsInfo VkImagePlaneMemoryRequirementsInfoKHR;
+
+typedef VkPhysicalDeviceSamplerYcbcrConversionFeatures VkPhysicalDeviceSamplerYcbcrConversionFeaturesKHR;
+
+typedef VkSamplerYcbcrConversionImageFormatProperties VkSamplerYcbcrConversionImageFormatPropertiesKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateSamplerYcbcrConversionKHR)(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion);
+typedef void (VKAPI_PTR *PFN_vkDestroySamplerYcbcrConversionKHR)(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateSamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSamplerYcbcrConversion*                   pYcbcrConversion);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroySamplerYcbcrConversionKHR(
+    VkDevice                                    device,
+    VkSamplerYcbcrConversion                    ycbcrConversion,
+    const VkAllocationCallbacks*                pAllocator);
+#endif
+
+
+#define VK_KHR_bind_memory2 1
+#define VK_KHR_BIND_MEMORY_2_SPEC_VERSION 1
+#define VK_KHR_BIND_MEMORY_2_EXTENSION_NAME "VK_KHR_bind_memory2"
+typedef VkBindBufferMemoryInfo VkBindBufferMemoryInfoKHR;
+
+typedef VkBindImageMemoryInfo VkBindImageMemoryInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkBindBufferMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBindImageMemory2KHR)(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkBindBufferMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindBufferMemoryInfo*               pBindInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindImageMemory2KHR(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindImageMemoryInfo*                pBindInfos);
+#endif
+
+
+#define VK_KHR_maintenance3 1
+#define VK_KHR_MAINTENANCE_3_SPEC_VERSION 1
+#define VK_KHR_MAINTENANCE_3_EXTENSION_NAME "VK_KHR_maintenance3"
+#define VK_KHR_MAINTENANCE3_SPEC_VERSION  VK_KHR_MAINTENANCE_3_SPEC_VERSION
+#define VK_KHR_MAINTENANCE3_EXTENSION_NAME VK_KHR_MAINTENANCE_3_EXTENSION_NAME
+typedef VkPhysicalDeviceMaintenance3Properties VkPhysicalDeviceMaintenance3PropertiesKHR;
+
+typedef VkDescriptorSetLayoutSupport VkDescriptorSetLayoutSupportKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSupportKHR)(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSupportKHR(
+    VkDevice                                    device,
+    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
+    VkDescriptorSetLayoutSupport*               pSupport);
+#endif
+
+
+#define VK_KHR_draw_indirect_count 1
+#define VK_KHR_DRAW_INDIRECT_COUNT_SPEC_VERSION 1
+#define VK_KHR_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_KHR_draw_indirect_count"
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountKHR)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountKHR(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_KHR_shader_subgroup_extended_types 1
+#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_SPEC_VERSION 1
+#define VK_KHR_SHADER_SUBGROUP_EXTENDED_TYPES_EXTENSION_NAME "VK_KHR_shader_subgroup_extended_types"
+typedef VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures VkPhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR;
+
+
+
+#define VK_KHR_8bit_storage 1
+#define VK_KHR_8BIT_STORAGE_SPEC_VERSION  1
+#define VK_KHR_8BIT_STORAGE_EXTENSION_NAME "VK_KHR_8bit_storage"
+typedef VkPhysicalDevice8BitStorageFeatures VkPhysicalDevice8BitStorageFeaturesKHR;
+
+
+
+#define VK_KHR_shader_atomic_int64 1
+#define VK_KHR_SHADER_ATOMIC_INT64_SPEC_VERSION 1
+#define VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME "VK_KHR_shader_atomic_int64"
+typedef VkPhysicalDeviceShaderAtomicInt64Features VkPhysicalDeviceShaderAtomicInt64FeaturesKHR;
+
+
+
+#define VK_KHR_shader_clock 1
+#define VK_KHR_SHADER_CLOCK_SPEC_VERSION  1
+#define VK_KHR_SHADER_CLOCK_EXTENSION_NAME "VK_KHR_shader_clock"
+typedef struct VkPhysicalDeviceShaderClockFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSubgroupClock;
+    VkBool32           shaderDeviceClock;
+} VkPhysicalDeviceShaderClockFeaturesKHR;
+
+
+
+#define VK_KHR_video_decode_h265 1
+#include "vk_video/vulkan_video_codec_h265std.h"
+#include "vk_video/vulkan_video_codec_h265std_decode.h"
+#define VK_KHR_VIDEO_DECODE_H265_SPEC_VERSION 7
+#define VK_KHR_VIDEO_DECODE_H265_EXTENSION_NAME "VK_KHR_video_decode_h265"
+typedef struct VkVideoDecodeH265ProfileInfoKHR {
+    VkStructureType           sType;
+    const void*               pNext;
+    StdVideoH265ProfileIdc    stdProfileIdc;
+} VkVideoDecodeH265ProfileInfoKHR;
+
+typedef struct VkVideoDecodeH265CapabilitiesKHR {
+    VkStructureType         sType;
+    void*                   pNext;
+    StdVideoH265LevelIdc    maxLevelIdc;
+} VkVideoDecodeH265CapabilitiesKHR;
+
+typedef struct VkVideoDecodeH265SessionParametersAddInfoKHR {
+    VkStructureType                            sType;
+    const void*                                pNext;
+    uint32_t                                   stdVPSCount;
+    const StdVideoH265VideoParameterSet*       pStdVPSs;
+    uint32_t                                   stdSPSCount;
+    const StdVideoH265SequenceParameterSet*    pStdSPSs;
+    uint32_t                                   stdPPSCount;
+    const StdVideoH265PictureParameterSet*     pStdPPSs;
+} VkVideoDecodeH265SessionParametersAddInfoKHR;
+
+typedef struct VkVideoDecodeH265SessionParametersCreateInfoKHR {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    uint32_t                                               maxStdVPSCount;
+    uint32_t                                               maxStdSPSCount;
+    uint32_t                                               maxStdPPSCount;
+    const VkVideoDecodeH265SessionParametersAddInfoKHR*    pParametersAddInfo;
+} VkVideoDecodeH265SessionParametersCreateInfoKHR;
+
+typedef struct VkVideoDecodeH265PictureInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    StdVideoDecodeH265PictureInfo*    pStdPictureInfo;
+    uint32_t                          sliceSegmentCount;
+    const uint32_t*                   pSliceSegmentOffsets;
+} VkVideoDecodeH265PictureInfoKHR;
+
+typedef struct VkVideoDecodeH265DpbSlotInfoKHR {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    const StdVideoDecodeH265ReferenceInfo*    pStdReferenceInfo;
+} VkVideoDecodeH265DpbSlotInfoKHR;
+
+
+
+#define VK_KHR_global_priority 1
+#define VK_MAX_GLOBAL_PRIORITY_SIZE_KHR   16U
+#define VK_KHR_GLOBAL_PRIORITY_SPEC_VERSION 1
+#define VK_KHR_GLOBAL_PRIORITY_EXTENSION_NAME "VK_KHR_global_priority"
+
+typedef enum VkQueueGlobalPriorityKHR {
+    VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR = 128,
+    VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR = 256,
+    VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR = 512,
+    VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR = 1024,
+    VK_QUEUE_GLOBAL_PRIORITY_LOW_EXT = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR,
+    VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_EXT = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR,
+    VK_QUEUE_GLOBAL_PRIORITY_HIGH_EXT = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR,
+    VK_QUEUE_GLOBAL_PRIORITY_REALTIME_EXT = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR,
+    VK_QUEUE_GLOBAL_PRIORITY_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkQueueGlobalPriorityKHR;
+typedef struct VkDeviceQueueGlobalPriorityCreateInfoKHR {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkQueueGlobalPriorityKHR    globalPriority;
+} VkDeviceQueueGlobalPriorityCreateInfoKHR;
+
+typedef struct VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           globalPriorityQuery;
+} VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+
+typedef struct VkQueueFamilyGlobalPriorityPropertiesKHR {
+    VkStructureType             sType;
+    void*                       pNext;
+    uint32_t                    priorityCount;
+    VkQueueGlobalPriorityKHR    priorities[VK_MAX_GLOBAL_PRIORITY_SIZE_KHR];
+} VkQueueFamilyGlobalPriorityPropertiesKHR;
+
+
+
+#define VK_KHR_driver_properties 1
+#define VK_KHR_DRIVER_PROPERTIES_SPEC_VERSION 1
+#define VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME "VK_KHR_driver_properties"
+#define VK_MAX_DRIVER_NAME_SIZE_KHR       VK_MAX_DRIVER_NAME_SIZE
+#define VK_MAX_DRIVER_INFO_SIZE_KHR       VK_MAX_DRIVER_INFO_SIZE
+typedef VkDriverId VkDriverIdKHR;
+
+typedef VkConformanceVersion VkConformanceVersionKHR;
+
+typedef VkPhysicalDeviceDriverProperties VkPhysicalDeviceDriverPropertiesKHR;
+
+
+
+#define VK_KHR_shader_float_controls 1
+#define VK_KHR_SHADER_FLOAT_CONTROLS_SPEC_VERSION 4
+#define VK_KHR_SHADER_FLOAT_CONTROLS_EXTENSION_NAME "VK_KHR_shader_float_controls"
+typedef VkShaderFloatControlsIndependence VkShaderFloatControlsIndependenceKHR;
+
+typedef VkPhysicalDeviceFloatControlsProperties VkPhysicalDeviceFloatControlsPropertiesKHR;
+
+
+
+#define VK_KHR_depth_stencil_resolve 1
+#define VK_KHR_DEPTH_STENCIL_RESOLVE_SPEC_VERSION 1
+#define VK_KHR_DEPTH_STENCIL_RESOLVE_EXTENSION_NAME "VK_KHR_depth_stencil_resolve"
+typedef VkResolveModeFlagBits VkResolveModeFlagBitsKHR;
+
+typedef VkResolveModeFlags VkResolveModeFlagsKHR;
+
+typedef VkSubpassDescriptionDepthStencilResolve VkSubpassDescriptionDepthStencilResolveKHR;
+
+typedef VkPhysicalDeviceDepthStencilResolveProperties VkPhysicalDeviceDepthStencilResolvePropertiesKHR;
+
+
+
+#define VK_KHR_swapchain_mutable_format 1
+#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_SPEC_VERSION 1
+#define VK_KHR_SWAPCHAIN_MUTABLE_FORMAT_EXTENSION_NAME "VK_KHR_swapchain_mutable_format"
+
+
+#define VK_KHR_timeline_semaphore 1
+#define VK_KHR_TIMELINE_SEMAPHORE_SPEC_VERSION 2
+#define VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME "VK_KHR_timeline_semaphore"
+typedef VkSemaphoreType VkSemaphoreTypeKHR;
+
+typedef VkSemaphoreWaitFlagBits VkSemaphoreWaitFlagBitsKHR;
+
+typedef VkSemaphoreWaitFlags VkSemaphoreWaitFlagsKHR;
+
+typedef VkPhysicalDeviceTimelineSemaphoreFeatures VkPhysicalDeviceTimelineSemaphoreFeaturesKHR;
+
+typedef VkPhysicalDeviceTimelineSemaphoreProperties VkPhysicalDeviceTimelineSemaphorePropertiesKHR;
+
+typedef VkSemaphoreTypeCreateInfo VkSemaphoreTypeCreateInfoKHR;
+
+typedef VkTimelineSemaphoreSubmitInfo VkTimelineSemaphoreSubmitInfoKHR;
+
+typedef VkSemaphoreWaitInfo VkSemaphoreWaitInfoKHR;
+
+typedef VkSemaphoreSignalInfo VkSemaphoreSignalInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreCounterValueKHR)(VkDevice device, VkSemaphore semaphore, uint64_t* pValue);
+typedef VkResult (VKAPI_PTR *PFN_vkWaitSemaphoresKHR)(VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout);
+typedef VkResult (VKAPI_PTR *PFN_vkSignalSemaphoreKHR)(VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreCounterValueKHR(
+    VkDevice                                    device,
+    VkSemaphore                                 semaphore,
+    uint64_t*                                   pValue);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitSemaphoresKHR(
+    VkDevice                                    device,
+    const VkSemaphoreWaitInfo*                  pWaitInfo,
+    uint64_t                                    timeout);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSignalSemaphoreKHR(
+    VkDevice                                    device,
+    const VkSemaphoreSignalInfo*                pSignalInfo);
+#endif
+
+
+#define VK_KHR_vulkan_memory_model 1
+#define VK_KHR_VULKAN_MEMORY_MODEL_SPEC_VERSION 3
+#define VK_KHR_VULKAN_MEMORY_MODEL_EXTENSION_NAME "VK_KHR_vulkan_memory_model"
+typedef VkPhysicalDeviceVulkanMemoryModelFeatures VkPhysicalDeviceVulkanMemoryModelFeaturesKHR;
+
+
+
+#define VK_KHR_shader_terminate_invocation 1
+#define VK_KHR_SHADER_TERMINATE_INVOCATION_SPEC_VERSION 1
+#define VK_KHR_SHADER_TERMINATE_INVOCATION_EXTENSION_NAME "VK_KHR_shader_terminate_invocation"
+typedef VkPhysicalDeviceShaderTerminateInvocationFeatures VkPhysicalDeviceShaderTerminateInvocationFeaturesKHR;
+
+
+
+#define VK_KHR_fragment_shading_rate 1
+#define VK_KHR_FRAGMENT_SHADING_RATE_SPEC_VERSION 2
+#define VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME "VK_KHR_fragment_shading_rate"
+
+typedef enum VkFragmentShadingRateCombinerOpKHR {
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR = 0,
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR = 1,
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR = 2,
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR = 3,
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR = 4,
+    VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkFragmentShadingRateCombinerOpKHR;
+typedef struct VkFragmentShadingRateAttachmentInfoKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    const VkAttachmentReference2*    pFragmentShadingRateAttachment;
+    VkExtent2D                       shadingRateAttachmentTexelSize;
+} VkFragmentShadingRateAttachmentInfoKHR;
+
+typedef struct VkPipelineFragmentShadingRateStateCreateInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExtent2D                            fragmentSize;
+    VkFragmentShadingRateCombinerOpKHR    combinerOps[2];
+} VkPipelineFragmentShadingRateStateCreateInfoKHR;
+
+typedef struct VkPhysicalDeviceFragmentShadingRateFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineFragmentShadingRate;
+    VkBool32           primitiveFragmentShadingRate;
+    VkBool32           attachmentFragmentShadingRate;
+} VkPhysicalDeviceFragmentShadingRateFeaturesKHR;
+
+typedef struct VkPhysicalDeviceFragmentShadingRatePropertiesKHR {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkExtent2D               minFragmentShadingRateAttachmentTexelSize;
+    VkExtent2D               maxFragmentShadingRateAttachmentTexelSize;
+    uint32_t                 maxFragmentShadingRateAttachmentTexelSizeAspectRatio;
+    VkBool32                 primitiveFragmentShadingRateWithMultipleViewports;
+    VkBool32                 layeredShadingRateAttachments;
+    VkBool32                 fragmentShadingRateNonTrivialCombinerOps;
+    VkExtent2D               maxFragmentSize;
+    uint32_t                 maxFragmentSizeAspectRatio;
+    uint32_t                 maxFragmentShadingRateCoverageSamples;
+    VkSampleCountFlagBits    maxFragmentShadingRateRasterizationSamples;
+    VkBool32                 fragmentShadingRateWithShaderDepthStencilWrites;
+    VkBool32                 fragmentShadingRateWithSampleMask;
+    VkBool32                 fragmentShadingRateWithShaderSampleMask;
+    VkBool32                 fragmentShadingRateWithConservativeRasterization;
+    VkBool32                 fragmentShadingRateWithFragmentShaderInterlock;
+    VkBool32                 fragmentShadingRateWithCustomSampleLocations;
+    VkBool32                 fragmentShadingRateStrictMultiplyCombiner;
+} VkPhysicalDeviceFragmentShadingRatePropertiesKHR;
+
+typedef struct VkPhysicalDeviceFragmentShadingRateKHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkSampleCountFlags    sampleCounts;
+    VkExtent2D            fragmentSize;
+} VkPhysicalDeviceFragmentShadingRateKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR)(VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates);
+typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateKHR)(VkCommandBuffer           commandBuffer, const VkExtent2D*                           pFragmentSize, const VkFragmentShadingRateCombinerOpKHR    combinerOps[2]);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceFragmentShadingRatesKHR(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pFragmentShadingRateCount,
+    VkPhysicalDeviceFragmentShadingRateKHR*     pFragmentShadingRates);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkExtent2D*                           pFragmentSize,
+    const VkFragmentShadingRateCombinerOpKHR    combinerOps[2]);
+#endif
+
+
+#define VK_KHR_spirv_1_4 1
+#define VK_KHR_SPIRV_1_4_SPEC_VERSION     1
+#define VK_KHR_SPIRV_1_4_EXTENSION_NAME   "VK_KHR_spirv_1_4"
+
+
+#define VK_KHR_surface_protected_capabilities 1
+#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_SPEC_VERSION 1
+#define VK_KHR_SURFACE_PROTECTED_CAPABILITIES_EXTENSION_NAME "VK_KHR_surface_protected_capabilities"
+typedef struct VkSurfaceProtectedCapabilitiesKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           supportsProtected;
+} VkSurfaceProtectedCapabilitiesKHR;
+
+
+
+#define VK_KHR_separate_depth_stencil_layouts 1
+#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_SPEC_VERSION 1
+#define VK_KHR_SEPARATE_DEPTH_STENCIL_LAYOUTS_EXTENSION_NAME "VK_KHR_separate_depth_stencil_layouts"
+typedef VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures VkPhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR;
+
+typedef VkAttachmentReferenceStencilLayout VkAttachmentReferenceStencilLayoutKHR;
+
+typedef VkAttachmentDescriptionStencilLayout VkAttachmentDescriptionStencilLayoutKHR;
+
+
+
+#define VK_KHR_present_wait 1
+#define VK_KHR_PRESENT_WAIT_SPEC_VERSION  1
+#define VK_KHR_PRESENT_WAIT_EXTENSION_NAME "VK_KHR_present_wait"
+typedef struct VkPhysicalDevicePresentWaitFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           presentWait;
+} VkPhysicalDevicePresentWaitFeaturesKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkWaitForPresentKHR)(VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkWaitForPresentKHR(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint64_t                                    presentId,
+    uint64_t                                    timeout);
+#endif
+
+
+#define VK_KHR_uniform_buffer_standard_layout 1
+#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_SPEC_VERSION 1
+#define VK_KHR_UNIFORM_BUFFER_STANDARD_LAYOUT_EXTENSION_NAME "VK_KHR_uniform_buffer_standard_layout"
+typedef VkPhysicalDeviceUniformBufferStandardLayoutFeatures VkPhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
+
+
+
+#define VK_KHR_buffer_device_address 1
+#define VK_KHR_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 1
+#define VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_KHR_buffer_device_address"
+typedef VkPhysicalDeviceBufferDeviceAddressFeatures VkPhysicalDeviceBufferDeviceAddressFeaturesKHR;
+
+typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoKHR;
+
+typedef VkBufferOpaqueCaptureAddressCreateInfo VkBufferOpaqueCaptureAddressCreateInfoKHR;
+
+typedef VkMemoryOpaqueCaptureAddressAllocateInfo VkMemoryOpaqueCaptureAddressAllocateInfoKHR;
+
+typedef VkDeviceMemoryOpaqueCaptureAddressInfo VkDeviceMemoryOpaqueCaptureAddressInfoKHR;
+
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureAddressKHR)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+typedef uint64_t (VKAPI_PTR *PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR)(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfo*            pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetBufferOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfo*            pInfo);
+
+VKAPI_ATTR uint64_t VKAPI_CALL vkGetDeviceMemoryOpaqueCaptureAddressKHR(
+    VkDevice                                    device,
+    const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo);
+#endif
+
+
+#define VK_KHR_deferred_host_operations 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDeferredOperationKHR)
+#define VK_KHR_DEFERRED_HOST_OPERATIONS_SPEC_VERSION 4
+#define VK_KHR_DEFERRED_HOST_OPERATIONS_EXTENSION_NAME "VK_KHR_deferred_host_operations"
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDeferredOperationKHR)(VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation);
+typedef void (VKAPI_PTR *PFN_vkDestroyDeferredOperationKHR)(VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator);
+typedef uint32_t (VKAPI_PTR *PFN_vkGetDeferredOperationMaxConcurrencyKHR)(VkDevice device, VkDeferredOperationKHR operation);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeferredOperationResultKHR)(VkDevice device, VkDeferredOperationKHR operation);
+typedef VkResult (VKAPI_PTR *PFN_vkDeferredOperationJoinKHR)(VkDevice device, VkDeferredOperationKHR operation);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDeferredOperationKHR(
+    VkDevice                                    device,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDeferredOperationKHR*                     pDeferredOperation);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDeferredOperationKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      operation,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR uint32_t VKAPI_CALL vkGetDeferredOperationMaxConcurrencyKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      operation);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeferredOperationResultKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      operation);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDeferredOperationJoinKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      operation);
+#endif
+
+
+#define VK_KHR_pipeline_executable_properties 1
+#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_SPEC_VERSION 1
+#define VK_KHR_PIPELINE_EXECUTABLE_PROPERTIES_EXTENSION_NAME "VK_KHR_pipeline_executable_properties"
+
+typedef enum VkPipelineExecutableStatisticFormatKHR {
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR = 0,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR = 1,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR = 2,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR = 3,
+    VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkPipelineExecutableStatisticFormatKHR;
+typedef struct VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineExecutableInfo;
+} VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+
+typedef struct VkPipelineInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkPipeline         pipeline;
+} VkPipelineInfoKHR;
+
+typedef struct VkPipelineExecutablePropertiesKHR {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkShaderStageFlags    stages;
+    char                  name[VK_MAX_DESCRIPTION_SIZE];
+    char                  description[VK_MAX_DESCRIPTION_SIZE];
+    uint32_t              subgroupSize;
+} VkPipelineExecutablePropertiesKHR;
+
+typedef struct VkPipelineExecutableInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkPipeline         pipeline;
+    uint32_t           executableIndex;
+} VkPipelineExecutableInfoKHR;
+
+typedef union VkPipelineExecutableStatisticValueKHR {
+    VkBool32    b32;
+    int64_t     i64;
+    uint64_t    u64;
+    double      f64;
+} VkPipelineExecutableStatisticValueKHR;
+
+typedef struct VkPipelineExecutableStatisticKHR {
+    VkStructureType                           sType;
+    void*                                     pNext;
+    char                                      name[VK_MAX_DESCRIPTION_SIZE];
+    char                                      description[VK_MAX_DESCRIPTION_SIZE];
+    VkPipelineExecutableStatisticFormatKHR    format;
+    VkPipelineExecutableStatisticValueKHR     value;
+} VkPipelineExecutableStatisticKHR;
+
+typedef struct VkPipelineExecutableInternalRepresentationKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    char               name[VK_MAX_DESCRIPTION_SIZE];
+    char               description[VK_MAX_DESCRIPTION_SIZE];
+    VkBool32           isText;
+    size_t             dataSize;
+    void*              pData;
+} VkPipelineExecutableInternalRepresentationKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutablePropertiesKHR)(VkDevice                        device, const VkPipelineInfoKHR*        pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableStatisticsKHR)(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelineExecutableInternalRepresentationsKHR)(VkDevice                        device, const VkPipelineExecutableInfoKHR*  pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutablePropertiesKHR(
+    VkDevice                                    device,
+    const VkPipelineInfoKHR*                    pPipelineInfo,
+    uint32_t*                                   pExecutableCount,
+    VkPipelineExecutablePropertiesKHR*          pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableStatisticsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pStatisticCount,
+    VkPipelineExecutableStatisticKHR*           pStatistics);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelineExecutableInternalRepresentationsKHR(
+    VkDevice                                    device,
+    const VkPipelineExecutableInfoKHR*          pExecutableInfo,
+    uint32_t*                                   pInternalRepresentationCount,
+    VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations);
+#endif
+
+
+#define VK_KHR_shader_integer_dot_product 1
+#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_SPEC_VERSION 1
+#define VK_KHR_SHADER_INTEGER_DOT_PRODUCT_EXTENSION_NAME "VK_KHR_shader_integer_dot_product"
+typedef VkPhysicalDeviceShaderIntegerDotProductFeatures VkPhysicalDeviceShaderIntegerDotProductFeaturesKHR;
+
+typedef VkPhysicalDeviceShaderIntegerDotProductProperties VkPhysicalDeviceShaderIntegerDotProductPropertiesKHR;
+
+
+
+#define VK_KHR_pipeline_library 1
+#define VK_KHR_PIPELINE_LIBRARY_SPEC_VERSION 1
+#define VK_KHR_PIPELINE_LIBRARY_EXTENSION_NAME "VK_KHR_pipeline_library"
+typedef struct VkPipelineLibraryCreateInfoKHR {
+    VkStructureType      sType;
+    const void*          pNext;
+    uint32_t             libraryCount;
+    const VkPipeline*    pLibraries;
+} VkPipelineLibraryCreateInfoKHR;
+
+
+
+#define VK_KHR_shader_non_semantic_info 1
+#define VK_KHR_SHADER_NON_SEMANTIC_INFO_SPEC_VERSION 1
+#define VK_KHR_SHADER_NON_SEMANTIC_INFO_EXTENSION_NAME "VK_KHR_shader_non_semantic_info"
+
+
+#define VK_KHR_present_id 1
+#define VK_KHR_PRESENT_ID_SPEC_VERSION    1
+#define VK_KHR_PRESENT_ID_EXTENSION_NAME  "VK_KHR_present_id"
+typedef struct VkPresentIdKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           swapchainCount;
+    const uint64_t*    pPresentIds;
+} VkPresentIdKHR;
+
+typedef struct VkPhysicalDevicePresentIdFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           presentId;
+} VkPhysicalDevicePresentIdFeaturesKHR;
+
+
+
+#define VK_KHR_synchronization2 1
+#define VK_KHR_SYNCHRONIZATION_2_SPEC_VERSION 1
+#define VK_KHR_SYNCHRONIZATION_2_EXTENSION_NAME "VK_KHR_synchronization2"
+typedef VkPipelineStageFlags2 VkPipelineStageFlags2KHR;
+
+typedef VkPipelineStageFlagBits2 VkPipelineStageFlagBits2KHR;
+
+typedef VkAccessFlags2 VkAccessFlags2KHR;
+
+typedef VkAccessFlagBits2 VkAccessFlagBits2KHR;
+
+typedef VkSubmitFlagBits VkSubmitFlagBitsKHR;
+
+typedef VkSubmitFlags VkSubmitFlagsKHR;
+
+typedef VkMemoryBarrier2 VkMemoryBarrier2KHR;
+
+typedef VkBufferMemoryBarrier2 VkBufferMemoryBarrier2KHR;
+
+typedef VkImageMemoryBarrier2 VkImageMemoryBarrier2KHR;
+
+typedef VkDependencyInfo VkDependencyInfoKHR;
+
+typedef VkSubmitInfo2 VkSubmitInfo2KHR;
+
+typedef VkSemaphoreSubmitInfo VkSemaphoreSubmitInfoKHR;
+
+typedef VkCommandBufferSubmitInfo VkCommandBufferSubmitInfoKHR;
+
+typedef VkPhysicalDeviceSynchronization2Features VkPhysicalDeviceSynchronization2FeaturesKHR;
+
+typedef struct VkQueueFamilyCheckpointProperties2NV {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkPipelineStageFlags2    checkpointExecutionStageMask;
+} VkQueueFamilyCheckpointProperties2NV;
+
+typedef struct VkCheckpointData2NV {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkPipelineStageFlags2    stage;
+    void*                    pCheckpointMarker;
+} VkCheckpointData2NV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetEvent2KHR)(VkCommandBuffer                   commandBuffer, VkEvent                                             event, const VkDependencyInfo*                             pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResetEvent2KHR)(VkCommandBuffer                   commandBuffer, VkEvent                                             event, VkPipelineStageFlags2               stageMask);
+typedef void (VKAPI_PTR *PFN_vkCmdWaitEvents2KHR)(VkCommandBuffer                   commandBuffer, uint32_t                                            eventCount, const VkEvent*                     pEvents, const VkDependencyInfo*            pDependencyInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdPipelineBarrier2KHR)(VkCommandBuffer                   commandBuffer, const VkDependencyInfo*                             pDependencyInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteTimestamp2KHR)(VkCommandBuffer                   commandBuffer, VkPipelineStageFlags2               stage, VkQueryPool                                         queryPool, uint32_t                                            query);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSubmit2KHR)(VkQueue                           queue, uint32_t                            submitCount, const VkSubmitInfo2*              pSubmits, VkFence           fence);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarker2AMD)(VkCommandBuffer                   commandBuffer, VkPipelineStageFlags2               stage, VkBuffer                                            dstBuffer, VkDeviceSize                                        dstOffset, uint32_t                                            marker);
+typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointData2NV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointData2NV* pCheckpointData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetEvent2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    const VkDependencyInfo*                     pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResetEvent2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkEvent                                     event,
+    VkPipelineStageFlags2                       stageMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWaitEvents2KHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    eventCount,
+    const VkEvent*                              pEvents,
+    const VkDependencyInfo*                     pDependencyInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPipelineBarrier2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkDependencyInfo*                     pDependencyInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteTimestamp2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags2                       stage,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSubmit2KHR(
+    VkQueue                                     queue,
+    uint32_t                                    submitCount,
+    const VkSubmitInfo2*                        pSubmits,
+    VkFence                                     fence);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarker2AMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlags2                       stage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+
+VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointData2NV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointData2NV*                        pCheckpointData);
+#endif
+
+
+#define VK_KHR_fragment_shader_barycentric 1
+#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1
+#define VK_KHR_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_KHR_fragment_shader_barycentric"
+typedef struct VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentShaderBarycentric;
+} VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+
+typedef struct VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           triStripVertexOrderIndependentOfProvokingVertex;
+} VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+
+
+
+#define VK_KHR_shader_subgroup_uniform_control_flow 1
+#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_SPEC_VERSION 1
+#define VK_KHR_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_EXTENSION_NAME "VK_KHR_shader_subgroup_uniform_control_flow"
+typedef struct VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSubgroupUniformControlFlow;
+} VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+
+
+
+#define VK_KHR_zero_initialize_workgroup_memory 1
+#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_SPEC_VERSION 1
+#define VK_KHR_ZERO_INITIALIZE_WORKGROUP_MEMORY_EXTENSION_NAME "VK_KHR_zero_initialize_workgroup_memory"
+typedef VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR;
+
+
+
+#define VK_KHR_workgroup_memory_explicit_layout 1
+#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_SPEC_VERSION 1
+#define VK_KHR_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_EXTENSION_NAME "VK_KHR_workgroup_memory_explicit_layout"
+typedef struct VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           workgroupMemoryExplicitLayout;
+    VkBool32           workgroupMemoryExplicitLayoutScalarBlockLayout;
+    VkBool32           workgroupMemoryExplicitLayout8BitAccess;
+    VkBool32           workgroupMemoryExplicitLayout16BitAccess;
+} VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+
+
+
+#define VK_KHR_copy_commands2 1
+#define VK_KHR_COPY_COMMANDS_2_SPEC_VERSION 1
+#define VK_KHR_COPY_COMMANDS_2_EXTENSION_NAME "VK_KHR_copy_commands2"
+typedef VkCopyBufferInfo2 VkCopyBufferInfo2KHR;
+
+typedef VkCopyImageInfo2 VkCopyImageInfo2KHR;
+
+typedef VkCopyBufferToImageInfo2 VkCopyBufferToImageInfo2KHR;
+
+typedef VkCopyImageToBufferInfo2 VkCopyImageToBufferInfo2KHR;
+
+typedef VkBlitImageInfo2 VkBlitImageInfo2KHR;
+
+typedef VkResolveImageInfo2 VkResolveImageInfo2KHR;
+
+typedef VkBufferCopy2 VkBufferCopy2KHR;
+
+typedef VkImageCopy2 VkImageCopy2KHR;
+
+typedef VkImageBlit2 VkImageBlit2KHR;
+
+typedef VkBufferImageCopy2 VkBufferImageCopy2KHR;
+
+typedef VkImageResolve2 VkImageResolve2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyBufferToImage2KHR)(VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyImageToBuffer2KHR)(VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBlitImage2KHR)(VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdResolveImage2KHR)(VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBuffer2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyBufferInfo2*                    pCopyBufferInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImage2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyImageInfo2*                     pCopyImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyBufferToImage2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyBufferToImageInfo2*             pCopyBufferToImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyImageToBuffer2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyImageToBufferInfo2*             pCopyImageToBufferInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBlitImage2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkBlitImageInfo2*                     pBlitImageInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdResolveImage2KHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkResolveImageInfo2*                  pResolveImageInfo);
+#endif
+
+
+#define VK_KHR_format_feature_flags2 1
+#define VK_KHR_FORMAT_FEATURE_FLAGS_2_SPEC_VERSION 2
+#define VK_KHR_FORMAT_FEATURE_FLAGS_2_EXTENSION_NAME "VK_KHR_format_feature_flags2"
+typedef VkFormatFeatureFlags2 VkFormatFeatureFlags2KHR;
+
+typedef VkFormatFeatureFlagBits2 VkFormatFeatureFlagBits2KHR;
+
+typedef VkFormatProperties3 VkFormatProperties3KHR;
+
+
+
+#define VK_KHR_ray_tracing_maintenance1 1
+#define VK_KHR_RAY_TRACING_MAINTENANCE_1_SPEC_VERSION 1
+#define VK_KHR_RAY_TRACING_MAINTENANCE_1_EXTENSION_NAME "VK_KHR_ray_tracing_maintenance1"
+typedef struct VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rayTracingMaintenance1;
+    VkBool32           rayTracingPipelineTraceRaysIndirect2;
+} VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+
+typedef struct VkTraceRaysIndirectCommand2KHR {
+    VkDeviceAddress    raygenShaderRecordAddress;
+    VkDeviceSize       raygenShaderRecordSize;
+    VkDeviceAddress    missShaderBindingTableAddress;
+    VkDeviceSize       missShaderBindingTableSize;
+    VkDeviceSize       missShaderBindingTableStride;
+    VkDeviceAddress    hitShaderBindingTableAddress;
+    VkDeviceSize       hitShaderBindingTableSize;
+    VkDeviceSize       hitShaderBindingTableStride;
+    VkDeviceAddress    callableShaderBindingTableAddress;
+    VkDeviceSize       callableShaderBindingTableSize;
+    VkDeviceSize       callableShaderBindingTableStride;
+    uint32_t           width;
+    uint32_t           height;
+    uint32_t           depth;
+} VkTraceRaysIndirectCommand2KHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirect2KHR)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirect2KHR(
+    VkCommandBuffer                             commandBuffer,
+    VkDeviceAddress                             indirectDeviceAddress);
+#endif
+
+
+#define VK_KHR_portability_enumeration 1
+#define VK_KHR_PORTABILITY_ENUMERATION_SPEC_VERSION 1
+#define VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME "VK_KHR_portability_enumeration"
+
+
+#define VK_KHR_maintenance4 1
+#define VK_KHR_MAINTENANCE_4_SPEC_VERSION 2
+#define VK_KHR_MAINTENANCE_4_EXTENSION_NAME "VK_KHR_maintenance4"
+typedef VkPhysicalDeviceMaintenance4Features VkPhysicalDeviceMaintenance4FeaturesKHR;
+
+typedef VkPhysicalDeviceMaintenance4Properties VkPhysicalDeviceMaintenance4PropertiesKHR;
+
+typedef VkDeviceBufferMemoryRequirements VkDeviceBufferMemoryRequirementsKHR;
+
+typedef VkDeviceImageMemoryRequirements VkDeviceImageMemoryRequirementsKHR;
+
+typedef void (VKAPI_PTR *PFN_vkGetDeviceBufferMemoryRequirementsKHR)(VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceImageMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceImageSparseMemoryRequirementsKHR)(VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceBufferMemoryRequirementsKHR(
+    VkDevice                                    device,
+    const VkDeviceBufferMemoryRequirements*     pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageMemoryRequirementsKHR(
+    VkDevice                                    device,
+    const VkDeviceImageMemoryRequirements*      pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceImageSparseMemoryRequirementsKHR(
+    VkDevice                                    device,
+    const VkDeviceImageMemoryRequirements*      pInfo,
+    uint32_t*                                   pSparseMemoryRequirementCount,
+    VkSparseImageMemoryRequirements2*           pSparseMemoryRequirements);
+#endif
+
+
+#define VK_EXT_debug_report 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugReportCallbackEXT)
+#define VK_EXT_DEBUG_REPORT_SPEC_VERSION  10
+#define VK_EXT_DEBUG_REPORT_EXTENSION_NAME "VK_EXT_debug_report"
+
+typedef enum VkDebugReportObjectTypeEXT {
+    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT = 0,
+    VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT = 1,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT = 2,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT = 3,
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT = 4,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT = 5,
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT = 6,
+    VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT = 7,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT = 8,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT = 9,
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT = 10,
+    VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT = 11,
+    VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT = 12,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT = 13,
+    VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT = 14,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT = 15,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT = 16,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT = 17,
+    VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT = 18,
+    VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT = 19,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT = 20,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT = 21,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT = 22,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT = 23,
+    VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT = 24,
+    VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT = 25,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT = 26,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT = 27,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT = 28,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT = 29,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT = 30,
+    VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT = 33,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT = 1000156000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT = 1000085000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT = 1000029000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT = 1000029001,
+    VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT = 1000150000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT = 1000165000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT = 1000366000,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
+    VK_DEBUG_REPORT_OBJECT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugReportObjectTypeEXT;
+
+typedef enum VkDebugReportFlagBitsEXT {
+    VK_DEBUG_REPORT_INFORMATION_BIT_EXT = 0x00000001,
+    VK_DEBUG_REPORT_WARNING_BIT_EXT = 0x00000002,
+    VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT = 0x00000004,
+    VK_DEBUG_REPORT_ERROR_BIT_EXT = 0x00000008,
+    VK_DEBUG_REPORT_DEBUG_BIT_EXT = 0x00000010,
+    VK_DEBUG_REPORT_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugReportFlagBitsEXT;
+typedef VkFlags VkDebugReportFlagsEXT;
+typedef VkBool32 (VKAPI_PTR *PFN_vkDebugReportCallbackEXT)(
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage,
+    void*                                       pUserData);
+
+typedef struct VkDebugReportCallbackCreateInfoEXT {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkDebugReportFlagsEXT           flags;
+    PFN_vkDebugReportCallbackEXT    pfnCallback;
+    void*                           pUserData;
+} VkDebugReportCallbackCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugReportCallbackEXT)(VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback);
+typedef void (VKAPI_PTR *PFN_vkDestroyDebugReportCallbackEXT)(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkDebugReportMessageEXT)(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    const VkDebugReportCallbackCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugReportCallbackEXT*                   pCallback);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDebugReportCallbackEXT(
+    VkInstance                                  instance,
+    VkDebugReportCallbackEXT                    callback,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkDebugReportMessageEXT(
+    VkInstance                                  instance,
+    VkDebugReportFlagsEXT                       flags,
+    VkDebugReportObjectTypeEXT                  objectType,
+    uint64_t                                    object,
+    size_t                                      location,
+    int32_t                                     messageCode,
+    const char*                                 pLayerPrefix,
+    const char*                                 pMessage);
+#endif
+
+
+#define VK_NV_glsl_shader 1
+#define VK_NV_GLSL_SHADER_SPEC_VERSION    1
+#define VK_NV_GLSL_SHADER_EXTENSION_NAME  "VK_NV_glsl_shader"
+
+
+#define VK_EXT_depth_range_unrestricted 1
+#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION 1
+#define VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "VK_EXT_depth_range_unrestricted"
+
+
+#define VK_IMG_filter_cubic 1
+#define VK_IMG_FILTER_CUBIC_SPEC_VERSION  1
+#define VK_IMG_FILTER_CUBIC_EXTENSION_NAME "VK_IMG_filter_cubic"
+
+
+#define VK_AMD_rasterization_order 1
+#define VK_AMD_RASTERIZATION_ORDER_SPEC_VERSION 1
+#define VK_AMD_RASTERIZATION_ORDER_EXTENSION_NAME "VK_AMD_rasterization_order"
+
+typedef enum VkRasterizationOrderAMD {
+    VK_RASTERIZATION_ORDER_STRICT_AMD = 0,
+    VK_RASTERIZATION_ORDER_RELAXED_AMD = 1,
+    VK_RASTERIZATION_ORDER_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkRasterizationOrderAMD;
+typedef struct VkPipelineRasterizationStateRasterizationOrderAMD {
+    VkStructureType            sType;
+    const void*                pNext;
+    VkRasterizationOrderAMD    rasterizationOrder;
+} VkPipelineRasterizationStateRasterizationOrderAMD;
+
+
+
+#define VK_AMD_shader_trinary_minmax 1
+#define VK_AMD_SHADER_TRINARY_MINMAX_SPEC_VERSION 1
+#define VK_AMD_SHADER_TRINARY_MINMAX_EXTENSION_NAME "VK_AMD_shader_trinary_minmax"
+
+
+#define VK_AMD_shader_explicit_vertex_parameter 1
+#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_SPEC_VERSION 1
+#define VK_AMD_SHADER_EXPLICIT_VERTEX_PARAMETER_EXTENSION_NAME "VK_AMD_shader_explicit_vertex_parameter"
+
+
+#define VK_EXT_debug_marker 1
+#define VK_EXT_DEBUG_MARKER_SPEC_VERSION  4
+#define VK_EXT_DEBUG_MARKER_EXTENSION_NAME "VK_EXT_debug_marker"
+typedef struct VkDebugMarkerObjectNameInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkDebugReportObjectTypeEXT    objectType;
+    uint64_t                      object;
+    const char*                   pObjectName;
+} VkDebugMarkerObjectNameInfoEXT;
+
+typedef struct VkDebugMarkerObjectTagInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkDebugReportObjectTypeEXT    objectType;
+    uint64_t                      object;
+    uint64_t                      tagName;
+    size_t                        tagSize;
+    const void*                   pTag;
+} VkDebugMarkerObjectTagInfoEXT;
+
+typedef struct VkDebugMarkerMarkerInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pMarkerName;
+    float              color[4];
+} VkDebugMarkerMarkerInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectTagEXT)(VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkDebugMarkerSetObjectNameEXT)(VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerBeginEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerEndEXT)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdDebugMarkerInsertEXT)(VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectTagInfoEXT*        pTagInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkDebugMarkerSetObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugMarkerObjectNameInfoEXT*       pNameInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerBeginEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerEndEXT(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDebugMarkerInsertEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugMarkerMarkerInfoEXT*           pMarkerInfo);
+#endif
+
+
+#define VK_AMD_gcn_shader 1
+#define VK_AMD_GCN_SHADER_SPEC_VERSION    1
+#define VK_AMD_GCN_SHADER_EXTENSION_NAME  "VK_AMD_gcn_shader"
+
+
+#define VK_NV_dedicated_allocation 1
+#define VK_NV_DEDICATED_ALLOCATION_SPEC_VERSION 1
+#define VK_NV_DEDICATED_ALLOCATION_EXTENSION_NAME "VK_NV_dedicated_allocation"
+typedef struct VkDedicatedAllocationImageCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           dedicatedAllocation;
+} VkDedicatedAllocationImageCreateInfoNV;
+
+typedef struct VkDedicatedAllocationBufferCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           dedicatedAllocation;
+} VkDedicatedAllocationBufferCreateInfoNV;
+
+typedef struct VkDedicatedAllocationMemoryAllocateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+    VkBuffer           buffer;
+} VkDedicatedAllocationMemoryAllocateInfoNV;
+
+
+
+#define VK_EXT_transform_feedback 1
+#define VK_EXT_TRANSFORM_FEEDBACK_SPEC_VERSION 1
+#define VK_EXT_TRANSFORM_FEEDBACK_EXTENSION_NAME "VK_EXT_transform_feedback"
+typedef VkFlags VkPipelineRasterizationStateStreamCreateFlagsEXT;
+typedef struct VkPhysicalDeviceTransformFeedbackFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           transformFeedback;
+    VkBool32           geometryStreams;
+} VkPhysicalDeviceTransformFeedbackFeaturesEXT;
+
+typedef struct VkPhysicalDeviceTransformFeedbackPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxTransformFeedbackStreams;
+    uint32_t           maxTransformFeedbackBuffers;
+    VkDeviceSize       maxTransformFeedbackBufferSize;
+    uint32_t           maxTransformFeedbackStreamDataSize;
+    uint32_t           maxTransformFeedbackBufferDataSize;
+    uint32_t           maxTransformFeedbackBufferDataStride;
+    VkBool32           transformFeedbackQueries;
+    VkBool32           transformFeedbackStreamsLinesTriangles;
+    VkBool32           transformFeedbackRasterizationStreamSelect;
+    VkBool32           transformFeedbackDraw;
+} VkPhysicalDeviceTransformFeedbackPropertiesEXT;
+
+typedef struct VkPipelineRasterizationStateStreamCreateInfoEXT {
+    VkStructureType                                     sType;
+    const void*                                         pNext;
+    VkPipelineRasterizationStateStreamCreateFlagsEXT    flags;
+    uint32_t                                            rasterizationStream;
+} VkPipelineRasterizationStateStreamCreateInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBindTransformFeedbackBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdEndTransformFeedbackEXT)(VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index);
+typedef void (VKAPI_PTR *PFN_vkCmdEndQueryIndexedEXT)(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectByteCountEXT)(VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBindTransformFeedbackBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndTransformFeedbackEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstCounterBuffer,
+    uint32_t                                    counterBufferCount,
+    const VkBuffer*                             pCounterBuffers,
+    const VkDeviceSize*                         pCounterBufferOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    VkQueryControlFlags                         flags,
+    uint32_t                                    index);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndQueryIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    query,
+    uint32_t                                    index);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectByteCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    VkBuffer                                    counterBuffer,
+    VkDeviceSize                                counterBufferOffset,
+    uint32_t                                    counterOffset,
+    uint32_t                                    vertexStride);
+#endif
+
+
+#define VK_NVX_binary_import 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuModuleNVX)
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkCuFunctionNVX)
+#define VK_NVX_BINARY_IMPORT_SPEC_VERSION 1
+#define VK_NVX_BINARY_IMPORT_EXTENSION_NAME "VK_NVX_binary_import"
+typedef struct VkCuModuleCreateInfoNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    size_t             dataSize;
+    const void*        pData;
+} VkCuModuleCreateInfoNVX;
+
+typedef struct VkCuFunctionCreateInfoNVX {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkCuModuleNVX      module;
+    const char*        pName;
+} VkCuFunctionCreateInfoNVX;
+
+typedef struct VkCuLaunchInfoNVX {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkCuFunctionNVX        function;
+    uint32_t               gridDimX;
+    uint32_t               gridDimY;
+    uint32_t               gridDimZ;
+    uint32_t               blockDimX;
+    uint32_t               blockDimY;
+    uint32_t               blockDimZ;
+    uint32_t               sharedMemBytes;
+    size_t                 paramCount;
+    const void* const *    pParams;
+    size_t                 extraCount;
+    const void* const *    pExtras;
+} VkCuLaunchInfoNVX;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateCuModuleNVX)(VkDevice device, const VkCuModuleCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuModuleNVX* pModule);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateCuFunctionNVX)(VkDevice device, const VkCuFunctionCreateInfoNVX* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCuFunctionNVX* pFunction);
+typedef void (VKAPI_PTR *PFN_vkDestroyCuModuleNVX)(VkDevice device, VkCuModuleNVX module, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkDestroyCuFunctionNVX)(VkDevice device, VkCuFunctionNVX function, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdCuLaunchKernelNVX)(VkCommandBuffer commandBuffer, const VkCuLaunchInfoNVX* pLaunchInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuModuleNVX(
+    VkDevice                                    device,
+    const VkCuModuleCreateInfoNVX*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCuModuleNVX*                              pModule);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateCuFunctionNVX(
+    VkDevice                                    device,
+    const VkCuFunctionCreateInfoNVX*            pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkCuFunctionNVX*                            pFunction);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCuModuleNVX(
+    VkDevice                                    device,
+    VkCuModuleNVX                               module,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyCuFunctionNVX(
+    VkDevice                                    device,
+    VkCuFunctionNVX                             function,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCuLaunchKernelNVX(
+    VkCommandBuffer                             commandBuffer,
+    const VkCuLaunchInfoNVX*                    pLaunchInfo);
+#endif
+
+
+#define VK_NVX_image_view_handle 1
+#define VK_NVX_IMAGE_VIEW_HANDLE_SPEC_VERSION 2
+#define VK_NVX_IMAGE_VIEW_HANDLE_EXTENSION_NAME "VK_NVX_image_view_handle"
+typedef struct VkImageViewHandleInfoNVX {
+    VkStructureType     sType;
+    const void*         pNext;
+    VkImageView         imageView;
+    VkDescriptorType    descriptorType;
+    VkSampler           sampler;
+} VkImageViewHandleInfoNVX;
+
+typedef struct VkImageViewAddressPropertiesNVX {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceAddress    deviceAddress;
+    VkDeviceSize       size;
+} VkImageViewAddressPropertiesNVX;
+
+typedef uint32_t (VKAPI_PTR *PFN_vkGetImageViewHandleNVX)(VkDevice device, const VkImageViewHandleInfoNVX* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewAddressNVX)(VkDevice device, VkImageView imageView, VkImageViewAddressPropertiesNVX* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR uint32_t VKAPI_CALL vkGetImageViewHandleNVX(
+    VkDevice                                    device,
+    const VkImageViewHandleInfoNVX*             pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewAddressNVX(
+    VkDevice                                    device,
+    VkImageView                                 imageView,
+    VkImageViewAddressPropertiesNVX*            pProperties);
+#endif
+
+
+#define VK_AMD_draw_indirect_count 1
+#define VK_AMD_DRAW_INDIRECT_COUNT_SPEC_VERSION 2
+#define VK_AMD_DRAW_INDIRECT_COUNT_EXTENSION_NAME "VK_AMD_draw_indirect_count"
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawIndexedIndirectCountAMD)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawIndexedIndirectCountAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_AMD_negative_viewport_height 1
+#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_SPEC_VERSION 1
+#define VK_AMD_NEGATIVE_VIEWPORT_HEIGHT_EXTENSION_NAME "VK_AMD_negative_viewport_height"
+
+
+#define VK_AMD_gpu_shader_half_float 1
+#define VK_AMD_GPU_SHADER_HALF_FLOAT_SPEC_VERSION 2
+#define VK_AMD_GPU_SHADER_HALF_FLOAT_EXTENSION_NAME "VK_AMD_gpu_shader_half_float"
+
+
+#define VK_AMD_shader_ballot 1
+#define VK_AMD_SHADER_BALLOT_SPEC_VERSION 1
+#define VK_AMD_SHADER_BALLOT_EXTENSION_NAME "VK_AMD_shader_ballot"
+
+
+#define VK_AMD_texture_gather_bias_lod 1
+#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_SPEC_VERSION 1
+#define VK_AMD_TEXTURE_GATHER_BIAS_LOD_EXTENSION_NAME "VK_AMD_texture_gather_bias_lod"
+typedef struct VkTextureLODGatherFormatPropertiesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           supportsTextureGatherLODBiasAMD;
+} VkTextureLODGatherFormatPropertiesAMD;
+
+
+
+#define VK_AMD_shader_info 1
+#define VK_AMD_SHADER_INFO_SPEC_VERSION   1
+#define VK_AMD_SHADER_INFO_EXTENSION_NAME "VK_AMD_shader_info"
+
+typedef enum VkShaderInfoTypeAMD {
+    VK_SHADER_INFO_TYPE_STATISTICS_AMD = 0,
+    VK_SHADER_INFO_TYPE_BINARY_AMD = 1,
+    VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD = 2,
+    VK_SHADER_INFO_TYPE_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkShaderInfoTypeAMD;
+typedef struct VkShaderResourceUsageAMD {
+    uint32_t    numUsedVgprs;
+    uint32_t    numUsedSgprs;
+    uint32_t    ldsSizePerLocalWorkGroup;
+    size_t      ldsUsageSizeInBytes;
+    size_t      scratchMemUsageInBytes;
+} VkShaderResourceUsageAMD;
+
+typedef struct VkShaderStatisticsInfoAMD {
+    VkShaderStageFlags          shaderStageMask;
+    VkShaderResourceUsageAMD    resourceUsage;
+    uint32_t                    numPhysicalVgprs;
+    uint32_t                    numPhysicalSgprs;
+    uint32_t                    numAvailableVgprs;
+    uint32_t                    numAvailableSgprs;
+    uint32_t                    computeWorkGroupSize[3];
+} VkShaderStatisticsInfoAMD;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetShaderInfoAMD)(VkDevice device, VkPipeline pipeline, VkShaderStageFlagBits shaderStage, VkShaderInfoTypeAMD infoType, size_t* pInfoSize, void* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetShaderInfoAMD(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    VkShaderStageFlagBits                       shaderStage,
+    VkShaderInfoTypeAMD                         infoType,
+    size_t*                                     pInfoSize,
+    void*                                       pInfo);
+#endif
+
+
+#define VK_AMD_shader_image_load_store_lod 1
+#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_SPEC_VERSION 1
+#define VK_AMD_SHADER_IMAGE_LOAD_STORE_LOD_EXTENSION_NAME "VK_AMD_shader_image_load_store_lod"
+
+
+#define VK_NV_corner_sampled_image 1
+#define VK_NV_CORNER_SAMPLED_IMAGE_SPEC_VERSION 2
+#define VK_NV_CORNER_SAMPLED_IMAGE_EXTENSION_NAME "VK_NV_corner_sampled_image"
+typedef struct VkPhysicalDeviceCornerSampledImageFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           cornerSampledImage;
+} VkPhysicalDeviceCornerSampledImageFeaturesNV;
+
+
+
+#define VK_IMG_format_pvrtc 1
+#define VK_IMG_FORMAT_PVRTC_SPEC_VERSION  1
+#define VK_IMG_FORMAT_PVRTC_EXTENSION_NAME "VK_IMG_format_pvrtc"
+
+
+#define VK_NV_external_memory_capabilities 1
+#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_CAPABILITIES_EXTENSION_NAME "VK_NV_external_memory_capabilities"
+
+typedef enum VkExternalMemoryHandleTypeFlagBitsNV {
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV = 0x00000001,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV = 0x00000002,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV = 0x00000004,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV = 0x00000008,
+    VK_EXTERNAL_MEMORY_HANDLE_TYPE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkExternalMemoryHandleTypeFlagBitsNV;
+typedef VkFlags VkExternalMemoryHandleTypeFlagsNV;
+
+typedef enum VkExternalMemoryFeatureFlagBitsNV {
+    VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV = 0x00000001,
+    VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV = 0x00000002,
+    VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV = 0x00000004,
+    VK_EXTERNAL_MEMORY_FEATURE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkExternalMemoryFeatureFlagBitsNV;
+typedef VkFlags VkExternalMemoryFeatureFlagsNV;
+typedef struct VkExternalImageFormatPropertiesNV {
+    VkImageFormatProperties              imageFormatProperties;
+    VkExternalMemoryFeatureFlagsNV       externalMemoryFeatures;
+    VkExternalMemoryHandleTypeFlagsNV    exportFromImportedHandleTypes;
+    VkExternalMemoryHandleTypeFlagsNV    compatibleHandleTypes;
+} VkExternalImageFormatPropertiesNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV)(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkExternalMemoryHandleTypeFlagsNV externalHandleType, VkExternalImageFormatPropertiesNV* pExternalImageFormatProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceExternalImageFormatPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkFormat                                    format,
+    VkImageType                                 type,
+    VkImageTiling                               tiling,
+    VkImageUsageFlags                           usage,
+    VkImageCreateFlags                          flags,
+    VkExternalMemoryHandleTypeFlagsNV           externalHandleType,
+    VkExternalImageFormatPropertiesNV*          pExternalImageFormatProperties);
+#endif
+
+
+#define VK_NV_external_memory 1
+#define VK_NV_EXTERNAL_MEMORY_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_EXTENSION_NAME "VK_NV_external_memory"
+typedef struct VkExternalMemoryImageCreateInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleTypes;
+} VkExternalMemoryImageCreateInfoNV;
+
+typedef struct VkExportMemoryAllocateInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkExternalMemoryHandleTypeFlagsNV    handleTypes;
+} VkExportMemoryAllocateInfoNV;
+
+
+
+#define VK_EXT_validation_flags 1
+#define VK_EXT_VALIDATION_FLAGS_SPEC_VERSION 2
+#define VK_EXT_VALIDATION_FLAGS_EXTENSION_NAME "VK_EXT_validation_flags"
+
+typedef enum VkValidationCheckEXT {
+    VK_VALIDATION_CHECK_ALL_EXT = 0,
+    VK_VALIDATION_CHECK_SHADERS_EXT = 1,
+    VK_VALIDATION_CHECK_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationCheckEXT;
+typedef struct VkValidationFlagsEXT {
+    VkStructureType                sType;
+    const void*                    pNext;
+    uint32_t                       disabledValidationCheckCount;
+    const VkValidationCheckEXT*    pDisabledValidationChecks;
+} VkValidationFlagsEXT;
+
+
+
+#define VK_EXT_shader_subgroup_ballot 1
+#define VK_EXT_SHADER_SUBGROUP_BALLOT_SPEC_VERSION 1
+#define VK_EXT_SHADER_SUBGROUP_BALLOT_EXTENSION_NAME "VK_EXT_shader_subgroup_ballot"
+
+
+#define VK_EXT_shader_subgroup_vote 1
+#define VK_EXT_SHADER_SUBGROUP_VOTE_SPEC_VERSION 1
+#define VK_EXT_SHADER_SUBGROUP_VOTE_EXTENSION_NAME "VK_EXT_shader_subgroup_vote"
+
+
+#define VK_EXT_texture_compression_astc_hdr 1
+#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_SPEC_VERSION 1
+#define VK_EXT_TEXTURE_COMPRESSION_ASTC_HDR_EXTENSION_NAME "VK_EXT_texture_compression_astc_hdr"
+typedef VkPhysicalDeviceTextureCompressionASTCHDRFeatures VkPhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
+
+
+
+#define VK_EXT_astc_decode_mode 1
+#define VK_EXT_ASTC_DECODE_MODE_SPEC_VERSION 1
+#define VK_EXT_ASTC_DECODE_MODE_EXTENSION_NAME "VK_EXT_astc_decode_mode"
+typedef struct VkImageViewASTCDecodeModeEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkFormat           decodeMode;
+} VkImageViewASTCDecodeModeEXT;
+
+typedef struct VkPhysicalDeviceASTCDecodeFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           decodeModeSharedExponent;
+} VkPhysicalDeviceASTCDecodeFeaturesEXT;
+
+
+
+#define VK_EXT_pipeline_robustness 1
+#define VK_EXT_PIPELINE_ROBUSTNESS_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_pipeline_robustness"
+
+typedef enum VkPipelineRobustnessBufferBehaviorEXT {
+    VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT = 0,
+    VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT = 1,
+    VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT = 2,
+    VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT = 3,
+    VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPipelineRobustnessBufferBehaviorEXT;
+
+typedef enum VkPipelineRobustnessImageBehaviorEXT {
+    VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT = 0,
+    VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT = 1,
+    VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT = 2,
+    VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT = 3,
+    VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPipelineRobustnessImageBehaviorEXT;
+typedef struct VkPhysicalDevicePipelineRobustnessFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineRobustness;
+} VkPhysicalDevicePipelineRobustnessFeaturesEXT;
+
+typedef struct VkPhysicalDevicePipelineRobustnessPropertiesEXT {
+    VkStructureType                          sType;
+    void*                                    pNext;
+    VkPipelineRobustnessBufferBehaviorEXT    defaultRobustnessStorageBuffers;
+    VkPipelineRobustnessBufferBehaviorEXT    defaultRobustnessUniformBuffers;
+    VkPipelineRobustnessBufferBehaviorEXT    defaultRobustnessVertexInputs;
+    VkPipelineRobustnessImageBehaviorEXT     defaultRobustnessImages;
+} VkPhysicalDevicePipelineRobustnessPropertiesEXT;
+
+typedef struct VkPipelineRobustnessCreateInfoEXT {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkPipelineRobustnessBufferBehaviorEXT    storageBuffers;
+    VkPipelineRobustnessBufferBehaviorEXT    uniformBuffers;
+    VkPipelineRobustnessBufferBehaviorEXT    vertexInputs;
+    VkPipelineRobustnessImageBehaviorEXT     images;
+} VkPipelineRobustnessCreateInfoEXT;
+
+
+
+#define VK_EXT_conditional_rendering 1
+#define VK_EXT_CONDITIONAL_RENDERING_SPEC_VERSION 2
+#define VK_EXT_CONDITIONAL_RENDERING_EXTENSION_NAME "VK_EXT_conditional_rendering"
+
+typedef enum VkConditionalRenderingFlagBitsEXT {
+    VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT = 0x00000001,
+    VK_CONDITIONAL_RENDERING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkConditionalRenderingFlagBitsEXT;
+typedef VkFlags VkConditionalRenderingFlagsEXT;
+typedef struct VkConditionalRenderingBeginInfoEXT {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkBuffer                          buffer;
+    VkDeviceSize                      offset;
+    VkConditionalRenderingFlagsEXT    flags;
+} VkConditionalRenderingBeginInfoEXT;
+
+typedef struct VkPhysicalDeviceConditionalRenderingFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           conditionalRendering;
+    VkBool32           inheritedConditionalRendering;
+} VkPhysicalDeviceConditionalRenderingFeaturesEXT;
+
+typedef struct VkCommandBufferInheritanceConditionalRenderingInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           conditionalRenderingEnable;
+} VkCommandBufferInheritanceConditionalRenderingInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBeginConditionalRenderingEXT)(VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin);
+typedef void (VKAPI_PTR *PFN_vkCmdEndConditionalRenderingEXT)(VkCommandBuffer commandBuffer);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkConditionalRenderingBeginInfoEXT*   pConditionalRenderingBegin);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndConditionalRenderingEXT(
+    VkCommandBuffer                             commandBuffer);
+#endif
+
+
+#define VK_NV_clip_space_w_scaling 1
+#define VK_NV_CLIP_SPACE_W_SCALING_SPEC_VERSION 1
+#define VK_NV_CLIP_SPACE_W_SCALING_EXTENSION_NAME "VK_NV_clip_space_w_scaling"
+typedef struct VkViewportWScalingNV {
+    float    xcoeff;
+    float    ycoeff;
+} VkViewportWScalingNV;
+
+typedef struct VkPipelineViewportWScalingStateCreateInfoNV {
+    VkStructureType                sType;
+    const void*                    pNext;
+    VkBool32                       viewportWScalingEnable;
+    uint32_t                       viewportCount;
+    const VkViewportWScalingNV*    pViewportWScalings;
+} VkPipelineViewportWScalingStateCreateInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportWScalingNV* pViewportWScalings);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportWScalingNV*                 pViewportWScalings);
+#endif
+
+
+#define VK_EXT_direct_mode_display 1
+#define VK_EXT_DIRECT_MODE_DISPLAY_SPEC_VERSION 1
+#define VK_EXT_DIRECT_MODE_DISPLAY_EXTENSION_NAME "VK_EXT_direct_mode_display"
+typedef VkResult (VKAPI_PTR *PFN_vkReleaseDisplayEXT)(VkPhysicalDevice physicalDevice, VkDisplayKHR display);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkReleaseDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display);
+#endif
+
+
+#define VK_EXT_display_surface_counter 1
+#define VK_EXT_DISPLAY_SURFACE_COUNTER_SPEC_VERSION 1
+#define VK_EXT_DISPLAY_SURFACE_COUNTER_EXTENSION_NAME "VK_EXT_display_surface_counter"
+
+typedef enum VkSurfaceCounterFlagBitsEXT {
+    VK_SURFACE_COUNTER_VBLANK_BIT_EXT = 0x00000001,
+    VK_SURFACE_COUNTER_VBLANK_EXT = VK_SURFACE_COUNTER_VBLANK_BIT_EXT,
+    VK_SURFACE_COUNTER_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkSurfaceCounterFlagBitsEXT;
+typedef VkFlags VkSurfaceCounterFlagsEXT;
+typedef struct VkSurfaceCapabilities2EXT {
+    VkStructureType                  sType;
+    void*                            pNext;
+    uint32_t                         minImageCount;
+    uint32_t                         maxImageCount;
+    VkExtent2D                       currentExtent;
+    VkExtent2D                       minImageExtent;
+    VkExtent2D                       maxImageExtent;
+    uint32_t                         maxImageArrayLayers;
+    VkSurfaceTransformFlagsKHR       supportedTransforms;
+    VkSurfaceTransformFlagBitsKHR    currentTransform;
+    VkCompositeAlphaFlagsKHR         supportedCompositeAlpha;
+    VkImageUsageFlags                supportedUsageFlags;
+    VkSurfaceCounterFlagsEXT         supportedSurfaceCounters;
+} VkSurfaceCapabilities2EXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT)(VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSurfaceCapabilities2EXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSurfaceKHR                                surface,
+    VkSurfaceCapabilities2EXT*                  pSurfaceCapabilities);
+#endif
+
+
+#define VK_EXT_display_control 1
+#define VK_EXT_DISPLAY_CONTROL_SPEC_VERSION 1
+#define VK_EXT_DISPLAY_CONTROL_EXTENSION_NAME "VK_EXT_display_control"
+
+typedef enum VkDisplayPowerStateEXT {
+    VK_DISPLAY_POWER_STATE_OFF_EXT = 0,
+    VK_DISPLAY_POWER_STATE_SUSPEND_EXT = 1,
+    VK_DISPLAY_POWER_STATE_ON_EXT = 2,
+    VK_DISPLAY_POWER_STATE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDisplayPowerStateEXT;
+
+typedef enum VkDeviceEventTypeEXT {
+    VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT = 0,
+    VK_DEVICE_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceEventTypeEXT;
+
+typedef enum VkDisplayEventTypeEXT {
+    VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT = 0,
+    VK_DISPLAY_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDisplayEventTypeEXT;
+typedef struct VkDisplayPowerInfoEXT {
+    VkStructureType           sType;
+    const void*               pNext;
+    VkDisplayPowerStateEXT    powerState;
+} VkDisplayPowerInfoEXT;
+
+typedef struct VkDeviceEventInfoEXT {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkDeviceEventTypeEXT    deviceEvent;
+} VkDeviceEventInfoEXT;
+
+typedef struct VkDisplayEventInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkDisplayEventTypeEXT    displayEvent;
+} VkDisplayEventInfoEXT;
+
+typedef struct VkSwapchainCounterCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkSurfaceCounterFlagsEXT    surfaceCounters;
+} VkSwapchainCounterCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkDisplayPowerControlEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterDeviceEventEXT)(VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef VkResult (VKAPI_PTR *PFN_vkRegisterDisplayEventEXT)(VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSwapchainCounterEXT)(VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkDisplayPowerControlEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayPowerInfoEXT*                pDisplayPowerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDeviceEventEXT(
+    VkDevice                                    device,
+    const VkDeviceEventInfoEXT*                 pDeviceEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkRegisterDisplayEventEXT(
+    VkDevice                                    device,
+    VkDisplayKHR                                display,
+    const VkDisplayEventInfoEXT*                pDisplayEventInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkFence*                                    pFence);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSwapchainCounterEXT(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkSurfaceCounterFlagBitsEXT                 counter,
+    uint64_t*                                   pCounterValue);
+#endif
+
+
+#define VK_GOOGLE_display_timing 1
+#define VK_GOOGLE_DISPLAY_TIMING_SPEC_VERSION 1
+#define VK_GOOGLE_DISPLAY_TIMING_EXTENSION_NAME "VK_GOOGLE_display_timing"
+typedef struct VkRefreshCycleDurationGOOGLE {
+    uint64_t    refreshDuration;
+} VkRefreshCycleDurationGOOGLE;
+
+typedef struct VkPastPresentationTimingGOOGLE {
+    uint32_t    presentID;
+    uint64_t    desiredPresentTime;
+    uint64_t    actualPresentTime;
+    uint64_t    earliestPresentTime;
+    uint64_t    presentMargin;
+} VkPastPresentationTimingGOOGLE;
+
+typedef struct VkPresentTimeGOOGLE {
+    uint32_t    presentID;
+    uint64_t    desiredPresentTime;
+} VkPresentTimeGOOGLE;
+
+typedef struct VkPresentTimesInfoGOOGLE {
+    VkStructureType               sType;
+    const void*                   pNext;
+    uint32_t                      swapchainCount;
+    const VkPresentTimeGOOGLE*    pTimes;
+} VkPresentTimesInfoGOOGLE;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetRefreshCycleDurationGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, VkRefreshCycleDurationGOOGLE* pDisplayTimingProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPastPresentationTimingGOOGLE)(VkDevice device, VkSwapchainKHR swapchain, uint32_t* pPresentationTimingCount, VkPastPresentationTimingGOOGLE* pPresentationTimings);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRefreshCycleDurationGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    VkRefreshCycleDurationGOOGLE*               pDisplayTimingProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPastPresentationTimingGOOGLE(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapchain,
+    uint32_t*                                   pPresentationTimingCount,
+    VkPastPresentationTimingGOOGLE*             pPresentationTimings);
+#endif
+
+
+#define VK_NV_sample_mask_override_coverage 1
+#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_SPEC_VERSION 1
+#define VK_NV_SAMPLE_MASK_OVERRIDE_COVERAGE_EXTENSION_NAME "VK_NV_sample_mask_override_coverage"
+
+
+#define VK_NV_geometry_shader_passthrough 1
+#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_SPEC_VERSION 1
+#define VK_NV_GEOMETRY_SHADER_PASSTHROUGH_EXTENSION_NAME "VK_NV_geometry_shader_passthrough"
+
+
+#define VK_NV_viewport_array2 1
+#define VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION 1
+#define VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME "VK_NV_viewport_array2"
+#define VK_NV_VIEWPORT_ARRAY2_SPEC_VERSION VK_NV_VIEWPORT_ARRAY_2_SPEC_VERSION
+#define VK_NV_VIEWPORT_ARRAY2_EXTENSION_NAME VK_NV_VIEWPORT_ARRAY_2_EXTENSION_NAME
+
+
+#define VK_NVX_multiview_per_view_attributes 1
+#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_SPEC_VERSION 1
+#define VK_NVX_MULTIVIEW_PER_VIEW_ATTRIBUTES_EXTENSION_NAME "VK_NVX_multiview_per_view_attributes"
+typedef struct VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           perViewPositionAllComponents;
+} VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+
+
+#define VK_NV_viewport_swizzle 1
+#define VK_NV_VIEWPORT_SWIZZLE_SPEC_VERSION 1
+#define VK_NV_VIEWPORT_SWIZZLE_EXTENSION_NAME "VK_NV_viewport_swizzle"
+
+typedef enum VkViewportCoordinateSwizzleNV {
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV = 0,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV = 1,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV = 2,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV = 3,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV = 4,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV = 5,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV = 6,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV = 7,
+    VK_VIEWPORT_COORDINATE_SWIZZLE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkViewportCoordinateSwizzleNV;
+typedef VkFlags VkPipelineViewportSwizzleStateCreateFlagsNV;
+typedef struct VkViewportSwizzleNV {
+    VkViewportCoordinateSwizzleNV    x;
+    VkViewportCoordinateSwizzleNV    y;
+    VkViewportCoordinateSwizzleNV    z;
+    VkViewportCoordinateSwizzleNV    w;
+} VkViewportSwizzleNV;
+
+typedef struct VkPipelineViewportSwizzleStateCreateInfoNV {
+    VkStructureType                                sType;
+    const void*                                    pNext;
+    VkPipelineViewportSwizzleStateCreateFlagsNV    flags;
+    uint32_t                                       viewportCount;
+    const VkViewportSwizzleNV*                     pViewportSwizzles;
+} VkPipelineViewportSwizzleStateCreateInfoNV;
+
+
+
+#define VK_EXT_discard_rectangles 1
+#define VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION 1
+#define VK_EXT_DISCARD_RECTANGLES_EXTENSION_NAME "VK_EXT_discard_rectangles"
+
+typedef enum VkDiscardRectangleModeEXT {
+    VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT = 0,
+    VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT = 1,
+    VK_DISCARD_RECTANGLE_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDiscardRectangleModeEXT;
+typedef VkFlags VkPipelineDiscardRectangleStateCreateFlagsEXT;
+typedef struct VkPhysicalDeviceDiscardRectanglePropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxDiscardRectangles;
+} VkPhysicalDeviceDiscardRectanglePropertiesEXT;
+
+typedef struct VkPipelineDiscardRectangleStateCreateInfoEXT {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineDiscardRectangleStateCreateFlagsEXT    flags;
+    VkDiscardRectangleModeEXT                        discardRectangleMode;
+    uint32_t                                         discardRectangleCount;
+    const VkRect2D*                                  pDiscardRectangles;
+} VkPipelineDiscardRectangleStateCreateInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetDiscardRectangleEXT)(VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDiscardRectangleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstDiscardRectangle,
+    uint32_t                                    discardRectangleCount,
+    const VkRect2D*                             pDiscardRectangles);
+#endif
+
+
+#define VK_EXT_conservative_rasterization 1
+#define VK_EXT_CONSERVATIVE_RASTERIZATION_SPEC_VERSION 1
+#define VK_EXT_CONSERVATIVE_RASTERIZATION_EXTENSION_NAME "VK_EXT_conservative_rasterization"
+
+typedef enum VkConservativeRasterizationModeEXT {
+    VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT = 0,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT = 1,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT = 2,
+    VK_CONSERVATIVE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkConservativeRasterizationModeEXT;
+typedef VkFlags VkPipelineRasterizationConservativeStateCreateFlagsEXT;
+typedef struct VkPhysicalDeviceConservativeRasterizationPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    float              primitiveOverestimationSize;
+    float              maxExtraPrimitiveOverestimationSize;
+    float              extraPrimitiveOverestimationSizeGranularity;
+    VkBool32           primitiveUnderestimation;
+    VkBool32           conservativePointAndLineRasterization;
+    VkBool32           degenerateTrianglesRasterized;
+    VkBool32           degenerateLinesRasterized;
+    VkBool32           fullyCoveredFragmentShaderInputVariable;
+    VkBool32           conservativeRasterizationPostDepthCoverage;
+} VkPhysicalDeviceConservativeRasterizationPropertiesEXT;
+
+typedef struct VkPipelineRasterizationConservativeStateCreateInfoEXT {
+    VkStructureType                                           sType;
+    const void*                                               pNext;
+    VkPipelineRasterizationConservativeStateCreateFlagsEXT    flags;
+    VkConservativeRasterizationModeEXT                        conservativeRasterizationMode;
+    float                                                     extraPrimitiveOverestimationSize;
+} VkPipelineRasterizationConservativeStateCreateInfoEXT;
+
+
+
+#define VK_EXT_depth_clip_enable 1
+#define VK_EXT_DEPTH_CLIP_ENABLE_SPEC_VERSION 1
+#define VK_EXT_DEPTH_CLIP_ENABLE_EXTENSION_NAME "VK_EXT_depth_clip_enable"
+typedef VkFlags VkPipelineRasterizationDepthClipStateCreateFlagsEXT;
+typedef struct VkPhysicalDeviceDepthClipEnableFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           depthClipEnable;
+} VkPhysicalDeviceDepthClipEnableFeaturesEXT;
+
+typedef struct VkPipelineRasterizationDepthClipStateCreateInfoEXT {
+    VkStructureType                                        sType;
+    const void*                                            pNext;
+    VkPipelineRasterizationDepthClipStateCreateFlagsEXT    flags;
+    VkBool32                                               depthClipEnable;
+} VkPipelineRasterizationDepthClipStateCreateInfoEXT;
+
+
+
+#define VK_EXT_swapchain_colorspace 1
+#define VK_EXT_SWAPCHAIN_COLOR_SPACE_SPEC_VERSION 4
+#define VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME "VK_EXT_swapchain_colorspace"
+
+
+#define VK_EXT_hdr_metadata 1
+#define VK_EXT_HDR_METADATA_SPEC_VERSION  2
+#define VK_EXT_HDR_METADATA_EXTENSION_NAME "VK_EXT_hdr_metadata"
+typedef struct VkXYColorEXT {
+    float    x;
+    float    y;
+} VkXYColorEXT;
+
+typedef struct VkHdrMetadataEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkXYColorEXT       displayPrimaryRed;
+    VkXYColorEXT       displayPrimaryGreen;
+    VkXYColorEXT       displayPrimaryBlue;
+    VkXYColorEXT       whitePoint;
+    float              maxLuminance;
+    float              minLuminance;
+    float              maxContentLightLevel;
+    float              maxFrameAverageLightLevel;
+} VkHdrMetadataEXT;
+
+typedef void (VKAPI_PTR *PFN_vkSetHdrMetadataEXT)(VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkSetHdrMetadataEXT(
+    VkDevice                                    device,
+    uint32_t                                    swapchainCount,
+    const VkSwapchainKHR*                       pSwapchains,
+    const VkHdrMetadataEXT*                     pMetadata);
+#endif
+
+
+#define VK_EXT_external_memory_dma_buf 1
+#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_SPEC_VERSION 1
+#define VK_EXT_EXTERNAL_MEMORY_DMA_BUF_EXTENSION_NAME "VK_EXT_external_memory_dma_buf"
+
+
+#define VK_EXT_queue_family_foreign 1
+#define VK_EXT_QUEUE_FAMILY_FOREIGN_SPEC_VERSION 1
+#define VK_EXT_QUEUE_FAMILY_FOREIGN_EXTENSION_NAME "VK_EXT_queue_family_foreign"
+#define VK_QUEUE_FAMILY_FOREIGN_EXT       (~2U)
+
+
+#define VK_EXT_debug_utils 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkDebugUtilsMessengerEXT)
+#define VK_EXT_DEBUG_UTILS_SPEC_VERSION   2
+#define VK_EXT_DEBUG_UTILS_EXTENSION_NAME "VK_EXT_debug_utils"
+typedef VkFlags VkDebugUtilsMessengerCallbackDataFlagsEXT;
+
+typedef enum VkDebugUtilsMessageSeverityFlagBitsEXT {
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT = 0x00000001,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT = 0x00000010,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT = 0x00000100,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT = 0x00001000,
+    VK_DEBUG_UTILS_MESSAGE_SEVERITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugUtilsMessageSeverityFlagBitsEXT;
+
+typedef enum VkDebugUtilsMessageTypeFlagBitsEXT {
+    VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT = 0x00000001,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT = 0x00000002,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT = 0x00000004,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT = 0x00000008,
+    VK_DEBUG_UTILS_MESSAGE_TYPE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDebugUtilsMessageTypeFlagBitsEXT;
+typedef VkFlags VkDebugUtilsMessageTypeFlagsEXT;
+typedef VkFlags VkDebugUtilsMessageSeverityFlagsEXT;
+typedef VkFlags VkDebugUtilsMessengerCreateFlagsEXT;
+typedef struct VkDebugUtilsLabelEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const char*        pLabelName;
+    float              color[4];
+} VkDebugUtilsLabelEXT;
+
+typedef struct VkDebugUtilsObjectNameInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkObjectType       objectType;
+    uint64_t           objectHandle;
+    const char*        pObjectName;
+} VkDebugUtilsObjectNameInfoEXT;
+
+typedef struct VkDebugUtilsMessengerCallbackDataEXT {
+    VkStructureType                              sType;
+    const void*                                  pNext;
+    VkDebugUtilsMessengerCallbackDataFlagsEXT    flags;
+    const char*                                  pMessageIdName;
+    int32_t                                      messageIdNumber;
+    const char*                                  pMessage;
+    uint32_t                                     queueLabelCount;
+    const VkDebugUtilsLabelEXT*                  pQueueLabels;
+    uint32_t                                     cmdBufLabelCount;
+    const VkDebugUtilsLabelEXT*                  pCmdBufLabels;
+    uint32_t                                     objectCount;
+    const VkDebugUtilsObjectNameInfoEXT*         pObjects;
+} VkDebugUtilsMessengerCallbackDataEXT;
+
+typedef VkBool32 (VKAPI_PTR *PFN_vkDebugUtilsMessengerCallbackEXT)(
+    VkDebugUtilsMessageSeverityFlagBitsEXT           messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT                  messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT*      pCallbackData,
+    void*                                            pUserData);
+
+typedef struct VkDebugUtilsMessengerCreateInfoEXT {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkDebugUtilsMessengerCreateFlagsEXT     flags;
+    VkDebugUtilsMessageSeverityFlagsEXT     messageSeverity;
+    VkDebugUtilsMessageTypeFlagsEXT         messageType;
+    PFN_vkDebugUtilsMessengerCallbackEXT    pfnUserCallback;
+    void*                                   pUserData;
+} VkDebugUtilsMessengerCreateInfoEXT;
+
+typedef struct VkDebugUtilsObjectTagInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkObjectType       objectType;
+    uint64_t           objectHandle;
+    uint64_t           tagName;
+    size_t             tagSize;
+    const void*        pTag;
+} VkDebugUtilsObjectTagInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectNameEXT)(VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkSetDebugUtilsObjectTagEXT)(VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo);
+typedef void (VKAPI_PTR *PFN_vkQueueBeginDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef void (VKAPI_PTR *PFN_vkQueueEndDebugUtilsLabelEXT)(VkQueue queue);
+typedef void (VKAPI_PTR *PFN_vkQueueInsertDebugUtilsLabelEXT)(VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBeginDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdEndDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer);
+typedef void (VKAPI_PTR *PFN_vkCmdInsertDebugUtilsLabelEXT)(VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDebugUtilsMessengerEXT)(VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger);
+typedef void (VKAPI_PTR *PFN_vkDestroyDebugUtilsMessengerEXT)(VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkSubmitDebugUtilsMessageEXT)(VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectNameEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectNameInfoEXT*        pNameInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetDebugUtilsObjectTagEXT(
+    VkDevice                                    device,
+    const VkDebugUtilsObjectTagInfoEXT*         pTagInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkQueueBeginDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkQueueEndDebugUtilsLabelEXT(
+    VkQueue                                     queue);
+
+VKAPI_ATTR void VKAPI_CALL vkQueueInsertDebugUtilsLabelEXT(
+    VkQueue                                     queue,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBeginDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdEndDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdInsertDebugUtilsLabelEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkDebugUtilsLabelEXT*                 pLabelInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    const VkDebugUtilsMessengerCreateInfoEXT*   pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkDebugUtilsMessengerEXT*                   pMessenger);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyDebugUtilsMessengerEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessengerEXT                    messenger,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkSubmitDebugUtilsMessageEXT(
+    VkInstance                                  instance,
+    VkDebugUtilsMessageSeverityFlagBitsEXT      messageSeverity,
+    VkDebugUtilsMessageTypeFlagsEXT             messageTypes,
+    const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData);
+#endif
+
+
+#define VK_EXT_sampler_filter_minmax 1
+#define VK_EXT_SAMPLER_FILTER_MINMAX_SPEC_VERSION 2
+#define VK_EXT_SAMPLER_FILTER_MINMAX_EXTENSION_NAME "VK_EXT_sampler_filter_minmax"
+typedef VkSamplerReductionMode VkSamplerReductionModeEXT;
+
+typedef VkSamplerReductionModeCreateInfo VkSamplerReductionModeCreateInfoEXT;
+
+typedef VkPhysicalDeviceSamplerFilterMinmaxProperties VkPhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
+
+
+
+#define VK_AMD_gpu_shader_int16 1
+#define VK_AMD_GPU_SHADER_INT16_SPEC_VERSION 2
+#define VK_AMD_GPU_SHADER_INT16_EXTENSION_NAME "VK_AMD_gpu_shader_int16"
+
+
+#define VK_AMD_mixed_attachment_samples 1
+#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_SPEC_VERSION 1
+#define VK_AMD_MIXED_ATTACHMENT_SAMPLES_EXTENSION_NAME "VK_AMD_mixed_attachment_samples"
+
+
+#define VK_AMD_shader_fragment_mask 1
+#define VK_AMD_SHADER_FRAGMENT_MASK_SPEC_VERSION 1
+#define VK_AMD_SHADER_FRAGMENT_MASK_EXTENSION_NAME "VK_AMD_shader_fragment_mask"
+
+
+#define VK_EXT_inline_uniform_block 1
+#define VK_EXT_INLINE_UNIFORM_BLOCK_SPEC_VERSION 1
+#define VK_EXT_INLINE_UNIFORM_BLOCK_EXTENSION_NAME "VK_EXT_inline_uniform_block"
+typedef VkPhysicalDeviceInlineUniformBlockFeatures VkPhysicalDeviceInlineUniformBlockFeaturesEXT;
+
+typedef VkPhysicalDeviceInlineUniformBlockProperties VkPhysicalDeviceInlineUniformBlockPropertiesEXT;
+
+typedef VkWriteDescriptorSetInlineUniformBlock VkWriteDescriptorSetInlineUniformBlockEXT;
+
+typedef VkDescriptorPoolInlineUniformBlockCreateInfo VkDescriptorPoolInlineUniformBlockCreateInfoEXT;
+
+
+
+#define VK_EXT_shader_stencil_export 1
+#define VK_EXT_SHADER_STENCIL_EXPORT_SPEC_VERSION 1
+#define VK_EXT_SHADER_STENCIL_EXPORT_EXTENSION_NAME "VK_EXT_shader_stencil_export"
+
+
+#define VK_EXT_sample_locations 1
+#define VK_EXT_SAMPLE_LOCATIONS_SPEC_VERSION 1
+#define VK_EXT_SAMPLE_LOCATIONS_EXTENSION_NAME "VK_EXT_sample_locations"
+typedef struct VkSampleLocationEXT {
+    float    x;
+    float    y;
+} VkSampleLocationEXT;
+
+typedef struct VkSampleLocationsInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkSampleCountFlagBits         sampleLocationsPerPixel;
+    VkExtent2D                    sampleLocationGridSize;
+    uint32_t                      sampleLocationsCount;
+    const VkSampleLocationEXT*    pSampleLocations;
+} VkSampleLocationsInfoEXT;
+
+typedef struct VkAttachmentSampleLocationsEXT {
+    uint32_t                    attachmentIndex;
+    VkSampleLocationsInfoEXT    sampleLocationsInfo;
+} VkAttachmentSampleLocationsEXT;
+
+typedef struct VkSubpassSampleLocationsEXT {
+    uint32_t                    subpassIndex;
+    VkSampleLocationsInfoEXT    sampleLocationsInfo;
+} VkSubpassSampleLocationsEXT;
+
+typedef struct VkRenderPassSampleLocationsBeginInfoEXT {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    uint32_t                                 attachmentInitialSampleLocationsCount;
+    const VkAttachmentSampleLocationsEXT*    pAttachmentInitialSampleLocations;
+    uint32_t                                 postSubpassSampleLocationsCount;
+    const VkSubpassSampleLocationsEXT*       pPostSubpassSampleLocations;
+} VkRenderPassSampleLocationsBeginInfoEXT;
+
+typedef struct VkPipelineSampleLocationsStateCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkBool32                    sampleLocationsEnable;
+    VkSampleLocationsInfoEXT    sampleLocationsInfo;
+} VkPipelineSampleLocationsStateCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceSampleLocationsPropertiesEXT {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkSampleCountFlags    sampleLocationSampleCounts;
+    VkExtent2D            maxSampleLocationGridSize;
+    float                 sampleLocationCoordinateRange[2];
+    uint32_t              sampleLocationSubPixelBits;
+    VkBool32              variableSampleLocations;
+} VkPhysicalDeviceSampleLocationsPropertiesEXT;
+
+typedef struct VkMultisamplePropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         maxSampleLocationGridSize;
+} VkMultisamplePropertiesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEXT)(VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo);
+typedef void (VKAPI_PTR *PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT)(VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkSampleLocationsInfoEXT*             pSampleLocationsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPhysicalDeviceMultisamplePropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    VkSampleCountFlagBits                       samples,
+    VkMultisamplePropertiesEXT*                 pMultisampleProperties);
+#endif
+
+
+#define VK_EXT_blend_operation_advanced 1
+#define VK_EXT_BLEND_OPERATION_ADVANCED_SPEC_VERSION 2
+#define VK_EXT_BLEND_OPERATION_ADVANCED_EXTENSION_NAME "VK_EXT_blend_operation_advanced"
+
+typedef enum VkBlendOverlapEXT {
+    VK_BLEND_OVERLAP_UNCORRELATED_EXT = 0,
+    VK_BLEND_OVERLAP_DISJOINT_EXT = 1,
+    VK_BLEND_OVERLAP_CONJOINT_EXT = 2,
+    VK_BLEND_OVERLAP_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBlendOverlapEXT;
+typedef struct VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           advancedBlendCoherentOperations;
+} VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+
+typedef struct VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           advancedBlendMaxColorAttachments;
+    VkBool32           advancedBlendIndependentBlend;
+    VkBool32           advancedBlendNonPremultipliedSrcColor;
+    VkBool32           advancedBlendNonPremultipliedDstColor;
+    VkBool32           advancedBlendCorrelatedOverlap;
+    VkBool32           advancedBlendAllOperations;
+} VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+
+typedef struct VkPipelineColorBlendAdvancedStateCreateInfoEXT {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkBool32             srcPremultiplied;
+    VkBool32             dstPremultiplied;
+    VkBlendOverlapEXT    blendOverlap;
+} VkPipelineColorBlendAdvancedStateCreateInfoEXT;
+
+
+
+#define VK_NV_fragment_coverage_to_color 1
+#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_SPEC_VERSION 1
+#define VK_NV_FRAGMENT_COVERAGE_TO_COLOR_EXTENSION_NAME "VK_NV_fragment_coverage_to_color"
+typedef VkFlags VkPipelineCoverageToColorStateCreateFlagsNV;
+typedef struct VkPipelineCoverageToColorStateCreateInfoNV {
+    VkStructureType                                sType;
+    const void*                                    pNext;
+    VkPipelineCoverageToColorStateCreateFlagsNV    flags;
+    VkBool32                                       coverageToColorEnable;
+    uint32_t                                       coverageToColorLocation;
+} VkPipelineCoverageToColorStateCreateInfoNV;
+
+
+
+#define VK_NV_framebuffer_mixed_samples 1
+#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_SPEC_VERSION 1
+#define VK_NV_FRAMEBUFFER_MIXED_SAMPLES_EXTENSION_NAME "VK_NV_framebuffer_mixed_samples"
+
+typedef enum VkCoverageModulationModeNV {
+    VK_COVERAGE_MODULATION_MODE_NONE_NV = 0,
+    VK_COVERAGE_MODULATION_MODE_RGB_NV = 1,
+    VK_COVERAGE_MODULATION_MODE_ALPHA_NV = 2,
+    VK_COVERAGE_MODULATION_MODE_RGBA_NV = 3,
+    VK_COVERAGE_MODULATION_MODE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCoverageModulationModeNV;
+typedef VkFlags VkPipelineCoverageModulationStateCreateFlagsNV;
+typedef struct VkPipelineCoverageModulationStateCreateInfoNV {
+    VkStructureType                                   sType;
+    const void*                                       pNext;
+    VkPipelineCoverageModulationStateCreateFlagsNV    flags;
+    VkCoverageModulationModeNV                        coverageModulationMode;
+    VkBool32                                          coverageModulationTableEnable;
+    uint32_t                                          coverageModulationTableCount;
+    const float*                                      pCoverageModulationTable;
+} VkPipelineCoverageModulationStateCreateInfoNV;
+
+
+
+#define VK_NV_fill_rectangle 1
+#define VK_NV_FILL_RECTANGLE_SPEC_VERSION 1
+#define VK_NV_FILL_RECTANGLE_EXTENSION_NAME "VK_NV_fill_rectangle"
+
+
+#define VK_NV_shader_sm_builtins 1
+#define VK_NV_SHADER_SM_BUILTINS_SPEC_VERSION 1
+#define VK_NV_SHADER_SM_BUILTINS_EXTENSION_NAME "VK_NV_shader_sm_builtins"
+typedef struct VkPhysicalDeviceShaderSMBuiltinsPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderSMCount;
+    uint32_t           shaderWarpsPerSM;
+} VkPhysicalDeviceShaderSMBuiltinsPropertiesNV;
+
+typedef struct VkPhysicalDeviceShaderSMBuiltinsFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderSMBuiltins;
+} VkPhysicalDeviceShaderSMBuiltinsFeaturesNV;
+
+
+
+#define VK_EXT_post_depth_coverage 1
+#define VK_EXT_POST_DEPTH_COVERAGE_SPEC_VERSION 1
+#define VK_EXT_POST_DEPTH_COVERAGE_EXTENSION_NAME "VK_EXT_post_depth_coverage"
+
+
+#define VK_EXT_image_drm_format_modifier 1
+#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_SPEC_VERSION 2
+#define VK_EXT_IMAGE_DRM_FORMAT_MODIFIER_EXTENSION_NAME "VK_EXT_image_drm_format_modifier"
+typedef struct VkDrmFormatModifierPropertiesEXT {
+    uint64_t                drmFormatModifier;
+    uint32_t                drmFormatModifierPlaneCount;
+    VkFormatFeatureFlags    drmFormatModifierTilingFeatures;
+} VkDrmFormatModifierPropertiesEXT;
+
+typedef struct VkDrmFormatModifierPropertiesListEXT {
+    VkStructureType                      sType;
+    void*                                pNext;
+    uint32_t                             drmFormatModifierCount;
+    VkDrmFormatModifierPropertiesEXT*    pDrmFormatModifierProperties;
+} VkDrmFormatModifierPropertiesListEXT;
+
+typedef struct VkPhysicalDeviceImageDrmFormatModifierInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           drmFormatModifier;
+    VkSharingMode      sharingMode;
+    uint32_t           queueFamilyIndexCount;
+    const uint32_t*    pQueueFamilyIndices;
+} VkPhysicalDeviceImageDrmFormatModifierInfoEXT;
+
+typedef struct VkImageDrmFormatModifierListCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           drmFormatModifierCount;
+    const uint64_t*    pDrmFormatModifiers;
+} VkImageDrmFormatModifierListCreateInfoEXT;
+
+typedef struct VkImageDrmFormatModifierExplicitCreateInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    uint64_t                      drmFormatModifier;
+    uint32_t                      drmFormatModifierPlaneCount;
+    const VkSubresourceLayout*    pPlaneLayouts;
+} VkImageDrmFormatModifierExplicitCreateInfoEXT;
+
+typedef struct VkImageDrmFormatModifierPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           drmFormatModifier;
+} VkImageDrmFormatModifierPropertiesEXT;
+
+typedef struct VkDrmFormatModifierProperties2EXT {
+    uint64_t                 drmFormatModifier;
+    uint32_t                 drmFormatModifierPlaneCount;
+    VkFormatFeatureFlags2    drmFormatModifierTilingFeatures;
+} VkDrmFormatModifierProperties2EXT;
+
+typedef struct VkDrmFormatModifierPropertiesList2EXT {
+    VkStructureType                       sType;
+    void*                                 pNext;
+    uint32_t                              drmFormatModifierCount;
+    VkDrmFormatModifierProperties2EXT*    pDrmFormatModifierProperties;
+} VkDrmFormatModifierPropertiesList2EXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetImageDrmFormatModifierPropertiesEXT)(VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetImageDrmFormatModifierPropertiesEXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    VkImageDrmFormatModifierPropertiesEXT*      pProperties);
+#endif
+
+
+#define VK_EXT_validation_cache 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkValidationCacheEXT)
+#define VK_EXT_VALIDATION_CACHE_SPEC_VERSION 1
+#define VK_EXT_VALIDATION_CACHE_EXTENSION_NAME "VK_EXT_validation_cache"
+
+typedef enum VkValidationCacheHeaderVersionEXT {
+    VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT = 1,
+    VK_VALIDATION_CACHE_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationCacheHeaderVersionEXT;
+typedef VkFlags VkValidationCacheCreateFlagsEXT;
+typedef struct VkValidationCacheCreateInfoEXT {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkValidationCacheCreateFlagsEXT    flags;
+    size_t                             initialDataSize;
+    const void*                        pInitialData;
+} VkValidationCacheCreateInfoEXT;
+
+typedef struct VkShaderModuleValidationCacheCreateInfoEXT {
+    VkStructureType         sType;
+    const void*             pNext;
+    VkValidationCacheEXT    validationCache;
+} VkShaderModuleValidationCacheCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateValidationCacheEXT)(VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache);
+typedef void (VKAPI_PTR *PFN_vkDestroyValidationCacheEXT)(VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkMergeValidationCachesEXT)(VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches);
+typedef VkResult (VKAPI_PTR *PFN_vkGetValidationCacheDataEXT)(VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateValidationCacheEXT(
+    VkDevice                                    device,
+    const VkValidationCacheCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkValidationCacheEXT*                       pValidationCache);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyValidationCacheEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkMergeValidationCachesEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        dstCache,
+    uint32_t                                    srcCacheCount,
+    const VkValidationCacheEXT*                 pSrcCaches);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetValidationCacheDataEXT(
+    VkDevice                                    device,
+    VkValidationCacheEXT                        validationCache,
+    size_t*                                     pDataSize,
+    void*                                       pData);
+#endif
+
+
+#define VK_EXT_descriptor_indexing 1
+#define VK_EXT_DESCRIPTOR_INDEXING_SPEC_VERSION 2
+#define VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME "VK_EXT_descriptor_indexing"
+typedef VkDescriptorBindingFlagBits VkDescriptorBindingFlagBitsEXT;
+
+typedef VkDescriptorBindingFlags VkDescriptorBindingFlagsEXT;
+
+typedef VkDescriptorSetLayoutBindingFlagsCreateInfo VkDescriptorSetLayoutBindingFlagsCreateInfoEXT;
+
+typedef VkPhysicalDeviceDescriptorIndexingFeatures VkPhysicalDeviceDescriptorIndexingFeaturesEXT;
+
+typedef VkPhysicalDeviceDescriptorIndexingProperties VkPhysicalDeviceDescriptorIndexingPropertiesEXT;
+
+typedef VkDescriptorSetVariableDescriptorCountAllocateInfo VkDescriptorSetVariableDescriptorCountAllocateInfoEXT;
+
+typedef VkDescriptorSetVariableDescriptorCountLayoutSupport VkDescriptorSetVariableDescriptorCountLayoutSupportEXT;
+
+
+
+#define VK_EXT_shader_viewport_index_layer 1
+#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_SPEC_VERSION 1
+#define VK_EXT_SHADER_VIEWPORT_INDEX_LAYER_EXTENSION_NAME "VK_EXT_shader_viewport_index_layer"
+
+
+#define VK_NV_shading_rate_image 1
+#define VK_NV_SHADING_RATE_IMAGE_SPEC_VERSION 3
+#define VK_NV_SHADING_RATE_IMAGE_EXTENSION_NAME "VK_NV_shading_rate_image"
+
+typedef enum VkShadingRatePaletteEntryNV {
+    VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV = 0,
+    VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV = 1,
+    VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV = 2,
+    VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV = 3,
+    VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV = 4,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV = 5,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV = 6,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV = 7,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV = 8,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV = 9,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV = 10,
+    VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV = 11,
+    VK_SHADING_RATE_PALETTE_ENTRY_MAX_ENUM_NV = 0x7FFFFFFF
+} VkShadingRatePaletteEntryNV;
+
+typedef enum VkCoarseSampleOrderTypeNV {
+    VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV = 0,
+    VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV = 1,
+    VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV = 2,
+    VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV = 3,
+    VK_COARSE_SAMPLE_ORDER_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCoarseSampleOrderTypeNV;
+typedef struct VkShadingRatePaletteNV {
+    uint32_t                              shadingRatePaletteEntryCount;
+    const VkShadingRatePaletteEntryNV*    pShadingRatePaletteEntries;
+} VkShadingRatePaletteNV;
+
+typedef struct VkPipelineViewportShadingRateImageStateCreateInfoNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkBool32                         shadingRateImageEnable;
+    uint32_t                         viewportCount;
+    const VkShadingRatePaletteNV*    pShadingRatePalettes;
+} VkPipelineViewportShadingRateImageStateCreateInfoNV;
+
+typedef struct VkPhysicalDeviceShadingRateImageFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shadingRateImage;
+    VkBool32           shadingRateCoarseSampleOrder;
+} VkPhysicalDeviceShadingRateImageFeaturesNV;
+
+typedef struct VkPhysicalDeviceShadingRateImagePropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         shadingRateTexelSize;
+    uint32_t           shadingRatePaletteSize;
+    uint32_t           shadingRateMaxCoarseSamples;
+} VkPhysicalDeviceShadingRateImagePropertiesNV;
+
+typedef struct VkCoarseSampleLocationNV {
+    uint32_t    pixelX;
+    uint32_t    pixelY;
+    uint32_t    sample;
+} VkCoarseSampleLocationNV;
+
+typedef struct VkCoarseSampleOrderCustomNV {
+    VkShadingRatePaletteEntryNV        shadingRate;
+    uint32_t                           sampleCount;
+    uint32_t                           sampleLocationCount;
+    const VkCoarseSampleLocationNV*    pSampleLocations;
+} VkCoarseSampleOrderCustomNV;
+
+typedef struct VkPipelineViewportCoarseSampleOrderStateCreateInfoNV {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkCoarseSampleOrderTypeNV             sampleOrderType;
+    uint32_t                              customSampleOrderCount;
+    const VkCoarseSampleOrderCustomNV*    pCustomSampleOrders;
+} VkPipelineViewportCoarseSampleOrderStateCreateInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBindShadingRateImageNV)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportShadingRatePaletteNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkShadingRatePaletteNV* pShadingRatePalettes);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoarseSampleOrderNV)(VkCommandBuffer commandBuffer, VkCoarseSampleOrderTypeNV sampleOrderType, uint32_t customSampleOrderCount, const VkCoarseSampleOrderCustomNV* pCustomSampleOrders);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBindShadingRateImageNV(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportShadingRatePaletteNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkShadingRatePaletteNV*               pShadingRatePalettes);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoarseSampleOrderNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoarseSampleOrderTypeNV                   sampleOrderType,
+    uint32_t                                    customSampleOrderCount,
+    const VkCoarseSampleOrderCustomNV*          pCustomSampleOrders);
+#endif
+
+
+#define VK_NV_ray_tracing 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureNV)
+#define VK_NV_RAY_TRACING_SPEC_VERSION    3
+#define VK_NV_RAY_TRACING_EXTENSION_NAME  "VK_NV_ray_tracing"
+#define VK_SHADER_UNUSED_KHR              (~0U)
+#define VK_SHADER_UNUSED_NV               VK_SHADER_UNUSED_KHR
+
+typedef enum VkRayTracingShaderGroupTypeKHR {
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR = 0,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR = 1,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR = 2,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_NV = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR,
+    VK_RAY_TRACING_SHADER_GROUP_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkRayTracingShaderGroupTypeKHR;
+typedef VkRayTracingShaderGroupTypeKHR VkRayTracingShaderGroupTypeNV;
+
+
+typedef enum VkGeometryTypeKHR {
+    VK_GEOMETRY_TYPE_TRIANGLES_KHR = 0,
+    VK_GEOMETRY_TYPE_AABBS_KHR = 1,
+    VK_GEOMETRY_TYPE_INSTANCES_KHR = 2,
+    VK_GEOMETRY_TYPE_TRIANGLES_NV = VK_GEOMETRY_TYPE_TRIANGLES_KHR,
+    VK_GEOMETRY_TYPE_AABBS_NV = VK_GEOMETRY_TYPE_AABBS_KHR,
+    VK_GEOMETRY_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkGeometryTypeKHR;
+typedef VkGeometryTypeKHR VkGeometryTypeNV;
+
+
+typedef enum VkAccelerationStructureTypeKHR {
+    VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR = 0,
+    VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR = 1,
+    VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR = 2,
+    VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR,
+    VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_NV = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR,
+    VK_ACCELERATION_STRUCTURE_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureTypeKHR;
+typedef VkAccelerationStructureTypeKHR VkAccelerationStructureTypeNV;
+
+
+typedef enum VkCopyAccelerationStructureModeKHR {
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR = 0,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR = 1,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR = 2,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR = 3,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_NV = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR,
+    VK_COPY_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkCopyAccelerationStructureModeKHR;
+typedef VkCopyAccelerationStructureModeKHR VkCopyAccelerationStructureModeNV;
+
+
+typedef enum VkAccelerationStructureMemoryRequirementsTypeNV {
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV = 0,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV = 1,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV = 2,
+    VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkAccelerationStructureMemoryRequirementsTypeNV;
+
+typedef enum VkGeometryFlagBitsKHR {
+    VK_GEOMETRY_OPAQUE_BIT_KHR = 0x00000001,
+    VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR = 0x00000002,
+    VK_GEOMETRY_OPAQUE_BIT_NV = VK_GEOMETRY_OPAQUE_BIT_KHR,
+    VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_NV = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR,
+    VK_GEOMETRY_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkGeometryFlagBitsKHR;
+typedef VkFlags VkGeometryFlagsKHR;
+typedef VkGeometryFlagsKHR VkGeometryFlagsNV;
+
+typedef VkGeometryFlagBitsKHR VkGeometryFlagBitsNV;
+
+
+typedef enum VkGeometryInstanceFlagBitsKHR {
+    VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR = 0x00000001,
+    VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR = 0x00000002,
+    VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR = 0x00000004,
+    VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR = 0x00000008,
+    VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT = 0x00000010,
+    VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000020,
+    VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR,
+    VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
+    VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
+    VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
+    VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_NV = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
+    VK_GEOMETRY_INSTANCE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkGeometryInstanceFlagBitsKHR;
+typedef VkFlags VkGeometryInstanceFlagsKHR;
+typedef VkGeometryInstanceFlagsKHR VkGeometryInstanceFlagsNV;
+
+typedef VkGeometryInstanceFlagBitsKHR VkGeometryInstanceFlagBitsNV;
+
+
+typedef enum VkBuildAccelerationStructureFlagBitsKHR {
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR = 0x00000001,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR = 0x00000002,
+    VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR = 0x00000004,
+    VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR = 0x00000008,
+    VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR = 0x00000010,
+    VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV = 0x00000020,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT = 0x00000040,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT = 0x00000080,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT = 0x00000100,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
+    VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
+    VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
+    VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
+    VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_NV = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
+    VK_BUILD_ACCELERATION_STRUCTURE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkBuildAccelerationStructureFlagBitsKHR;
+typedef VkFlags VkBuildAccelerationStructureFlagsKHR;
+typedef VkBuildAccelerationStructureFlagsKHR VkBuildAccelerationStructureFlagsNV;
+
+typedef VkBuildAccelerationStructureFlagBitsKHR VkBuildAccelerationStructureFlagBitsNV;
+
+typedef struct VkRayTracingShaderGroupCreateInfoNV {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkRayTracingShaderGroupTypeKHR    type;
+    uint32_t                          generalShader;
+    uint32_t                          closestHitShader;
+    uint32_t                          anyHitShader;
+    uint32_t                          intersectionShader;
+} VkRayTracingShaderGroupCreateInfoNV;
+
+typedef struct VkRayTracingPipelineCreateInfoNV {
+    VkStructureType                               sType;
+    const void*                                   pNext;
+    VkPipelineCreateFlags                         flags;
+    uint32_t                                      stageCount;
+    const VkPipelineShaderStageCreateInfo*        pStages;
+    uint32_t                                      groupCount;
+    const VkRayTracingShaderGroupCreateInfoNV*    pGroups;
+    uint32_t                                      maxRecursionDepth;
+    VkPipelineLayout                              layout;
+    VkPipeline                                    basePipelineHandle;
+    int32_t                                       basePipelineIndex;
+} VkRayTracingPipelineCreateInfoNV;
+
+typedef struct VkGeometryTrianglesNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           vertexData;
+    VkDeviceSize       vertexOffset;
+    uint32_t           vertexCount;
+    VkDeviceSize       vertexStride;
+    VkFormat           vertexFormat;
+    VkBuffer           indexData;
+    VkDeviceSize       indexOffset;
+    uint32_t           indexCount;
+    VkIndexType        indexType;
+    VkBuffer           transformData;
+    VkDeviceSize       transformOffset;
+} VkGeometryTrianglesNV;
+
+typedef struct VkGeometryAABBNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           aabbData;
+    uint32_t           numAABBs;
+    uint32_t           stride;
+    VkDeviceSize       offset;
+} VkGeometryAABBNV;
+
+typedef struct VkGeometryDataNV {
+    VkGeometryTrianglesNV    triangles;
+    VkGeometryAABBNV         aabbs;
+} VkGeometryDataNV;
+
+typedef struct VkGeometryNV {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkGeometryTypeKHR     geometryType;
+    VkGeometryDataNV      geometry;
+    VkGeometryFlagsKHR    flags;
+} VkGeometryNV;
+
+typedef struct VkAccelerationStructureInfoNV {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkAccelerationStructureTypeNV          type;
+    VkBuildAccelerationStructureFlagsNV    flags;
+    uint32_t                               instanceCount;
+    uint32_t                               geometryCount;
+    const VkGeometryNV*                    pGeometries;
+} VkAccelerationStructureInfoNV;
+
+typedef struct VkAccelerationStructureCreateInfoNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDeviceSize                     compactedSize;
+    VkAccelerationStructureInfoNV    info;
+} VkAccelerationStructureCreateInfoNV;
+
+typedef struct VkBindAccelerationStructureMemoryInfoNV {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkAccelerationStructureNV    accelerationStructure;
+    VkDeviceMemory               memory;
+    VkDeviceSize                 memoryOffset;
+    uint32_t                     deviceIndexCount;
+    const uint32_t*              pDeviceIndices;
+} VkBindAccelerationStructureMemoryInfoNV;
+
+typedef struct VkWriteDescriptorSetAccelerationStructureNV {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    uint32_t                            accelerationStructureCount;
+    const VkAccelerationStructureNV*    pAccelerationStructures;
+} VkWriteDescriptorSetAccelerationStructureNV;
+
+typedef struct VkAccelerationStructureMemoryRequirementsInfoNV {
+    VkStructureType                                    sType;
+    const void*                                        pNext;
+    VkAccelerationStructureMemoryRequirementsTypeNV    type;
+    VkAccelerationStructureNV                          accelerationStructure;
+} VkAccelerationStructureMemoryRequirementsInfoNV;
+
+typedef struct VkPhysicalDeviceRayTracingPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderGroupHandleSize;
+    uint32_t           maxRecursionDepth;
+    uint32_t           maxShaderGroupStride;
+    uint32_t           shaderGroupBaseAlignment;
+    uint64_t           maxGeometryCount;
+    uint64_t           maxInstanceCount;
+    uint64_t           maxTriangleCount;
+    uint32_t           maxDescriptorSetAccelerationStructures;
+} VkPhysicalDeviceRayTracingPropertiesNV;
+
+typedef struct VkTransformMatrixKHR {
+    float    matrix[3][4];
+} VkTransformMatrixKHR;
+
+typedef VkTransformMatrixKHR VkTransformMatrixNV;
+
+typedef struct VkAabbPositionsKHR {
+    float    minX;
+    float    minY;
+    float    minZ;
+    float    maxX;
+    float    maxY;
+    float    maxZ;
+} VkAabbPositionsKHR;
+
+typedef VkAabbPositionsKHR VkAabbPositionsNV;
+
+typedef struct VkAccelerationStructureInstanceKHR {
+    VkTransformMatrixKHR          transform;
+    uint32_t                      instanceCustomIndex:24;
+    uint32_t                      mask:8;
+    uint32_t                      instanceShaderBindingTableRecordOffset:24;
+    VkGeometryInstanceFlagsKHR    flags:8;
+    uint64_t                      accelerationStructureReference;
+} VkAccelerationStructureInstanceKHR;
+
+typedef VkAccelerationStructureInstanceKHR VkAccelerationStructureInstanceNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureNV)(VkDevice device, const VkAccelerationStructureCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureNV* pAccelerationStructure);
+typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureMemoryRequirementsNV)(VkDevice device, const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2KHR* pMemoryRequirements);
+typedef VkResult (VKAPI_PTR *PFN_vkBindAccelerationStructureMemoryNV)(VkDevice device, uint32_t bindInfoCount, const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructureNV)(VkCommandBuffer commandBuffer, const VkAccelerationStructureInfoNV* pInfo, VkBuffer instanceData, VkDeviceSize instanceOffset, VkBool32 update, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkBuffer scratch, VkDeviceSize scratchOffset);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureNV)(VkCommandBuffer commandBuffer, VkAccelerationStructureNV dst, VkAccelerationStructureNV src, VkCopyAccelerationStructureModeKHR mode);
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysNV)(VkCommandBuffer commandBuffer, VkBuffer raygenShaderBindingTableBuffer, VkDeviceSize raygenShaderBindingOffset, VkBuffer missShaderBindingTableBuffer, VkDeviceSize missShaderBindingOffset, VkDeviceSize missShaderBindingStride, VkBuffer hitShaderBindingTableBuffer, VkDeviceSize hitShaderBindingOffset, VkDeviceSize hitShaderBindingStride, VkBuffer callableShaderBindingTableBuffer, VkDeviceSize callableShaderBindingOffset, VkDeviceSize callableShaderBindingStride, uint32_t width, uint32_t height, uint32_t depth);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesNV)(VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoNV* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupHandlesNV)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureHandleNV)(VkDevice device, VkAccelerationStructureNV accelerationStructure, size_t dataSize, void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesNV)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureNV* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef VkResult (VKAPI_PTR *PFN_vkCompileDeferredNV)(VkDevice device, VkPipeline pipeline, uint32_t shader);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoNV*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureNV*                  pAccelerationStructure);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkAccelerationStructureMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2KHR*                   pMemoryRequirements);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindAccelerationStructureMemoryNV(
+    VkDevice                                    device,
+    uint32_t                                    bindInfoCount,
+    const VkBindAccelerationStructureMemoryInfoNV* pBindInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkAccelerationStructureInfoNV*        pInfo,
+    VkBuffer                                    instanceData,
+    VkDeviceSize                                instanceOffset,
+    VkBool32                                    update,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkBuffer                                    scratch,
+    VkDeviceSize                                scratchOffset);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureNV(
+    VkCommandBuffer                             commandBuffer,
+    VkAccelerationStructureNV                   dst,
+    VkAccelerationStructureNV                   src,
+    VkCopyAccelerationStructureModeKHR          mode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    raygenShaderBindingTableBuffer,
+    VkDeviceSize                                raygenShaderBindingOffset,
+    VkBuffer                                    missShaderBindingTableBuffer,
+    VkDeviceSize                                missShaderBindingOffset,
+    VkDeviceSize                                missShaderBindingStride,
+    VkBuffer                                    hitShaderBindingTableBuffer,
+    VkDeviceSize                                hitShaderBindingOffset,
+    VkDeviceSize                                hitShaderBindingStride,
+    VkBuffer                                    callableShaderBindingTableBuffer,
+    VkDeviceSize                                callableShaderBindingOffset,
+    VkDeviceSize                                callableShaderBindingStride,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesNV(
+    VkDevice                                    device,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoNV*     pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesKHR(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingShaderGroupHandlesNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureHandleNV(
+    VkDevice                                    device,
+    VkAccelerationStructureNV                   accelerationStructure,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureNV*            pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCompileDeferredNV(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    shader);
+#endif
+
+
+#define VK_NV_representative_fragment_test 1
+#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_SPEC_VERSION 2
+#define VK_NV_REPRESENTATIVE_FRAGMENT_TEST_EXTENSION_NAME "VK_NV_representative_fragment_test"
+typedef struct VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           representativeFragmentTest;
+} VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+
+typedef struct VkPipelineRepresentativeFragmentTestStateCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           representativeFragmentTestEnable;
+} VkPipelineRepresentativeFragmentTestStateCreateInfoNV;
+
+
+
+#define VK_EXT_filter_cubic 1
+#define VK_EXT_FILTER_CUBIC_SPEC_VERSION  3
+#define VK_EXT_FILTER_CUBIC_EXTENSION_NAME "VK_EXT_filter_cubic"
+typedef struct VkPhysicalDeviceImageViewImageFormatInfoEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkImageViewType    imageViewType;
+} VkPhysicalDeviceImageViewImageFormatInfoEXT;
+
+typedef struct VkFilterCubicImageViewImageFormatPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           filterCubic;
+    VkBool32           filterCubicMinmax;
+} VkFilterCubicImageViewImageFormatPropertiesEXT;
+
+
+
+#define VK_QCOM_render_pass_shader_resolve 1
+#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_SPEC_VERSION 4
+#define VK_QCOM_RENDER_PASS_SHADER_RESOLVE_EXTENSION_NAME "VK_QCOM_render_pass_shader_resolve"
+
+
+#define VK_EXT_global_priority 1
+#define VK_EXT_GLOBAL_PRIORITY_SPEC_VERSION 2
+#define VK_EXT_GLOBAL_PRIORITY_EXTENSION_NAME "VK_EXT_global_priority"
+typedef VkQueueGlobalPriorityKHR VkQueueGlobalPriorityEXT;
+
+typedef VkDeviceQueueGlobalPriorityCreateInfoKHR VkDeviceQueueGlobalPriorityCreateInfoEXT;
+
+
+
+#define VK_EXT_external_memory_host 1
+#define VK_EXT_EXTERNAL_MEMORY_HOST_SPEC_VERSION 1
+#define VK_EXT_EXTERNAL_MEMORY_HOST_EXTENSION_NAME "VK_EXT_external_memory_host"
+typedef struct VkImportMemoryHostPointerInfoEXT {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+    void*                                 pHostPointer;
+} VkImportMemoryHostPointerInfoEXT;
+
+typedef struct VkMemoryHostPointerPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryHostPointerPropertiesEXT;
+
+typedef struct VkPhysicalDeviceExternalMemoryHostPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       minImportedHostPointerAlignment;
+} VkPhysicalDeviceExternalMemoryHostPropertiesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryHostPointerPropertiesEXT)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryHostPointerPropertiesEXT(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    const void*                                 pHostPointer,
+    VkMemoryHostPointerPropertiesEXT*           pMemoryHostPointerProperties);
+#endif
+
+
+#define VK_AMD_buffer_marker 1
+#define VK_AMD_BUFFER_MARKER_SPEC_VERSION 1
+#define VK_AMD_BUFFER_MARKER_EXTENSION_NAME "VK_AMD_buffer_marker"
+typedef void (VKAPI_PTR *PFN_vkCmdWriteBufferMarkerAMD)(VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkBuffer dstBuffer, VkDeviceSize dstOffset, uint32_t marker);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteBufferMarkerAMD(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineStageFlagBits                     pipelineStage,
+    VkBuffer                                    dstBuffer,
+    VkDeviceSize                                dstOffset,
+    uint32_t                                    marker);
+#endif
+
+
+#define VK_AMD_pipeline_compiler_control 1
+#define VK_AMD_PIPELINE_COMPILER_CONTROL_SPEC_VERSION 1
+#define VK_AMD_PIPELINE_COMPILER_CONTROL_EXTENSION_NAME "VK_AMD_pipeline_compiler_control"
+
+typedef enum VkPipelineCompilerControlFlagBitsAMD {
+    VK_PIPELINE_COMPILER_CONTROL_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkPipelineCompilerControlFlagBitsAMD;
+typedef VkFlags VkPipelineCompilerControlFlagsAMD;
+typedef struct VkPipelineCompilerControlCreateInfoAMD {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkPipelineCompilerControlFlagsAMD    compilerControlFlags;
+} VkPipelineCompilerControlCreateInfoAMD;
+
+
+
+#define VK_EXT_calibrated_timestamps 1
+#define VK_EXT_CALIBRATED_TIMESTAMPS_SPEC_VERSION 2
+#define VK_EXT_CALIBRATED_TIMESTAMPS_EXTENSION_NAME "VK_EXT_calibrated_timestamps"
+
+typedef enum VkTimeDomainEXT {
+    VK_TIME_DOMAIN_DEVICE_EXT = 0,
+    VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT = 1,
+    VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT = 2,
+    VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT = 3,
+    VK_TIME_DOMAIN_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkTimeDomainEXT;
+typedef struct VkCalibratedTimestampInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkTimeDomainEXT    timeDomain;
+} VkCalibratedTimestampInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT)(VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainEXT* pTimeDomains);
+typedef VkResult (VKAPI_PTR *PFN_vkGetCalibratedTimestampsEXT)(VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoEXT* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pTimeDomainCount,
+    VkTimeDomainEXT*                            pTimeDomains);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetCalibratedTimestampsEXT(
+    VkDevice                                    device,
+    uint32_t                                    timestampCount,
+    const VkCalibratedTimestampInfoEXT*         pTimestampInfos,
+    uint64_t*                                   pTimestamps,
+    uint64_t*                                   pMaxDeviation);
+#endif
+
+
+#define VK_AMD_shader_core_properties 1
+#define VK_AMD_SHADER_CORE_PROPERTIES_SPEC_VERSION 2
+#define VK_AMD_SHADER_CORE_PROPERTIES_EXTENSION_NAME "VK_AMD_shader_core_properties"
+typedef struct VkPhysicalDeviceShaderCorePropertiesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderEngineCount;
+    uint32_t           shaderArraysPerEngineCount;
+    uint32_t           computeUnitsPerShaderArray;
+    uint32_t           simdPerComputeUnit;
+    uint32_t           wavefrontsPerSimd;
+    uint32_t           wavefrontSize;
+    uint32_t           sgprsPerSimd;
+    uint32_t           minSgprAllocation;
+    uint32_t           maxSgprAllocation;
+    uint32_t           sgprAllocationGranularity;
+    uint32_t           vgprsPerSimd;
+    uint32_t           minVgprAllocation;
+    uint32_t           maxVgprAllocation;
+    uint32_t           vgprAllocationGranularity;
+} VkPhysicalDeviceShaderCorePropertiesAMD;
+
+
+
+#define VK_AMD_memory_overallocation_behavior 1
+#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_SPEC_VERSION 1
+#define VK_AMD_MEMORY_OVERALLOCATION_BEHAVIOR_EXTENSION_NAME "VK_AMD_memory_overallocation_behavior"
+
+typedef enum VkMemoryOverallocationBehaviorAMD {
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD = 0,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD = 1,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD = 2,
+    VK_MEMORY_OVERALLOCATION_BEHAVIOR_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkMemoryOverallocationBehaviorAMD;
+typedef struct VkDeviceMemoryOverallocationCreateInfoAMD {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkMemoryOverallocationBehaviorAMD    overallocationBehavior;
+} VkDeviceMemoryOverallocationCreateInfoAMD;
+
+
+
+#define VK_EXT_vertex_attribute_divisor 1
+#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_SPEC_VERSION 3
+#define VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME "VK_EXT_vertex_attribute_divisor"
+typedef struct VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxVertexAttribDivisor;
+} VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+
+typedef struct VkVertexInputBindingDivisorDescriptionEXT {
+    uint32_t    binding;
+    uint32_t    divisor;
+} VkVertexInputBindingDivisorDescriptionEXT;
+
+typedef struct VkPipelineVertexInputDivisorStateCreateInfoEXT {
+    VkStructureType                                     sType;
+    const void*                                         pNext;
+    uint32_t                                            vertexBindingDivisorCount;
+    const VkVertexInputBindingDivisorDescriptionEXT*    pVertexBindingDivisors;
+} VkPipelineVertexInputDivisorStateCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           vertexAttributeInstanceRateDivisor;
+    VkBool32           vertexAttributeInstanceRateZeroDivisor;
+} VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+
+
+
+#define VK_EXT_pipeline_creation_feedback 1
+#define VK_EXT_PIPELINE_CREATION_FEEDBACK_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_CREATION_FEEDBACK_EXTENSION_NAME "VK_EXT_pipeline_creation_feedback"
+typedef VkPipelineCreationFeedbackFlagBits VkPipelineCreationFeedbackFlagBitsEXT;
+
+typedef VkPipelineCreationFeedbackFlags VkPipelineCreationFeedbackFlagsEXT;
+
+typedef VkPipelineCreationFeedbackCreateInfo VkPipelineCreationFeedbackCreateInfoEXT;
+
+typedef VkPipelineCreationFeedback VkPipelineCreationFeedbackEXT;
+
+
+
+#define VK_NV_shader_subgroup_partitioned 1
+#define VK_NV_SHADER_SUBGROUP_PARTITIONED_SPEC_VERSION 1
+#define VK_NV_SHADER_SUBGROUP_PARTITIONED_EXTENSION_NAME "VK_NV_shader_subgroup_partitioned"
+
+
+#define VK_NV_compute_shader_derivatives 1
+#define VK_NV_COMPUTE_SHADER_DERIVATIVES_SPEC_VERSION 1
+#define VK_NV_COMPUTE_SHADER_DERIVATIVES_EXTENSION_NAME "VK_NV_compute_shader_derivatives"
+typedef struct VkPhysicalDeviceComputeShaderDerivativesFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           computeDerivativeGroupQuads;
+    VkBool32           computeDerivativeGroupLinear;
+} VkPhysicalDeviceComputeShaderDerivativesFeaturesNV;
+
+
+
+#define VK_NV_mesh_shader 1
+#define VK_NV_MESH_SHADER_SPEC_VERSION    1
+#define VK_NV_MESH_SHADER_EXTENSION_NAME  "VK_NV_mesh_shader"
+typedef struct VkPhysicalDeviceMeshShaderFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           taskShader;
+    VkBool32           meshShader;
+} VkPhysicalDeviceMeshShaderFeaturesNV;
+
+typedef struct VkPhysicalDeviceMeshShaderPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxDrawMeshTasksCount;
+    uint32_t           maxTaskWorkGroupInvocations;
+    uint32_t           maxTaskWorkGroupSize[3];
+    uint32_t           maxTaskTotalMemorySize;
+    uint32_t           maxTaskOutputCount;
+    uint32_t           maxMeshWorkGroupInvocations;
+    uint32_t           maxMeshWorkGroupSize[3];
+    uint32_t           maxMeshTotalMemorySize;
+    uint32_t           maxMeshOutputVertices;
+    uint32_t           maxMeshOutputPrimitives;
+    uint32_t           maxMeshMultiviewViewCount;
+    uint32_t           meshOutputPerVertexGranularity;
+    uint32_t           meshOutputPerPrimitiveGranularity;
+} VkPhysicalDeviceMeshShaderPropertiesNV;
+
+typedef struct VkDrawMeshTasksIndirectCommandNV {
+    uint32_t    taskCount;
+    uint32_t    firstTask;
+} VkDrawMeshTasksIndirectCommandNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksNV)(VkCommandBuffer commandBuffer, uint32_t taskCount, uint32_t firstTask);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountNV)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    taskCount,
+    uint32_t                                    firstTask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_NV_fragment_shader_barycentric 1
+#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_SPEC_VERSION 1
+#define VK_NV_FRAGMENT_SHADER_BARYCENTRIC_EXTENSION_NAME "VK_NV_fragment_shader_barycentric"
+typedef VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR VkPhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+
+
+
+#define VK_NV_shader_image_footprint 1
+#define VK_NV_SHADER_IMAGE_FOOTPRINT_SPEC_VERSION 2
+#define VK_NV_SHADER_IMAGE_FOOTPRINT_EXTENSION_NAME "VK_NV_shader_image_footprint"
+typedef struct VkPhysicalDeviceShaderImageFootprintFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           imageFootprint;
+} VkPhysicalDeviceShaderImageFootprintFeaturesNV;
+
+
+
+#define VK_NV_scissor_exclusive 1
+#define VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION 1
+#define VK_NV_SCISSOR_EXCLUSIVE_EXTENSION_NAME "VK_NV_scissor_exclusive"
+typedef struct VkPipelineViewportExclusiveScissorStateCreateInfoNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           exclusiveScissorCount;
+    const VkRect2D*    pExclusiveScissors;
+} VkPipelineViewportExclusiveScissorStateCreateInfoNV;
+
+typedef struct VkPhysicalDeviceExclusiveScissorFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           exclusiveScissor;
+} VkPhysicalDeviceExclusiveScissorFeaturesNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetExclusiveScissorNV)(VkCommandBuffer commandBuffer, uint32_t firstExclusiveScissor, uint32_t exclusiveScissorCount, const VkRect2D* pExclusiveScissors);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetExclusiveScissorNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstExclusiveScissor,
+    uint32_t                                    exclusiveScissorCount,
+    const VkRect2D*                             pExclusiveScissors);
+#endif
+
+
+#define VK_NV_device_diagnostic_checkpoints 1
+#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_SPEC_VERSION 2
+#define VK_NV_DEVICE_DIAGNOSTIC_CHECKPOINTS_EXTENSION_NAME "VK_NV_device_diagnostic_checkpoints"
+typedef struct VkQueueFamilyCheckpointPropertiesNV {
+    VkStructureType         sType;
+    void*                   pNext;
+    VkPipelineStageFlags    checkpointExecutionStageMask;
+} VkQueueFamilyCheckpointPropertiesNV;
+
+typedef struct VkCheckpointDataNV {
+    VkStructureType            sType;
+    void*                      pNext;
+    VkPipelineStageFlagBits    stage;
+    void*                      pCheckpointMarker;
+} VkCheckpointDataNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetCheckpointNV)(VkCommandBuffer commandBuffer, const void* pCheckpointMarker);
+typedef void (VKAPI_PTR *PFN_vkGetQueueCheckpointDataNV)(VkQueue queue, uint32_t* pCheckpointDataCount, VkCheckpointDataNV* pCheckpointData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCheckpointNV(
+    VkCommandBuffer                             commandBuffer,
+    const void*                                 pCheckpointMarker);
+
+VKAPI_ATTR void VKAPI_CALL vkGetQueueCheckpointDataNV(
+    VkQueue                                     queue,
+    uint32_t*                                   pCheckpointDataCount,
+    VkCheckpointDataNV*                         pCheckpointData);
+#endif
+
+
+#define VK_INTEL_shader_integer_functions2 1
+#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_SPEC_VERSION 1
+#define VK_INTEL_SHADER_INTEGER_FUNCTIONS_2_EXTENSION_NAME "VK_INTEL_shader_integer_functions2"
+typedef struct VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderIntegerFunctions2;
+} VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+
+
+
+#define VK_INTEL_performance_query 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkPerformanceConfigurationINTEL)
+#define VK_INTEL_PERFORMANCE_QUERY_SPEC_VERSION 2
+#define VK_INTEL_PERFORMANCE_QUERY_EXTENSION_NAME "VK_INTEL_performance_query"
+
+typedef enum VkPerformanceConfigurationTypeINTEL {
+    VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL = 0,
+    VK_PERFORMANCE_CONFIGURATION_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceConfigurationTypeINTEL;
+
+typedef enum VkQueryPoolSamplingModeINTEL {
+    VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL = 0,
+    VK_QUERY_POOL_SAMPLING_MODE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkQueryPoolSamplingModeINTEL;
+
+typedef enum VkPerformanceOverrideTypeINTEL {
+    VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL = 0,
+    VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL = 1,
+    VK_PERFORMANCE_OVERRIDE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceOverrideTypeINTEL;
+
+typedef enum VkPerformanceParameterTypeINTEL {
+    VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL = 0,
+    VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL = 1,
+    VK_PERFORMANCE_PARAMETER_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceParameterTypeINTEL;
+
+typedef enum VkPerformanceValueTypeINTEL {
+    VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL = 0,
+    VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL = 1,
+    VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL = 2,
+    VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL = 3,
+    VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL = 4,
+    VK_PERFORMANCE_VALUE_TYPE_MAX_ENUM_INTEL = 0x7FFFFFFF
+} VkPerformanceValueTypeINTEL;
+typedef union VkPerformanceValueDataINTEL {
+    uint32_t       value32;
+    uint64_t       value64;
+    float          valueFloat;
+    VkBool32       valueBool;
+    const char*    valueString;
+} VkPerformanceValueDataINTEL;
+
+typedef struct VkPerformanceValueINTEL {
+    VkPerformanceValueTypeINTEL    type;
+    VkPerformanceValueDataINTEL    data;
+} VkPerformanceValueINTEL;
+
+typedef struct VkInitializePerformanceApiInfoINTEL {
+    VkStructureType    sType;
+    const void*        pNext;
+    void*              pUserData;
+} VkInitializePerformanceApiInfoINTEL;
+
+typedef struct VkQueryPoolPerformanceQueryCreateInfoINTEL {
+    VkStructureType                 sType;
+    const void*                     pNext;
+    VkQueryPoolSamplingModeINTEL    performanceCountersSampling;
+} VkQueryPoolPerformanceQueryCreateInfoINTEL;
+
+typedef VkQueryPoolPerformanceQueryCreateInfoINTEL VkQueryPoolCreateInfoINTEL;
+
+typedef struct VkPerformanceMarkerInfoINTEL {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           marker;
+} VkPerformanceMarkerInfoINTEL;
+
+typedef struct VkPerformanceStreamMarkerInfoINTEL {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           marker;
+} VkPerformanceStreamMarkerInfoINTEL;
+
+typedef struct VkPerformanceOverrideInfoINTEL {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkPerformanceOverrideTypeINTEL    type;
+    VkBool32                          enable;
+    uint64_t                          parameter;
+} VkPerformanceOverrideInfoINTEL;
+
+typedef struct VkPerformanceConfigurationAcquireInfoINTEL {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkPerformanceConfigurationTypeINTEL    type;
+} VkPerformanceConfigurationAcquireInfoINTEL;
+
+typedef VkResult (VKAPI_PTR *PFN_vkInitializePerformanceApiINTEL)(VkDevice device, const VkInitializePerformanceApiInfoINTEL* pInitializeInfo);
+typedef void (VKAPI_PTR *PFN_vkUninitializePerformanceApiINTEL)(VkDevice device);
+typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceMarkerInfoINTEL* pMarkerInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceStreamMarkerINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceStreamMarkerInfoINTEL* pMarkerInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCmdSetPerformanceOverrideINTEL)(VkCommandBuffer commandBuffer, const VkPerformanceOverrideInfoINTEL* pOverrideInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkAcquirePerformanceConfigurationINTEL)(VkDevice device, const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo, VkPerformanceConfigurationINTEL* pConfiguration);
+typedef VkResult (VKAPI_PTR *PFN_vkReleasePerformanceConfigurationINTEL)(VkDevice device, VkPerformanceConfigurationINTEL configuration);
+typedef VkResult (VKAPI_PTR *PFN_vkQueueSetPerformanceConfigurationINTEL)(VkQueue queue, VkPerformanceConfigurationINTEL configuration);
+typedef VkResult (VKAPI_PTR *PFN_vkGetPerformanceParameterINTEL)(VkDevice device, VkPerformanceParameterTypeINTEL parameter, VkPerformanceValueINTEL* pValue);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkInitializePerformanceApiINTEL(
+    VkDevice                                    device,
+    const VkInitializePerformanceApiInfoINTEL*  pInitializeInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkUninitializePerformanceApiINTEL(
+    VkDevice                                    device);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceMarkerInfoINTEL*         pMarkerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceStreamMarkerINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceStreamMarkerInfoINTEL*   pMarkerInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCmdSetPerformanceOverrideINTEL(
+    VkCommandBuffer                             commandBuffer,
+    const VkPerformanceOverrideInfoINTEL*       pOverrideInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquirePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    const VkPerformanceConfigurationAcquireInfoINTEL* pAcquireInfo,
+    VkPerformanceConfigurationINTEL*            pConfiguration);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkReleasePerformanceConfigurationINTEL(
+    VkDevice                                    device,
+    VkPerformanceConfigurationINTEL             configuration);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkQueueSetPerformanceConfigurationINTEL(
+    VkQueue                                     queue,
+    VkPerformanceConfigurationINTEL             configuration);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPerformanceParameterINTEL(
+    VkDevice                                    device,
+    VkPerformanceParameterTypeINTEL             parameter,
+    VkPerformanceValueINTEL*                    pValue);
+#endif
+
+
+#define VK_EXT_pci_bus_info 1
+#define VK_EXT_PCI_BUS_INFO_SPEC_VERSION  2
+#define VK_EXT_PCI_BUS_INFO_EXTENSION_NAME "VK_EXT_pci_bus_info"
+typedef struct VkPhysicalDevicePCIBusInfoPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           pciDomain;
+    uint32_t           pciBus;
+    uint32_t           pciDevice;
+    uint32_t           pciFunction;
+} VkPhysicalDevicePCIBusInfoPropertiesEXT;
+
+
+
+#define VK_AMD_display_native_hdr 1
+#define VK_AMD_DISPLAY_NATIVE_HDR_SPEC_VERSION 1
+#define VK_AMD_DISPLAY_NATIVE_HDR_EXTENSION_NAME "VK_AMD_display_native_hdr"
+typedef struct VkDisplayNativeHdrSurfaceCapabilitiesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           localDimmingSupport;
+} VkDisplayNativeHdrSurfaceCapabilitiesAMD;
+
+typedef struct VkSwapchainDisplayNativeHdrCreateInfoAMD {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           localDimmingEnable;
+} VkSwapchainDisplayNativeHdrCreateInfoAMD;
+
+typedef void (VKAPI_PTR *PFN_vkSetLocalDimmingAMD)(VkDevice device, VkSwapchainKHR swapChain, VkBool32 localDimmingEnable);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkSetLocalDimmingAMD(
+    VkDevice                                    device,
+    VkSwapchainKHR                              swapChain,
+    VkBool32                                    localDimmingEnable);
+#endif
+
+
+#define VK_EXT_fragment_density_map 1
+#define VK_EXT_FRAGMENT_DENSITY_MAP_SPEC_VERSION 2
+#define VK_EXT_FRAGMENT_DENSITY_MAP_EXTENSION_NAME "VK_EXT_fragment_density_map"
+typedef struct VkPhysicalDeviceFragmentDensityMapFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentDensityMap;
+    VkBool32           fragmentDensityMapDynamic;
+    VkBool32           fragmentDensityMapNonSubsampledImages;
+} VkPhysicalDeviceFragmentDensityMapFeaturesEXT;
+
+typedef struct VkPhysicalDeviceFragmentDensityMapPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         minFragmentDensityTexelSize;
+    VkExtent2D         maxFragmentDensityTexelSize;
+    VkBool32           fragmentDensityInvocations;
+} VkPhysicalDeviceFragmentDensityMapPropertiesEXT;
+
+typedef struct VkRenderPassFragmentDensityMapCreateInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkAttachmentReference    fragmentDensityMapAttachment;
+} VkRenderPassFragmentDensityMapCreateInfoEXT;
+
+
+
+#define VK_EXT_scalar_block_layout 1
+#define VK_EXT_SCALAR_BLOCK_LAYOUT_SPEC_VERSION 1
+#define VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME "VK_EXT_scalar_block_layout"
+typedef VkPhysicalDeviceScalarBlockLayoutFeatures VkPhysicalDeviceScalarBlockLayoutFeaturesEXT;
+
+
+
+#define VK_GOOGLE_hlsl_functionality1 1
+#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION 1
+#define VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME "VK_GOOGLE_hlsl_functionality1"
+#define VK_GOOGLE_HLSL_FUNCTIONALITY1_SPEC_VERSION VK_GOOGLE_HLSL_FUNCTIONALITY_1_SPEC_VERSION
+#define VK_GOOGLE_HLSL_FUNCTIONALITY1_EXTENSION_NAME VK_GOOGLE_HLSL_FUNCTIONALITY_1_EXTENSION_NAME
+
+
+#define VK_GOOGLE_decorate_string 1
+#define VK_GOOGLE_DECORATE_STRING_SPEC_VERSION 1
+#define VK_GOOGLE_DECORATE_STRING_EXTENSION_NAME "VK_GOOGLE_decorate_string"
+
+
+#define VK_EXT_subgroup_size_control 1
+#define VK_EXT_SUBGROUP_SIZE_CONTROL_SPEC_VERSION 2
+#define VK_EXT_SUBGROUP_SIZE_CONTROL_EXTENSION_NAME "VK_EXT_subgroup_size_control"
+typedef VkPhysicalDeviceSubgroupSizeControlFeatures VkPhysicalDeviceSubgroupSizeControlFeaturesEXT;
+
+typedef VkPhysicalDeviceSubgroupSizeControlProperties VkPhysicalDeviceSubgroupSizeControlPropertiesEXT;
+
+typedef VkPipelineShaderStageRequiredSubgroupSizeCreateInfo VkPipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+
+
+
+#define VK_AMD_shader_core_properties2 1
+#define VK_AMD_SHADER_CORE_PROPERTIES_2_SPEC_VERSION 1
+#define VK_AMD_SHADER_CORE_PROPERTIES_2_EXTENSION_NAME "VK_AMD_shader_core_properties2"
+
+typedef enum VkShaderCorePropertiesFlagBitsAMD {
+    VK_SHADER_CORE_PROPERTIES_FLAG_BITS_MAX_ENUM_AMD = 0x7FFFFFFF
+} VkShaderCorePropertiesFlagBitsAMD;
+typedef VkFlags VkShaderCorePropertiesFlagsAMD;
+typedef struct VkPhysicalDeviceShaderCoreProperties2AMD {
+    VkStructureType                   sType;
+    void*                             pNext;
+    VkShaderCorePropertiesFlagsAMD    shaderCoreFeatures;
+    uint32_t                          activeComputeUnitCount;
+} VkPhysicalDeviceShaderCoreProperties2AMD;
+
+
+
+#define VK_AMD_device_coherent_memory 1
+#define VK_AMD_DEVICE_COHERENT_MEMORY_SPEC_VERSION 1
+#define VK_AMD_DEVICE_COHERENT_MEMORY_EXTENSION_NAME "VK_AMD_device_coherent_memory"
+typedef struct VkPhysicalDeviceCoherentMemoryFeaturesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           deviceCoherentMemory;
+} VkPhysicalDeviceCoherentMemoryFeaturesAMD;
+
+
+
+#define VK_EXT_shader_image_atomic_int64 1
+#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_SPEC_VERSION 1
+#define VK_EXT_SHADER_IMAGE_ATOMIC_INT64_EXTENSION_NAME "VK_EXT_shader_image_atomic_int64"
+typedef struct VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderImageInt64Atomics;
+    VkBool32           sparseImageInt64Atomics;
+} VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+
+
+
+#define VK_EXT_memory_budget 1
+#define VK_EXT_MEMORY_BUDGET_SPEC_VERSION 1
+#define VK_EXT_MEMORY_BUDGET_EXTENSION_NAME "VK_EXT_memory_budget"
+typedef struct VkPhysicalDeviceMemoryBudgetPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       heapBudget[VK_MAX_MEMORY_HEAPS];
+    VkDeviceSize       heapUsage[VK_MAX_MEMORY_HEAPS];
+} VkPhysicalDeviceMemoryBudgetPropertiesEXT;
+
+
+
+#define VK_EXT_memory_priority 1
+#define VK_EXT_MEMORY_PRIORITY_SPEC_VERSION 1
+#define VK_EXT_MEMORY_PRIORITY_EXTENSION_NAME "VK_EXT_memory_priority"
+typedef struct VkPhysicalDeviceMemoryPriorityFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           memoryPriority;
+} VkPhysicalDeviceMemoryPriorityFeaturesEXT;
+
+typedef struct VkMemoryPriorityAllocateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    float              priority;
+} VkMemoryPriorityAllocateInfoEXT;
+
+
+
+#define VK_NV_dedicated_allocation_image_aliasing 1
+#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_SPEC_VERSION 1
+#define VK_NV_DEDICATED_ALLOCATION_IMAGE_ALIASING_EXTENSION_NAME "VK_NV_dedicated_allocation_image_aliasing"
+typedef struct VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           dedicatedAllocationImageAliasing;
+} VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+
+
+
+#define VK_EXT_buffer_device_address 1
+#define VK_EXT_BUFFER_DEVICE_ADDRESS_SPEC_VERSION 2
+#define VK_EXT_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME "VK_EXT_buffer_device_address"
+typedef struct VkPhysicalDeviceBufferDeviceAddressFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           bufferDeviceAddress;
+    VkBool32           bufferDeviceAddressCaptureReplay;
+    VkBool32           bufferDeviceAddressMultiDevice;
+} VkPhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
+typedef VkPhysicalDeviceBufferDeviceAddressFeaturesEXT VkPhysicalDeviceBufferAddressFeaturesEXT;
+
+typedef VkBufferDeviceAddressInfo VkBufferDeviceAddressInfoEXT;
+
+typedef struct VkBufferDeviceAddressCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceAddress    deviceAddress;
+} VkBufferDeviceAddressCreateInfoEXT;
+
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetBufferDeviceAddressEXT)(VkDevice device, const VkBufferDeviceAddressInfo* pInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetBufferDeviceAddressEXT(
+    VkDevice                                    device,
+    const VkBufferDeviceAddressInfo*            pInfo);
+#endif
+
+
+#define VK_EXT_tooling_info 1
+#define VK_EXT_TOOLING_INFO_SPEC_VERSION  1
+#define VK_EXT_TOOLING_INFO_EXTENSION_NAME "VK_EXT_tooling_info"
+typedef VkToolPurposeFlagBits VkToolPurposeFlagBitsEXT;
+
+typedef VkToolPurposeFlags VkToolPurposeFlagsEXT;
+
+typedef VkPhysicalDeviceToolProperties VkPhysicalDeviceToolPropertiesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceToolPropertiesEXT)(VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceToolPropertiesEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pToolCount,
+    VkPhysicalDeviceToolProperties*             pToolProperties);
+#endif
+
+
+#define VK_EXT_separate_stencil_usage 1
+#define VK_EXT_SEPARATE_STENCIL_USAGE_SPEC_VERSION 1
+#define VK_EXT_SEPARATE_STENCIL_USAGE_EXTENSION_NAME "VK_EXT_separate_stencil_usage"
+typedef VkImageStencilUsageCreateInfo VkImageStencilUsageCreateInfoEXT;
+
+
+
+#define VK_EXT_validation_features 1
+#define VK_EXT_VALIDATION_FEATURES_SPEC_VERSION 5
+#define VK_EXT_VALIDATION_FEATURES_EXTENSION_NAME "VK_EXT_validation_features"
+
+typedef enum VkValidationFeatureEnableEXT {
+    VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT = 0,
+    VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT = 1,
+    VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT = 2,
+    VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT = 3,
+    VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT = 4,
+    VK_VALIDATION_FEATURE_ENABLE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationFeatureEnableEXT;
+
+typedef enum VkValidationFeatureDisableEXT {
+    VK_VALIDATION_FEATURE_DISABLE_ALL_EXT = 0,
+    VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT = 1,
+    VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT = 2,
+    VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT = 3,
+    VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT = 4,
+    VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT = 5,
+    VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT = 6,
+    VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT = 7,
+    VK_VALIDATION_FEATURE_DISABLE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkValidationFeatureDisableEXT;
+typedef struct VkValidationFeaturesEXT {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    uint32_t                                enabledValidationFeatureCount;
+    const VkValidationFeatureEnableEXT*     pEnabledValidationFeatures;
+    uint32_t                                disabledValidationFeatureCount;
+    const VkValidationFeatureDisableEXT*    pDisabledValidationFeatures;
+} VkValidationFeaturesEXT;
+
+
+
+#define VK_NV_cooperative_matrix 1
+#define VK_NV_COOPERATIVE_MATRIX_SPEC_VERSION 1
+#define VK_NV_COOPERATIVE_MATRIX_EXTENSION_NAME "VK_NV_cooperative_matrix"
+
+typedef enum VkComponentTypeNV {
+    VK_COMPONENT_TYPE_FLOAT16_NV = 0,
+    VK_COMPONENT_TYPE_FLOAT32_NV = 1,
+    VK_COMPONENT_TYPE_FLOAT64_NV = 2,
+    VK_COMPONENT_TYPE_SINT8_NV = 3,
+    VK_COMPONENT_TYPE_SINT16_NV = 4,
+    VK_COMPONENT_TYPE_SINT32_NV = 5,
+    VK_COMPONENT_TYPE_SINT64_NV = 6,
+    VK_COMPONENT_TYPE_UINT8_NV = 7,
+    VK_COMPONENT_TYPE_UINT16_NV = 8,
+    VK_COMPONENT_TYPE_UINT32_NV = 9,
+    VK_COMPONENT_TYPE_UINT64_NV = 10,
+    VK_COMPONENT_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkComponentTypeNV;
+
+typedef enum VkScopeNV {
+    VK_SCOPE_DEVICE_NV = 1,
+    VK_SCOPE_WORKGROUP_NV = 2,
+    VK_SCOPE_SUBGROUP_NV = 3,
+    VK_SCOPE_QUEUE_FAMILY_NV = 5,
+    VK_SCOPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkScopeNV;
+typedef struct VkCooperativeMatrixPropertiesNV {
+    VkStructureType      sType;
+    void*                pNext;
+    uint32_t             MSize;
+    uint32_t             NSize;
+    uint32_t             KSize;
+    VkComponentTypeNV    AType;
+    VkComponentTypeNV    BType;
+    VkComponentTypeNV    CType;
+    VkComponentTypeNV    DType;
+    VkScopeNV            scope;
+} VkCooperativeMatrixPropertiesNV;
+
+typedef struct VkPhysicalDeviceCooperativeMatrixFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           cooperativeMatrix;
+    VkBool32           cooperativeMatrixRobustBufferAccess;
+} VkPhysicalDeviceCooperativeMatrixFeaturesNV;
+
+typedef struct VkPhysicalDeviceCooperativeMatrixPropertiesNV {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkShaderStageFlags    cooperativeMatrixSupportedStages;
+} VkPhysicalDeviceCooperativeMatrixPropertiesNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV)(VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesNV* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceCooperativeMatrixPropertiesNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pPropertyCount,
+    VkCooperativeMatrixPropertiesNV*            pProperties);
+#endif
+
+
+#define VK_NV_coverage_reduction_mode 1
+#define VK_NV_COVERAGE_REDUCTION_MODE_SPEC_VERSION 1
+#define VK_NV_COVERAGE_REDUCTION_MODE_EXTENSION_NAME "VK_NV_coverage_reduction_mode"
+
+typedef enum VkCoverageReductionModeNV {
+    VK_COVERAGE_REDUCTION_MODE_MERGE_NV = 0,
+    VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV = 1,
+    VK_COVERAGE_REDUCTION_MODE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkCoverageReductionModeNV;
+typedef VkFlags VkPipelineCoverageReductionStateCreateFlagsNV;
+typedef struct VkPhysicalDeviceCoverageReductionModeFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           coverageReductionMode;
+} VkPhysicalDeviceCoverageReductionModeFeaturesNV;
+
+typedef struct VkPipelineCoverageReductionStateCreateInfoNV {
+    VkStructureType                                  sType;
+    const void*                                      pNext;
+    VkPipelineCoverageReductionStateCreateFlagsNV    flags;
+    VkCoverageReductionModeNV                        coverageReductionMode;
+} VkPipelineCoverageReductionStateCreateInfoNV;
+
+typedef struct VkFramebufferMixedSamplesCombinationNV {
+    VkStructureType              sType;
+    void*                        pNext;
+    VkCoverageReductionModeNV    coverageReductionMode;
+    VkSampleCountFlagBits        rasterizationSamples;
+    VkSampleCountFlags           depthStencilSamples;
+    VkSampleCountFlags           colorSamples;
+} VkFramebufferMixedSamplesCombinationNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV)(VkPhysicalDevice physicalDevice, uint32_t* pCombinationCount, VkFramebufferMixedSamplesCombinationNV* pCombinations);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t*                                   pCombinationCount,
+    VkFramebufferMixedSamplesCombinationNV*     pCombinations);
+#endif
+
+
+#define VK_EXT_fragment_shader_interlock 1
+#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_SPEC_VERSION 1
+#define VK_EXT_FRAGMENT_SHADER_INTERLOCK_EXTENSION_NAME "VK_EXT_fragment_shader_interlock"
+typedef struct VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentShaderSampleInterlock;
+    VkBool32           fragmentShaderPixelInterlock;
+    VkBool32           fragmentShaderShadingRateInterlock;
+} VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+
+
+
+#define VK_EXT_ycbcr_image_arrays 1
+#define VK_EXT_YCBCR_IMAGE_ARRAYS_SPEC_VERSION 1
+#define VK_EXT_YCBCR_IMAGE_ARRAYS_EXTENSION_NAME "VK_EXT_ycbcr_image_arrays"
+typedef struct VkPhysicalDeviceYcbcrImageArraysFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           ycbcrImageArrays;
+} VkPhysicalDeviceYcbcrImageArraysFeaturesEXT;
+
+
+
+#define VK_EXT_provoking_vertex 1
+#define VK_EXT_PROVOKING_VERTEX_SPEC_VERSION 1
+#define VK_EXT_PROVOKING_VERTEX_EXTENSION_NAME "VK_EXT_provoking_vertex"
+
+typedef enum VkProvokingVertexModeEXT {
+    VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT = 0,
+    VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT = 1,
+    VK_PROVOKING_VERTEX_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkProvokingVertexModeEXT;
+typedef struct VkPhysicalDeviceProvokingVertexFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           provokingVertexLast;
+    VkBool32           transformFeedbackPreservesProvokingVertex;
+} VkPhysicalDeviceProvokingVertexFeaturesEXT;
+
+typedef struct VkPhysicalDeviceProvokingVertexPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           provokingVertexModePerPipeline;
+    VkBool32           transformFeedbackPreservesTriangleFanProvokingVertex;
+} VkPhysicalDeviceProvokingVertexPropertiesEXT;
+
+typedef struct VkPipelineRasterizationProvokingVertexStateCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkProvokingVertexModeEXT    provokingVertexMode;
+} VkPipelineRasterizationProvokingVertexStateCreateInfoEXT;
+
+
+
+#define VK_EXT_headless_surface 1
+#define VK_EXT_HEADLESS_SURFACE_SPEC_VERSION 1
+#define VK_EXT_HEADLESS_SURFACE_EXTENSION_NAME "VK_EXT_headless_surface"
+typedef VkFlags VkHeadlessSurfaceCreateFlagsEXT;
+typedef struct VkHeadlessSurfaceCreateInfoEXT {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkHeadlessSurfaceCreateFlagsEXT    flags;
+} VkHeadlessSurfaceCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateHeadlessSurfaceEXT)(VkInstance instance, const VkHeadlessSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateHeadlessSurfaceEXT(
+    VkInstance                                  instance,
+    const VkHeadlessSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_EXT_line_rasterization 1
+#define VK_EXT_LINE_RASTERIZATION_SPEC_VERSION 1
+#define VK_EXT_LINE_RASTERIZATION_EXTENSION_NAME "VK_EXT_line_rasterization"
+
+typedef enum VkLineRasterizationModeEXT {
+    VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT = 0,
+    VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT = 1,
+    VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT = 2,
+    VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT = 3,
+    VK_LINE_RASTERIZATION_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkLineRasterizationModeEXT;
+typedef struct VkPhysicalDeviceLineRasterizationFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rectangularLines;
+    VkBool32           bresenhamLines;
+    VkBool32           smoothLines;
+    VkBool32           stippledRectangularLines;
+    VkBool32           stippledBresenhamLines;
+    VkBool32           stippledSmoothLines;
+} VkPhysicalDeviceLineRasterizationFeaturesEXT;
+
+typedef struct VkPhysicalDeviceLineRasterizationPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           lineSubPixelPrecisionBits;
+} VkPhysicalDeviceLineRasterizationPropertiesEXT;
+
+typedef struct VkPipelineRasterizationLineStateCreateInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkLineRasterizationModeEXT    lineRasterizationMode;
+    VkBool32                      stippledLineEnable;
+    uint32_t                      lineStippleFactor;
+    uint16_t                      lineStipplePattern;
+} VkPipelineRasterizationLineStateCreateInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEXT)(VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    lineStippleFactor,
+    uint16_t                                    lineStipplePattern);
+#endif
+
+
+#define VK_EXT_shader_atomic_float 1
+#define VK_EXT_SHADER_ATOMIC_FLOAT_SPEC_VERSION 1
+#define VK_EXT_SHADER_ATOMIC_FLOAT_EXTENSION_NAME "VK_EXT_shader_atomic_float"
+typedef struct VkPhysicalDeviceShaderAtomicFloatFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderBufferFloat32Atomics;
+    VkBool32           shaderBufferFloat32AtomicAdd;
+    VkBool32           shaderBufferFloat64Atomics;
+    VkBool32           shaderBufferFloat64AtomicAdd;
+    VkBool32           shaderSharedFloat32Atomics;
+    VkBool32           shaderSharedFloat32AtomicAdd;
+    VkBool32           shaderSharedFloat64Atomics;
+    VkBool32           shaderSharedFloat64AtomicAdd;
+    VkBool32           shaderImageFloat32Atomics;
+    VkBool32           shaderImageFloat32AtomicAdd;
+    VkBool32           sparseImageFloat32Atomics;
+    VkBool32           sparseImageFloat32AtomicAdd;
+} VkPhysicalDeviceShaderAtomicFloatFeaturesEXT;
+
+
+
+#define VK_EXT_host_query_reset 1
+#define VK_EXT_HOST_QUERY_RESET_SPEC_VERSION 1
+#define VK_EXT_HOST_QUERY_RESET_EXTENSION_NAME "VK_EXT_host_query_reset"
+typedef VkPhysicalDeviceHostQueryResetFeatures VkPhysicalDeviceHostQueryResetFeaturesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkResetQueryPoolEXT)(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkResetQueryPoolEXT(
+    VkDevice                                    device,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery,
+    uint32_t                                    queryCount);
+#endif
+
+
+#define VK_EXT_index_type_uint8 1
+#define VK_EXT_INDEX_TYPE_UINT8_SPEC_VERSION 1
+#define VK_EXT_INDEX_TYPE_UINT8_EXTENSION_NAME "VK_EXT_index_type_uint8"
+typedef struct VkPhysicalDeviceIndexTypeUint8FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           indexTypeUint8;
+} VkPhysicalDeviceIndexTypeUint8FeaturesEXT;
+
+
+
+#define VK_EXT_extended_dynamic_state 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_SPEC_VERSION 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_extended_dynamic_state"
+typedef struct VkPhysicalDeviceExtendedDynamicStateFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           extendedDynamicState;
+} VkPhysicalDeviceExtendedDynamicStateFeaturesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetCullModeEXT)(VkCommandBuffer commandBuffer, VkCullModeFlags cullMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetFrontFaceEXT)(VkCommandBuffer commandBuffer, VkFrontFace frontFace);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveTopologyEXT)(VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports);
+typedef void (VKAPI_PTR *PFN_vkCmdSetScissorWithCountEXT)(VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors);
+typedef void (VKAPI_PTR *PFN_vkCmdBindVertexBuffers2EXT)(VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthWriteEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthCompareOpEXT)(VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBoundsTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilTestEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetStencilOpEXT)(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCullModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkCullModeFlags                             cullMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetFrontFaceEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkFrontFace                                 frontFace);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveTopologyEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkPrimitiveTopology                         primitiveTopology);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWithCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    viewportCount,
+    const VkViewport*                           pViewports);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetScissorWithCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    scissorCount,
+    const VkRect2D*                             pScissors);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindVertexBuffers2EXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstBinding,
+    uint32_t                                    bindingCount,
+    const VkBuffer*                             pBuffers,
+    const VkDeviceSize*                         pOffsets,
+    const VkDeviceSize*                         pSizes,
+    const VkDeviceSize*                         pStrides);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthTestEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthWriteEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthWriteEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthCompareOpEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkCompareOp                                 depthCompareOp);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBoundsTestEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthBoundsTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilTestEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    stencilTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetStencilOpEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkStencilFaceFlags                          faceMask,
+    VkStencilOp                                 failOp,
+    VkStencilOp                                 passOp,
+    VkStencilOp                                 depthFailOp,
+    VkCompareOp                                 compareOp);
+#endif
+
+
+#define VK_EXT_shader_atomic_float2 1
+#define VK_EXT_SHADER_ATOMIC_FLOAT_2_SPEC_VERSION 1
+#define VK_EXT_SHADER_ATOMIC_FLOAT_2_EXTENSION_NAME "VK_EXT_shader_atomic_float2"
+typedef struct VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderBufferFloat16Atomics;
+    VkBool32           shaderBufferFloat16AtomicAdd;
+    VkBool32           shaderBufferFloat16AtomicMinMax;
+    VkBool32           shaderBufferFloat32AtomicMinMax;
+    VkBool32           shaderBufferFloat64AtomicMinMax;
+    VkBool32           shaderSharedFloat16Atomics;
+    VkBool32           shaderSharedFloat16AtomicAdd;
+    VkBool32           shaderSharedFloat16AtomicMinMax;
+    VkBool32           shaderSharedFloat32AtomicMinMax;
+    VkBool32           shaderSharedFloat64AtomicMinMax;
+    VkBool32           shaderImageFloat32AtomicMinMax;
+    VkBool32           sparseImageFloat32AtomicMinMax;
+} VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+
+
+
+#define VK_EXT_surface_maintenance1 1
+#define VK_EXT_SURFACE_MAINTENANCE_1_SPEC_VERSION 1
+#define VK_EXT_SURFACE_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_surface_maintenance1"
+
+typedef enum VkPresentScalingFlagBitsEXT {
+    VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT = 0x00000001,
+    VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT = 0x00000002,
+    VK_PRESENT_SCALING_STRETCH_BIT_EXT = 0x00000004,
+    VK_PRESENT_SCALING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPresentScalingFlagBitsEXT;
+typedef VkFlags VkPresentScalingFlagsEXT;
+
+typedef enum VkPresentGravityFlagBitsEXT {
+    VK_PRESENT_GRAVITY_MIN_BIT_EXT = 0x00000001,
+    VK_PRESENT_GRAVITY_MAX_BIT_EXT = 0x00000002,
+    VK_PRESENT_GRAVITY_CENTERED_BIT_EXT = 0x00000004,
+    VK_PRESENT_GRAVITY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkPresentGravityFlagBitsEXT;
+typedef VkFlags VkPresentGravityFlagsEXT;
+typedef struct VkSurfacePresentModeEXT {
+    VkStructureType     sType;
+    void*               pNext;
+    VkPresentModeKHR    presentMode;
+} VkSurfacePresentModeEXT;
+
+typedef struct VkSurfacePresentScalingCapabilitiesEXT {
+    VkStructureType             sType;
+    void*                       pNext;
+    VkPresentScalingFlagsEXT    supportedPresentScaling;
+    VkPresentGravityFlagsEXT    supportedPresentGravityX;
+    VkPresentGravityFlagsEXT    supportedPresentGravityY;
+    VkExtent2D                  minScaledImageExtent;
+    VkExtent2D                  maxScaledImageExtent;
+} VkSurfacePresentScalingCapabilitiesEXT;
+
+typedef struct VkSurfacePresentModeCompatibilityEXT {
+    VkStructureType      sType;
+    void*                pNext;
+    uint32_t             presentModeCount;
+    VkPresentModeKHR*    pPresentModes;
+} VkSurfacePresentModeCompatibilityEXT;
+
+
+
+#define VK_EXT_swapchain_maintenance1 1
+#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_SPEC_VERSION 1
+#define VK_EXT_SWAPCHAIN_MAINTENANCE_1_EXTENSION_NAME "VK_EXT_swapchain_maintenance1"
+typedef struct VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           swapchainMaintenance1;
+} VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT;
+
+typedef struct VkSwapchainPresentFenceInfoEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           swapchainCount;
+    const VkFence*     pFences;
+} VkSwapchainPresentFenceInfoEXT;
+
+typedef struct VkSwapchainPresentModesCreateInfoEXT {
+    VkStructureType            sType;
+    void*                      pNext;
+    uint32_t                   presentModeCount;
+    const VkPresentModeKHR*    pPresentModes;
+} VkSwapchainPresentModesCreateInfoEXT;
+
+typedef struct VkSwapchainPresentModeInfoEXT {
+    VkStructureType            sType;
+    void*                      pNext;
+    uint32_t                   swapchainCount;
+    const VkPresentModeKHR*    pPresentModes;
+} VkSwapchainPresentModeInfoEXT;
+
+typedef struct VkSwapchainPresentScalingCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkPresentScalingFlagsEXT    scalingBehavior;
+    VkPresentGravityFlagsEXT    presentGravityX;
+    VkPresentGravityFlagsEXT    presentGravityY;
+} VkSwapchainPresentScalingCreateInfoEXT;
+
+typedef struct VkReleaseSwapchainImagesInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSwapchainKHR     swapchain;
+    uint32_t           imageIndexCount;
+    const uint32_t*    pImageIndices;
+} VkReleaseSwapchainImagesInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkReleaseSwapchainImagesEXT)(VkDevice device, const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkReleaseSwapchainImagesEXT(
+    VkDevice                                    device,
+    const VkReleaseSwapchainImagesInfoEXT*      pReleaseInfo);
+#endif
+
+
+#define VK_EXT_shader_demote_to_helper_invocation 1
+#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_SPEC_VERSION 1
+#define VK_EXT_SHADER_DEMOTE_TO_HELPER_INVOCATION_EXTENSION_NAME "VK_EXT_shader_demote_to_helper_invocation"
+typedef VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures VkPhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+
+
+
+#define VK_NV_device_generated_commands 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkIndirectCommandsLayoutNV)
+#define VK_NV_DEVICE_GENERATED_COMMANDS_SPEC_VERSION 3
+#define VK_NV_DEVICE_GENERATED_COMMANDS_EXTENSION_NAME "VK_NV_device_generated_commands"
+
+typedef enum VkIndirectCommandsTokenTypeNV {
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV = 0,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV = 1,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV = 2,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV = 3,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV = 4,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV = 5,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV = 6,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV = 7,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV = 1000328000,
+    VK_INDIRECT_COMMANDS_TOKEN_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkIndirectCommandsTokenTypeNV;
+
+typedef enum VkIndirectStateFlagBitsNV {
+    VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV = 0x00000001,
+    VK_INDIRECT_STATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkIndirectStateFlagBitsNV;
+typedef VkFlags VkIndirectStateFlagsNV;
+
+typedef enum VkIndirectCommandsLayoutUsageFlagBitsNV {
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV = 0x00000001,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV = 0x00000002,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV = 0x00000004,
+    VK_INDIRECT_COMMANDS_LAYOUT_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkIndirectCommandsLayoutUsageFlagBitsNV;
+typedef VkFlags VkIndirectCommandsLayoutUsageFlagsNV;
+typedef struct VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxGraphicsShaderGroupCount;
+    uint32_t           maxIndirectSequenceCount;
+    uint32_t           maxIndirectCommandsTokenCount;
+    uint32_t           maxIndirectCommandsStreamCount;
+    uint32_t           maxIndirectCommandsTokenOffset;
+    uint32_t           maxIndirectCommandsStreamStride;
+    uint32_t           minSequencesCountBufferOffsetAlignment;
+    uint32_t           minSequencesIndexBufferOffsetAlignment;
+    uint32_t           minIndirectCommandsBufferOffsetAlignment;
+} VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+
+typedef struct VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           deviceGeneratedCommands;
+} VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+
+typedef struct VkGraphicsShaderGroupCreateInfoNV {
+    VkStructureType                                 sType;
+    const void*                                     pNext;
+    uint32_t                                        stageCount;
+    const VkPipelineShaderStageCreateInfo*          pStages;
+    const VkPipelineVertexInputStateCreateInfo*     pVertexInputState;
+    const VkPipelineTessellationStateCreateInfo*    pTessellationState;
+} VkGraphicsShaderGroupCreateInfoNV;
+
+typedef struct VkGraphicsPipelineShaderGroupsCreateInfoNV {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    groupCount;
+    const VkGraphicsShaderGroupCreateInfoNV*    pGroups;
+    uint32_t                                    pipelineCount;
+    const VkPipeline*                           pPipelines;
+} VkGraphicsPipelineShaderGroupsCreateInfoNV;
+
+typedef struct VkBindShaderGroupIndirectCommandNV {
+    uint32_t    groupIndex;
+} VkBindShaderGroupIndirectCommandNV;
+
+typedef struct VkBindIndexBufferIndirectCommandNV {
+    VkDeviceAddress    bufferAddress;
+    uint32_t           size;
+    VkIndexType        indexType;
+} VkBindIndexBufferIndirectCommandNV;
+
+typedef struct VkBindVertexBufferIndirectCommandNV {
+    VkDeviceAddress    bufferAddress;
+    uint32_t           size;
+    uint32_t           stride;
+} VkBindVertexBufferIndirectCommandNV;
+
+typedef struct VkSetStateFlagsIndirectCommandNV {
+    uint32_t    data;
+} VkSetStateFlagsIndirectCommandNV;
+
+typedef struct VkIndirectCommandsStreamNV {
+    VkBuffer        buffer;
+    VkDeviceSize    offset;
+} VkIndirectCommandsStreamNV;
+
+typedef struct VkIndirectCommandsLayoutTokenNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkIndirectCommandsTokenTypeNV    tokenType;
+    uint32_t                         stream;
+    uint32_t                         offset;
+    uint32_t                         vertexBindingUnit;
+    VkBool32                         vertexDynamicStride;
+    VkPipelineLayout                 pushconstantPipelineLayout;
+    VkShaderStageFlags               pushconstantShaderStageFlags;
+    uint32_t                         pushconstantOffset;
+    uint32_t                         pushconstantSize;
+    VkIndirectStateFlagsNV           indirectStateFlags;
+    uint32_t                         indexTypeCount;
+    const VkIndexType*               pIndexTypes;
+    const uint32_t*                  pIndexTypeValues;
+} VkIndirectCommandsLayoutTokenNV;
+
+typedef struct VkIndirectCommandsLayoutCreateInfoNV {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkIndirectCommandsLayoutUsageFlagsNV      flags;
+    VkPipelineBindPoint                       pipelineBindPoint;
+    uint32_t                                  tokenCount;
+    const VkIndirectCommandsLayoutTokenNV*    pTokens;
+    uint32_t                                  streamCount;
+    const uint32_t*                           pStreamStrides;
+} VkIndirectCommandsLayoutCreateInfoNV;
+
+typedef struct VkGeneratedCommandsInfoNV {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    VkPipelineBindPoint                  pipelineBindPoint;
+    VkPipeline                           pipeline;
+    VkIndirectCommandsLayoutNV           indirectCommandsLayout;
+    uint32_t                             streamCount;
+    const VkIndirectCommandsStreamNV*    pStreams;
+    uint32_t                             sequencesCount;
+    VkBuffer                             preprocessBuffer;
+    VkDeviceSize                         preprocessOffset;
+    VkDeviceSize                         preprocessSize;
+    VkBuffer                             sequencesCountBuffer;
+    VkDeviceSize                         sequencesCountOffset;
+    VkBuffer                             sequencesIndexBuffer;
+    VkDeviceSize                         sequencesIndexOffset;
+} VkGeneratedCommandsInfoNV;
+
+typedef struct VkGeneratedCommandsMemoryRequirementsInfoNV {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkPipelineBindPoint           pipelineBindPoint;
+    VkPipeline                    pipeline;
+    VkIndirectCommandsLayoutNV    indirectCommandsLayout;
+    uint32_t                      maxSequencesCount;
+} VkGeneratedCommandsMemoryRequirementsInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkGetGeneratedCommandsMemoryRequirementsNV)(VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo, VkMemoryRequirements2* pMemoryRequirements);
+typedef void (VKAPI_PTR *PFN_vkCmdPreprocessGeneratedCommandsNV)(VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdExecuteGeneratedCommandsNV)(VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoNV* pGeneratedCommandsInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdBindPipelineShaderGroupNV)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline, uint32_t groupIndex);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateIndirectCommandsLayoutNV)(VkDevice device, const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutNV* pIndirectCommandsLayout);
+typedef void (VKAPI_PTR *PFN_vkDestroyIndirectCommandsLayoutNV)(VkDevice device, VkIndirectCommandsLayoutNV indirectCommandsLayout, const VkAllocationCallbacks* pAllocator);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetGeneratedCommandsMemoryRequirementsNV(
+    VkDevice                                    device,
+    const VkGeneratedCommandsMemoryRequirementsInfoNV* pInfo,
+    VkMemoryRequirements2*                      pMemoryRequirements);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdPreprocessGeneratedCommandsNV(
+    VkCommandBuffer                             commandBuffer,
+    const VkGeneratedCommandsInfoNV*            pGeneratedCommandsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdExecuteGeneratedCommandsNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    isPreprocessed,
+    const VkGeneratedCommandsInfoNV*            pGeneratedCommandsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindPipelineShaderGroupNV(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipeline                                  pipeline,
+    uint32_t                                    groupIndex);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateIndirectCommandsLayoutNV(
+    VkDevice                                    device,
+    const VkIndirectCommandsLayoutCreateInfoNV* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkIndirectCommandsLayoutNV*                 pIndirectCommandsLayout);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyIndirectCommandsLayoutNV(
+    VkDevice                                    device,
+    VkIndirectCommandsLayoutNV                  indirectCommandsLayout,
+    const VkAllocationCallbacks*                pAllocator);
+#endif
+
+
+#define VK_NV_inherited_viewport_scissor 1
+#define VK_NV_INHERITED_VIEWPORT_SCISSOR_SPEC_VERSION 1
+#define VK_NV_INHERITED_VIEWPORT_SCISSOR_EXTENSION_NAME "VK_NV_inherited_viewport_scissor"
+typedef struct VkPhysicalDeviceInheritedViewportScissorFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           inheritedViewportScissor2D;
+} VkPhysicalDeviceInheritedViewportScissorFeaturesNV;
+
+typedef struct VkCommandBufferInheritanceViewportScissorInfoNV {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkBool32             viewportScissor2D;
+    uint32_t             viewportDepthCount;
+    const VkViewport*    pViewportDepths;
+} VkCommandBufferInheritanceViewportScissorInfoNV;
+
+
+
+#define VK_EXT_texel_buffer_alignment 1
+#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_SPEC_VERSION 1
+#define VK_EXT_TEXEL_BUFFER_ALIGNMENT_EXTENSION_NAME "VK_EXT_texel_buffer_alignment"
+typedef struct VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           texelBufferAlignment;
+} VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+
+typedef VkPhysicalDeviceTexelBufferAlignmentProperties VkPhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+
+
+
+#define VK_QCOM_render_pass_transform 1
+#define VK_QCOM_RENDER_PASS_TRANSFORM_SPEC_VERSION 3
+#define VK_QCOM_RENDER_PASS_TRANSFORM_EXTENSION_NAME "VK_QCOM_render_pass_transform"
+typedef struct VkRenderPassTransformBeginInfoQCOM {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkSurfaceTransformFlagBitsKHR    transform;
+} VkRenderPassTransformBeginInfoQCOM;
+
+typedef struct VkCommandBufferInheritanceRenderPassTransformInfoQCOM {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkSurfaceTransformFlagBitsKHR    transform;
+    VkRect2D                         renderArea;
+} VkCommandBufferInheritanceRenderPassTransformInfoQCOM;
+
+
+
+#define VK_EXT_device_memory_report 1
+#define VK_EXT_DEVICE_MEMORY_REPORT_SPEC_VERSION 2
+#define VK_EXT_DEVICE_MEMORY_REPORT_EXTENSION_NAME "VK_EXT_device_memory_report"
+
+typedef enum VkDeviceMemoryReportEventTypeEXT {
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT = 0,
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT = 1,
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT = 2,
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT = 3,
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT = 4,
+    VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceMemoryReportEventTypeEXT;
+typedef VkFlags VkDeviceMemoryReportFlagsEXT;
+typedef struct VkPhysicalDeviceDeviceMemoryReportFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           deviceMemoryReport;
+} VkPhysicalDeviceDeviceMemoryReportFeaturesEXT;
+
+typedef struct VkDeviceMemoryReportCallbackDataEXT {
+    VkStructureType                     sType;
+    void*                               pNext;
+    VkDeviceMemoryReportFlagsEXT        flags;
+    VkDeviceMemoryReportEventTypeEXT    type;
+    uint64_t                            memoryObjectId;
+    VkDeviceSize                        size;
+    VkObjectType                        objectType;
+    uint64_t                            objectHandle;
+    uint32_t                            heapIndex;
+} VkDeviceMemoryReportCallbackDataEXT;
+
+typedef void (VKAPI_PTR *PFN_vkDeviceMemoryReportCallbackEXT)(
+    const VkDeviceMemoryReportCallbackDataEXT*  pCallbackData,
+    void*                                       pUserData);
+
+typedef struct VkDeviceDeviceMemoryReportCreateInfoEXT {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkDeviceMemoryReportFlagsEXT           flags;
+    PFN_vkDeviceMemoryReportCallbackEXT    pfnUserCallback;
+    void*                                  pUserData;
+} VkDeviceDeviceMemoryReportCreateInfoEXT;
+
+
+
+#define VK_EXT_acquire_drm_display 1
+#define VK_EXT_ACQUIRE_DRM_DISPLAY_SPEC_VERSION 1
+#define VK_EXT_ACQUIRE_DRM_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_drm_display"
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, VkDisplayKHR display);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDrmDisplayEXT)(VkPhysicalDevice physicalDevice, int32_t drmFd, uint32_t connectorId, VkDisplayKHR* display);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireDrmDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    int32_t                                     drmFd,
+    VkDisplayKHR                                display);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDrmDisplayEXT(
+    VkPhysicalDevice                            physicalDevice,
+    int32_t                                     drmFd,
+    uint32_t                                    connectorId,
+    VkDisplayKHR*                               display);
+#endif
+
+
+#define VK_EXT_robustness2 1
+#define VK_EXT_ROBUSTNESS_2_SPEC_VERSION  1
+#define VK_EXT_ROBUSTNESS_2_EXTENSION_NAME "VK_EXT_robustness2"
+typedef struct VkPhysicalDeviceRobustness2FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           robustBufferAccess2;
+    VkBool32           robustImageAccess2;
+    VkBool32           nullDescriptor;
+} VkPhysicalDeviceRobustness2FeaturesEXT;
+
+typedef struct VkPhysicalDeviceRobustness2PropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       robustStorageBufferAccessSizeAlignment;
+    VkDeviceSize       robustUniformBufferAccessSizeAlignment;
+} VkPhysicalDeviceRobustness2PropertiesEXT;
+
+
+
+#define VK_EXT_custom_border_color 1
+#define VK_EXT_CUSTOM_BORDER_COLOR_SPEC_VERSION 12
+#define VK_EXT_CUSTOM_BORDER_COLOR_EXTENSION_NAME "VK_EXT_custom_border_color"
+typedef struct VkSamplerCustomBorderColorCreateInfoEXT {
+    VkStructureType      sType;
+    const void*          pNext;
+    VkClearColorValue    customBorderColor;
+    VkFormat             format;
+} VkSamplerCustomBorderColorCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceCustomBorderColorPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxCustomBorderColorSamplers;
+} VkPhysicalDeviceCustomBorderColorPropertiesEXT;
+
+typedef struct VkPhysicalDeviceCustomBorderColorFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           customBorderColors;
+    VkBool32           customBorderColorWithoutFormat;
+} VkPhysicalDeviceCustomBorderColorFeaturesEXT;
+
+
+
+#define VK_GOOGLE_user_type 1
+#define VK_GOOGLE_USER_TYPE_SPEC_VERSION  1
+#define VK_GOOGLE_USER_TYPE_EXTENSION_NAME "VK_GOOGLE_user_type"
+
+
+#define VK_NV_present_barrier 1
+#define VK_NV_PRESENT_BARRIER_SPEC_VERSION 1
+#define VK_NV_PRESENT_BARRIER_EXTENSION_NAME "VK_NV_present_barrier"
+typedef struct VkPhysicalDevicePresentBarrierFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           presentBarrier;
+} VkPhysicalDevicePresentBarrierFeaturesNV;
+
+typedef struct VkSurfaceCapabilitiesPresentBarrierNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           presentBarrierSupported;
+} VkSurfaceCapabilitiesPresentBarrierNV;
+
+typedef struct VkSwapchainPresentBarrierCreateInfoNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           presentBarrierEnable;
+} VkSwapchainPresentBarrierCreateInfoNV;
+
+
+
+#define VK_EXT_private_data 1
+typedef VkPrivateDataSlot VkPrivateDataSlotEXT;
+
+#define VK_EXT_PRIVATE_DATA_SPEC_VERSION  1
+#define VK_EXT_PRIVATE_DATA_EXTENSION_NAME "VK_EXT_private_data"
+typedef VkPrivateDataSlotCreateFlags VkPrivateDataSlotCreateFlagsEXT;
+
+typedef VkPhysicalDevicePrivateDataFeatures VkPhysicalDevicePrivateDataFeaturesEXT;
+
+typedef VkDevicePrivateDataCreateInfo VkDevicePrivateDataCreateInfoEXT;
+
+typedef VkPrivateDataSlotCreateInfo VkPrivateDataSlotCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreatePrivateDataSlotEXT)(VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot);
+typedef void (VKAPI_PTR *PFN_vkDestroyPrivateDataSlotEXT)(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkSetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data);
+typedef void (VKAPI_PTR *PFN_vkGetPrivateDataEXT)(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreatePrivateDataSlotEXT(
+    VkDevice                                    device,
+    const VkPrivateDataSlotCreateInfo*          pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPrivateDataSlot*                          pPrivateDataSlot);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyPrivateDataSlotEXT(
+    VkDevice                                    device,
+    VkPrivateDataSlot                           privateDataSlot,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetPrivateDataEXT(
+    VkDevice                                    device,
+    VkObjectType                                objectType,
+    uint64_t                                    objectHandle,
+    VkPrivateDataSlot                           privateDataSlot,
+    uint64_t                                    data);
+
+VKAPI_ATTR void VKAPI_CALL vkGetPrivateDataEXT(
+    VkDevice                                    device,
+    VkObjectType                                objectType,
+    uint64_t                                    objectHandle,
+    VkPrivateDataSlot                           privateDataSlot,
+    uint64_t*                                   pData);
+#endif
+
+
+#define VK_EXT_pipeline_creation_cache_control 1
+#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_SPEC_VERSION 3
+#define VK_EXT_PIPELINE_CREATION_CACHE_CONTROL_EXTENSION_NAME "VK_EXT_pipeline_creation_cache_control"
+typedef VkPhysicalDevicePipelineCreationCacheControlFeatures VkPhysicalDevicePipelineCreationCacheControlFeaturesEXT;
+
+
+
+#define VK_NV_device_diagnostics_config 1
+#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_SPEC_VERSION 2
+#define VK_NV_DEVICE_DIAGNOSTICS_CONFIG_EXTENSION_NAME "VK_NV_device_diagnostics_config"
+
+typedef enum VkDeviceDiagnosticsConfigFlagBitsNV {
+    VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV = 0x00000001,
+    VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV = 0x00000002,
+    VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV = 0x00000004,
+    VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV = 0x00000008,
+    VK_DEVICE_DIAGNOSTICS_CONFIG_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkDeviceDiagnosticsConfigFlagBitsNV;
+typedef VkFlags VkDeviceDiagnosticsConfigFlagsNV;
+typedef struct VkPhysicalDeviceDiagnosticsConfigFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           diagnosticsConfig;
+} VkPhysicalDeviceDiagnosticsConfigFeaturesNV;
+
+typedef struct VkDeviceDiagnosticsConfigCreateInfoNV {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkDeviceDiagnosticsConfigFlagsNV    flags;
+} VkDeviceDiagnosticsConfigCreateInfoNV;
+
+
+
+#define VK_QCOM_render_pass_store_ops 1
+#define VK_QCOM_RENDER_PASS_STORE_OPS_SPEC_VERSION 2
+#define VK_QCOM_RENDER_PASS_STORE_OPS_EXTENSION_NAME "VK_QCOM_render_pass_store_ops"
+
+
+#define VK_EXT_descriptor_buffer 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkAccelerationStructureKHR)
+#define VK_EXT_DESCRIPTOR_BUFFER_SPEC_VERSION 1
+#define VK_EXT_DESCRIPTOR_BUFFER_EXTENSION_NAME "VK_EXT_descriptor_buffer"
+typedef struct VkPhysicalDeviceDescriptorBufferPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           combinedImageSamplerDescriptorSingleArray;
+    VkBool32           bufferlessPushDescriptors;
+    VkBool32           allowSamplerImageViewPostSubmitCreation;
+    VkDeviceSize       descriptorBufferOffsetAlignment;
+    uint32_t           maxDescriptorBufferBindings;
+    uint32_t           maxResourceDescriptorBufferBindings;
+    uint32_t           maxSamplerDescriptorBufferBindings;
+    uint32_t           maxEmbeddedImmutableSamplerBindings;
+    uint32_t           maxEmbeddedImmutableSamplers;
+    size_t             bufferCaptureReplayDescriptorDataSize;
+    size_t             imageCaptureReplayDescriptorDataSize;
+    size_t             imageViewCaptureReplayDescriptorDataSize;
+    size_t             samplerCaptureReplayDescriptorDataSize;
+    size_t             accelerationStructureCaptureReplayDescriptorDataSize;
+    size_t             samplerDescriptorSize;
+    size_t             combinedImageSamplerDescriptorSize;
+    size_t             sampledImageDescriptorSize;
+    size_t             storageImageDescriptorSize;
+    size_t             uniformTexelBufferDescriptorSize;
+    size_t             robustUniformTexelBufferDescriptorSize;
+    size_t             storageTexelBufferDescriptorSize;
+    size_t             robustStorageTexelBufferDescriptorSize;
+    size_t             uniformBufferDescriptorSize;
+    size_t             robustUniformBufferDescriptorSize;
+    size_t             storageBufferDescriptorSize;
+    size_t             robustStorageBufferDescriptorSize;
+    size_t             inputAttachmentDescriptorSize;
+    size_t             accelerationStructureDescriptorSize;
+    VkDeviceSize       maxSamplerDescriptorBufferRange;
+    VkDeviceSize       maxResourceDescriptorBufferRange;
+    VkDeviceSize       samplerDescriptorBufferAddressSpaceSize;
+    VkDeviceSize       resourceDescriptorBufferAddressSpaceSize;
+    VkDeviceSize       descriptorBufferAddressSpaceSize;
+} VkPhysicalDeviceDescriptorBufferPropertiesEXT;
+
+typedef struct VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    size_t             combinedImageSamplerDensityMapDescriptorSize;
+} VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
+
+typedef struct VkPhysicalDeviceDescriptorBufferFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           descriptorBuffer;
+    VkBool32           descriptorBufferCaptureReplay;
+    VkBool32           descriptorBufferImageLayoutIgnored;
+    VkBool32           descriptorBufferPushDescriptors;
+} VkPhysicalDeviceDescriptorBufferFeaturesEXT;
+
+typedef struct VkDescriptorAddressInfoEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceAddress    address;
+    VkDeviceSize       range;
+    VkFormat           format;
+} VkDescriptorAddressInfoEXT;
+
+typedef struct VkDescriptorBufferBindingInfoEXT {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkDeviceAddress       address;
+    VkBufferUsageFlags    usage;
+} VkDescriptorBufferBindingInfoEXT;
+
+typedef struct VkDescriptorBufferBindingPushDescriptorBufferHandleEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBuffer           buffer;
+} VkDescriptorBufferBindingPushDescriptorBufferHandleEXT;
+
+typedef union VkDescriptorDataEXT {
+    const VkSampler*                     pSampler;
+    const VkDescriptorImageInfo*         pCombinedImageSampler;
+    const VkDescriptorImageInfo*         pInputAttachmentImage;
+    const VkDescriptorImageInfo*         pSampledImage;
+    const VkDescriptorImageInfo*         pStorageImage;
+    const VkDescriptorAddressInfoEXT*    pUniformTexelBuffer;
+    const VkDescriptorAddressInfoEXT*    pStorageTexelBuffer;
+    const VkDescriptorAddressInfoEXT*    pUniformBuffer;
+    const VkDescriptorAddressInfoEXT*    pStorageBuffer;
+    VkDeviceAddress                      accelerationStructure;
+} VkDescriptorDataEXT;
+
+typedef struct VkDescriptorGetInfoEXT {
+    VkStructureType        sType;
+    const void*            pNext;
+    VkDescriptorType       type;
+    VkDescriptorDataEXT    data;
+} VkDescriptorGetInfoEXT;
+
+typedef struct VkBufferCaptureDescriptorDataInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBuffer           buffer;
+} VkBufferCaptureDescriptorDataInfoEXT;
+
+typedef struct VkImageCaptureDescriptorDataInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImage            image;
+} VkImageCaptureDescriptorDataInfoEXT;
+
+typedef struct VkImageViewCaptureDescriptorDataInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkImageView        imageView;
+} VkImageViewCaptureDescriptorDataInfoEXT;
+
+typedef struct VkSamplerCaptureDescriptorDataInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkSampler          sampler;
+} VkSamplerCaptureDescriptorDataInfoEXT;
+
+typedef struct VkOpaqueCaptureDescriptorDataCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const void*        opaqueCaptureDescriptorData;
+} VkOpaqueCaptureDescriptorDataCreateInfoEXT;
+
+typedef struct VkAccelerationStructureCaptureDescriptorDataInfoEXT {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkAccelerationStructureKHR    accelerationStructure;
+    VkAccelerationStructureNV     accelerationStructureNV;
+} VkAccelerationStructureCaptureDescriptorDataInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutSizeEXT)(VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes);
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)(VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize* pOffset);
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorEXT)(VkDevice device, const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, void* pDescriptor);
+typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBuffersEXT)(VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT* pBindingInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDescriptorBufferOffsetsEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, const VkDeviceSize* pOffsets);
+typedef void (VKAPI_PTR *PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT)(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set);
+typedef VkResult (VKAPI_PTR *PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT* pInfo, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetImageOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageCaptureDescriptorDataInfoEXT* pInfo, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, void* pData);
+typedef VkResult (VKAPI_PTR *PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT)(VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutSizeEXT(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       layout,
+    VkDeviceSize*                               pLayoutSizeInBytes);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutBindingOffsetEXT(
+    VkDevice                                    device,
+    VkDescriptorSetLayout                       layout,
+    uint32_t                                    binding,
+    VkDeviceSize*                               pOffset);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorEXT(
+    VkDevice                                    device,
+    const VkDescriptorGetInfoEXT*               pDescriptorInfo,
+    size_t                                      dataSize,
+    void*                                       pDescriptor);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBuffersEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    bufferCount,
+    const VkDescriptorBufferBindingInfoEXT*     pBindingInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDescriptorBufferOffsetsEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    firstSet,
+    uint32_t                                    setCount,
+    const uint32_t*                             pBufferIndices,
+    const VkDeviceSize*                         pOffsets);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBindDescriptorBufferEmbeddedSamplersEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkPipelineBindPoint                         pipelineBindPoint,
+    VkPipelineLayout                            layout,
+    uint32_t                                    set);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferOpaqueCaptureDescriptorDataEXT(
+    VkDevice                                    device,
+    const VkBufferCaptureDescriptorDataInfoEXT* pInfo,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetImageOpaqueCaptureDescriptorDataEXT(
+    VkDevice                                    device,
+    const VkImageCaptureDescriptorDataInfoEXT*  pInfo,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetImageViewOpaqueCaptureDescriptorDataEXT(
+    VkDevice                                    device,
+    const VkImageViewCaptureDescriptorDataInfoEXT* pInfo,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSamplerOpaqueCaptureDescriptorDataEXT(
+    VkDevice                                    device,
+    const VkSamplerCaptureDescriptorDataInfoEXT* pInfo,
+    void*                                       pData);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(
+    VkDevice                                    device,
+    const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo,
+    void*                                       pData);
+#endif
+
+
+#define VK_EXT_graphics_pipeline_library 1
+#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_SPEC_VERSION 1
+#define VK_EXT_GRAPHICS_PIPELINE_LIBRARY_EXTENSION_NAME "VK_EXT_graphics_pipeline_library"
+
+typedef enum VkGraphicsPipelineLibraryFlagBitsEXT {
+    VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT = 0x00000001,
+    VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT = 0x00000002,
+    VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT = 0x00000004,
+    VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT = 0x00000008,
+    VK_GRAPHICS_PIPELINE_LIBRARY_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkGraphicsPipelineLibraryFlagBitsEXT;
+typedef VkFlags VkGraphicsPipelineLibraryFlagsEXT;
+typedef struct VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           graphicsPipelineLibrary;
+} VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+
+typedef struct VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           graphicsPipelineLibraryFastLinking;
+    VkBool32           graphicsPipelineLibraryIndependentInterpolationDecoration;
+} VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+
+typedef struct VkGraphicsPipelineLibraryCreateInfoEXT {
+    VkStructureType                      sType;
+    void*                                pNext;
+    VkGraphicsPipelineLibraryFlagsEXT    flags;
+} VkGraphicsPipelineLibraryCreateInfoEXT;
+
+
+
+#define VK_AMD_shader_early_and_late_fragment_tests 1
+#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_SPEC_VERSION 1
+#define VK_AMD_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_EXTENSION_NAME "VK_AMD_shader_early_and_late_fragment_tests"
+typedef struct VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderEarlyAndLateFragmentTests;
+} VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+
+
+
+#define VK_NV_fragment_shading_rate_enums 1
+#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_SPEC_VERSION 1
+#define VK_NV_FRAGMENT_SHADING_RATE_ENUMS_EXTENSION_NAME "VK_NV_fragment_shading_rate_enums"
+
+typedef enum VkFragmentShadingRateTypeNV {
+    VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV = 0,
+    VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV = 1,
+    VK_FRAGMENT_SHADING_RATE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkFragmentShadingRateTypeNV;
+
+typedef enum VkFragmentShadingRateNV {
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV = 0,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV = 1,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV = 4,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV = 5,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV = 6,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV = 9,
+    VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV = 10,
+    VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV = 11,
+    VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV = 12,
+    VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV = 13,
+    VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV = 14,
+    VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV = 15,
+    VK_FRAGMENT_SHADING_RATE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkFragmentShadingRateNV;
+typedef struct VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentShadingRateEnums;
+    VkBool32           supersampleFragmentShadingRates;
+    VkBool32           noInvocationFragmentShadingRates;
+} VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+
+typedef struct VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV {
+    VkStructureType          sType;
+    void*                    pNext;
+    VkSampleCountFlagBits    maxFragmentShadingRateInvocationCount;
+} VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+
+typedef struct VkPipelineFragmentShadingRateEnumStateCreateInfoNV {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkFragmentShadingRateTypeNV           shadingRateType;
+    VkFragmentShadingRateNV               shadingRate;
+    VkFragmentShadingRateCombinerOpKHR    combinerOps[2];
+} VkPipelineFragmentShadingRateEnumStateCreateInfoNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetFragmentShadingRateEnumNV)(VkCommandBuffer           commandBuffer, VkFragmentShadingRateNV                     shadingRate, const VkFragmentShadingRateCombinerOpKHR    combinerOps[2]);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetFragmentShadingRateEnumNV(
+    VkCommandBuffer                             commandBuffer,
+    VkFragmentShadingRateNV                     shadingRate,
+    const VkFragmentShadingRateCombinerOpKHR    combinerOps[2]);
+#endif
+
+
+#define VK_NV_ray_tracing_motion_blur 1
+#define VK_NV_RAY_TRACING_MOTION_BLUR_SPEC_VERSION 1
+#define VK_NV_RAY_TRACING_MOTION_BLUR_EXTENSION_NAME "VK_NV_ray_tracing_motion_blur"
+
+typedef enum VkAccelerationStructureMotionInstanceTypeNV {
+    VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV = 0,
+    VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV = 1,
+    VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV = 2,
+    VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkAccelerationStructureMotionInstanceTypeNV;
+typedef VkFlags VkAccelerationStructureMotionInfoFlagsNV;
+typedef VkFlags VkAccelerationStructureMotionInstanceFlagsNV;
+typedef union VkDeviceOrHostAddressConstKHR {
+    VkDeviceAddress    deviceAddress;
+    const void*        hostAddress;
+} VkDeviceOrHostAddressConstKHR;
+
+typedef struct VkAccelerationStructureGeometryMotionTrianglesDataNV {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDeviceOrHostAddressConstKHR    vertexData;
+} VkAccelerationStructureGeometryMotionTrianglesDataNV;
+
+typedef struct VkAccelerationStructureMotionInfoNV {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    uint32_t                                    maxInstances;
+    VkAccelerationStructureMotionInfoFlagsNV    flags;
+} VkAccelerationStructureMotionInfoNV;
+
+typedef struct VkAccelerationStructureMatrixMotionInstanceNV {
+    VkTransformMatrixKHR          transformT0;
+    VkTransformMatrixKHR          transformT1;
+    uint32_t                      instanceCustomIndex:24;
+    uint32_t                      mask:8;
+    uint32_t                      instanceShaderBindingTableRecordOffset:24;
+    VkGeometryInstanceFlagsKHR    flags:8;
+    uint64_t                      accelerationStructureReference;
+} VkAccelerationStructureMatrixMotionInstanceNV;
+
+typedef struct VkSRTDataNV {
+    float    sx;
+    float    a;
+    float    b;
+    float    pvx;
+    float    sy;
+    float    c;
+    float    pvy;
+    float    sz;
+    float    pvz;
+    float    qx;
+    float    qy;
+    float    qz;
+    float    qw;
+    float    tx;
+    float    ty;
+    float    tz;
+} VkSRTDataNV;
+
+typedef struct VkAccelerationStructureSRTMotionInstanceNV {
+    VkSRTDataNV                   transformT0;
+    VkSRTDataNV                   transformT1;
+    uint32_t                      instanceCustomIndex:24;
+    uint32_t                      mask:8;
+    uint32_t                      instanceShaderBindingTableRecordOffset:24;
+    VkGeometryInstanceFlagsKHR    flags:8;
+    uint64_t                      accelerationStructureReference;
+} VkAccelerationStructureSRTMotionInstanceNV;
+
+typedef union VkAccelerationStructureMotionInstanceDataNV {
+    VkAccelerationStructureInstanceKHR               staticInstance;
+    VkAccelerationStructureMatrixMotionInstanceNV    matrixMotionInstance;
+    VkAccelerationStructureSRTMotionInstanceNV       srtMotionInstance;
+} VkAccelerationStructureMotionInstanceDataNV;
+
+typedef struct VkAccelerationStructureMotionInstanceNV {
+    VkAccelerationStructureMotionInstanceTypeNV     type;
+    VkAccelerationStructureMotionInstanceFlagsNV    flags;
+    VkAccelerationStructureMotionInstanceDataNV     data;
+} VkAccelerationStructureMotionInstanceNV;
+
+typedef struct VkPhysicalDeviceRayTracingMotionBlurFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rayTracingMotionBlur;
+    VkBool32           rayTracingMotionBlurPipelineTraceRaysIndirect;
+} VkPhysicalDeviceRayTracingMotionBlurFeaturesNV;
+
+
+
+#define VK_EXT_ycbcr_2plane_444_formats 1
+#define VK_EXT_YCBCR_2PLANE_444_FORMATS_SPEC_VERSION 1
+#define VK_EXT_YCBCR_2PLANE_444_FORMATS_EXTENSION_NAME "VK_EXT_ycbcr_2plane_444_formats"
+typedef struct VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           ycbcr2plane444Formats;
+} VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+
+
+
+#define VK_EXT_fragment_density_map2 1
+#define VK_EXT_FRAGMENT_DENSITY_MAP_2_SPEC_VERSION 1
+#define VK_EXT_FRAGMENT_DENSITY_MAP_2_EXTENSION_NAME "VK_EXT_fragment_density_map2"
+typedef struct VkPhysicalDeviceFragmentDensityMap2FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentDensityMapDeferred;
+} VkPhysicalDeviceFragmentDensityMap2FeaturesEXT;
+
+typedef struct VkPhysicalDeviceFragmentDensityMap2PropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           subsampledLoads;
+    VkBool32           subsampledCoarseReconstructionEarlyAccess;
+    uint32_t           maxSubsampledArrayLayers;
+    uint32_t           maxDescriptorSetSubsampledSamplers;
+} VkPhysicalDeviceFragmentDensityMap2PropertiesEXT;
+
+
+
+#define VK_QCOM_rotated_copy_commands 1
+#define VK_QCOM_ROTATED_COPY_COMMANDS_SPEC_VERSION 1
+#define VK_QCOM_ROTATED_COPY_COMMANDS_EXTENSION_NAME "VK_QCOM_rotated_copy_commands"
+typedef struct VkCopyCommandTransformInfoQCOM {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkSurfaceTransformFlagBitsKHR    transform;
+} VkCopyCommandTransformInfoQCOM;
+
+
+
+#define VK_EXT_image_robustness 1
+#define VK_EXT_IMAGE_ROBUSTNESS_SPEC_VERSION 1
+#define VK_EXT_IMAGE_ROBUSTNESS_EXTENSION_NAME "VK_EXT_image_robustness"
+typedef VkPhysicalDeviceImageRobustnessFeatures VkPhysicalDeviceImageRobustnessFeaturesEXT;
+
+
+
+#define VK_EXT_image_compression_control 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SPEC_VERSION 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_EXTENSION_NAME "VK_EXT_image_compression_control"
+
+typedef enum VkImageCompressionFlagBitsEXT {
+    VK_IMAGE_COMPRESSION_DEFAULT_EXT = 0,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT = 0x00000001,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT = 0x00000002,
+    VK_IMAGE_COMPRESSION_DISABLED_EXT = 0x00000004,
+    VK_IMAGE_COMPRESSION_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkImageCompressionFlagBitsEXT;
+typedef VkFlags VkImageCompressionFlagsEXT;
+
+typedef enum VkImageCompressionFixedRateFlagBitsEXT {
+    VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT = 0,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT = 0x00000001,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT = 0x00000002,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT = 0x00000004,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT = 0x00000008,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT = 0x00000010,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT = 0x00000020,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT = 0x00000040,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT = 0x00000080,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT = 0x00000100,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT = 0x00000200,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT = 0x00000400,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT = 0x00000800,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT = 0x00001000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT = 0x00002000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT = 0x00004000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT = 0x00008000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT = 0x00010000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT = 0x00020000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT = 0x00040000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT = 0x00080000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT = 0x00100000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT = 0x00200000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT = 0x00400000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT = 0x00800000,
+    VK_IMAGE_COMPRESSION_FIXED_RATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkImageCompressionFixedRateFlagBitsEXT;
+typedef VkFlags VkImageCompressionFixedRateFlagsEXT;
+typedef struct VkPhysicalDeviceImageCompressionControlFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           imageCompressionControl;
+} VkPhysicalDeviceImageCompressionControlFeaturesEXT;
+
+typedef struct VkImageCompressionControlEXT {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkImageCompressionFlagsEXT              flags;
+    uint32_t                                compressionControlPlaneCount;
+    VkImageCompressionFixedRateFlagsEXT*    pFixedRateFlags;
+} VkImageCompressionControlEXT;
+
+typedef struct VkSubresourceLayout2EXT {
+    VkStructureType        sType;
+    void*                  pNext;
+    VkSubresourceLayout    subresourceLayout;
+} VkSubresourceLayout2EXT;
+
+typedef struct VkImageSubresource2EXT {
+    VkStructureType       sType;
+    void*                 pNext;
+    VkImageSubresource    imageSubresource;
+} VkImageSubresource2EXT;
+
+typedef struct VkImageCompressionPropertiesEXT {
+    VkStructureType                        sType;
+    void*                                  pNext;
+    VkImageCompressionFlagsEXT             imageCompressionFlags;
+    VkImageCompressionFixedRateFlagsEXT    imageCompressionFixedRateFlags;
+} VkImageCompressionPropertiesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkGetImageSubresourceLayout2EXT)(VkDevice device, VkImage image, const VkImageSubresource2EXT* pSubresource, VkSubresourceLayout2EXT* pLayout);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetImageSubresourceLayout2EXT(
+    VkDevice                                    device,
+    VkImage                                     image,
+    const VkImageSubresource2EXT*               pSubresource,
+    VkSubresourceLayout2EXT*                    pLayout);
+#endif
+
+
+#define VK_EXT_attachment_feedback_loop_layout 1
+#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_SPEC_VERSION 2
+#define VK_EXT_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_EXTENSION_NAME "VK_EXT_attachment_feedback_loop_layout"
+typedef struct VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           attachmentFeedbackLoopLayout;
+} VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+
+
+
+#define VK_EXT_4444_formats 1
+#define VK_EXT_4444_FORMATS_SPEC_VERSION  1
+#define VK_EXT_4444_FORMATS_EXTENSION_NAME "VK_EXT_4444_formats"
+typedef struct VkPhysicalDevice4444FormatsFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           formatA4R4G4B4;
+    VkBool32           formatA4B4G4R4;
+} VkPhysicalDevice4444FormatsFeaturesEXT;
+
+
+
+#define VK_EXT_device_fault 1
+#define VK_EXT_DEVICE_FAULT_SPEC_VERSION  1
+#define VK_EXT_DEVICE_FAULT_EXTENSION_NAME "VK_EXT_device_fault"
+
+typedef enum VkDeviceFaultAddressTypeEXT {
+    VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT = 0,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT = 1,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT = 2,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT = 3,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT = 4,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT = 5,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT = 6,
+    VK_DEVICE_FAULT_ADDRESS_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceFaultAddressTypeEXT;
+
+typedef enum VkDeviceFaultVendorBinaryHeaderVersionEXT {
+    VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT = 1,
+    VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceFaultVendorBinaryHeaderVersionEXT;
+typedef struct VkPhysicalDeviceFaultFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           deviceFault;
+    VkBool32           deviceFaultVendorBinary;
+} VkPhysicalDeviceFaultFeaturesEXT;
+
+typedef struct VkDeviceFaultCountsEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           addressInfoCount;
+    uint32_t           vendorInfoCount;
+    VkDeviceSize       vendorBinarySize;
+} VkDeviceFaultCountsEXT;
+
+typedef struct VkDeviceFaultAddressInfoEXT {
+    VkDeviceFaultAddressTypeEXT    addressType;
+    VkDeviceAddress                reportedAddress;
+    VkDeviceSize                   addressPrecision;
+} VkDeviceFaultAddressInfoEXT;
+
+typedef struct VkDeviceFaultVendorInfoEXT {
+    char        description[VK_MAX_DESCRIPTION_SIZE];
+    uint64_t    vendorFaultCode;
+    uint64_t    vendorFaultData;
+} VkDeviceFaultVendorInfoEXT;
+
+typedef struct VkDeviceFaultInfoEXT {
+    VkStructureType                 sType;
+    void*                           pNext;
+    char                            description[VK_MAX_DESCRIPTION_SIZE];
+    VkDeviceFaultAddressInfoEXT*    pAddressInfos;
+    VkDeviceFaultVendorInfoEXT*     pVendorInfos;
+    void*                           pVendorBinaryData;
+} VkDeviceFaultInfoEXT;
+
+typedef struct VkDeviceFaultVendorBinaryHeaderVersionOneEXT {
+    uint32_t                                     headerSize;
+    VkDeviceFaultVendorBinaryHeaderVersionEXT    headerVersion;
+    uint32_t                                     vendorID;
+    uint32_t                                     deviceID;
+    uint32_t                                     driverVersion;
+    uint8_t                                      pipelineCacheUUID[VK_UUID_SIZE];
+    uint32_t                                     applicationNameOffset;
+    uint32_t                                     applicationVersion;
+    uint32_t                                     engineNameOffset;
+} VkDeviceFaultVendorBinaryHeaderVersionOneEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceFaultInfoEXT)(VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceFaultInfoEXT(
+    VkDevice                                    device,
+    VkDeviceFaultCountsEXT*                     pFaultCounts,
+    VkDeviceFaultInfoEXT*                       pFaultInfo);
+#endif
+
+
+#define VK_ARM_rasterization_order_attachment_access 1
+#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1
+#define VK_ARM_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_ARM_rasterization_order_attachment_access"
+typedef struct VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rasterizationOrderColorAttachmentAccess;
+    VkBool32           rasterizationOrderDepthAttachmentAccess;
+    VkBool32           rasterizationOrderStencilAttachmentAccess;
+} VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+
+typedef VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
+
+
+
+#define VK_EXT_rgba10x6_formats 1
+#define VK_EXT_RGBA10X6_FORMATS_SPEC_VERSION 1
+#define VK_EXT_RGBA10X6_FORMATS_EXTENSION_NAME "VK_EXT_rgba10x6_formats"
+typedef struct VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           formatRgba10x6WithoutYCbCrSampler;
+} VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT;
+
+
+
+#define VK_VALVE_mutable_descriptor_type 1
+#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1
+#define VK_VALVE_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_VALVE_mutable_descriptor_type"
+typedef struct VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           mutableDescriptorType;
+} VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+
+typedef VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT VkPhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
+
+typedef struct VkMutableDescriptorTypeListEXT {
+    uint32_t                   descriptorTypeCount;
+    const VkDescriptorType*    pDescriptorTypes;
+} VkMutableDescriptorTypeListEXT;
+
+typedef VkMutableDescriptorTypeListEXT VkMutableDescriptorTypeListVALVE;
+
+typedef struct VkMutableDescriptorTypeCreateInfoEXT {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    uint32_t                                 mutableDescriptorTypeListCount;
+    const VkMutableDescriptorTypeListEXT*    pMutableDescriptorTypeLists;
+} VkMutableDescriptorTypeCreateInfoEXT;
+
+typedef VkMutableDescriptorTypeCreateInfoEXT VkMutableDescriptorTypeCreateInfoVALVE;
+
+
+
+#define VK_EXT_vertex_input_dynamic_state 1
+#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_SPEC_VERSION 2
+#define VK_EXT_VERTEX_INPUT_DYNAMIC_STATE_EXTENSION_NAME "VK_EXT_vertex_input_dynamic_state"
+typedef struct VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           vertexInputDynamicState;
+} VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+
+typedef struct VkVertexInputBindingDescription2EXT {
+    VkStructureType      sType;
+    void*                pNext;
+    uint32_t             binding;
+    uint32_t             stride;
+    VkVertexInputRate    inputRate;
+    uint32_t             divisor;
+} VkVertexInputBindingDescription2EXT;
+
+typedef struct VkVertexInputAttributeDescription2EXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           location;
+    uint32_t           binding;
+    VkFormat           format;
+    uint32_t           offset;
+} VkVertexInputAttributeDescription2EXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetVertexInputEXT)(VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetVertexInputEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    vertexBindingDescriptionCount,
+    const VkVertexInputBindingDescription2EXT*  pVertexBindingDescriptions,
+    uint32_t                                    vertexAttributeDescriptionCount,
+    const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions);
+#endif
+
+
+#define VK_EXT_physical_device_drm 1
+#define VK_EXT_PHYSICAL_DEVICE_DRM_SPEC_VERSION 1
+#define VK_EXT_PHYSICAL_DEVICE_DRM_EXTENSION_NAME "VK_EXT_physical_device_drm"
+typedef struct VkPhysicalDeviceDrmPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           hasPrimary;
+    VkBool32           hasRender;
+    int64_t            primaryMajor;
+    int64_t            primaryMinor;
+    int64_t            renderMajor;
+    int64_t            renderMinor;
+} VkPhysicalDeviceDrmPropertiesEXT;
+
+
+
+#define VK_EXT_device_address_binding_report 1
+#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_SPEC_VERSION 1
+#define VK_EXT_DEVICE_ADDRESS_BINDING_REPORT_EXTENSION_NAME "VK_EXT_device_address_binding_report"
+
+typedef enum VkDeviceAddressBindingTypeEXT {
+    VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT = 0,
+    VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT = 1,
+    VK_DEVICE_ADDRESS_BINDING_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceAddressBindingTypeEXT;
+
+typedef enum VkDeviceAddressBindingFlagBitsEXT {
+    VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT = 0x00000001,
+    VK_DEVICE_ADDRESS_BINDING_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkDeviceAddressBindingFlagBitsEXT;
+typedef VkFlags VkDeviceAddressBindingFlagsEXT;
+typedef struct VkPhysicalDeviceAddressBindingReportFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           reportAddressBinding;
+} VkPhysicalDeviceAddressBindingReportFeaturesEXT;
+
+typedef struct VkDeviceAddressBindingCallbackDataEXT {
+    VkStructureType                   sType;
+    void*                             pNext;
+    VkDeviceAddressBindingFlagsEXT    flags;
+    VkDeviceAddress                   baseAddress;
+    VkDeviceSize                      size;
+    VkDeviceAddressBindingTypeEXT     bindingType;
+} VkDeviceAddressBindingCallbackDataEXT;
+
+
+
+#define VK_EXT_depth_clip_control 1
+#define VK_EXT_DEPTH_CLIP_CONTROL_SPEC_VERSION 1
+#define VK_EXT_DEPTH_CLIP_CONTROL_EXTENSION_NAME "VK_EXT_depth_clip_control"
+typedef struct VkPhysicalDeviceDepthClipControlFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           depthClipControl;
+} VkPhysicalDeviceDepthClipControlFeaturesEXT;
+
+typedef struct VkPipelineViewportDepthClipControlCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           negativeOneToOne;
+} VkPipelineViewportDepthClipControlCreateInfoEXT;
+
+
+
+#define VK_EXT_primitive_topology_list_restart 1
+#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_SPEC_VERSION 1
+#define VK_EXT_PRIMITIVE_TOPOLOGY_LIST_RESTART_EXTENSION_NAME "VK_EXT_primitive_topology_list_restart"
+typedef struct VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           primitiveTopologyListRestart;
+    VkBool32           primitiveTopologyPatchListRestart;
+} VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+
+
+
+#define VK_HUAWEI_subpass_shading 1
+#define VK_HUAWEI_SUBPASS_SHADING_SPEC_VERSION 2
+#define VK_HUAWEI_SUBPASS_SHADING_EXTENSION_NAME "VK_HUAWEI_subpass_shading"
+typedef struct VkSubpassShadingPipelineCreateInfoHUAWEI {
+    VkStructureType    sType;
+    void*              pNext;
+    VkRenderPass       renderPass;
+    uint32_t           subpass;
+} VkSubpassShadingPipelineCreateInfoHUAWEI;
+
+typedef struct VkPhysicalDeviceSubpassShadingFeaturesHUAWEI {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           subpassShading;
+} VkPhysicalDeviceSubpassShadingFeaturesHUAWEI;
+
+typedef struct VkPhysicalDeviceSubpassShadingPropertiesHUAWEI {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxSubpassShadingWorkgroupSizeAspectRatio;
+} VkPhysicalDeviceSubpassShadingPropertiesHUAWEI;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI)(VkDevice device, VkRenderPass renderpass, VkExtent2D* pMaxWorkgroupSize);
+typedef void (VKAPI_PTR *PFN_vkCmdSubpassShadingHUAWEI)(VkCommandBuffer commandBuffer);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceSubpassShadingMaxWorkgroupSizeHUAWEI(
+    VkDevice                                    device,
+    VkRenderPass                                renderpass,
+    VkExtent2D*                                 pMaxWorkgroupSize);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSubpassShadingHUAWEI(
+    VkCommandBuffer                             commandBuffer);
+#endif
+
+
+#define VK_HUAWEI_invocation_mask 1
+#define VK_HUAWEI_INVOCATION_MASK_SPEC_VERSION 1
+#define VK_HUAWEI_INVOCATION_MASK_EXTENSION_NAME "VK_HUAWEI_invocation_mask"
+typedef struct VkPhysicalDeviceInvocationMaskFeaturesHUAWEI {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           invocationMask;
+} VkPhysicalDeviceInvocationMaskFeaturesHUAWEI;
+
+typedef void (VKAPI_PTR *PFN_vkCmdBindInvocationMaskHUAWEI)(VkCommandBuffer commandBuffer, VkImageView imageView, VkImageLayout imageLayout);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdBindInvocationMaskHUAWEI(
+    VkCommandBuffer                             commandBuffer,
+    VkImageView                                 imageView,
+    VkImageLayout                               imageLayout);
+#endif
+
+
+#define VK_NV_external_memory_rdma 1
+typedef void* VkRemoteAddressNV;
+#define VK_NV_EXTERNAL_MEMORY_RDMA_SPEC_VERSION 1
+#define VK_NV_EXTERNAL_MEMORY_RDMA_EXTENSION_NAME "VK_NV_external_memory_rdma"
+typedef struct VkMemoryGetRemoteAddressInfoNV {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceMemory                        memory;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkMemoryGetRemoteAddressInfoNV;
+
+typedef struct VkPhysicalDeviceExternalMemoryRDMAFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           externalMemoryRDMA;
+} VkPhysicalDeviceExternalMemoryRDMAFeaturesNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryRemoteAddressNV)(VkDevice device, const VkMemoryGetRemoteAddressInfoNV* pMemoryGetRemoteAddressInfo, VkRemoteAddressNV* pAddress);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryRemoteAddressNV(
+    VkDevice                                    device,
+    const VkMemoryGetRemoteAddressInfoNV*       pMemoryGetRemoteAddressInfo,
+    VkRemoteAddressNV*                          pAddress);
+#endif
+
+
+#define VK_EXT_pipeline_properties 1
+#define VK_EXT_PIPELINE_PROPERTIES_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_PROPERTIES_EXTENSION_NAME "VK_EXT_pipeline_properties"
+typedef VkPipelineInfoKHR VkPipelineInfoEXT;
+
+typedef struct VkPipelinePropertiesIdentifierEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint8_t            pipelineIdentifier[VK_UUID_SIZE];
+} VkPipelinePropertiesIdentifierEXT;
+
+typedef struct VkPhysicalDevicePipelinePropertiesFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelinePropertiesIdentifier;
+} VkPhysicalDevicePipelinePropertiesFeaturesEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPipelinePropertiesEXT)(VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPipelinePropertiesEXT(
+    VkDevice                                    device,
+    const VkPipelineInfoEXT*                    pPipelineInfo,
+    VkBaseOutStructure*                         pPipelineProperties);
+#endif
+
+
+#define VK_EXT_multisampled_render_to_single_sampled 1
+#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_SPEC_VERSION 1
+#define VK_EXT_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_EXTENSION_NAME "VK_EXT_multisampled_render_to_single_sampled"
+typedef struct VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           multisampledRenderToSingleSampled;
+} VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+
+typedef struct VkSubpassResolvePerformanceQueryEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           optimal;
+} VkSubpassResolvePerformanceQueryEXT;
+
+typedef struct VkMultisampledRenderToSingleSampledInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkBool32                 multisampledRenderToSingleSampledEnable;
+    VkSampleCountFlagBits    rasterizationSamples;
+} VkMultisampledRenderToSingleSampledInfoEXT;
+
+
+
+#define VK_EXT_extended_dynamic_state2 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_SPEC_VERSION 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_2_EXTENSION_NAME "VK_EXT_extended_dynamic_state2"
+typedef struct VkPhysicalDeviceExtendedDynamicState2FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           extendedDynamicState2;
+    VkBool32           extendedDynamicState2LogicOp;
+    VkBool32           extendedDynamicState2PatchControlPoints;
+} VkPhysicalDeviceExtendedDynamicState2FeaturesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetPatchControlPointsEXT)(VkCommandBuffer commandBuffer, uint32_t patchControlPoints);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizerDiscardEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthBiasEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEXT)(VkCommandBuffer commandBuffer, VkLogicOp logicOp);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPrimitiveRestartEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPatchControlPointsEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    patchControlPoints);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizerDiscardEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    rasterizerDiscardEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthBiasEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthBiasEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkLogicOp                                   logicOp);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPrimitiveRestartEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    primitiveRestartEnable);
+#endif
+
+
+#define VK_EXT_color_write_enable 1
+#define VK_EXT_COLOR_WRITE_ENABLE_SPEC_VERSION 1
+#define VK_EXT_COLOR_WRITE_ENABLE_EXTENSION_NAME "VK_EXT_color_write_enable"
+typedef struct VkPhysicalDeviceColorWriteEnableFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           colorWriteEnable;
+} VkPhysicalDeviceColorWriteEnableFeaturesEXT;
+
+typedef struct VkPipelineColorWriteCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           attachmentCount;
+    const VkBool32*    pColorWriteEnables;
+} VkPipelineColorWriteCreateInfoEXT;
+
+typedef void                                    (VKAPI_PTR *PFN_vkCmdSetColorWriteEnableEXT)(VkCommandBuffer       commandBuffer, uint32_t                                attachmentCount, const VkBool32*   pColorWriteEnables);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void                                    VKAPI_CALL vkCmdSetColorWriteEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    attachmentCount,
+    const VkBool32*                             pColorWriteEnables);
+#endif
+
+
+#define VK_EXT_primitives_generated_query 1
+#define VK_EXT_PRIMITIVES_GENERATED_QUERY_SPEC_VERSION 1
+#define VK_EXT_PRIMITIVES_GENERATED_QUERY_EXTENSION_NAME "VK_EXT_primitives_generated_query"
+typedef struct VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           primitivesGeneratedQuery;
+    VkBool32           primitivesGeneratedQueryWithRasterizerDiscard;
+    VkBool32           primitivesGeneratedQueryWithNonZeroStreams;
+} VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+
+
+
+#define VK_EXT_global_priority_query 1
+#define VK_EXT_GLOBAL_PRIORITY_QUERY_SPEC_VERSION 1
+#define VK_EXT_GLOBAL_PRIORITY_QUERY_EXTENSION_NAME "VK_EXT_global_priority_query"
+#define VK_MAX_GLOBAL_PRIORITY_SIZE_EXT   VK_MAX_GLOBAL_PRIORITY_SIZE_KHR
+typedef VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR VkPhysicalDeviceGlobalPriorityQueryFeaturesEXT;
+
+typedef VkQueueFamilyGlobalPriorityPropertiesKHR VkQueueFamilyGlobalPriorityPropertiesEXT;
+
+
+
+#define VK_EXT_image_view_min_lod 1
+#define VK_EXT_IMAGE_VIEW_MIN_LOD_SPEC_VERSION 1
+#define VK_EXT_IMAGE_VIEW_MIN_LOD_EXTENSION_NAME "VK_EXT_image_view_min_lod"
+typedef struct VkPhysicalDeviceImageViewMinLodFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           minLod;
+} VkPhysicalDeviceImageViewMinLodFeaturesEXT;
+
+typedef struct VkImageViewMinLodCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    float              minLod;
+} VkImageViewMinLodCreateInfoEXT;
+
+
+
+#define VK_EXT_multi_draw 1
+#define VK_EXT_MULTI_DRAW_SPEC_VERSION    1
+#define VK_EXT_MULTI_DRAW_EXTENSION_NAME  "VK_EXT_multi_draw"
+typedef struct VkPhysicalDeviceMultiDrawFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           multiDraw;
+} VkPhysicalDeviceMultiDrawFeaturesEXT;
+
+typedef struct VkPhysicalDeviceMultiDrawPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxMultiDrawCount;
+} VkPhysicalDeviceMultiDrawPropertiesEXT;
+
+typedef struct VkMultiDrawInfoEXT {
+    uint32_t    firstVertex;
+    uint32_t    vertexCount;
+} VkMultiDrawInfoEXT;
+
+typedef struct VkMultiDrawIndexedInfoEXT {
+    uint32_t    firstIndex;
+    uint32_t    indexCount;
+    int32_t     vertexOffset;
+} VkMultiDrawIndexedInfoEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMultiIndexedEXT)(VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    drawCount,
+    const VkMultiDrawInfoEXT*                   pVertexInfo,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMultiIndexedEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    drawCount,
+    const VkMultiDrawIndexedInfoEXT*            pIndexInfo,
+    uint32_t                                    instanceCount,
+    uint32_t                                    firstInstance,
+    uint32_t                                    stride,
+    const int32_t*                              pVertexOffset);
+#endif
+
+
+#define VK_EXT_image_2d_view_of_3d 1
+#define VK_EXT_IMAGE_2D_VIEW_OF_3D_SPEC_VERSION 1
+#define VK_EXT_IMAGE_2D_VIEW_OF_3D_EXTENSION_NAME "VK_EXT_image_2d_view_of_3d"
+typedef struct VkPhysicalDeviceImage2DViewOf3DFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           image2DViewOf3D;
+    VkBool32           sampler2DViewOf3D;
+} VkPhysicalDeviceImage2DViewOf3DFeaturesEXT;
+
+
+
+#define VK_EXT_opacity_micromap 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkMicromapEXT)
+#define VK_EXT_OPACITY_MICROMAP_SPEC_VERSION 2
+#define VK_EXT_OPACITY_MICROMAP_EXTENSION_NAME "VK_EXT_opacity_micromap"
+
+typedef enum VkMicromapTypeEXT {
+    VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT = 0,
+    VK_MICROMAP_TYPE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkMicromapTypeEXT;
+
+typedef enum VkBuildMicromapModeEXT {
+    VK_BUILD_MICROMAP_MODE_BUILD_EXT = 0,
+    VK_BUILD_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBuildMicromapModeEXT;
+
+typedef enum VkCopyMicromapModeEXT {
+    VK_COPY_MICROMAP_MODE_CLONE_EXT = 0,
+    VK_COPY_MICROMAP_MODE_SERIALIZE_EXT = 1,
+    VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT = 2,
+    VK_COPY_MICROMAP_MODE_COMPACT_EXT = 3,
+    VK_COPY_MICROMAP_MODE_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkCopyMicromapModeEXT;
+
+typedef enum VkOpacityMicromapFormatEXT {
+    VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT = 1,
+    VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT = 2,
+    VK_OPACITY_MICROMAP_FORMAT_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkOpacityMicromapFormatEXT;
+
+typedef enum VkOpacityMicromapSpecialIndexEXT {
+    VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT = -1,
+    VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT = -2,
+    VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT = -3,
+    VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT = -4,
+    VK_OPACITY_MICROMAP_SPECIAL_INDEX_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkOpacityMicromapSpecialIndexEXT;
+
+typedef enum VkAccelerationStructureCompatibilityKHR {
+    VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR = 0,
+    VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR = 1,
+    VK_ACCELERATION_STRUCTURE_COMPATIBILITY_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureCompatibilityKHR;
+
+typedef enum VkAccelerationStructureBuildTypeKHR {
+    VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR = 0,
+    VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR = 1,
+    VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR = 2,
+    VK_ACCELERATION_STRUCTURE_BUILD_TYPE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureBuildTypeKHR;
+
+typedef enum VkBuildMicromapFlagBitsEXT {
+    VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT = 0x00000001,
+    VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT = 0x00000002,
+    VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT = 0x00000004,
+    VK_BUILD_MICROMAP_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkBuildMicromapFlagBitsEXT;
+typedef VkFlags VkBuildMicromapFlagsEXT;
+
+typedef enum VkMicromapCreateFlagBitsEXT {
+    VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT = 0x00000001,
+    VK_MICROMAP_CREATE_FLAG_BITS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkMicromapCreateFlagBitsEXT;
+typedef VkFlags VkMicromapCreateFlagsEXT;
+typedef struct VkMicromapUsageEXT {
+    uint32_t    count;
+    uint32_t    subdivisionLevel;
+    uint32_t    format;
+} VkMicromapUsageEXT;
+
+typedef union VkDeviceOrHostAddressKHR {
+    VkDeviceAddress    deviceAddress;
+    void*              hostAddress;
+} VkDeviceOrHostAddressKHR;
+
+typedef struct VkMicromapBuildInfoEXT {
+    VkStructureType                     sType;
+    const void*                         pNext;
+    VkMicromapTypeEXT                   type;
+    VkBuildMicromapFlagsEXT             flags;
+    VkBuildMicromapModeEXT              mode;
+    VkMicromapEXT                       dstMicromap;
+    uint32_t                            usageCountsCount;
+    const VkMicromapUsageEXT*           pUsageCounts;
+    const VkMicromapUsageEXT* const*    ppUsageCounts;
+    VkDeviceOrHostAddressConstKHR       data;
+    VkDeviceOrHostAddressKHR            scratchData;
+    VkDeviceOrHostAddressConstKHR       triangleArray;
+    VkDeviceSize                        triangleArrayStride;
+} VkMicromapBuildInfoEXT;
+
+typedef struct VkMicromapCreateInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkMicromapCreateFlagsEXT    createFlags;
+    VkBuffer                    buffer;
+    VkDeviceSize                offset;
+    VkDeviceSize                size;
+    VkMicromapTypeEXT           type;
+    VkDeviceAddress             deviceAddress;
+} VkMicromapCreateInfoEXT;
+
+typedef struct VkPhysicalDeviceOpacityMicromapFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           micromap;
+    VkBool32           micromapCaptureReplay;
+    VkBool32           micromapHostCommands;
+} VkPhysicalDeviceOpacityMicromapFeaturesEXT;
+
+typedef struct VkPhysicalDeviceOpacityMicromapPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxOpacity2StateSubdivisionLevel;
+    uint32_t           maxOpacity4StateSubdivisionLevel;
+} VkPhysicalDeviceOpacityMicromapPropertiesEXT;
+
+typedef struct VkMicromapVersionInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    const uint8_t*     pVersionData;
+} VkMicromapVersionInfoEXT;
+
+typedef struct VkCopyMicromapToMemoryInfoEXT {
+    VkStructureType             sType;
+    const void*                 pNext;
+    VkMicromapEXT               src;
+    VkDeviceOrHostAddressKHR    dst;
+    VkCopyMicromapModeEXT       mode;
+} VkCopyMicromapToMemoryInfoEXT;
+
+typedef struct VkCopyMemoryToMicromapInfoEXT {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDeviceOrHostAddressConstKHR    src;
+    VkMicromapEXT                    dst;
+    VkCopyMicromapModeEXT            mode;
+} VkCopyMemoryToMicromapInfoEXT;
+
+typedef struct VkCopyMicromapInfoEXT {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkMicromapEXT            src;
+    VkMicromapEXT            dst;
+    VkCopyMicromapModeEXT    mode;
+} VkCopyMicromapInfoEXT;
+
+typedef struct VkMicromapBuildSizesInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceSize       micromapSize;
+    VkDeviceSize       buildScratchSize;
+    VkBool32           discardable;
+} VkMicromapBuildSizesInfoEXT;
+
+typedef struct VkAccelerationStructureTrianglesOpacityMicromapEXT {
+    VkStructureType                     sType;
+    void*                               pNext;
+    VkIndexType                         indexType;
+    VkDeviceOrHostAddressConstKHR       indexBuffer;
+    VkDeviceSize                        indexStride;
+    uint32_t                            baseTriangle;
+    uint32_t                            usageCountsCount;
+    const VkMicromapUsageEXT*           pUsageCounts;
+    const VkMicromapUsageEXT* const*    ppUsageCounts;
+    VkMicromapEXT                       micromap;
+} VkAccelerationStructureTrianglesOpacityMicromapEXT;
+
+typedef struct VkMicromapTriangleEXT {
+    uint32_t    dataOffset;
+    uint16_t    subdivisionLevel;
+    uint16_t    format;
+} VkMicromapTriangleEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateMicromapEXT)(VkDevice                                           device, const VkMicromapCreateInfoEXT*        pCreateInfo, const VkAllocationCallbacks*       pAllocator, VkMicromapEXT*                        pMicromap);
+typedef void (VKAPI_PTR *PFN_vkDestroyMicromapEXT)(VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildMicromapsEXT)(VkCommandBuffer                                    commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkBuildMicromapsEXT)(VkDevice                                           device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMicromapToMemoryEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToMicromapEXT)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkWriteMicromapsPropertiesEXT)(VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType  queryType, size_t       dataSize, void* pData, size_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMicromapToMemoryEXT)(VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToMicromapEXT)(VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteMicromapsPropertiesEXT)(VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceMicromapCompatibilityEXT)(VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility);
+typedef void (VKAPI_PTR *PFN_vkGetMicromapBuildSizesEXT)(VkDevice                                            device, VkAccelerationStructureBuildTypeKHR                 buildType, const VkMicromapBuildInfoEXT*  pBuildInfo, VkMicromapBuildSizesInfoEXT*           pSizeInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateMicromapEXT(
+    VkDevice                                    device,
+    const VkMicromapCreateInfoEXT*              pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkMicromapEXT*                              pMicromap);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyMicromapEXT(
+    VkDevice                                    device,
+    VkMicromapEXT                               micromap,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildMicromapsEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    infoCount,
+    const VkMicromapBuildInfoEXT*               pInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBuildMicromapsEXT(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    uint32_t                                    infoCount,
+    const VkMicromapBuildInfoEXT*               pInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapEXT(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyMicromapInfoEXT*                pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMicromapToMemoryEXT(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyMicromapToMemoryInfoEXT*        pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToMicromapEXT(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyMemoryToMicromapInfoEXT*        pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWriteMicromapsPropertiesEXT(
+    VkDevice                                    device,
+    uint32_t                                    micromapCount,
+    const VkMicromapEXT*                        pMicromaps,
+    VkQueryType                                 queryType,
+    size_t                                      dataSize,
+    void*                                       pData,
+    size_t                                      stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyMicromapInfoEXT*                pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMicromapToMemoryEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyMicromapToMemoryInfoEXT*        pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToMicromapEXT(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyMemoryToMicromapInfoEXT*        pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteMicromapsPropertiesEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    micromapCount,
+    const VkMicromapEXT*                        pMicromaps,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceMicromapCompatibilityEXT(
+    VkDevice                                    device,
+    const VkMicromapVersionInfoEXT*             pVersionInfo,
+    VkAccelerationStructureCompatibilityKHR*    pCompatibility);
+
+VKAPI_ATTR void VKAPI_CALL vkGetMicromapBuildSizesEXT(
+    VkDevice                                    device,
+    VkAccelerationStructureBuildTypeKHR         buildType,
+    const VkMicromapBuildInfoEXT*               pBuildInfo,
+    VkMicromapBuildSizesInfoEXT*                pSizeInfo);
+#endif
+
+
+#define VK_EXT_load_store_op_none 1
+#define VK_EXT_LOAD_STORE_OP_NONE_SPEC_VERSION 1
+#define VK_EXT_LOAD_STORE_OP_NONE_EXTENSION_NAME "VK_EXT_load_store_op_none"
+
+
+#define VK_EXT_border_color_swizzle 1
+#define VK_EXT_BORDER_COLOR_SWIZZLE_SPEC_VERSION 1
+#define VK_EXT_BORDER_COLOR_SWIZZLE_EXTENSION_NAME "VK_EXT_border_color_swizzle"
+typedef struct VkPhysicalDeviceBorderColorSwizzleFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           borderColorSwizzle;
+    VkBool32           borderColorSwizzleFromImage;
+} VkPhysicalDeviceBorderColorSwizzleFeaturesEXT;
+
+typedef struct VkSamplerBorderColorComponentMappingCreateInfoEXT {
+    VkStructureType       sType;
+    const void*           pNext;
+    VkComponentMapping    components;
+    VkBool32              srgb;
+} VkSamplerBorderColorComponentMappingCreateInfoEXT;
+
+
+
+#define VK_EXT_pageable_device_local_memory 1
+#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_SPEC_VERSION 1
+#define VK_EXT_PAGEABLE_DEVICE_LOCAL_MEMORY_EXTENSION_NAME "VK_EXT_pageable_device_local_memory"
+typedef struct VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pageableDeviceLocalMemory;
+} VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+
+typedef void (VKAPI_PTR *PFN_vkSetDeviceMemoryPriorityEXT)(VkDevice       device, VkDeviceMemory memory, float          priority);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkSetDeviceMemoryPriorityEXT(
+    VkDevice                                    device,
+    VkDeviceMemory                              memory,
+    float                                       priority);
+#endif
+
+
+#define VK_VALVE_descriptor_set_host_mapping 1
+#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_SPEC_VERSION 1
+#define VK_VALVE_DESCRIPTOR_SET_HOST_MAPPING_EXTENSION_NAME "VK_VALVE_descriptor_set_host_mapping"
+typedef struct VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           descriptorSetHostMapping;
+} VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+
+typedef struct VkDescriptorSetBindingReferenceVALVE {
+    VkStructureType          sType;
+    const void*              pNext;
+    VkDescriptorSetLayout    descriptorSetLayout;
+    uint32_t                 binding;
+} VkDescriptorSetBindingReferenceVALVE;
+
+typedef struct VkDescriptorSetLayoutHostMappingInfoVALVE {
+    VkStructureType    sType;
+    void*              pNext;
+    size_t             descriptorOffset;
+    uint32_t           descriptorSize;
+} VkDescriptorSetLayoutHostMappingInfoVALVE;
+
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE)(VkDevice device, const VkDescriptorSetBindingReferenceVALVE* pBindingReference, VkDescriptorSetLayoutHostMappingInfoVALVE* pHostMapping);
+typedef void (VKAPI_PTR *PFN_vkGetDescriptorSetHostMappingVALVE)(VkDevice device, VkDescriptorSet descriptorSet, void** ppData);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetLayoutHostMappingInfoVALVE(
+    VkDevice                                    device,
+    const VkDescriptorSetBindingReferenceVALVE* pBindingReference,
+    VkDescriptorSetLayoutHostMappingInfoVALVE*  pHostMapping);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDescriptorSetHostMappingVALVE(
+    VkDevice                                    device,
+    VkDescriptorSet                             descriptorSet,
+    void**                                      ppData);
+#endif
+
+
+#define VK_EXT_depth_clamp_zero_one 1
+#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_SPEC_VERSION 1
+#define VK_EXT_DEPTH_CLAMP_ZERO_ONE_EXTENSION_NAME "VK_EXT_depth_clamp_zero_one"
+typedef struct VkPhysicalDeviceDepthClampZeroOneFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           depthClampZeroOne;
+} VkPhysicalDeviceDepthClampZeroOneFeaturesEXT;
+
+
+
+#define VK_EXT_non_seamless_cube_map 1
+#define VK_EXT_NON_SEAMLESS_CUBE_MAP_SPEC_VERSION 1
+#define VK_EXT_NON_SEAMLESS_CUBE_MAP_EXTENSION_NAME "VK_EXT_non_seamless_cube_map"
+typedef struct VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           nonSeamlessCubeMap;
+} VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+
+
+
+#define VK_QCOM_fragment_density_map_offset 1
+#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_SPEC_VERSION 1
+#define VK_QCOM_FRAGMENT_DENSITY_MAP_OFFSET_EXTENSION_NAME "VK_QCOM_fragment_density_map_offset"
+typedef struct VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           fragmentDensityMapOffset;
+} VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+
+typedef struct VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent2D         fragmentDensityOffsetGranularity;
+} VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+
+typedef struct VkSubpassFragmentDensityMapOffsetEndInfoQCOM {
+    VkStructureType      sType;
+    const void*          pNext;
+    uint32_t             fragmentDensityOffsetCount;
+    const VkOffset2D*    pFragmentDensityOffsets;
+} VkSubpassFragmentDensityMapOffsetEndInfoQCOM;
+
+
+
+#define VK_NV_copy_memory_indirect 1
+#define VK_NV_COPY_MEMORY_INDIRECT_SPEC_VERSION 1
+#define VK_NV_COPY_MEMORY_INDIRECT_EXTENSION_NAME "VK_NV_copy_memory_indirect"
+typedef struct VkCopyMemoryIndirectCommandNV {
+    VkDeviceAddress    srcAddress;
+    VkDeviceAddress    dstAddress;
+    VkDeviceSize       size;
+} VkCopyMemoryIndirectCommandNV;
+
+typedef struct VkCopyMemoryToImageIndirectCommandNV {
+    VkDeviceAddress             srcAddress;
+    uint32_t                    bufferRowLength;
+    uint32_t                    bufferImageHeight;
+    VkImageSubresourceLayers    imageSubresource;
+    VkOffset3D                  imageOffset;
+    VkExtent3D                  imageExtent;
+} VkCopyMemoryToImageIndirectCommandNV;
+
+typedef struct VkPhysicalDeviceCopyMemoryIndirectFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           indirectCopy;
+} VkPhysicalDeviceCopyMemoryIndirectFeaturesNV;
+
+typedef struct VkPhysicalDeviceCopyMemoryIndirectPropertiesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkQueueFlags       supportedQueues;
+} VkPhysicalDeviceCopyMemoryIndirectPropertiesNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToImageIndirectNV)(VkCommandBuffer commandBuffer, VkDeviceAddress copyBufferAddress, uint32_t copyCount, uint32_t stride, VkImage dstImage, VkImageLayout dstImageLayout, const VkImageSubresourceLayers* pImageSubresources);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkDeviceAddress                             copyBufferAddress,
+    uint32_t                                    copyCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToImageIndirectNV(
+    VkCommandBuffer                             commandBuffer,
+    VkDeviceAddress                             copyBufferAddress,
+    uint32_t                                    copyCount,
+    uint32_t                                    stride,
+    VkImage                                     dstImage,
+    VkImageLayout                               dstImageLayout,
+    const VkImageSubresourceLayers*             pImageSubresources);
+#endif
+
+
+#define VK_NV_memory_decompression 1
+#define VK_NV_MEMORY_DECOMPRESSION_SPEC_VERSION 1
+#define VK_NV_MEMORY_DECOMPRESSION_EXTENSION_NAME "VK_NV_memory_decompression"
+
+// Flag bits for VkMemoryDecompressionMethodFlagBitsNV
+typedef VkFlags64 VkMemoryDecompressionMethodFlagBitsNV;
+static const VkMemoryDecompressionMethodFlagBitsNV VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV = 0x00000001ULL;
+
+typedef VkFlags64 VkMemoryDecompressionMethodFlagsNV;
+typedef struct VkDecompressMemoryRegionNV {
+    VkDeviceAddress                       srcAddress;
+    VkDeviceAddress                       dstAddress;
+    VkDeviceSize                          compressedSize;
+    VkDeviceSize                          decompressedSize;
+    VkMemoryDecompressionMethodFlagsNV    decompressionMethod;
+} VkDecompressMemoryRegionNV;
+
+typedef struct VkPhysicalDeviceMemoryDecompressionFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           memoryDecompression;
+} VkPhysicalDeviceMemoryDecompressionFeaturesNV;
+
+typedef struct VkPhysicalDeviceMemoryDecompressionPropertiesNV {
+    VkStructureType                       sType;
+    void*                                 pNext;
+    VkMemoryDecompressionMethodFlagsNV    decompressionMethods;
+    uint64_t                              maxDecompressionIndirectCount;
+} VkPhysicalDeviceMemoryDecompressionPropertiesNV;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryNV)(VkCommandBuffer commandBuffer, uint32_t decompressRegionCount, const VkDecompressMemoryRegionNV* pDecompressMemoryRegions);
+typedef void (VKAPI_PTR *PFN_vkCmdDecompressMemoryIndirectCountNV)(VkCommandBuffer commandBuffer, VkDeviceAddress indirectCommandsAddress, VkDeviceAddress indirectCommandsCountAddress, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    decompressRegionCount,
+    const VkDecompressMemoryRegionNV*           pDecompressMemoryRegions);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDecompressMemoryIndirectCountNV(
+    VkCommandBuffer                             commandBuffer,
+    VkDeviceAddress                             indirectCommandsAddress,
+    VkDeviceAddress                             indirectCommandsCountAddress,
+    uint32_t                                    stride);
+#endif
+
+
+#define VK_NV_linear_color_attachment 1
+#define VK_NV_LINEAR_COLOR_ATTACHMENT_SPEC_VERSION 1
+#define VK_NV_LINEAR_COLOR_ATTACHMENT_EXTENSION_NAME "VK_NV_linear_color_attachment"
+typedef struct VkPhysicalDeviceLinearColorAttachmentFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           linearColorAttachment;
+} VkPhysicalDeviceLinearColorAttachmentFeaturesNV;
+
+
+
+#define VK_GOOGLE_surfaceless_query 1
+#define VK_GOOGLE_SURFACELESS_QUERY_SPEC_VERSION 2
+#define VK_GOOGLE_SURFACELESS_QUERY_EXTENSION_NAME "VK_GOOGLE_surfaceless_query"
+
+
+#define VK_EXT_image_compression_control_swapchain 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_SPEC_VERSION 1
+#define VK_EXT_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_EXTENSION_NAME "VK_EXT_image_compression_control_swapchain"
+typedef struct VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           imageCompressionControlSwapchain;
+} VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+
+
+
+#define VK_QCOM_image_processing 1
+#define VK_QCOM_IMAGE_PROCESSING_SPEC_VERSION 1
+#define VK_QCOM_IMAGE_PROCESSING_EXTENSION_NAME "VK_QCOM_image_processing"
+typedef struct VkImageViewSampleWeightCreateInfoQCOM {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkOffset2D         filterCenter;
+    VkExtent2D         filterSize;
+    uint32_t           numPhases;
+} VkImageViewSampleWeightCreateInfoQCOM;
+
+typedef struct VkPhysicalDeviceImageProcessingFeaturesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           textureSampleWeighted;
+    VkBool32           textureBoxFilter;
+    VkBool32           textureBlockMatch;
+} VkPhysicalDeviceImageProcessingFeaturesQCOM;
+
+typedef struct VkPhysicalDeviceImageProcessingPropertiesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxWeightFilterPhases;
+    VkExtent2D         maxWeightFilterDimension;
+    VkExtent2D         maxBlockMatchRegion;
+    VkExtent2D         maxBoxFilterBlockSize;
+} VkPhysicalDeviceImageProcessingPropertiesQCOM;
+
+
+
+#define VK_EXT_extended_dynamic_state3 1
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_SPEC_VERSION 2
+#define VK_EXT_EXTENDED_DYNAMIC_STATE_3_EXTENSION_NAME "VK_EXT_extended_dynamic_state3"
+typedef struct VkPhysicalDeviceExtendedDynamicState3FeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           extendedDynamicState3TessellationDomainOrigin;
+    VkBool32           extendedDynamicState3DepthClampEnable;
+    VkBool32           extendedDynamicState3PolygonMode;
+    VkBool32           extendedDynamicState3RasterizationSamples;
+    VkBool32           extendedDynamicState3SampleMask;
+    VkBool32           extendedDynamicState3AlphaToCoverageEnable;
+    VkBool32           extendedDynamicState3AlphaToOneEnable;
+    VkBool32           extendedDynamicState3LogicOpEnable;
+    VkBool32           extendedDynamicState3ColorBlendEnable;
+    VkBool32           extendedDynamicState3ColorBlendEquation;
+    VkBool32           extendedDynamicState3ColorWriteMask;
+    VkBool32           extendedDynamicState3RasterizationStream;
+    VkBool32           extendedDynamicState3ConservativeRasterizationMode;
+    VkBool32           extendedDynamicState3ExtraPrimitiveOverestimationSize;
+    VkBool32           extendedDynamicState3DepthClipEnable;
+    VkBool32           extendedDynamicState3SampleLocationsEnable;
+    VkBool32           extendedDynamicState3ColorBlendAdvanced;
+    VkBool32           extendedDynamicState3ProvokingVertexMode;
+    VkBool32           extendedDynamicState3LineRasterizationMode;
+    VkBool32           extendedDynamicState3LineStippleEnable;
+    VkBool32           extendedDynamicState3DepthClipNegativeOneToOne;
+    VkBool32           extendedDynamicState3ViewportWScalingEnable;
+    VkBool32           extendedDynamicState3ViewportSwizzle;
+    VkBool32           extendedDynamicState3CoverageToColorEnable;
+    VkBool32           extendedDynamicState3CoverageToColorLocation;
+    VkBool32           extendedDynamicState3CoverageModulationMode;
+    VkBool32           extendedDynamicState3CoverageModulationTableEnable;
+    VkBool32           extendedDynamicState3CoverageModulationTable;
+    VkBool32           extendedDynamicState3CoverageReductionMode;
+    VkBool32           extendedDynamicState3RepresentativeFragmentTestEnable;
+    VkBool32           extendedDynamicState3ShadingRateImageEnable;
+} VkPhysicalDeviceExtendedDynamicState3FeaturesEXT;
+
+typedef struct VkPhysicalDeviceExtendedDynamicState3PropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           dynamicPrimitiveTopologyUnrestricted;
+} VkPhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+typedef struct VkColorBlendEquationEXT {
+    VkBlendFactor    srcColorBlendFactor;
+    VkBlendFactor    dstColorBlendFactor;
+    VkBlendOp        colorBlendOp;
+    VkBlendFactor    srcAlphaBlendFactor;
+    VkBlendFactor    dstAlphaBlendFactor;
+    VkBlendOp        alphaBlendOp;
+} VkColorBlendEquationEXT;
+
+typedef struct VkColorBlendAdvancedEXT {
+    VkBlendOp            advancedBlendOp;
+    VkBool32             srcPremultiplied;
+    VkBool32             dstPremultiplied;
+    VkBlendOverlapEXT    blendOverlap;
+    VkBool32             clampResults;
+} VkColorBlendAdvancedEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdSetTessellationDomainOriginEXT)(VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClampEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClampEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetPolygonModeEXT)(VkCommandBuffer commandBuffer, VkPolygonMode polygonMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationSamplesEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits  rasterizationSamples);
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleMaskEXT)(VkCommandBuffer commandBuffer, VkSampleCountFlagBits  samples, const VkSampleMask*    pSampleMask);
+typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToCoverageEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetAlphaToOneEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLogicOpEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 logicOpEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEnableEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendEquationEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorWriteMaskEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRasterizationStreamEXT)(VkCommandBuffer commandBuffer, uint32_t rasterizationStream);
+typedef void (VKAPI_PTR *PFN_vkCmdSetConservativeRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT)(VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 depthClipEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetSampleLocationsEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetColorBlendAdvancedEXT)(VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced);
+typedef void (VKAPI_PTR *PFN_vkCmdSetProvokingVertexModeEXT)(VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineRasterizationModeEXT)(VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetLineStippleEnableEXT)(VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetDepthClipNegativeOneToOneEXT)(VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportWScalingEnableNV)(VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetViewportSwizzleNV)(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageToColorLocationNV)(VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationModeNV)(VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableEnableNV)(VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageModulationTableNV)(VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetShadingRateImageEnableNV)(VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRepresentativeFragmentTestEnableNV)(VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable);
+typedef void (VKAPI_PTR *PFN_vkCmdSetCoverageReductionModeNV)(VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdSetTessellationDomainOriginEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkTessellationDomainOrigin                  domainOrigin);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClampEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthClampEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetPolygonModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkPolygonMode                               polygonMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationSamplesEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkSampleCountFlagBits                       rasterizationSamples);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleMaskEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkSampleCountFlagBits                       samples,
+    const VkSampleMask*                         pSampleMask);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToCoverageEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    alphaToCoverageEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetAlphaToOneEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    alphaToOneEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLogicOpEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    logicOpEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstAttachment,
+    uint32_t                                    attachmentCount,
+    const VkBool32*                             pColorBlendEnables);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendEquationEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstAttachment,
+    uint32_t                                    attachmentCount,
+    const VkColorBlendEquationEXT*              pColorBlendEquations);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorWriteMaskEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstAttachment,
+    uint32_t                                    attachmentCount,
+    const VkColorComponentFlags*                pColorWriteMasks);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRasterizationStreamEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    rasterizationStream);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetConservativeRasterizationModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkConservativeRasterizationModeEXT          conservativeRasterizationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetExtraPrimitiveOverestimationSizeEXT(
+    VkCommandBuffer                             commandBuffer,
+    float                                       extraPrimitiveOverestimationSize);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    depthClipEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetSampleLocationsEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    sampleLocationsEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetColorBlendAdvancedEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstAttachment,
+    uint32_t                                    attachmentCount,
+    const VkColorBlendAdvancedEXT*              pColorBlendAdvanced);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetProvokingVertexModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkProvokingVertexModeEXT                    provokingVertexMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineRasterizationModeEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkLineRasterizationModeEXT                  lineRasterizationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetLineStippleEnableEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    stippledLineEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetDepthClipNegativeOneToOneEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    negativeOneToOne);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportWScalingEnableNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    viewportWScalingEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetViewportSwizzleNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    firstViewport,
+    uint32_t                                    viewportCount,
+    const VkViewportSwizzleNV*                  pViewportSwizzles);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorEnableNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    coverageToColorEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageToColorLocationNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    coverageToColorLocation);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationModeNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoverageModulationModeNV                  coverageModulationMode);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableEnableNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    coverageModulationTableEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageModulationTableNV(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    coverageModulationTableCount,
+    const float*                                pCoverageModulationTable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetShadingRateImageEnableNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    shadingRateImageEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRepresentativeFragmentTestEnableNV(
+    VkCommandBuffer                             commandBuffer,
+    VkBool32                                    representativeFragmentTestEnable);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetCoverageReductionModeNV(
+    VkCommandBuffer                             commandBuffer,
+    VkCoverageReductionModeNV                   coverageReductionMode);
+#endif
+
+
+#define VK_EXT_subpass_merge_feedback 1
+#define VK_EXT_SUBPASS_MERGE_FEEDBACK_SPEC_VERSION 2
+#define VK_EXT_SUBPASS_MERGE_FEEDBACK_EXTENSION_NAME "VK_EXT_subpass_merge_feedback"
+
+typedef enum VkSubpassMergeStatusEXT {
+    VK_SUBPASS_MERGE_STATUS_MERGED_EXT = 0,
+    VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT = 1,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT = 2,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT = 3,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT = 4,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT = 5,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT = 6,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT = 7,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT = 8,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT = 9,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT = 10,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT = 11,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT = 12,
+    VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT = 13,
+    VK_SUBPASS_MERGE_STATUS_MAX_ENUM_EXT = 0x7FFFFFFF
+} VkSubpassMergeStatusEXT;
+typedef struct VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           subpassMergeFeedback;
+} VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+
+typedef struct VkRenderPassCreationControlEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkBool32           disallowMerging;
+} VkRenderPassCreationControlEXT;
+
+typedef struct VkRenderPassCreationFeedbackInfoEXT {
+    uint32_t    postMergeSubpassCount;
+} VkRenderPassCreationFeedbackInfoEXT;
+
+typedef struct VkRenderPassCreationFeedbackCreateInfoEXT {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkRenderPassCreationFeedbackInfoEXT*    pRenderPassFeedback;
+} VkRenderPassCreationFeedbackCreateInfoEXT;
+
+typedef struct VkRenderPassSubpassFeedbackInfoEXT {
+    VkSubpassMergeStatusEXT    subpassMergeStatus;
+    char                       description[VK_MAX_DESCRIPTION_SIZE];
+    uint32_t                   postMergeIndex;
+} VkRenderPassSubpassFeedbackInfoEXT;
+
+typedef struct VkRenderPassSubpassFeedbackCreateInfoEXT {
+    VkStructureType                        sType;
+    const void*                            pNext;
+    VkRenderPassSubpassFeedbackInfoEXT*    pSubpassFeedback;
+} VkRenderPassSubpassFeedbackCreateInfoEXT;
+
+
+
+#define VK_LUNARG_direct_driver_loading 1
+#define VK_LUNARG_DIRECT_DRIVER_LOADING_SPEC_VERSION 1
+#define VK_LUNARG_DIRECT_DRIVER_LOADING_EXTENSION_NAME "VK_LUNARG_direct_driver_loading"
+
+typedef enum VkDirectDriverLoadingModeLUNARG {
+    VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG = 0,
+    VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG = 1,
+    VK_DIRECT_DRIVER_LOADING_MODE_MAX_ENUM_LUNARG = 0x7FFFFFFF
+} VkDirectDriverLoadingModeLUNARG;
+typedef VkFlags VkDirectDriverLoadingFlagsLUNARG;
+typedef PFN_vkVoidFunction (VKAPI_PTR *PFN_vkGetInstanceProcAddrLUNARG)(
+    VkInstance instance, const char* pName);
+
+typedef struct VkDirectDriverLoadingInfoLUNARG {
+    VkStructureType                     sType;
+    void*                               pNext;
+    VkDirectDriverLoadingFlagsLUNARG    flags;
+    PFN_vkGetInstanceProcAddrLUNARG     pfnGetInstanceProcAddr;
+} VkDirectDriverLoadingInfoLUNARG;
+
+typedef struct VkDirectDriverLoadingListLUNARG {
+    VkStructureType                           sType;
+    void*                                     pNext;
+    VkDirectDriverLoadingModeLUNARG           mode;
+    uint32_t                                  driverCount;
+    const VkDirectDriverLoadingInfoLUNARG*    pDrivers;
+} VkDirectDriverLoadingListLUNARG;
+
+
+
+#define VK_EXT_shader_module_identifier 1
+#define VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT 32U
+#define VK_EXT_SHADER_MODULE_IDENTIFIER_SPEC_VERSION 1
+#define VK_EXT_SHADER_MODULE_IDENTIFIER_EXTENSION_NAME "VK_EXT_shader_module_identifier"
+typedef struct VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderModuleIdentifier;
+} VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+
+typedef struct VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint8_t            shaderModuleIdentifierAlgorithmUUID[VK_UUID_SIZE];
+} VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+
+typedef struct VkPipelineShaderStageModuleIdentifierCreateInfoEXT {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           identifierSize;
+    const uint8_t*     pIdentifier;
+} VkPipelineShaderStageModuleIdentifierCreateInfoEXT;
+
+typedef struct VkShaderModuleIdentifierEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           identifierSize;
+    uint8_t            identifier[VK_MAX_SHADER_MODULE_IDENTIFIER_SIZE_EXT];
+} VkShaderModuleIdentifierEXT;
+
+typedef void (VKAPI_PTR *PFN_vkGetShaderModuleIdentifierEXT)(VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier);
+typedef void (VKAPI_PTR *PFN_vkGetShaderModuleCreateInfoIdentifierEXT)(VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleIdentifierEXT(
+    VkDevice                                    device,
+    VkShaderModule                              shaderModule,
+    VkShaderModuleIdentifierEXT*                pIdentifier);
+
+VKAPI_ATTR void VKAPI_CALL vkGetShaderModuleCreateInfoIdentifierEXT(
+    VkDevice                                    device,
+    const VkShaderModuleCreateInfo*             pCreateInfo,
+    VkShaderModuleIdentifierEXT*                pIdentifier);
+#endif
+
+
+#define VK_EXT_rasterization_order_attachment_access 1
+#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_SPEC_VERSION 1
+#define VK_EXT_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_EXTENSION_NAME "VK_EXT_rasterization_order_attachment_access"
+
+
+#define VK_NV_optical_flow 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkOpticalFlowSessionNV)
+#define VK_NV_OPTICAL_FLOW_SPEC_VERSION   1
+#define VK_NV_OPTICAL_FLOW_EXTENSION_NAME "VK_NV_optical_flow"
+
+typedef enum VkOpticalFlowPerformanceLevelNV {
+    VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV = 0,
+    VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV = 1,
+    VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV = 2,
+    VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV = 3,
+    VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowPerformanceLevelNV;
+
+typedef enum VkOpticalFlowSessionBindingPointNV {
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV = 0,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV = 1,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV = 2,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV = 3,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV = 4,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV = 5,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV = 6,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV = 7,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV = 8,
+    VK_OPTICAL_FLOW_SESSION_BINDING_POINT_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowSessionBindingPointNV;
+
+typedef enum VkOpticalFlowGridSizeFlagBitsNV {
+    VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV = 0,
+    VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV = 0x00000001,
+    VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV = 0x00000002,
+    VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV = 0x00000004,
+    VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV = 0x00000008,
+    VK_OPTICAL_FLOW_GRID_SIZE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowGridSizeFlagBitsNV;
+typedef VkFlags   VkOpticalFlowGridSizeFlagsNV;
+
+typedef enum VkOpticalFlowUsageFlagBitsNV {
+    VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV = 0,
+    VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV = 0x00000001,
+    VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV = 0x00000002,
+    VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV = 0x00000004,
+    VK_OPTICAL_FLOW_USAGE_COST_BIT_NV = 0x00000008,
+    VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV = 0x00000010,
+    VK_OPTICAL_FLOW_USAGE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowUsageFlagBitsNV;
+typedef VkFlags   VkOpticalFlowUsageFlagsNV;
+
+typedef enum VkOpticalFlowSessionCreateFlagBitsNV {
+    VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV = 0x00000001,
+    VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV = 0x00000002,
+    VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV = 0x00000004,
+    VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV = 0x00000008,
+    VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV = 0x00000010,
+    VK_OPTICAL_FLOW_SESSION_CREATE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowSessionCreateFlagBitsNV;
+typedef VkFlags   VkOpticalFlowSessionCreateFlagsNV;
+
+typedef enum VkOpticalFlowExecuteFlagBitsNV {
+    VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV = 0x00000001,
+    VK_OPTICAL_FLOW_EXECUTE_FLAG_BITS_MAX_ENUM_NV = 0x7FFFFFFF
+} VkOpticalFlowExecuteFlagBitsNV;
+typedef VkFlags   VkOpticalFlowExecuteFlagsNV;
+typedef struct VkPhysicalDeviceOpticalFlowFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           opticalFlow;
+} VkPhysicalDeviceOpticalFlowFeaturesNV;
+
+typedef struct VkPhysicalDeviceOpticalFlowPropertiesNV {
+    VkStructureType                 sType;
+    void*                           pNext;
+    VkOpticalFlowGridSizeFlagsNV    supportedOutputGridSizes;
+    VkOpticalFlowGridSizeFlagsNV    supportedHintGridSizes;
+    VkBool32                        hintSupported;
+    VkBool32                        costSupported;
+    VkBool32                        bidirectionalFlowSupported;
+    VkBool32                        globalFlowSupported;
+    uint32_t                        minWidth;
+    uint32_t                        minHeight;
+    uint32_t                        maxWidth;
+    uint32_t                        maxHeight;
+    uint32_t                        maxNumRegionsOfInterest;
+} VkPhysicalDeviceOpticalFlowPropertiesNV;
+
+typedef struct VkOpticalFlowImageFormatInfoNV {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkOpticalFlowUsageFlagsNV    usage;
+} VkOpticalFlowImageFormatInfoNV;
+
+typedef struct VkOpticalFlowImageFormatPropertiesNV {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkFormat           format;
+} VkOpticalFlowImageFormatPropertiesNV;
+
+typedef struct VkOpticalFlowSessionCreateInfoNV {
+    VkStructureType                      sType;
+    void*                                pNext;
+    uint32_t                             width;
+    uint32_t                             height;
+    VkFormat                             imageFormat;
+    VkFormat                             flowVectorFormat;
+    VkFormat                             costFormat;
+    VkOpticalFlowGridSizeFlagsNV         outputGridSize;
+    VkOpticalFlowGridSizeFlagsNV         hintGridSize;
+    VkOpticalFlowPerformanceLevelNV      performanceLevel;
+    VkOpticalFlowSessionCreateFlagsNV    flags;
+} VkOpticalFlowSessionCreateInfoNV;
+
+typedef struct VkOpticalFlowSessionCreatePrivateDataInfoNV {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           id;
+    uint32_t           size;
+    const void*        pPrivateData;
+} VkOpticalFlowSessionCreatePrivateDataInfoNV;
+
+typedef struct VkOpticalFlowExecuteInfoNV {
+    VkStructureType                sType;
+    void*                          pNext;
+    VkOpticalFlowExecuteFlagsNV    flags;
+    uint32_t                       regionCount;
+    const VkRect2D*                pRegions;
+} VkOpticalFlowExecuteInfoNV;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV)(VkPhysicalDevice physicalDevice, const VkOpticalFlowImageFormatInfoNV* pOpticalFlowImageFormatInfo, uint32_t* pFormatCount, VkOpticalFlowImageFormatPropertiesNV* pImageFormatProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateOpticalFlowSessionNV)(VkDevice device, const VkOpticalFlowSessionCreateInfoNV* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkOpticalFlowSessionNV* pSession);
+typedef void (VKAPI_PTR *PFN_vkDestroyOpticalFlowSessionNV)(VkDevice device, VkOpticalFlowSessionNV session, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkBindOpticalFlowSessionImageNV)(VkDevice device, VkOpticalFlowSessionNV session, VkOpticalFlowSessionBindingPointNV bindingPoint, VkImageView view, VkImageLayout layout);
+typedef void (VKAPI_PTR *PFN_vkCmdOpticalFlowExecuteNV)(VkCommandBuffer commandBuffer, VkOpticalFlowSessionNV session, const VkOpticalFlowExecuteInfoNV* pExecuteInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetPhysicalDeviceOpticalFlowImageFormatsNV(
+    VkPhysicalDevice                            physicalDevice,
+    const VkOpticalFlowImageFormatInfoNV*       pOpticalFlowImageFormatInfo,
+    uint32_t*                                   pFormatCount,
+    VkOpticalFlowImageFormatPropertiesNV*       pImageFormatProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateOpticalFlowSessionNV(
+    VkDevice                                    device,
+    const VkOpticalFlowSessionCreateInfoNV*     pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkOpticalFlowSessionNV*                     pSession);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyOpticalFlowSessionNV(
+    VkDevice                                    device,
+    VkOpticalFlowSessionNV                      session,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBindOpticalFlowSessionImageNV(
+    VkDevice                                    device,
+    VkOpticalFlowSessionNV                      session,
+    VkOpticalFlowSessionBindingPointNV          bindingPoint,
+    VkImageView                                 view,
+    VkImageLayout                               layout);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdOpticalFlowExecuteNV(
+    VkCommandBuffer                             commandBuffer,
+    VkOpticalFlowSessionNV                      session,
+    const VkOpticalFlowExecuteInfoNV*           pExecuteInfo);
+#endif
+
+
+#define VK_EXT_legacy_dithering 1
+#define VK_EXT_LEGACY_DITHERING_SPEC_VERSION 1
+#define VK_EXT_LEGACY_DITHERING_EXTENSION_NAME "VK_EXT_legacy_dithering"
+typedef struct VkPhysicalDeviceLegacyDitheringFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           legacyDithering;
+} VkPhysicalDeviceLegacyDitheringFeaturesEXT;
+
+
+
+#define VK_EXT_pipeline_protected_access 1
+#define VK_EXT_PIPELINE_PROTECTED_ACCESS_SPEC_VERSION 1
+#define VK_EXT_PIPELINE_PROTECTED_ACCESS_EXTENSION_NAME "VK_EXT_pipeline_protected_access"
+typedef struct VkPhysicalDevicePipelineProtectedAccessFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           pipelineProtectedAccess;
+} VkPhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+
+
+#define VK_QCOM_tile_properties 1
+#define VK_QCOM_TILE_PROPERTIES_SPEC_VERSION 1
+#define VK_QCOM_TILE_PROPERTIES_EXTENSION_NAME "VK_QCOM_tile_properties"
+typedef struct VkPhysicalDeviceTilePropertiesFeaturesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           tileProperties;
+} VkPhysicalDeviceTilePropertiesFeaturesQCOM;
+
+typedef struct VkTilePropertiesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkExtent3D         tileSize;
+    VkExtent2D         apronSize;
+    VkOffset2D         origin;
+} VkTilePropertiesQCOM;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetFramebufferTilePropertiesQCOM)(VkDevice device, VkFramebuffer framebuffer, uint32_t* pPropertiesCount, VkTilePropertiesQCOM* pProperties);
+typedef VkResult (VKAPI_PTR *PFN_vkGetDynamicRenderingTilePropertiesQCOM)(VkDevice device, const VkRenderingInfo* pRenderingInfo, VkTilePropertiesQCOM* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetFramebufferTilePropertiesQCOM(
+    VkDevice                                    device,
+    VkFramebuffer                               framebuffer,
+    uint32_t*                                   pPropertiesCount,
+    VkTilePropertiesQCOM*                       pProperties);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetDynamicRenderingTilePropertiesQCOM(
+    VkDevice                                    device,
+    const VkRenderingInfo*                      pRenderingInfo,
+    VkTilePropertiesQCOM*                       pProperties);
+#endif
+
+
+#define VK_SEC_amigo_profiling 1
+#define VK_SEC_AMIGO_PROFILING_SPEC_VERSION 1
+#define VK_SEC_AMIGO_PROFILING_EXTENSION_NAME "VK_SEC_amigo_profiling"
+typedef struct VkPhysicalDeviceAmigoProfilingFeaturesSEC {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           amigoProfiling;
+} VkPhysicalDeviceAmigoProfilingFeaturesSEC;
+
+typedef struct VkAmigoProfilingSubmitInfoSEC {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint64_t           firstDrawTimestamp;
+    uint64_t           swapBufferTimestamp;
+} VkAmigoProfilingSubmitInfoSEC;
+
+
+
+#define VK_QCOM_multiview_per_view_viewports 1
+#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_SPEC_VERSION 1
+#define VK_QCOM_MULTIVIEW_PER_VIEW_VIEWPORTS_EXTENSION_NAME "VK_QCOM_multiview_per_view_viewports"
+typedef struct VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           multiviewPerViewViewports;
+} VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
+
+
+
+#define VK_NV_ray_tracing_invocation_reorder 1
+#define VK_NV_RAY_TRACING_INVOCATION_REORDER_SPEC_VERSION 1
+#define VK_NV_RAY_TRACING_INVOCATION_REORDER_EXTENSION_NAME "VK_NV_ray_tracing_invocation_reorder"
+
+typedef enum VkRayTracingInvocationReorderModeNV {
+    VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV = 0,
+    VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV = 1,
+    VK_RAY_TRACING_INVOCATION_REORDER_MODE_MAX_ENUM_NV = 0x7FFFFFFF
+} VkRayTracingInvocationReorderModeNV;
+typedef struct VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV {
+    VkStructureType                        sType;
+    void*                                  pNext;
+    VkRayTracingInvocationReorderModeNV    rayTracingInvocationReorderReorderingHint;
+} VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV;
+
+typedef struct VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rayTracingInvocationReorder;
+} VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV;
+
+
+
+#define VK_EXT_mutable_descriptor_type 1
+#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_SPEC_VERSION 1
+#define VK_EXT_MUTABLE_DESCRIPTOR_TYPE_EXTENSION_NAME "VK_EXT_mutable_descriptor_type"
+
+
+#define VK_ARM_shader_core_builtins 1
+#define VK_ARM_SHADER_CORE_BUILTINS_SPEC_VERSION 2
+#define VK_ARM_SHADER_CORE_BUILTINS_EXTENSION_NAME "VK_ARM_shader_core_builtins"
+typedef struct VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           shaderCoreBuiltins;
+} VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+
+typedef struct VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           shaderCoreMask;
+    uint32_t           shaderCoreCount;
+    uint32_t           shaderWarpsPerCore;
+} VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+
+
+
+#define VK_KHR_acceleration_structure 1
+#define VK_KHR_ACCELERATION_STRUCTURE_SPEC_VERSION 13
+#define VK_KHR_ACCELERATION_STRUCTURE_EXTENSION_NAME "VK_KHR_acceleration_structure"
+
+typedef enum VkBuildAccelerationStructureModeKHR {
+    VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR = 0,
+    VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR = 1,
+    VK_BUILD_ACCELERATION_STRUCTURE_MODE_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkBuildAccelerationStructureModeKHR;
+
+typedef enum VkAccelerationStructureCreateFlagBitsKHR {
+    VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR = 0x00000001,
+    VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT = 0x00000008,
+    VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV = 0x00000004,
+    VK_ACCELERATION_STRUCTURE_CREATE_FLAG_BITS_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkAccelerationStructureCreateFlagBitsKHR;
+typedef VkFlags VkAccelerationStructureCreateFlagsKHR;
+typedef struct VkAccelerationStructureBuildRangeInfoKHR {
+    uint32_t    primitiveCount;
+    uint32_t    primitiveOffset;
+    uint32_t    firstVertex;
+    uint32_t    transformOffset;
+} VkAccelerationStructureBuildRangeInfoKHR;
+
+typedef struct VkAccelerationStructureGeometryTrianglesDataKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkFormat                         vertexFormat;
+    VkDeviceOrHostAddressConstKHR    vertexData;
+    VkDeviceSize                     vertexStride;
+    uint32_t                         maxVertex;
+    VkIndexType                      indexType;
+    VkDeviceOrHostAddressConstKHR    indexData;
+    VkDeviceOrHostAddressConstKHR    transformData;
+} VkAccelerationStructureGeometryTrianglesDataKHR;
+
+typedef struct VkAccelerationStructureGeometryAabbsDataKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkDeviceOrHostAddressConstKHR    data;
+    VkDeviceSize                     stride;
+} VkAccelerationStructureGeometryAabbsDataKHR;
+
+typedef struct VkAccelerationStructureGeometryInstancesDataKHR {
+    VkStructureType                  sType;
+    const void*                      pNext;
+    VkBool32                         arrayOfPointers;
+    VkDeviceOrHostAddressConstKHR    data;
+} VkAccelerationStructureGeometryInstancesDataKHR;
+
+typedef union VkAccelerationStructureGeometryDataKHR {
+    VkAccelerationStructureGeometryTrianglesDataKHR    triangles;
+    VkAccelerationStructureGeometryAabbsDataKHR        aabbs;
+    VkAccelerationStructureGeometryInstancesDataKHR    instances;
+} VkAccelerationStructureGeometryDataKHR;
+
+typedef struct VkAccelerationStructureGeometryKHR {
+    VkStructureType                           sType;
+    const void*                               pNext;
+    VkGeometryTypeKHR                         geometryType;
+    VkAccelerationStructureGeometryDataKHR    geometry;
+    VkGeometryFlagsKHR                        flags;
+} VkAccelerationStructureGeometryKHR;
+
+typedef struct VkAccelerationStructureBuildGeometryInfoKHR {
+    VkStructureType                                     sType;
+    const void*                                         pNext;
+    VkAccelerationStructureTypeKHR                      type;
+    VkBuildAccelerationStructureFlagsKHR                flags;
+    VkBuildAccelerationStructureModeKHR                 mode;
+    VkAccelerationStructureKHR                          srcAccelerationStructure;
+    VkAccelerationStructureKHR                          dstAccelerationStructure;
+    uint32_t                                            geometryCount;
+    const VkAccelerationStructureGeometryKHR*           pGeometries;
+    const VkAccelerationStructureGeometryKHR* const*    ppGeometries;
+    VkDeviceOrHostAddressKHR                            scratchData;
+} VkAccelerationStructureBuildGeometryInfoKHR;
+
+typedef struct VkAccelerationStructureCreateInfoKHR {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkAccelerationStructureCreateFlagsKHR    createFlags;
+    VkBuffer                                 buffer;
+    VkDeviceSize                             offset;
+    VkDeviceSize                             size;
+    VkAccelerationStructureTypeKHR           type;
+    VkDeviceAddress                          deviceAddress;
+} VkAccelerationStructureCreateInfoKHR;
+
+typedef struct VkWriteDescriptorSetAccelerationStructureKHR {
+    VkStructureType                      sType;
+    const void*                          pNext;
+    uint32_t                             accelerationStructureCount;
+    const VkAccelerationStructureKHR*    pAccelerationStructures;
+} VkWriteDescriptorSetAccelerationStructureKHR;
+
+typedef struct VkPhysicalDeviceAccelerationStructureFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           accelerationStructure;
+    VkBool32           accelerationStructureCaptureReplay;
+    VkBool32           accelerationStructureIndirectBuild;
+    VkBool32           accelerationStructureHostCommands;
+    VkBool32           descriptorBindingAccelerationStructureUpdateAfterBind;
+} VkPhysicalDeviceAccelerationStructureFeaturesKHR;
+
+typedef struct VkPhysicalDeviceAccelerationStructurePropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           maxGeometryCount;
+    uint64_t           maxInstanceCount;
+    uint64_t           maxPrimitiveCount;
+    uint32_t           maxPerStageDescriptorAccelerationStructures;
+    uint32_t           maxPerStageDescriptorUpdateAfterBindAccelerationStructures;
+    uint32_t           maxDescriptorSetAccelerationStructures;
+    uint32_t           maxDescriptorSetUpdateAfterBindAccelerationStructures;
+    uint32_t           minAccelerationStructureScratchOffsetAlignment;
+} VkPhysicalDeviceAccelerationStructurePropertiesKHR;
+
+typedef struct VkAccelerationStructureDeviceAddressInfoKHR {
+    VkStructureType               sType;
+    const void*                   pNext;
+    VkAccelerationStructureKHR    accelerationStructure;
+} VkAccelerationStructureDeviceAddressInfoKHR;
+
+typedef struct VkAccelerationStructureVersionInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    const uint8_t*     pVersionData;
+} VkAccelerationStructureVersionInfoKHR;
+
+typedef struct VkCopyAccelerationStructureToMemoryInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkAccelerationStructureKHR            src;
+    VkDeviceOrHostAddressKHR              dst;
+    VkCopyAccelerationStructureModeKHR    mode;
+} VkCopyAccelerationStructureToMemoryInfoKHR;
+
+typedef struct VkCopyMemoryToAccelerationStructureInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceOrHostAddressConstKHR         src;
+    VkAccelerationStructureKHR            dst;
+    VkCopyAccelerationStructureModeKHR    mode;
+} VkCopyMemoryToAccelerationStructureInfoKHR;
+
+typedef struct VkCopyAccelerationStructureInfoKHR {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkAccelerationStructureKHR            src;
+    VkAccelerationStructureKHR            dst;
+    VkCopyAccelerationStructureModeKHR    mode;
+} VkCopyAccelerationStructureInfoKHR;
+
+typedef struct VkAccelerationStructureBuildSizesInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    VkDeviceSize       accelerationStructureSize;
+    VkDeviceSize       updateScratchSize;
+    VkDeviceSize       buildScratchSize;
+} VkAccelerationStructureBuildSizesInfoKHR;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateAccelerationStructureKHR)(VkDevice                                           device, const VkAccelerationStructureCreateInfoKHR*        pCreateInfo, const VkAllocationCallbacks*       pAllocator, VkAccelerationStructureKHR*                        pAccelerationStructure);
+typedef void (VKAPI_PTR *PFN_vkDestroyAccelerationStructureKHR)(VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresKHR)(VkCommandBuffer                                    commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos);
+typedef void (VKAPI_PTR *PFN_vkCmdBuildAccelerationStructuresIndirectKHR)(VkCommandBuffer                  commandBuffer, uint32_t                                           infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress*             pIndirectDeviceAddresses, const uint32_t*                    pIndirectStrides, const uint32_t* const*             ppMaxPrimitiveCounts);
+typedef VkResult (VKAPI_PTR *PFN_vkBuildAccelerationStructuresKHR)(VkDevice                                           device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyAccelerationStructureToMemoryKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkCopyMemoryToAccelerationStructureKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkWriteAccelerationStructuresPropertiesKHR)(VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType  queryType, size_t       dataSize, void* pData, size_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyAccelerationStructureToMemoryKHR)(VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdCopyMemoryToAccelerationStructureKHR)(VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+typedef VkDeviceAddress (VKAPI_PTR *PFN_vkGetAccelerationStructureDeviceAddressKHR)(VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo);
+typedef void (VKAPI_PTR *PFN_vkCmdWriteAccelerationStructuresPropertiesKHR)(VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery);
+typedef void (VKAPI_PTR *PFN_vkGetDeviceAccelerationStructureCompatibilityKHR)(VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility);
+typedef void (VKAPI_PTR *PFN_vkGetAccelerationStructureBuildSizesKHR)(VkDevice                                            device, VkAccelerationStructureBuildTypeKHR                 buildType, const VkAccelerationStructureBuildGeometryInfoKHR*  pBuildInfo, const uint32_t*  pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR*           pSizeInfo);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateAccelerationStructureKHR(
+    VkDevice                                    device,
+    const VkAccelerationStructureCreateInfoKHR* pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkAccelerationStructureKHR*                 pAccelerationStructure);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyAccelerationStructureKHR(
+    VkDevice                                    device,
+    VkAccelerationStructureKHR                  accelerationStructure,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    infoCount,
+    const VkAccelerationStructureBuildGeometryInfoKHR* pInfos,
+    const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdBuildAccelerationStructuresIndirectKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    infoCount,
+    const VkAccelerationStructureBuildGeometryInfoKHR* pInfos,
+    const VkDeviceAddress*                      pIndirectDeviceAddresses,
+    const uint32_t*                             pIndirectStrides,
+    const uint32_t* const*                      ppMaxPrimitiveCounts);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkBuildAccelerationStructuresKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    uint32_t                                    infoCount,
+    const VkAccelerationStructureBuildGeometryInfoKHR* pInfos,
+    const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyAccelerationStructureInfoKHR*   pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyAccelerationStructureToMemoryKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCopyMemoryToAccelerationStructureKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkWriteAccelerationStructuresPropertiesKHR(
+    VkDevice                                    device,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureKHR*           pAccelerationStructures,
+    VkQueryType                                 queryType,
+    size_t                                      dataSize,
+    void*                                       pData,
+    size_t                                      stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyAccelerationStructureInfoKHR*   pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyAccelerationStructureToMemoryKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdCopyMemoryToAccelerationStructureKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo);
+
+VKAPI_ATTR VkDeviceAddress VKAPI_CALL vkGetAccelerationStructureDeviceAddressKHR(
+    VkDevice                                    device,
+    const VkAccelerationStructureDeviceAddressInfoKHR* pInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdWriteAccelerationStructuresPropertiesKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    accelerationStructureCount,
+    const VkAccelerationStructureKHR*           pAccelerationStructures,
+    VkQueryType                                 queryType,
+    VkQueryPool                                 queryPool,
+    uint32_t                                    firstQuery);
+
+VKAPI_ATTR void VKAPI_CALL vkGetDeviceAccelerationStructureCompatibilityKHR(
+    VkDevice                                    device,
+    const VkAccelerationStructureVersionInfoKHR* pVersionInfo,
+    VkAccelerationStructureCompatibilityKHR*    pCompatibility);
+
+VKAPI_ATTR void VKAPI_CALL vkGetAccelerationStructureBuildSizesKHR(
+    VkDevice                                    device,
+    VkAccelerationStructureBuildTypeKHR         buildType,
+    const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo,
+    const uint32_t*                             pMaxPrimitiveCounts,
+    VkAccelerationStructureBuildSizesInfoKHR*   pSizeInfo);
+#endif
+
+
+#define VK_KHR_ray_tracing_pipeline 1
+#define VK_KHR_RAY_TRACING_PIPELINE_SPEC_VERSION 1
+#define VK_KHR_RAY_TRACING_PIPELINE_EXTENSION_NAME "VK_KHR_ray_tracing_pipeline"
+
+typedef enum VkShaderGroupShaderKHR {
+    VK_SHADER_GROUP_SHADER_GENERAL_KHR = 0,
+    VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR = 1,
+    VK_SHADER_GROUP_SHADER_ANY_HIT_KHR = 2,
+    VK_SHADER_GROUP_SHADER_INTERSECTION_KHR = 3,
+    VK_SHADER_GROUP_SHADER_MAX_ENUM_KHR = 0x7FFFFFFF
+} VkShaderGroupShaderKHR;
+typedef struct VkRayTracingShaderGroupCreateInfoKHR {
+    VkStructureType                   sType;
+    const void*                       pNext;
+    VkRayTracingShaderGroupTypeKHR    type;
+    uint32_t                          generalShader;
+    uint32_t                          closestHitShader;
+    uint32_t                          anyHitShader;
+    uint32_t                          intersectionShader;
+    const void*                       pShaderGroupCaptureReplayHandle;
+} VkRayTracingShaderGroupCreateInfoKHR;
+
+typedef struct VkRayTracingPipelineInterfaceCreateInfoKHR {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           maxPipelineRayPayloadSize;
+    uint32_t           maxPipelineRayHitAttributeSize;
+} VkRayTracingPipelineInterfaceCreateInfoKHR;
+
+typedef struct VkRayTracingPipelineCreateInfoKHR {
+    VkStructureType                                      sType;
+    const void*                                          pNext;
+    VkPipelineCreateFlags                                flags;
+    uint32_t                                             stageCount;
+    const VkPipelineShaderStageCreateInfo*               pStages;
+    uint32_t                                             groupCount;
+    const VkRayTracingShaderGroupCreateInfoKHR*          pGroups;
+    uint32_t                                             maxPipelineRayRecursionDepth;
+    const VkPipelineLibraryCreateInfoKHR*                pLibraryInfo;
+    const VkRayTracingPipelineInterfaceCreateInfoKHR*    pLibraryInterface;
+    const VkPipelineDynamicStateCreateInfo*              pDynamicState;
+    VkPipelineLayout                                     layout;
+    VkPipeline                                           basePipelineHandle;
+    int32_t                                              basePipelineIndex;
+} VkRayTracingPipelineCreateInfoKHR;
+
+typedef struct VkPhysicalDeviceRayTracingPipelineFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rayTracingPipeline;
+    VkBool32           rayTracingPipelineShaderGroupHandleCaptureReplay;
+    VkBool32           rayTracingPipelineShaderGroupHandleCaptureReplayMixed;
+    VkBool32           rayTracingPipelineTraceRaysIndirect;
+    VkBool32           rayTraversalPrimitiveCulling;
+} VkPhysicalDeviceRayTracingPipelineFeaturesKHR;
+
+typedef struct VkPhysicalDeviceRayTracingPipelinePropertiesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           shaderGroupHandleSize;
+    uint32_t           maxRayRecursionDepth;
+    uint32_t           maxShaderGroupStride;
+    uint32_t           shaderGroupBaseAlignment;
+    uint32_t           shaderGroupHandleCaptureReplaySize;
+    uint32_t           maxRayDispatchInvocationCount;
+    uint32_t           shaderGroupHandleAlignment;
+    uint32_t           maxRayHitAttributeSize;
+} VkPhysicalDeviceRayTracingPipelinePropertiesKHR;
+
+typedef struct VkStridedDeviceAddressRegionKHR {
+    VkDeviceAddress    deviceAddress;
+    VkDeviceSize       stride;
+    VkDeviceSize       size;
+} VkStridedDeviceAddressRegionKHR;
+
+typedef struct VkTraceRaysIndirectCommandKHR {
+    uint32_t    width;
+    uint32_t    height;
+    uint32_t    depth;
+} VkTraceRaysIndirectCommandKHR;
+
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth);
+typedef VkResult (VKAPI_PTR *PFN_vkCreateRayTracingPipelinesKHR)(VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines);
+typedef VkResult (VKAPI_PTR *PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR)(VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData);
+typedef void (VKAPI_PTR *PFN_vkCmdTraceRaysIndirectKHR)(VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress);
+typedef VkDeviceSize (VKAPI_PTR *PFN_vkGetRayTracingShaderGroupStackSizeKHR)(VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader);
+typedef void (VKAPI_PTR *PFN_vkCmdSetRayTracingPipelineStackSizeKHR)(VkCommandBuffer commandBuffer, uint32_t pipelineStackSize);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkStridedDeviceAddressRegionKHR*      pRaygenShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pMissShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pHitShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pCallableShaderBindingTable,
+    uint32_t                                    width,
+    uint32_t                                    height,
+    uint32_t                                    depth);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateRayTracingPipelinesKHR(
+    VkDevice                                    device,
+    VkDeferredOperationKHR                      deferredOperation,
+    VkPipelineCache                             pipelineCache,
+    uint32_t                                    createInfoCount,
+    const VkRayTracingPipelineCreateInfoKHR*    pCreateInfos,
+    const VkAllocationCallbacks*                pAllocator,
+    VkPipeline*                                 pPipelines);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    firstGroup,
+    uint32_t                                    groupCount,
+    size_t                                      dataSize,
+    void*                                       pData);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdTraceRaysIndirectKHR(
+    VkCommandBuffer                             commandBuffer,
+    const VkStridedDeviceAddressRegionKHR*      pRaygenShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pMissShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pHitShaderBindingTable,
+    const VkStridedDeviceAddressRegionKHR*      pCallableShaderBindingTable,
+    VkDeviceAddress                             indirectDeviceAddress);
+
+VKAPI_ATTR VkDeviceSize VKAPI_CALL vkGetRayTracingShaderGroupStackSizeKHR(
+    VkDevice                                    device,
+    VkPipeline                                  pipeline,
+    uint32_t                                    group,
+    VkShaderGroupShaderKHR                      groupShader);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdSetRayTracingPipelineStackSizeKHR(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    pipelineStackSize);
+#endif
+
+
+#define VK_KHR_ray_query 1
+#define VK_KHR_RAY_QUERY_SPEC_VERSION     1
+#define VK_KHR_RAY_QUERY_EXTENSION_NAME   "VK_KHR_ray_query"
+typedef struct VkPhysicalDeviceRayQueryFeaturesKHR {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           rayQuery;
+} VkPhysicalDeviceRayQueryFeaturesKHR;
+
+
+
+#define VK_EXT_mesh_shader 1
+#define VK_EXT_MESH_SHADER_SPEC_VERSION   1
+#define VK_EXT_MESH_SHADER_EXTENSION_NAME "VK_EXT_mesh_shader"
+typedef struct VkPhysicalDeviceMeshShaderFeaturesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           taskShader;
+    VkBool32           meshShader;
+    VkBool32           multiviewMeshShader;
+    VkBool32           primitiveFragmentShadingRateMeshShader;
+    VkBool32           meshShaderQueries;
+} VkPhysicalDeviceMeshShaderFeaturesEXT;
+
+typedef struct VkPhysicalDeviceMeshShaderPropertiesEXT {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           maxTaskWorkGroupTotalCount;
+    uint32_t           maxTaskWorkGroupCount[3];
+    uint32_t           maxTaskWorkGroupInvocations;
+    uint32_t           maxTaskWorkGroupSize[3];
+    uint32_t           maxTaskPayloadSize;
+    uint32_t           maxTaskSharedMemorySize;
+    uint32_t           maxTaskPayloadAndSharedMemorySize;
+    uint32_t           maxMeshWorkGroupTotalCount;
+    uint32_t           maxMeshWorkGroupCount[3];
+    uint32_t           maxMeshWorkGroupInvocations;
+    uint32_t           maxMeshWorkGroupSize[3];
+    uint32_t           maxMeshSharedMemorySize;
+    uint32_t           maxMeshPayloadAndSharedMemorySize;
+    uint32_t           maxMeshOutputMemorySize;
+    uint32_t           maxMeshPayloadAndOutputMemorySize;
+    uint32_t           maxMeshOutputComponents;
+    uint32_t           maxMeshOutputVertices;
+    uint32_t           maxMeshOutputPrimitives;
+    uint32_t           maxMeshOutputLayers;
+    uint32_t           maxMeshMultiviewViewCount;
+    uint32_t           meshOutputPerVertexGranularity;
+    uint32_t           meshOutputPerPrimitiveGranularity;
+    uint32_t           maxPreferredTaskWorkGroupInvocations;
+    uint32_t           maxPreferredMeshWorkGroupInvocations;
+    VkBool32           prefersLocalInvocationVertexOutput;
+    VkBool32           prefersLocalInvocationPrimitiveOutput;
+    VkBool32           prefersCompactVertexOutput;
+    VkBool32           prefersCompactPrimitiveOutput;
+} VkPhysicalDeviceMeshShaderPropertiesEXT;
+
+typedef struct VkDrawMeshTasksIndirectCommandEXT {
+    uint32_t    groupCountX;
+    uint32_t    groupCountY;
+    uint32_t    groupCountZ;
+} VkDrawMeshTasksIndirectCommandEXT;
+
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksEXT)(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride);
+typedef void (VKAPI_PTR *PFN_vkCmdDrawMeshTasksIndirectCountEXT)(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksEXT(
+    VkCommandBuffer                             commandBuffer,
+    uint32_t                                    groupCountX,
+    uint32_t                                    groupCountY,
+    uint32_t                                    groupCountZ);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    uint32_t                                    drawCount,
+    uint32_t                                    stride);
+
+VKAPI_ATTR void VKAPI_CALL vkCmdDrawMeshTasksIndirectCountEXT(
+    VkCommandBuffer                             commandBuffer,
+    VkBuffer                                    buffer,
+    VkDeviceSize                                offset,
+    VkBuffer                                    countBuffer,
+    VkDeviceSize                                countBufferOffset,
+    uint32_t                                    maxDrawCount,
+    uint32_t                                    stride);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vulkan/vulkan_directfb.h b/include/vulkan/vulkan_directfb.h
new file mode 100644
index 0000000..ab3504e
--- /dev/null
+++ b/include/vulkan/vulkan_directfb.h
@@ -0,0 +1,54 @@
+#ifndef VULKAN_DIRECTFB_H_
+#define VULKAN_DIRECTFB_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_EXT_directfb_surface 1
+#define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1
+#define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface"
+typedef VkFlags VkDirectFBSurfaceCreateFlagsEXT;
+typedef struct VkDirectFBSurfaceCreateInfoEXT {
+    VkStructureType                    sType;
+    const void*                        pNext;
+    VkDirectFBSurfaceCreateFlagsEXT    flags;
+    IDirectFB*                         dfb;
+    IDirectFBSurface*                  surface;
+} VkDirectFBSurfaceCreateInfoEXT;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateDirectFBSurfaceEXT)(VkInstance instance, const VkDirectFBSurfaceCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+typedef VkBool32 (VKAPI_PTR *PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT)(VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, IDirectFB* dfb);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateDirectFBSurfaceEXT(
+    VkInstance                                  instance,
+    const VkDirectFBSurfaceCreateInfoEXT*       pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+
+VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceDirectFBPresentationSupportEXT(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    queueFamilyIndex,
+    IDirectFB*                                  dfb);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vulkan/vulkan_enums.hpp b/include/vulkan/vulkan_enums.hpp
new file mode 100644
index 0000000..ab07ed4
--- /dev/null
+++ b/include/vulkan/vulkan_enums.hpp
@@ -0,0 +1,6930 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_ENUMS_HPP
+#  define VULKAN_ENUMS_HPP
+
+namespace VULKAN_HPP_NAMESPACE
+{
+  template <typename EnumType, EnumType value>
+  struct CppType
+  {};
+
+  //=============
+  //=== ENUMs ===
+  //=============
+
+
+  //=== VK_VERSION_1_0 ===
+
+  enum class Result
+  {
+    eSuccess = VK_SUCCESS,
+    eNotReady = VK_NOT_READY,
+    eTimeout = VK_TIMEOUT,
+    eEventSet = VK_EVENT_SET,
+    eEventReset = VK_EVENT_RESET,
+    eIncomplete = VK_INCOMPLETE,
+    eErrorOutOfHostMemory = VK_ERROR_OUT_OF_HOST_MEMORY,
+    eErrorOutOfDeviceMemory = VK_ERROR_OUT_OF_DEVICE_MEMORY,
+    eErrorInitializationFailed = VK_ERROR_INITIALIZATION_FAILED,
+    eErrorDeviceLost = VK_ERROR_DEVICE_LOST,
+    eErrorMemoryMapFailed = VK_ERROR_MEMORY_MAP_FAILED,
+    eErrorLayerNotPresent = VK_ERROR_LAYER_NOT_PRESENT,
+    eErrorExtensionNotPresent = VK_ERROR_EXTENSION_NOT_PRESENT,
+    eErrorFeatureNotPresent = VK_ERROR_FEATURE_NOT_PRESENT,
+    eErrorIncompatibleDriver = VK_ERROR_INCOMPATIBLE_DRIVER,
+    eErrorTooManyObjects = VK_ERROR_TOO_MANY_OBJECTS,
+    eErrorFormatNotSupported = VK_ERROR_FORMAT_NOT_SUPPORTED,
+    eErrorFragmentedPool = VK_ERROR_FRAGMENTED_POOL,
+    eErrorUnknown = VK_ERROR_UNKNOWN,
+    eErrorOutOfPoolMemory = VK_ERROR_OUT_OF_POOL_MEMORY,
+    eErrorInvalidExternalHandle = VK_ERROR_INVALID_EXTERNAL_HANDLE,
+    eErrorFragmentation = VK_ERROR_FRAGMENTATION,
+    eErrorInvalidOpaqueCaptureAddress = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS,
+    ePipelineCompileRequired = VK_PIPELINE_COMPILE_REQUIRED,
+    eErrorSurfaceLostKHR = VK_ERROR_SURFACE_LOST_KHR,
+    eErrorNativeWindowInUseKHR = VK_ERROR_NATIVE_WINDOW_IN_USE_KHR,
+    eSuboptimalKHR = VK_SUBOPTIMAL_KHR,
+    eErrorOutOfDateKHR = VK_ERROR_OUT_OF_DATE_KHR,
+    eErrorIncompatibleDisplayKHR = VK_ERROR_INCOMPATIBLE_DISPLAY_KHR,
+    eErrorValidationFailedEXT = VK_ERROR_VALIDATION_FAILED_EXT,
+    eErrorInvalidShaderNV = VK_ERROR_INVALID_SHADER_NV,
+    eErrorImageUsageNotSupportedKHR = VK_ERROR_IMAGE_USAGE_NOT_SUPPORTED_KHR,
+    eErrorVideoPictureLayoutNotSupportedKHR = VK_ERROR_VIDEO_PICTURE_LAYOUT_NOT_SUPPORTED_KHR,
+    eErrorVideoProfileOperationNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_OPERATION_NOT_SUPPORTED_KHR,
+    eErrorVideoProfileFormatNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_FORMAT_NOT_SUPPORTED_KHR,
+    eErrorVideoProfileCodecNotSupportedKHR = VK_ERROR_VIDEO_PROFILE_CODEC_NOT_SUPPORTED_KHR,
+    eErrorVideoStdVersionNotSupportedKHR = VK_ERROR_VIDEO_STD_VERSION_NOT_SUPPORTED_KHR,
+    eErrorInvalidDrmFormatModifierPlaneLayoutEXT = VK_ERROR_INVALID_DRM_FORMAT_MODIFIER_PLANE_LAYOUT_EXT,
+    eErrorNotPermittedKHR = VK_ERROR_NOT_PERMITTED_KHR,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eErrorFullScreenExclusiveModeLostEXT = VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eThreadIdleKHR = VK_THREAD_IDLE_KHR,
+    eThreadDoneKHR = VK_THREAD_DONE_KHR,
+    eOperationDeferredKHR = VK_OPERATION_DEFERRED_KHR,
+    eOperationNotDeferredKHR = VK_OPERATION_NOT_DEFERRED_KHR,
+    eErrorCompressionExhaustedEXT = VK_ERROR_COMPRESSION_EXHAUSTED_EXT,
+    eErrorFragmentationEXT = VK_ERROR_FRAGMENTATION_EXT,
+    eErrorInvalidDeviceAddressEXT = VK_ERROR_INVALID_DEVICE_ADDRESS_EXT,
+    eErrorInvalidExternalHandleKHR = VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR,
+    eErrorInvalidOpaqueCaptureAddressKHR = VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR,
+    eErrorNotPermittedEXT = VK_ERROR_NOT_PERMITTED_EXT,
+    eErrorOutOfPoolMemoryKHR = VK_ERROR_OUT_OF_POOL_MEMORY_KHR,
+    eErrorPipelineCompileRequiredEXT = VK_ERROR_PIPELINE_COMPILE_REQUIRED_EXT,
+    ePipelineCompileRequiredEXT = VK_PIPELINE_COMPILE_REQUIRED_EXT
+  };
+
+  enum class StructureType
+  {
+    eApplicationInfo = VK_STRUCTURE_TYPE_APPLICATION_INFO,
+    eInstanceCreateInfo = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
+    eDeviceQueueCreateInfo = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
+    eDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
+    eSubmitInfo = VK_STRUCTURE_TYPE_SUBMIT_INFO,
+    eMemoryAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
+    eMappedMemoryRange = VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
+    eBindSparseInfo = VK_STRUCTURE_TYPE_BIND_SPARSE_INFO,
+    eFenceCreateInfo = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO,
+    eSemaphoreCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
+    eEventCreateInfo = VK_STRUCTURE_TYPE_EVENT_CREATE_INFO,
+    eQueryPoolCreateInfo = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO,
+    eBufferCreateInfo = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
+    eBufferViewCreateInfo = VK_STRUCTURE_TYPE_BUFFER_VIEW_CREATE_INFO,
+    eImageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
+    eImageViewCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
+    eShaderModuleCreateInfo = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
+    ePipelineCacheCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO,
+    ePipelineShaderStageCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
+    ePipelineVertexInputStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
+    ePipelineInputAssemblyStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
+    ePipelineTessellationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO,
+    ePipelineViewportStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
+    ePipelineRasterizationStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
+    ePipelineMultisampleStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
+    ePipelineDepthStencilStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO,
+    ePipelineColorBlendStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
+    ePipelineDynamicStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
+    eGraphicsPipelineCreateInfo = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
+    eComputePipelineCreateInfo = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
+    ePipelineLayoutCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
+    eSamplerCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
+    eDescriptorSetLayoutCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
+    eDescriptorPoolCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
+    eDescriptorSetAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
+    eWriteDescriptorSet = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
+    eCopyDescriptorSet = VK_STRUCTURE_TYPE_COPY_DESCRIPTOR_SET,
+    eFramebufferCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
+    eRenderPassCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
+    eCommandPoolCreateInfo = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
+    eCommandBufferAllocateInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
+    eCommandBufferInheritanceInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_INFO,
+    eCommandBufferBeginInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
+    eRenderPassBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
+    eBufferMemoryBarrier = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
+    eImageMemoryBarrier = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
+    eMemoryBarrier = VK_STRUCTURE_TYPE_MEMORY_BARRIER,
+    eLoaderInstanceCreateInfo = VK_STRUCTURE_TYPE_LOADER_INSTANCE_CREATE_INFO,
+    eLoaderDeviceCreateInfo = VK_STRUCTURE_TYPE_LOADER_DEVICE_CREATE_INFO,
+    ePhysicalDeviceSubgroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_PROPERTIES,
+    eBindBufferMemoryInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO,
+    eBindImageMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO,
+    ePhysicalDevice16BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES,
+    eMemoryDedicatedRequirements = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
+    eMemoryDedicatedAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
+    eMemoryAllocateFlagsInfo = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO,
+    eDeviceGroupRenderPassBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO,
+    eDeviceGroupCommandBufferBeginInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO,
+    eDeviceGroupSubmitInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO,
+    eDeviceGroupBindSparseInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO,
+    eBindBufferMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO,
+    eBindImageMemoryDeviceGroupInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO,
+    ePhysicalDeviceGroupProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES,
+    eDeviceGroupDeviceCreateInfo = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO,
+    eBufferMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2,
+    eImageMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
+    eImageSparseMemoryRequirementsInfo2 = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2,
+    eMemoryRequirements2 = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
+    eSparseImageMemoryRequirements2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2,
+    ePhysicalDeviceFeatures2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
+    ePhysicalDeviceProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
+    eFormatProperties2 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
+    eImageFormatProperties2 = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
+    ePhysicalDeviceImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
+    eQueueFamilyProperties2 = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
+    ePhysicalDeviceMemoryProperties2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2,
+    eSparseImageFormatProperties2 = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2,
+    ePhysicalDeviceSparseImageFormatInfo2 = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2,
+    ePhysicalDevicePointClippingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES,
+    eRenderPassInputAttachmentAspectCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO,
+    eImageViewUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO,
+    ePipelineTessellationDomainOriginStateCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO,
+    eRenderPassMultiviewCreateInfo = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO,
+    ePhysicalDeviceMultiviewFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES,
+    ePhysicalDeviceMultiviewProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES,
+    ePhysicalDeviceVariablePointersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES,
+    eProtectedSubmitInfo = VK_STRUCTURE_TYPE_PROTECTED_SUBMIT_INFO,
+    ePhysicalDeviceProtectedMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES,
+    ePhysicalDeviceProtectedMemoryProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_PROPERTIES,
+    eDeviceQueueInfo2 = VK_STRUCTURE_TYPE_DEVICE_QUEUE_INFO_2,
+    eSamplerYcbcrConversionCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO,
+    eSamplerYcbcrConversionInfo = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO,
+    eBindImagePlaneMemoryInfo = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO,
+    eImagePlaneMemoryRequirementsInfo = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
+    ePhysicalDeviceSamplerYcbcrConversionFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES,
+    eSamplerYcbcrConversionImageFormatProperties = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES,
+    eDescriptorUpdateTemplateCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO,
+    ePhysicalDeviceExternalImageFormatInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
+    eExternalImageFormatProperties = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES,
+    ePhysicalDeviceExternalBufferInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO,
+    eExternalBufferProperties = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES,
+    ePhysicalDeviceIdProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES,
+    eExternalMemoryBufferCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO,
+    eExternalMemoryImageCreateInfo = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
+    eExportMemoryAllocateInfo = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO,
+    ePhysicalDeviceExternalFenceInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO,
+    eExternalFenceProperties = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES,
+    eExportFenceCreateInfo = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO,
+    eExportSemaphoreCreateInfo = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
+    ePhysicalDeviceExternalSemaphoreInfo = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
+    eExternalSemaphoreProperties = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES,
+    ePhysicalDeviceMaintenance3Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES,
+    eDescriptorSetLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT,
+    ePhysicalDeviceShaderDrawParametersFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES,
+    ePhysicalDeviceVulkan11Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
+    ePhysicalDeviceVulkan11Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
+    ePhysicalDeviceVulkan12Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
+    ePhysicalDeviceVulkan12Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
+    eImageFormatListCreateInfo = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO,
+    eAttachmentDescription2 = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2,
+    eAttachmentReference2 = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2,
+    eSubpassDescription2 = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2,
+    eSubpassDependency2 = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2,
+    eRenderPassCreateInfo2 = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2,
+    eSubpassBeginInfo = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO,
+    eSubpassEndInfo = VK_STRUCTURE_TYPE_SUBPASS_END_INFO,
+    ePhysicalDevice8BitStorageFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES,
+    ePhysicalDeviceDriverProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES,
+    ePhysicalDeviceShaderAtomicInt64Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES,
+    ePhysicalDeviceShaderFloat16Int8Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES,
+    ePhysicalDeviceFloatControlsProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES,
+    eDescriptorSetLayoutBindingFlagsCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO,
+    ePhysicalDeviceDescriptorIndexingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES,
+    ePhysicalDeviceDescriptorIndexingProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES,
+    eDescriptorSetVariableDescriptorCountAllocateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO,
+    eDescriptorSetVariableDescriptorCountLayoutSupport = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT,
+    ePhysicalDeviceDepthStencilResolveProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES,
+    eSubpassDescriptionDepthStencilResolve = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE,
+    ePhysicalDeviceScalarBlockLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES,
+    eImageStencilUsageCreateInfo = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO,
+    ePhysicalDeviceSamplerFilterMinmaxProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES,
+    eSamplerReductionModeCreateInfo = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO,
+    ePhysicalDeviceVulkanMemoryModelFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES,
+    ePhysicalDeviceImagelessFramebufferFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES,
+    eFramebufferAttachmentsCreateInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO,
+    eFramebufferAttachmentImageInfo = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO,
+    eRenderPassAttachmentBeginInfo = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO,
+    ePhysicalDeviceUniformBufferStandardLayoutFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES,
+    ePhysicalDeviceShaderSubgroupExtendedTypesFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES,
+    ePhysicalDeviceSeparateDepthStencilLayoutsFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES,
+    eAttachmentReferenceStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT,
+    eAttachmentDescriptionStencilLayout = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT,
+    ePhysicalDeviceHostQueryResetFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES,
+    ePhysicalDeviceTimelineSemaphoreFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES,
+    ePhysicalDeviceTimelineSemaphoreProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES,
+    eSemaphoreTypeCreateInfo = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
+    eTimelineSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO,
+    eSemaphoreWaitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
+    eSemaphoreSignalInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO,
+    ePhysicalDeviceBufferDeviceAddressFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES,
+    eBufferDeviceAddressInfo = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO,
+    eBufferOpaqueCaptureAddressCreateInfo = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO,
+    eMemoryOpaqueCaptureAddressAllocateInfo = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO,
+    eDeviceMemoryOpaqueCaptureAddressInfo = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO,
+    ePhysicalDeviceVulkan13Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES,
+    ePhysicalDeviceVulkan13Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_PROPERTIES,
+    ePipelineCreationFeedbackCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO,
+    ePhysicalDeviceShaderTerminateInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES,
+    ePhysicalDeviceToolProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES,
+    ePhysicalDeviceShaderDemoteToHelperInvocationFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES,
+    ePhysicalDevicePrivateDataFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES,
+    eDevicePrivateDataCreateInfo = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO,
+    ePrivateDataSlotCreateInfo = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO,
+    ePhysicalDevicePipelineCreationCacheControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES,
+    eMemoryBarrier2 = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2,
+    eBufferMemoryBarrier2 = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2,
+    eImageMemoryBarrier2 = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2,
+    eDependencyInfo = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
+    eSubmitInfo2 = VK_STRUCTURE_TYPE_SUBMIT_INFO_2,
+    eSemaphoreSubmitInfo = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO,
+    eCommandBufferSubmitInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO,
+    ePhysicalDeviceSynchronization2Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES,
+    ePhysicalDeviceZeroInitializeWorkgroupMemoryFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES,
+    ePhysicalDeviceImageRobustnessFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES,
+    eCopyBufferInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2,
+    eCopyImageInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2,
+    eCopyBufferToImageInfo2 = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2,
+    eCopyImageToBufferInfo2 = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2,
+    eBlitImageInfo2 = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2,
+    eResolveImageInfo2 = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2,
+    eBufferCopy2 = VK_STRUCTURE_TYPE_BUFFER_COPY_2,
+    eImageCopy2 = VK_STRUCTURE_TYPE_IMAGE_COPY_2,
+    eImageBlit2 = VK_STRUCTURE_TYPE_IMAGE_BLIT_2,
+    eBufferImageCopy2 = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2,
+    eImageResolve2 = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2,
+    ePhysicalDeviceSubgroupSizeControlProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES,
+    ePipelineShaderStageRequiredSubgroupSizeCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO,
+    ePhysicalDeviceSubgroupSizeControlFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES,
+    ePhysicalDeviceInlineUniformBlockFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES,
+    ePhysicalDeviceInlineUniformBlockProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES,
+    eWriteDescriptorSetInlineUniformBlock = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK,
+    eDescriptorPoolInlineUniformBlockCreateInfo = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO,
+    ePhysicalDeviceTextureCompressionAstcHdrFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES,
+    eRenderingInfo = VK_STRUCTURE_TYPE_RENDERING_INFO,
+    eRenderingAttachmentInfo = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO,
+    ePipelineRenderingCreateInfo = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO,
+    ePhysicalDeviceDynamicRenderingFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES,
+    eCommandBufferInheritanceRenderingInfo = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO,
+    ePhysicalDeviceShaderIntegerDotProductFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES,
+    ePhysicalDeviceShaderIntegerDotProductProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES,
+    ePhysicalDeviceTexelBufferAlignmentProperties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES,
+    eFormatProperties3 = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3,
+    ePhysicalDeviceMaintenance4Features = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES,
+    ePhysicalDeviceMaintenance4Properties = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES,
+    eDeviceBufferMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS,
+    eDeviceImageMemoryRequirements = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS,
+    eSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
+    ePresentInfoKHR = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
+    eDeviceGroupPresentCapabilitiesKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_CAPABILITIES_KHR,
+    eImageSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_SWAPCHAIN_CREATE_INFO_KHR,
+    eBindImageMemorySwapchainInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR,
+    eAcquireNextImageInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_NEXT_IMAGE_INFO_KHR,
+    eDeviceGroupPresentInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_PRESENT_INFO_KHR,
+    eDeviceGroupSwapchainCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SWAPCHAIN_CREATE_INFO_KHR,
+    eDisplayModeCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_CREATE_INFO_KHR,
+    eDisplaySurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_SURFACE_CREATE_INFO_KHR,
+    eDisplayPresentInfoKHR = VK_STRUCTURE_TYPE_DISPLAY_PRESENT_INFO_KHR,
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+    eXlibSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+    eXcbSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+    eWaylandSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WAYLAND_SURFACE_CREATE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+    eAndroidSurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eWin32SurfaceCreateInfoKHR = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eDebugReportCallbackCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CALLBACK_CREATE_INFO_EXT,
+    ePipelineRasterizationStateRasterizationOrderAMD = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_RASTERIZATION_ORDER_AMD,
+    eDebugMarkerObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_NAME_INFO_EXT,
+    eDebugMarkerObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_OBJECT_TAG_INFO_EXT,
+    eDebugMarkerMarkerInfoEXT = VK_STRUCTURE_TYPE_DEBUG_MARKER_MARKER_INFO_EXT,
+    eVideoProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_INFO_KHR,
+    eVideoCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_CAPABILITIES_KHR,
+    eVideoPictureResourceInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PICTURE_RESOURCE_INFO_KHR,
+    eVideoSessionMemoryRequirementsKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_MEMORY_REQUIREMENTS_KHR,
+    eBindVideoSessionMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_VIDEO_SESSION_MEMORY_INFO_KHR,
+    eVideoSessionCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_CREATE_INFO_KHR,
+    eVideoSessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_CREATE_INFO_KHR,
+    eVideoSessionParametersUpdateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_SESSION_PARAMETERS_UPDATE_INFO_KHR,
+    eVideoBeginCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_BEGIN_CODING_INFO_KHR,
+    eVideoEndCodingInfoKHR = VK_STRUCTURE_TYPE_VIDEO_END_CODING_INFO_KHR,
+    eVideoCodingControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_CODING_CONTROL_INFO_KHR,
+    eVideoReferenceSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_REFERENCE_SLOT_INFO_KHR,
+    eQueueFamilyVideoPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR,
+    eVideoProfileListInfoKHR = VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR,
+    ePhysicalDeviceVideoFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_FORMAT_INFO_KHR,
+    eVideoFormatPropertiesKHR = VK_STRUCTURE_TYPE_VIDEO_FORMAT_PROPERTIES_KHR,
+    eQueueFamilyQueryResultStatusPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_QUERY_RESULT_STATUS_PROPERTIES_KHR,
+    eVideoDecodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_INFO_KHR,
+    eVideoDecodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_CAPABILITIES_KHR,
+    eVideoDecodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_USAGE_INFO_KHR,
+    eDedicatedAllocationImageCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_IMAGE_CREATE_INFO_NV,
+    eDedicatedAllocationBufferCreateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_BUFFER_CREATE_INFO_NV,
+    eDedicatedAllocationMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_DEDICATED_ALLOCATION_MEMORY_ALLOCATE_INFO_NV,
+    ePhysicalDeviceTransformFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT,
+    ePhysicalDeviceTransformFeedbackPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT,
+    ePipelineRasterizationStateStreamCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_STREAM_CREATE_INFO_EXT,
+    eCuModuleCreateInfoNVX = VK_STRUCTURE_TYPE_CU_MODULE_CREATE_INFO_NVX,
+    eCuFunctionCreateInfoNVX = VK_STRUCTURE_TYPE_CU_FUNCTION_CREATE_INFO_NVX,
+    eCuLaunchInfoNVX = VK_STRUCTURE_TYPE_CU_LAUNCH_INFO_NVX,
+    eImageViewHandleInfoNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_HANDLE_INFO_NVX,
+    eImageViewAddressPropertiesNVX = VK_STRUCTURE_TYPE_IMAGE_VIEW_ADDRESS_PROPERTIES_NVX,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeH264CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_CAPABILITIES_EXT,
+    eVideoEncodeH264SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_CREATE_INFO_EXT,
+    eVideoEncodeH264SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_SESSION_PARAMETERS_ADD_INFO_EXT,
+    eVideoEncodeH264VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_VCL_FRAME_INFO_EXT,
+    eVideoEncodeH264DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_DPB_SLOT_INFO_EXT,
+    eVideoEncodeH264NaluSliceInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_NALU_SLICE_INFO_EXT,
+    eVideoEncodeH264EmitPictureParametersInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_EMIT_PICTURE_PARAMETERS_INFO_EXT,
+    eVideoEncodeH264ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_PROFILE_INFO_EXT,
+    eVideoEncodeH264RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_INFO_EXT,
+    eVideoEncodeH264RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_RATE_CONTROL_LAYER_INFO_EXT,
+    eVideoEncodeH264ReferenceListsInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H264_REFERENCE_LISTS_INFO_EXT,
+    eVideoEncodeH265CapabilitiesEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_CAPABILITIES_EXT,
+    eVideoEncodeH265SessionParametersCreateInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_CREATE_INFO_EXT,
+    eVideoEncodeH265SessionParametersAddInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_SESSION_PARAMETERS_ADD_INFO_EXT,
+    eVideoEncodeH265VclFrameInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_VCL_FRAME_INFO_EXT,
+    eVideoEncodeH265DpbSlotInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_DPB_SLOT_INFO_EXT,
+    eVideoEncodeH265NaluSliceSegmentInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_NALU_SLICE_SEGMENT_INFO_EXT,
+    eVideoEncodeH265EmitPictureParametersInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_EMIT_PICTURE_PARAMETERS_INFO_EXT,
+    eVideoEncodeH265ProfileInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_PROFILE_INFO_EXT,
+    eVideoEncodeH265ReferenceListsInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_REFERENCE_LISTS_INFO_EXT,
+    eVideoEncodeH265RateControlInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_INFO_EXT,
+    eVideoEncodeH265RateControlLayerInfoEXT = VK_STRUCTURE_TYPE_VIDEO_ENCODE_H265_RATE_CONTROL_LAYER_INFO_EXT,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eVideoDecodeH264CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_CAPABILITIES_KHR,
+    eVideoDecodeH264PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PICTURE_INFO_KHR,
+    eVideoDecodeH264ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_PROFILE_INFO_KHR,
+    eVideoDecodeH264SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_CREATE_INFO_KHR,
+    eVideoDecodeH264SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_SESSION_PARAMETERS_ADD_INFO_KHR,
+    eVideoDecodeH264DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H264_DPB_SLOT_INFO_KHR,
+    eTextureLodGatherFormatPropertiesAMD = VK_STRUCTURE_TYPE_TEXTURE_LOD_GATHER_FORMAT_PROPERTIES_AMD,
+    eRenderingFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
+    eRenderingFragmentDensityMapAttachmentInfoEXT = VK_STRUCTURE_TYPE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_INFO_EXT,
+    eAttachmentSampleCountInfoAMD = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_AMD,
+    eMultiviewPerViewAttributesInfoNVX = VK_STRUCTURE_TYPE_MULTIVIEW_PER_VIEW_ATTRIBUTES_INFO_NVX,
+#if defined( VK_USE_PLATFORM_GGP )
+    eStreamDescriptorSurfaceCreateInfoGGP = VK_STRUCTURE_TYPE_STREAM_DESCRIPTOR_SURFACE_CREATE_INFO_GGP,
+#endif /*VK_USE_PLATFORM_GGP*/
+    ePhysicalDeviceCornerSampledImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CORNER_SAMPLED_IMAGE_FEATURES_NV,
+    eExternalMemoryImageCreateInfoNV = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_NV,
+    eExportMemoryAllocateInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_NV,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eImportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_NV,
+    eExportMemoryWin32HandleInfoNV = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_NV,
+    eWin32KeyedMutexAcquireReleaseInfoNV = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_NV,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eValidationFlagsEXT = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT,
+#if defined( VK_USE_PLATFORM_VI_NN )
+    eViSurfaceCreateInfoNN = VK_STRUCTURE_TYPE_VI_SURFACE_CREATE_INFO_NN,
+#endif /*VK_USE_PLATFORM_VI_NN*/
+    eImageViewAstcDecodeModeEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_ASTC_DECODE_MODE_EXT,
+    ePhysicalDeviceAstcDecodeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ASTC_DECODE_FEATURES_EXT,
+    ePipelineRobustnessCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_ROBUSTNESS_CREATE_INFO_EXT,
+    ePhysicalDevicePipelineRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_FEATURES_EXT,
+    ePhysicalDevicePipelineRobustnessPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_ROBUSTNESS_PROPERTIES_EXT,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eImportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+    eExportMemoryWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_WIN32_HANDLE_INFO_KHR,
+    eMemoryWin32HandlePropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_WIN32_HANDLE_PROPERTIES_KHR,
+    eMemoryGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eImportMemoryFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
+    eMemoryFdPropertiesKHR = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
+    eMemoryGetFdInfoKHR = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eWin32KeyedMutexAcquireReleaseInfoKHR = VK_STRUCTURE_TYPE_WIN32_KEYED_MUTEX_ACQUIRE_RELEASE_INFO_KHR,
+    eImportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+    eExportSemaphoreWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_WIN32_HANDLE_INFO_KHR,
+    eD3D12FenceSubmitInfoKHR = VK_STRUCTURE_TYPE_D3D12_FENCE_SUBMIT_INFO_KHR,
+    eSemaphoreGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eImportSemaphoreFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
+    eSemaphoreGetFdInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
+    ePhysicalDevicePushDescriptorPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR,
+    eCommandBufferInheritanceConditionalRenderingInfoEXT = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_CONDITIONAL_RENDERING_INFO_EXT,
+    ePhysicalDeviceConditionalRenderingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT,
+    eConditionalRenderingBeginInfoEXT = VK_STRUCTURE_TYPE_CONDITIONAL_RENDERING_BEGIN_INFO_EXT,
+    ePresentRegionsKHR = VK_STRUCTURE_TYPE_PRESENT_REGIONS_KHR,
+    ePipelineViewportWScalingStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_W_SCALING_STATE_CREATE_INFO_NV,
+    eSurfaceCapabilities2EXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_EXT,
+    eDisplayPowerInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_POWER_INFO_EXT,
+    eDeviceEventInfoEXT = VK_STRUCTURE_TYPE_DEVICE_EVENT_INFO_EXT,
+    eDisplayEventInfoEXT = VK_STRUCTURE_TYPE_DISPLAY_EVENT_INFO_EXT,
+    eSwapchainCounterCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_COUNTER_CREATE_INFO_EXT,
+    ePresentTimesInfoGOOGLE = VK_STRUCTURE_TYPE_PRESENT_TIMES_INFO_GOOGLE,
+    ePhysicalDeviceMultiviewPerViewAttributesPropertiesNVX = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_ATTRIBUTES_PROPERTIES_NVX,
+    ePipelineViewportSwizzleStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SWIZZLE_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceDiscardRectanglePropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DISCARD_RECTANGLE_PROPERTIES_EXT,
+    ePipelineDiscardRectangleStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_DISCARD_RECTANGLE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceConservativeRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONSERVATIVE_RASTERIZATION_PROPERTIES_EXT,
+    ePipelineRasterizationConservativeStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_CONSERVATIVE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceDepthClipEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT,
+    ePipelineRasterizationDepthClipStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_DEPTH_CLIP_STATE_CREATE_INFO_EXT,
+    eHdrMetadataEXT = VK_STRUCTURE_TYPE_HDR_METADATA_EXT,
+    eSharedPresentSurfaceCapabilitiesKHR = VK_STRUCTURE_TYPE_SHARED_PRESENT_SURFACE_CAPABILITIES_KHR,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eImportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+    eExportFenceWin32HandleInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_WIN32_HANDLE_INFO_KHR,
+    eFenceGetWin32HandleInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_WIN32_HANDLE_INFO_KHR,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eImportFenceFdInfoKHR = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR,
+    eFenceGetFdInfoKHR = VK_STRUCTURE_TYPE_FENCE_GET_FD_INFO_KHR,
+    ePhysicalDevicePerformanceQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_FEATURES_KHR,
+    ePhysicalDevicePerformanceQueryPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PERFORMANCE_QUERY_PROPERTIES_KHR,
+    eQueryPoolPerformanceCreateInfoKHR = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_CREATE_INFO_KHR,
+    ePerformanceQuerySubmitInfoKHR = VK_STRUCTURE_TYPE_PERFORMANCE_QUERY_SUBMIT_INFO_KHR,
+    eAcquireProfilingLockInfoKHR = VK_STRUCTURE_TYPE_ACQUIRE_PROFILING_LOCK_INFO_KHR,
+    ePerformanceCounterKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_KHR,
+    ePerformanceCounterDescriptionKHR = VK_STRUCTURE_TYPE_PERFORMANCE_COUNTER_DESCRIPTION_KHR,
+    ePhysicalDeviceSurfaceInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SURFACE_INFO_2_KHR,
+    eSurfaceCapabilities2KHR = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_2_KHR,
+    eSurfaceFormat2KHR = VK_STRUCTURE_TYPE_SURFACE_FORMAT_2_KHR,
+    eDisplayProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PROPERTIES_2_KHR,
+    eDisplayPlaneProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_PROPERTIES_2_KHR,
+    eDisplayModeProperties2KHR = VK_STRUCTURE_TYPE_DISPLAY_MODE_PROPERTIES_2_KHR,
+    eDisplayPlaneInfo2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_INFO_2_KHR,
+    eDisplayPlaneCapabilities2KHR = VK_STRUCTURE_TYPE_DISPLAY_PLANE_CAPABILITIES_2_KHR,
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+    eIosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_IOS_SURFACE_CREATE_INFO_MVK,
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+    eMacosSurfaceCreateInfoMVK = VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK,
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+    eDebugUtilsObjectNameInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT,
+    eDebugUtilsObjectTagInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_TAG_INFO_EXT,
+    eDebugUtilsLabelEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT,
+    eDebugUtilsMessengerCallbackDataEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CALLBACK_DATA_EXT,
+    eDebugUtilsMessengerCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+    eAndroidHardwareBufferUsageANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_USAGE_ANDROID,
+    eAndroidHardwareBufferPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_PROPERTIES_ANDROID,
+    eAndroidHardwareBufferFormatPropertiesANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID,
+    eImportAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_IMPORT_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
+    eMemoryGetAndroidHardwareBufferInfoANDROID = VK_STRUCTURE_TYPE_MEMORY_GET_ANDROID_HARDWARE_BUFFER_INFO_ANDROID,
+    eExternalFormatANDROID = VK_STRUCTURE_TYPE_EXTERNAL_FORMAT_ANDROID,
+    eAndroidHardwareBufferFormatProperties2ANDROID = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID,
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+    eSampleLocationsInfoEXT = VK_STRUCTURE_TYPE_SAMPLE_LOCATIONS_INFO_EXT,
+    eRenderPassSampleLocationsBeginInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SAMPLE_LOCATIONS_BEGIN_INFO_EXT,
+    ePipelineSampleLocationsStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SAMPLE_LOCATIONS_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceSampleLocationsPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLE_LOCATIONS_PROPERTIES_EXT,
+    eMultisamplePropertiesEXT = VK_STRUCTURE_TYPE_MULTISAMPLE_PROPERTIES_EXT,
+    ePhysicalDeviceBlendOperationAdvancedFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_FEATURES_EXT,
+    ePhysicalDeviceBlendOperationAdvancedPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BLEND_OPERATION_ADVANCED_PROPERTIES_EXT,
+    ePipelineColorBlendAdvancedStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_ADVANCED_STATE_CREATE_INFO_EXT,
+    ePipelineCoverageToColorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_TO_COLOR_STATE_CREATE_INFO_NV,
+    eWriteDescriptorSetAccelerationStructureKHR = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_KHR,
+    eAccelerationStructureBuildGeometryInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_GEOMETRY_INFO_KHR,
+    eAccelerationStructureDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_DEVICE_ADDRESS_INFO_KHR,
+    eAccelerationStructureGeometryAabbsDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_AABBS_DATA_KHR,
+    eAccelerationStructureGeometryInstancesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_INSTANCES_DATA_KHR,
+    eAccelerationStructureGeometryTrianglesDataKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_TRIANGLES_DATA_KHR,
+    eAccelerationStructureGeometryKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_KHR,
+    eAccelerationStructureVersionInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_VERSION_INFO_KHR,
+    eCopyAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_INFO_KHR,
+    eCopyAccelerationStructureToMemoryInfoKHR = VK_STRUCTURE_TYPE_COPY_ACCELERATION_STRUCTURE_TO_MEMORY_INFO_KHR,
+    eCopyMemoryToAccelerationStructureInfoKHR = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_ACCELERATION_STRUCTURE_INFO_KHR,
+    ePhysicalDeviceAccelerationStructureFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR,
+    ePhysicalDeviceAccelerationStructurePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_PROPERTIES_KHR,
+    eAccelerationStructureCreateInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_KHR,
+    eAccelerationStructureBuildSizesInfoKHR = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_BUILD_SIZES_INFO_KHR,
+    ePhysicalDeviceRayTracingPipelineFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR,
+    ePhysicalDeviceRayTracingPipelinePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_PROPERTIES_KHR,
+    eRayTracingPipelineCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_KHR,
+    eRayTracingShaderGroupCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_KHR,
+    eRayTracingPipelineInterfaceCreateInfoKHR = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_INTERFACE_CREATE_INFO_KHR,
+    ePhysicalDeviceRayQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR,
+    ePipelineCoverageModulationStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_MODULATION_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceShaderSmBuiltinsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_FEATURES_NV,
+    ePhysicalDeviceShaderSmBuiltinsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SM_BUILTINS_PROPERTIES_NV,
+    eDrmFormatModifierPropertiesListEXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
+    ePhysicalDeviceImageDrmFormatModifierInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
+    eImageDrmFormatModifierListCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT,
+    eImageDrmFormatModifierExplicitCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
+    eImageDrmFormatModifierPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
+    eDrmFormatModifierPropertiesList2EXT = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_2_EXT,
+    eValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_VALIDATION_CACHE_CREATE_INFO_EXT,
+    eShaderModuleValidationCacheCreateInfoEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_VALIDATION_CACHE_CREATE_INFO_EXT,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    ePhysicalDevicePortabilitySubsetFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR,
+    ePhysicalDevicePortabilitySubsetPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_PROPERTIES_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    ePipelineViewportShadingRateImageStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_SHADING_RATE_IMAGE_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceShadingRateImageFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_FEATURES_NV,
+    ePhysicalDeviceShadingRateImagePropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADING_RATE_IMAGE_PROPERTIES_NV,
+    ePipelineViewportCoarseSampleOrderStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_COARSE_SAMPLE_ORDER_STATE_CREATE_INFO_NV,
+    eRayTracingPipelineCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_PIPELINE_CREATE_INFO_NV,
+    eAccelerationStructureCreateInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CREATE_INFO_NV,
+    eGeometryNV = VK_STRUCTURE_TYPE_GEOMETRY_NV,
+    eGeometryTrianglesNV = VK_STRUCTURE_TYPE_GEOMETRY_TRIANGLES_NV,
+    eGeometryAabbNV = VK_STRUCTURE_TYPE_GEOMETRY_AABB_NV,
+    eBindAccelerationStructureMemoryInfoNV = VK_STRUCTURE_TYPE_BIND_ACCELERATION_STRUCTURE_MEMORY_INFO_NV,
+    eWriteDescriptorSetAccelerationStructureNV = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_ACCELERATION_STRUCTURE_NV,
+    eAccelerationStructureMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_INFO_NV,
+    ePhysicalDeviceRayTracingPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PROPERTIES_NV,
+    eRayTracingShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_RAY_TRACING_SHADER_GROUP_CREATE_INFO_NV,
+    eAccelerationStructureInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_INFO_NV,
+    ePhysicalDeviceRepresentativeFragmentTestFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_REPRESENTATIVE_FRAGMENT_TEST_FEATURES_NV,
+    ePipelineRepresentativeFragmentTestStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_REPRESENTATIVE_FRAGMENT_TEST_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceImageViewImageFormatInfoEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_IMAGE_FORMAT_INFO_EXT,
+    eFilterCubicImageViewImageFormatPropertiesEXT = VK_STRUCTURE_TYPE_FILTER_CUBIC_IMAGE_VIEW_IMAGE_FORMAT_PROPERTIES_EXT,
+    eImportMemoryHostPointerInfoEXT = VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT,
+    eMemoryHostPointerPropertiesEXT = VK_STRUCTURE_TYPE_MEMORY_HOST_POINTER_PROPERTIES_EXT,
+    ePhysicalDeviceExternalMemoryHostPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT,
+    ePhysicalDeviceShaderClockFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CLOCK_FEATURES_KHR,
+    ePipelineCompilerControlCreateInfoAMD = VK_STRUCTURE_TYPE_PIPELINE_COMPILER_CONTROL_CREATE_INFO_AMD,
+    eCalibratedTimestampInfoEXT = VK_STRUCTURE_TYPE_CALIBRATED_TIMESTAMP_INFO_EXT,
+    ePhysicalDeviceShaderCorePropertiesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_AMD,
+    eVideoDecodeH265CapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_CAPABILITIES_KHR,
+    eVideoDecodeH265SessionParametersCreateInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_CREATE_INFO_KHR,
+    eVideoDecodeH265SessionParametersAddInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_SESSION_PARAMETERS_ADD_INFO_KHR,
+    eVideoDecodeH265ProfileInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PROFILE_INFO_KHR,
+    eVideoDecodeH265PictureInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_PICTURE_INFO_KHR,
+    eVideoDecodeH265DpbSlotInfoKHR = VK_STRUCTURE_TYPE_VIDEO_DECODE_H265_DPB_SLOT_INFO_KHR,
+    eDeviceQueueGlobalPriorityCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_KHR,
+    ePhysicalDeviceGlobalPriorityQueryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_KHR,
+    eQueueFamilyGlobalPriorityPropertiesKHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_KHR,
+    eDeviceMemoryOverallocationCreateInfoAMD = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OVERALLOCATION_CREATE_INFO_AMD,
+    ePhysicalDeviceVertexAttributeDivisorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT,
+    ePipelineVertexInputDivisorStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceVertexAttributeDivisorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT,
+#if defined( VK_USE_PLATFORM_GGP )
+    ePresentFrameTokenGGP = VK_STRUCTURE_TYPE_PRESENT_FRAME_TOKEN_GGP,
+#endif /*VK_USE_PLATFORM_GGP*/
+    ePhysicalDeviceComputeShaderDerivativesFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COMPUTE_SHADER_DERIVATIVES_FEATURES_NV,
+    ePhysicalDeviceMeshShaderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_NV,
+    ePhysicalDeviceMeshShaderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_NV,
+    ePhysicalDeviceShaderImageFootprintFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_FOOTPRINT_FEATURES_NV,
+    ePipelineViewportExclusiveScissorStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_EXCLUSIVE_SCISSOR_STATE_CREATE_INFO_NV,
+    ePhysicalDeviceExclusiveScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXCLUSIVE_SCISSOR_FEATURES_NV,
+    eCheckpointDataNV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_NV,
+    eQueueFamilyCheckpointPropertiesNV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_NV,
+    ePhysicalDeviceShaderIntegerFunctions2FeaturesINTEL = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_FUNCTIONS_2_FEATURES_INTEL,
+    eQueryPoolPerformanceQueryCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_PERFORMANCE_QUERY_CREATE_INFO_INTEL,
+    eInitializePerformanceApiInfoINTEL = VK_STRUCTURE_TYPE_INITIALIZE_PERFORMANCE_API_INFO_INTEL,
+    ePerformanceMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_MARKER_INFO_INTEL,
+    ePerformanceStreamMarkerInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_STREAM_MARKER_INFO_INTEL,
+    ePerformanceOverrideInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_OVERRIDE_INFO_INTEL,
+    ePerformanceConfigurationAcquireInfoINTEL = VK_STRUCTURE_TYPE_PERFORMANCE_CONFIGURATION_ACQUIRE_INFO_INTEL,
+    ePhysicalDevicePciBusInfoPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PCI_BUS_INFO_PROPERTIES_EXT,
+    eDisplayNativeHdrSurfaceCapabilitiesAMD = VK_STRUCTURE_TYPE_DISPLAY_NATIVE_HDR_SURFACE_CAPABILITIES_AMD,
+    eSwapchainDisplayNativeHdrCreateInfoAMD = VK_STRUCTURE_TYPE_SWAPCHAIN_DISPLAY_NATIVE_HDR_CREATE_INFO_AMD,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eImagepipeSurfaceCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGEPIPE_SURFACE_CREATE_INFO_FUCHSIA,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+    eMetalSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT,
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+    ePhysicalDeviceFragmentDensityMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_PROPERTIES_EXT,
+    eRenderPassFragmentDensityMapCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_FRAGMENT_DENSITY_MAP_CREATE_INFO_EXT,
+    eFragmentShadingRateAttachmentInfoKHR = VK_STRUCTURE_TYPE_FRAGMENT_SHADING_RATE_ATTACHMENT_INFO_KHR,
+    ePipelineFragmentShadingRateStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_STATE_CREATE_INFO_KHR,
+    ePhysicalDeviceFragmentShadingRatePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_PROPERTIES_KHR,
+    ePhysicalDeviceFragmentShadingRateFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR,
+    ePhysicalDeviceFragmentShadingRateKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_KHR,
+    ePhysicalDeviceShaderCoreProperties2AMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_PROPERTIES_2_AMD,
+    ePhysicalDeviceCoherentMemoryFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COHERENT_MEMORY_FEATURES_AMD,
+    ePhysicalDeviceShaderImageAtomicInt64FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_IMAGE_ATOMIC_INT64_FEATURES_EXT,
+    ePhysicalDeviceMemoryBudgetPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_BUDGET_PROPERTIES_EXT,
+    ePhysicalDeviceMemoryPriorityFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT,
+    eMemoryPriorityAllocateInfoEXT = VK_STRUCTURE_TYPE_MEMORY_PRIORITY_ALLOCATE_INFO_EXT,
+    eSurfaceProtectedCapabilitiesKHR = VK_STRUCTURE_TYPE_SURFACE_PROTECTED_CAPABILITIES_KHR,
+    ePhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEDICATED_ALLOCATION_IMAGE_ALIASING_FEATURES_NV,
+    ePhysicalDeviceBufferDeviceAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT,
+    eBufferDeviceAddressCreateInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_CREATE_INFO_EXT,
+    eValidationFeaturesEXT = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
+    ePhysicalDevicePresentWaitFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_WAIT_FEATURES_KHR,
+    ePhysicalDeviceCooperativeMatrixFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_NV,
+    eCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_NV,
+    ePhysicalDeviceCooperativeMatrixPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_PROPERTIES_NV,
+    ePhysicalDeviceCoverageReductionModeFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COVERAGE_REDUCTION_MODE_FEATURES_NV,
+    ePipelineCoverageReductionStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_COVERAGE_REDUCTION_STATE_CREATE_INFO_NV,
+    eFramebufferMixedSamplesCombinationNV = VK_STRUCTURE_TYPE_FRAMEBUFFER_MIXED_SAMPLES_COMBINATION_NV,
+    ePhysicalDeviceFragmentShaderInterlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_INTERLOCK_FEATURES_EXT,
+    ePhysicalDeviceYcbcrImageArraysFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_IMAGE_ARRAYS_FEATURES_EXT,
+    ePhysicalDeviceProvokingVertexFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT,
+    ePipelineRasterizationProvokingVertexStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceProvokingVertexPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT,
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    eSurfaceFullScreenExclusiveInfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_INFO_EXT,
+    eSurfaceCapabilitiesFullScreenExclusiveEXT = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_FULL_SCREEN_EXCLUSIVE_EXT,
+    eSurfaceFullScreenExclusiveWin32InfoEXT = VK_STRUCTURE_TYPE_SURFACE_FULL_SCREEN_EXCLUSIVE_WIN32_INFO_EXT,
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    eHeadlessSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_HEADLESS_SURFACE_CREATE_INFO_EXT,
+    ePhysicalDeviceLineRasterizationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT,
+    ePipelineRasterizationLineStateCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT,
+    ePhysicalDeviceLineRasterizationPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT,
+    ePhysicalDeviceShaderAtomicFloatFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT,
+    ePhysicalDeviceIndexTypeUint8FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT,
+    ePhysicalDeviceExtendedDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT,
+    ePhysicalDevicePipelineExecutablePropertiesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR,
+    ePipelineInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_INFO_KHR,
+    ePipelineExecutablePropertiesKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_PROPERTIES_KHR,
+    ePipelineExecutableInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INFO_KHR,
+    ePipelineExecutableStatisticKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_STATISTIC_KHR,
+    ePipelineExecutableInternalRepresentationKHR = VK_STRUCTURE_TYPE_PIPELINE_EXECUTABLE_INTERNAL_REPRESENTATION_KHR,
+    ePhysicalDeviceShaderAtomicFloat2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_2_FEATURES_EXT,
+    eSurfacePresentModeEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_EXT,
+    eSurfacePresentScalingCapabilitiesEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_SCALING_CAPABILITIES_EXT,
+    eSurfacePresentModeCompatibilityEXT = VK_STRUCTURE_TYPE_SURFACE_PRESENT_MODE_COMPATIBILITY_EXT,
+    ePhysicalDeviceSwapchainMaintenance1FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SWAPCHAIN_MAINTENANCE_1_FEATURES_EXT,
+    eSwapchainPresentFenceInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_FENCE_INFO_EXT,
+    eSwapchainPresentModesCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODES_CREATE_INFO_EXT,
+    eSwapchainPresentModeInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_MODE_INFO_EXT,
+    eSwapchainPresentScalingCreateInfoEXT = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_SCALING_CREATE_INFO_EXT,
+    eReleaseSwapchainImagesInfoEXT = VK_STRUCTURE_TYPE_RELEASE_SWAPCHAIN_IMAGES_INFO_EXT,
+    ePhysicalDeviceDeviceGeneratedCommandsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_PROPERTIES_NV,
+    eGraphicsShaderGroupCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_SHADER_GROUP_CREATE_INFO_NV,
+    eGraphicsPipelineShaderGroupsCreateInfoNV = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_SHADER_GROUPS_CREATE_INFO_NV,
+    eIndirectCommandsLayoutTokenNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_TOKEN_NV,
+    eIndirectCommandsLayoutCreateInfoNV = VK_STRUCTURE_TYPE_INDIRECT_COMMANDS_LAYOUT_CREATE_INFO_NV,
+    eGeneratedCommandsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_INFO_NV,
+    eGeneratedCommandsMemoryRequirementsInfoNV = VK_STRUCTURE_TYPE_GENERATED_COMMANDS_MEMORY_REQUIREMENTS_INFO_NV,
+    ePhysicalDeviceDeviceGeneratedCommandsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_GENERATED_COMMANDS_FEATURES_NV,
+    ePhysicalDeviceInheritedViewportScissorFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INHERITED_VIEWPORT_SCISSOR_FEATURES_NV,
+    eCommandBufferInheritanceViewportScissorInfoNV = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_VIEWPORT_SCISSOR_INFO_NV,
+    ePhysicalDeviceTexelBufferAlignmentFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_FEATURES_EXT,
+    eCommandBufferInheritanceRenderPassTransformInfoQCOM = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDER_PASS_TRANSFORM_INFO_QCOM,
+    eRenderPassTransformBeginInfoQCOM = VK_STRUCTURE_TYPE_RENDER_PASS_TRANSFORM_BEGIN_INFO_QCOM,
+    ePhysicalDeviceDeviceMemoryReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEVICE_MEMORY_REPORT_FEATURES_EXT,
+    eDeviceDeviceMemoryReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_DEVICE_MEMORY_REPORT_CREATE_INFO_EXT,
+    eDeviceMemoryReportCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_MEMORY_REPORT_CALLBACK_DATA_EXT,
+    ePhysicalDeviceRobustness2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_FEATURES_EXT,
+    ePhysicalDeviceRobustness2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ROBUSTNESS_2_PROPERTIES_EXT,
+    eSamplerCustomBorderColorCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT,
+    ePhysicalDeviceCustomBorderColorPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT,
+    ePhysicalDeviceCustomBorderColorFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT,
+    ePipelineLibraryCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_LIBRARY_CREATE_INFO_KHR,
+    ePhysicalDevicePresentBarrierFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_BARRIER_FEATURES_NV,
+    eSurfaceCapabilitiesPresentBarrierNV = VK_STRUCTURE_TYPE_SURFACE_CAPABILITIES_PRESENT_BARRIER_NV,
+    eSwapchainPresentBarrierCreateInfoNV = VK_STRUCTURE_TYPE_SWAPCHAIN_PRESENT_BARRIER_CREATE_INFO_NV,
+    ePresentIdKHR = VK_STRUCTURE_TYPE_PRESENT_ID_KHR,
+    ePhysicalDevicePresentIdFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENT_ID_FEATURES_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_INFO_KHR,
+    eVideoEncodeRateControlInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_INFO_KHR,
+    eVideoEncodeRateControlLayerInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_RATE_CONTROL_LAYER_INFO_KHR,
+    eVideoEncodeCapabilitiesKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_CAPABILITIES_KHR,
+    eVideoEncodeUsageInfoKHR = VK_STRUCTURE_TYPE_VIDEO_ENCODE_USAGE_INFO_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    ePhysicalDeviceDiagnosticsConfigFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DIAGNOSTICS_CONFIG_FEATURES_NV,
+    eDeviceDiagnosticsConfigCreateInfoNV = VK_STRUCTURE_TYPE_DEVICE_DIAGNOSTICS_CONFIG_CREATE_INFO_NV,
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+    eExportMetalObjectCreateInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECT_CREATE_INFO_EXT,
+    eExportMetalObjectsInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_OBJECTS_INFO_EXT,
+    eExportMetalDeviceInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_DEVICE_INFO_EXT,
+    eExportMetalCommandQueueInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_COMMAND_QUEUE_INFO_EXT,
+    eExportMetalBufferInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_BUFFER_INFO_EXT,
+    eImportMetalBufferInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_BUFFER_INFO_EXT,
+    eExportMetalTextureInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_TEXTURE_INFO_EXT,
+    eImportMetalTextureInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_TEXTURE_INFO_EXT,
+    eExportMetalIoSurfaceInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_IO_SURFACE_INFO_EXT,
+    eImportMetalIoSurfaceInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_IO_SURFACE_INFO_EXT,
+    eExportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_EXPORT_METAL_SHARED_EVENT_INFO_EXT,
+    eImportMetalSharedEventInfoEXT = VK_STRUCTURE_TYPE_IMPORT_METAL_SHARED_EVENT_INFO_EXT,
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+    eQueueFamilyCheckpointProperties2NV = VK_STRUCTURE_TYPE_QUEUE_FAMILY_CHECKPOINT_PROPERTIES_2_NV,
+    eCheckpointData2NV = VK_STRUCTURE_TYPE_CHECKPOINT_DATA_2_NV,
+    ePhysicalDeviceDescriptorBufferPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT,
+    ePhysicalDeviceDescriptorBufferDensityMapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_DENSITY_MAP_PROPERTIES_EXT,
+    ePhysicalDeviceDescriptorBufferFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_FEATURES_EXT,
+    eDescriptorAddressInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT,
+    eDescriptorGetInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT,
+    eBufferCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_BUFFER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT,
+    eImageCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT,
+    eImageViewCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_CAPTURE_DESCRIPTOR_DATA_INFO_EXT,
+    eSamplerCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_CAPTURE_DESCRIPTOR_DATA_INFO_EXT,
+    eOpaqueCaptureDescriptorDataCreateInfoEXT = VK_STRUCTURE_TYPE_OPAQUE_CAPTURE_DESCRIPTOR_DATA_CREATE_INFO_EXT,
+    eDescriptorBufferBindingInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT,
+    eDescriptorBufferBindingPushDescriptorBufferHandleEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_PUSH_DESCRIPTOR_BUFFER_HANDLE_EXT,
+    eAccelerationStructureCaptureDescriptorDataInfoEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_CAPTURE_DESCRIPTOR_DATA_INFO_EXT,
+    ePhysicalDeviceGraphicsPipelineLibraryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_FEATURES_EXT,
+    ePhysicalDeviceGraphicsPipelineLibraryPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GRAPHICS_PIPELINE_LIBRARY_PROPERTIES_EXT,
+    eGraphicsPipelineLibraryCreateInfoEXT = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_LIBRARY_CREATE_INFO_EXT,
+    ePhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EARLY_AND_LATE_FRAGMENT_TESTS_FEATURES_AMD,
+    ePhysicalDeviceFragmentShaderBarycentricFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_KHR,
+    ePhysicalDeviceFragmentShaderBarycentricPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_PROPERTIES_KHR,
+    ePhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_UNIFORM_CONTROL_FLOW_FEATURES_KHR,
+    ePhysicalDeviceFragmentShadingRateEnumsPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_PROPERTIES_NV,
+    ePhysicalDeviceFragmentShadingRateEnumsFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_ENUMS_FEATURES_NV,
+    ePipelineFragmentShadingRateEnumStateCreateInfoNV = VK_STRUCTURE_TYPE_PIPELINE_FRAGMENT_SHADING_RATE_ENUM_STATE_CREATE_INFO_NV,
+    eAccelerationStructureGeometryMotionTrianglesDataNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_GEOMETRY_MOTION_TRIANGLES_DATA_NV,
+    ePhysicalDeviceRayTracingMotionBlurFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MOTION_BLUR_FEATURES_NV,
+    eAccelerationStructureMotionInfoNV = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_MOTION_INFO_NV,
+    ePhysicalDeviceMeshShaderFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT,
+    ePhysicalDeviceMeshShaderPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_PROPERTIES_EXT,
+    ePhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_YCBCR_2_PLANE_444_FORMATS_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMap2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMap2PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_2_PROPERTIES_EXT,
+    eCopyCommandTransformInfoQCOM = VK_STRUCTURE_TYPE_COPY_COMMAND_TRANSFORM_INFO_QCOM,
+    ePhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR,
+    ePhysicalDeviceImageCompressionControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_FEATURES_EXT,
+    eImageCompressionControlEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_CONTROL_EXT,
+    eSubresourceLayout2EXT = VK_STRUCTURE_TYPE_SUBRESOURCE_LAYOUT_2_EXT,
+    eImageSubresource2EXT = VK_STRUCTURE_TYPE_IMAGE_SUBRESOURCE_2_EXT,
+    eImageCompressionPropertiesEXT = VK_STRUCTURE_TYPE_IMAGE_COMPRESSION_PROPERTIES_EXT,
+    ePhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ATTACHMENT_FEEDBACK_LOOP_LAYOUT_FEATURES_EXT,
+    ePhysicalDevice4444FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT,
+    ePhysicalDeviceFaultFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FAULT_FEATURES_EXT,
+    eDeviceFaultCountsEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_COUNTS_EXT,
+    eDeviceFaultInfoEXT = VK_STRUCTURE_TYPE_DEVICE_FAULT_INFO_EXT,
+    ePhysicalDeviceRgba10X6FormatsFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RGBA10X6_FORMATS_FEATURES_EXT,
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+    eDirectfbSurfaceCreateInfoEXT = VK_STRUCTURE_TYPE_DIRECTFB_SURFACE_CREATE_INFO_EXT,
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+    ePhysicalDeviceVertexInputDynamicStateFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT,
+    eVertexInputBindingDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_BINDING_DESCRIPTION_2_EXT,
+    eVertexInputAttributeDescription2EXT = VK_STRUCTURE_TYPE_VERTEX_INPUT_ATTRIBUTE_DESCRIPTION_2_EXT,
+    ePhysicalDeviceDrmPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT,
+    ePhysicalDeviceAddressBindingReportFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ADDRESS_BINDING_REPORT_FEATURES_EXT,
+    eDeviceAddressBindingCallbackDataEXT = VK_STRUCTURE_TYPE_DEVICE_ADDRESS_BINDING_CALLBACK_DATA_EXT,
+    ePhysicalDeviceDepthClipControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_CONTROL_FEATURES_EXT,
+    ePipelineViewportDepthClipControlCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_DEPTH_CLIP_CONTROL_CREATE_INFO_EXT,
+    ePhysicalDevicePrimitiveTopologyListRestartFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eImportMemoryZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_ZIRCON_HANDLE_INFO_FUCHSIA,
+    eMemoryZirconHandlePropertiesFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_ZIRCON_HANDLE_PROPERTIES_FUCHSIA,
+    eMemoryGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_MEMORY_GET_ZIRCON_HANDLE_INFO_FUCHSIA,
+    eImportSemaphoreZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_ZIRCON_HANDLE_INFO_FUCHSIA,
+    eSemaphoreGetZirconHandleInfoFUCHSIA = VK_STRUCTURE_TYPE_SEMAPHORE_GET_ZIRCON_HANDLE_INFO_FUCHSIA,
+    eBufferCollectionCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA,
+    eImportMemoryBufferCollectionFUCHSIA = VK_STRUCTURE_TYPE_IMPORT_MEMORY_BUFFER_COLLECTION_FUCHSIA,
+    eBufferCollectionImageCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_IMAGE_CREATE_INFO_FUCHSIA,
+    eBufferCollectionPropertiesFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_PROPERTIES_FUCHSIA,
+    eBufferConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_CONSTRAINTS_INFO_FUCHSIA,
+    eBufferCollectionBufferCreateInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_BUFFER_CREATE_INFO_FUCHSIA,
+    eImageConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGE_CONSTRAINTS_INFO_FUCHSIA,
+    eImageFormatConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_IMAGE_FORMAT_CONSTRAINTS_INFO_FUCHSIA,
+    eSysmemColorSpaceFUCHSIA = VK_STRUCTURE_TYPE_SYSMEM_COLOR_SPACE_FUCHSIA,
+    eBufferCollectionConstraintsInfoFUCHSIA = VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CONSTRAINTS_INFO_FUCHSIA,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    eSubpassShadingPipelineCreateInfoHUAWEI = VK_STRUCTURE_TYPE_SUBPASS_SHADING_PIPELINE_CREATE_INFO_HUAWEI,
+    ePhysicalDeviceSubpassShadingFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_FEATURES_HUAWEI,
+    ePhysicalDeviceSubpassShadingPropertiesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_SHADING_PROPERTIES_HUAWEI,
+    ePhysicalDeviceInvocationMaskFeaturesHUAWEI = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INVOCATION_MASK_FEATURES_HUAWEI,
+    eMemoryGetRemoteAddressInfoNV = VK_STRUCTURE_TYPE_MEMORY_GET_REMOTE_ADDRESS_INFO_NV,
+    ePhysicalDeviceExternalMemoryRdmaFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_RDMA_FEATURES_NV,
+    ePipelinePropertiesIdentifierEXT = VK_STRUCTURE_TYPE_PIPELINE_PROPERTIES_IDENTIFIER_EXT,
+    ePhysicalDevicePipelinePropertiesFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROPERTIES_FEATURES_EXT,
+    ePhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_FEATURES_EXT,
+    eSubpassResolvePerformanceQueryEXT = VK_STRUCTURE_TYPE_SUBPASS_RESOLVE_PERFORMANCE_QUERY_EXT,
+    eMultisampledRenderToSingleSampledInfoEXT = VK_STRUCTURE_TYPE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_INFO_EXT,
+    ePhysicalDeviceExtendedDynamicState2FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT,
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+    eScreenSurfaceCreateInfoQNX = VK_STRUCTURE_TYPE_SCREEN_SURFACE_CREATE_INFO_QNX,
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+    ePhysicalDeviceColorWriteEnableFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT,
+    ePipelineColorWriteCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_COLOR_WRITE_CREATE_INFO_EXT,
+    ePhysicalDevicePrimitivesGeneratedQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVES_GENERATED_QUERY_FEATURES_EXT,
+    ePhysicalDeviceRayTracingMaintenance1FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_MAINTENANCE_1_FEATURES_KHR,
+    ePhysicalDeviceImageViewMinLodFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_VIEW_MIN_LOD_FEATURES_EXT,
+    eImageViewMinLodCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_VIEW_MIN_LOD_CREATE_INFO_EXT,
+    ePhysicalDeviceMultiDrawFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT,
+    ePhysicalDeviceMultiDrawPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT,
+    ePhysicalDeviceImage2DViewOf3DFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_2D_VIEW_OF_3D_FEATURES_EXT,
+    eMicromapBuildInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_INFO_EXT,
+    eMicromapVersionInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_VERSION_INFO_EXT,
+    eCopyMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_INFO_EXT,
+    eCopyMicromapToMemoryInfoEXT = VK_STRUCTURE_TYPE_COPY_MICROMAP_TO_MEMORY_INFO_EXT,
+    eCopyMemoryToMicromapInfoEXT = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_MICROMAP_INFO_EXT,
+    ePhysicalDeviceOpacityMicromapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_FEATURES_EXT,
+    ePhysicalDeviceOpacityMicromapPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPACITY_MICROMAP_PROPERTIES_EXT,
+    eMicromapCreateInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_CREATE_INFO_EXT,
+    eMicromapBuildSizesInfoEXT = VK_STRUCTURE_TYPE_MICROMAP_BUILD_SIZES_INFO_EXT,
+    eAccelerationStructureTrianglesOpacityMicromapEXT = VK_STRUCTURE_TYPE_ACCELERATION_STRUCTURE_TRIANGLES_OPACITY_MICROMAP_EXT,
+    ePhysicalDeviceBorderColorSwizzleFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BORDER_COLOR_SWIZZLE_FEATURES_EXT,
+    eSamplerBorderColorComponentMappingCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_BORDER_COLOR_COMPONENT_MAPPING_CREATE_INFO_EXT,
+    ePhysicalDevicePageableDeviceLocalMemoryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PAGEABLE_DEVICE_LOCAL_MEMORY_FEATURES_EXT,
+    ePhysicalDeviceDescriptorSetHostMappingFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_SET_HOST_MAPPING_FEATURES_VALVE,
+    eDescriptorSetBindingReferenceVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_BINDING_REFERENCE_VALVE,
+    eDescriptorSetLayoutHostMappingInfoVALVE = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_HOST_MAPPING_INFO_VALVE,
+    ePhysicalDeviceDepthClampZeroOneFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLAMP_ZERO_ONE_FEATURES_EXT,
+    ePhysicalDeviceNonSeamlessCubeMapFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_NON_SEAMLESS_CUBE_MAP_FEATURES_EXT,
+    ePhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_FEATURES_QCOM,
+    ePhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_DENSITY_MAP_OFFSET_PROPERTIES_QCOM,
+    eSubpassFragmentDensityMapOffsetEndInfoQCOM = VK_STRUCTURE_TYPE_SUBPASS_FRAGMENT_DENSITY_MAP_OFFSET_END_INFO_QCOM,
+    ePhysicalDeviceCopyMemoryIndirectFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_FEATURES_NV,
+    ePhysicalDeviceCopyMemoryIndirectPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COPY_MEMORY_INDIRECT_PROPERTIES_NV,
+    ePhysicalDeviceMemoryDecompressionFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_FEATURES_NV,
+    ePhysicalDeviceMemoryDecompressionPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_DECOMPRESSION_PROPERTIES_NV,
+    ePhysicalDeviceLinearColorAttachmentFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINEAR_COLOR_ATTACHMENT_FEATURES_NV,
+    ePhysicalDeviceImageCompressionControlSwapchainFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_COMPRESSION_CONTROL_SWAPCHAIN_FEATURES_EXT,
+    ePhysicalDeviceImageProcessingFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_FEATURES_QCOM,
+    ePhysicalDeviceImageProcessingPropertiesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_PROCESSING_PROPERTIES_QCOM,
+    eImageViewSampleWeightCreateInfoQCOM = VK_STRUCTURE_TYPE_IMAGE_VIEW_SAMPLE_WEIGHT_CREATE_INFO_QCOM,
+    ePhysicalDeviceExtendedDynamicState3FeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_FEATURES_EXT,
+    ePhysicalDeviceExtendedDynamicState3PropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_3_PROPERTIES_EXT,
+    ePhysicalDeviceSubpassMergeFeedbackFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBPASS_MERGE_FEEDBACK_FEATURES_EXT,
+    eRenderPassCreationControlEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_CONTROL_EXT,
+    eRenderPassCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_CREATION_FEEDBACK_CREATE_INFO_EXT,
+    eRenderPassSubpassFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_RENDER_PASS_SUBPASS_FEEDBACK_CREATE_INFO_EXT,
+    eDirectDriverLoadingInfoLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_INFO_LUNARG,
+    eDirectDriverLoadingListLUNARG = VK_STRUCTURE_TYPE_DIRECT_DRIVER_LOADING_LIST_LUNARG,
+    ePhysicalDeviceShaderModuleIdentifierFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_FEATURES_EXT,
+    ePhysicalDeviceShaderModuleIdentifierPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_MODULE_IDENTIFIER_PROPERTIES_EXT,
+    ePipelineShaderStageModuleIdentifierCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_MODULE_IDENTIFIER_CREATE_INFO_EXT,
+    eShaderModuleIdentifierEXT = VK_STRUCTURE_TYPE_SHADER_MODULE_IDENTIFIER_EXT,
+    ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_EXT,
+    ePhysicalDeviceOpticalFlowFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_FEATURES_NV,
+    ePhysicalDeviceOpticalFlowPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_OPTICAL_FLOW_PROPERTIES_NV,
+    eOpticalFlowImageFormatInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_INFO_NV,
+    eOpticalFlowImageFormatPropertiesNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_IMAGE_FORMAT_PROPERTIES_NV,
+    eOpticalFlowSessionCreateInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_INFO_NV,
+    eOpticalFlowExecuteInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_EXECUTE_INFO_NV,
+    eOpticalFlowSessionCreatePrivateDataInfoNV = VK_STRUCTURE_TYPE_OPTICAL_FLOW_SESSION_CREATE_PRIVATE_DATA_INFO_NV,
+    ePhysicalDeviceLegacyDitheringFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LEGACY_DITHERING_FEATURES_EXT,
+    ePhysicalDevicePipelineProtectedAccessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_PROTECTED_ACCESS_FEATURES_EXT,
+    ePhysicalDeviceTilePropertiesFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TILE_PROPERTIES_FEATURES_QCOM,
+    eTilePropertiesQCOM = VK_STRUCTURE_TYPE_TILE_PROPERTIES_QCOM,
+    ePhysicalDeviceAmigoProfilingFeaturesSEC = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_AMIGO_PROFILING_FEATURES_SEC,
+    eAmigoProfilingSubmitInfoSEC = VK_STRUCTURE_TYPE_AMIGO_PROFILING_SUBMIT_INFO_SEC,
+    ePhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PER_VIEW_VIEWPORTS_FEATURES_QCOM,
+    ePhysicalDeviceRayTracingInvocationReorderFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_FEATURES_NV,
+    ePhysicalDeviceRayTracingInvocationReorderPropertiesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_INVOCATION_REORDER_PROPERTIES_NV,
+    ePhysicalDeviceMutableDescriptorTypeFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_EXT,
+    eMutableDescriptorTypeCreateInfoEXT = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_EXT,
+    ePhysicalDeviceShaderCoreBuiltinsFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_FEATURES_ARM,
+    ePhysicalDeviceShaderCoreBuiltinsPropertiesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_CORE_BUILTINS_PROPERTIES_ARM,
+    eAttachmentDescription2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_2_KHR,
+    eAttachmentDescriptionStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_DESCRIPTION_STENCIL_LAYOUT_KHR,
+    eAttachmentReference2KHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_2_KHR,
+    eAttachmentReferenceStencilLayoutKHR = VK_STRUCTURE_TYPE_ATTACHMENT_REFERENCE_STENCIL_LAYOUT_KHR,
+    eAttachmentSampleCountInfoNV = VK_STRUCTURE_TYPE_ATTACHMENT_SAMPLE_COUNT_INFO_NV,
+    eBindBufferMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_DEVICE_GROUP_INFO_KHR,
+    eBindBufferMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR,
+    eBindImageMemoryDeviceGroupInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_DEVICE_GROUP_INFO_KHR,
+    eBindImageMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR,
+    eBindImagePlaneMemoryInfoKHR = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO_KHR,
+    eBlitImageInfo2KHR = VK_STRUCTURE_TYPE_BLIT_IMAGE_INFO_2_KHR,
+    eBufferCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_COPY_2_KHR,
+    eBufferDeviceAddressInfoEXT = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_EXT,
+    eBufferDeviceAddressInfoKHR = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO_KHR,
+    eBufferImageCopy2KHR = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2_KHR,
+    eBufferMemoryBarrier2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2_KHR,
+    eBufferMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eBufferOpaqueCaptureAddressCreateInfoKHR = VK_STRUCTURE_TYPE_BUFFER_OPAQUE_CAPTURE_ADDRESS_CREATE_INFO_KHR,
+    eCommandBufferInheritanceRenderingInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_INHERITANCE_RENDERING_INFO_KHR,
+    eCommandBufferSubmitInfoKHR = VK_STRUCTURE_TYPE_COMMAND_BUFFER_SUBMIT_INFO_KHR,
+    eCopyBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_INFO_2_KHR,
+    eCopyBufferToImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2_KHR,
+    eCopyImageInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_INFO_2_KHR,
+    eCopyImageToBufferInfo2KHR = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_BUFFER_INFO_2_KHR,
+    eDebugReportCreateInfoEXT = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT,
+    eDependencyInfoKHR = VK_STRUCTURE_TYPE_DEPENDENCY_INFO_KHR,
+    eDescriptorPoolInlineUniformBlockCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_INLINE_UNIFORM_BLOCK_CREATE_INFO_EXT,
+    eDescriptorSetLayoutBindingFlagsCreateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_BINDING_FLAGS_CREATE_INFO_EXT,
+    eDescriptorSetLayoutSupportKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_SUPPORT_KHR,
+    eDescriptorSetVariableDescriptorCountAllocateInfoEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_ALLOCATE_INFO_EXT,
+    eDescriptorSetVariableDescriptorCountLayoutSupportEXT = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_VARIABLE_DESCRIPTOR_COUNT_LAYOUT_SUPPORT_EXT,
+    eDescriptorUpdateTemplateCreateInfoKHR = VK_STRUCTURE_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_CREATE_INFO_KHR,
+    eDeviceBufferMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_BUFFER_MEMORY_REQUIREMENTS_KHR,
+    eDeviceGroupBindSparseInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_BIND_SPARSE_INFO_KHR,
+    eDeviceGroupCommandBufferBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_COMMAND_BUFFER_BEGIN_INFO_KHR,
+    eDeviceGroupDeviceCreateInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_DEVICE_CREATE_INFO_KHR,
+    eDeviceGroupRenderPassBeginInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_RENDER_PASS_BEGIN_INFO_KHR,
+    eDeviceGroupSubmitInfoKHR = VK_STRUCTURE_TYPE_DEVICE_GROUP_SUBMIT_INFO_KHR,
+    eDeviceImageMemoryRequirementsKHR = VK_STRUCTURE_TYPE_DEVICE_IMAGE_MEMORY_REQUIREMENTS_KHR,
+    eDeviceMemoryOpaqueCaptureAddressInfoKHR = VK_STRUCTURE_TYPE_DEVICE_MEMORY_OPAQUE_CAPTURE_ADDRESS_INFO_KHR,
+    eDevicePrivateDataCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_PRIVATE_DATA_CREATE_INFO_EXT,
+    eDeviceQueueGlobalPriorityCreateInfoEXT = VK_STRUCTURE_TYPE_DEVICE_QUEUE_GLOBAL_PRIORITY_CREATE_INFO_EXT,
+    eExportFenceCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_FENCE_CREATE_INFO_KHR,
+    eExportMemoryAllocateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO_KHR,
+    eExportSemaphoreCreateInfoKHR = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO_KHR,
+    eExternalBufferPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_BUFFER_PROPERTIES_KHR,
+    eExternalFencePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_FENCE_PROPERTIES_KHR,
+    eExternalImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
+    eExternalMemoryBufferCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_BUFFER_CREATE_INFO_KHR,
+    eExternalMemoryImageCreateInfoKHR = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO_KHR,
+    eExternalSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES_KHR,
+    eFormatProperties2KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2_KHR,
+    eFormatProperties3KHR = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3_KHR,
+    eFramebufferAttachmentsCreateInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENTS_CREATE_INFO_KHR,
+    eFramebufferAttachmentImageInfoKHR = VK_STRUCTURE_TYPE_FRAMEBUFFER_ATTACHMENT_IMAGE_INFO_KHR,
+    eImageBlit2KHR = VK_STRUCTURE_TYPE_IMAGE_BLIT_2_KHR,
+    eImageCopy2KHR = VK_STRUCTURE_TYPE_IMAGE_COPY_2_KHR,
+    eImageFormatListCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO_KHR,
+    eImageFormatProperties2KHR = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+    eImageMemoryBarrier2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2_KHR,
+    eImageMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eImagePlaneMemoryRequirementsInfoKHR = VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO_KHR,
+    eImageResolve2KHR = VK_STRUCTURE_TYPE_IMAGE_RESOLVE_2_KHR,
+    eImageSparseMemoryRequirementsInfo2KHR = VK_STRUCTURE_TYPE_IMAGE_SPARSE_MEMORY_REQUIREMENTS_INFO_2_KHR,
+    eImageStencilUsageCreateInfoEXT = VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO_EXT,
+    eImageViewUsageCreateInfoKHR = VK_STRUCTURE_TYPE_IMAGE_VIEW_USAGE_CREATE_INFO_KHR,
+    eMemoryAllocateFlagsInfoKHR = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO_KHR,
+    eMemoryBarrier2KHR = VK_STRUCTURE_TYPE_MEMORY_BARRIER_2_KHR,
+    eMemoryDedicatedAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR,
+    eMemoryDedicatedRequirementsKHR = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
+    eMemoryOpaqueCaptureAddressAllocateInfoKHR = VK_STRUCTURE_TYPE_MEMORY_OPAQUE_CAPTURE_ADDRESS_ALLOCATE_INFO_KHR,
+    eMemoryRequirements2KHR = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
+    eMutableDescriptorTypeCreateInfoVALVE = VK_STRUCTURE_TYPE_MUTABLE_DESCRIPTOR_TYPE_CREATE_INFO_VALVE,
+    ePhysicalDevice16BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES_KHR,
+    ePhysicalDevice8BitStorageFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_8BIT_STORAGE_FEATURES_KHR,
+    ePhysicalDeviceBufferAddressFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_ADDRESS_FEATURES_EXT,
+    ePhysicalDeviceBufferDeviceAddressFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR,
+    ePhysicalDeviceDepthStencilResolvePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_STENCIL_RESOLVE_PROPERTIES_KHR,
+    ePhysicalDeviceDescriptorIndexingFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT,
+    ePhysicalDeviceDescriptorIndexingPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_PROPERTIES_EXT,
+    ePhysicalDeviceDriverPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR,
+    ePhysicalDeviceDynamicRenderingFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DYNAMIC_RENDERING_FEATURES_KHR,
+    ePhysicalDeviceExternalBufferInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_BUFFER_INFO_KHR,
+    ePhysicalDeviceExternalFenceInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_FENCE_INFO_KHR,
+    ePhysicalDeviceExternalImageFormatInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO_KHR,
+    ePhysicalDeviceExternalSemaphoreInfoKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO_KHR,
+    ePhysicalDeviceFeatures2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2_KHR,
+    ePhysicalDeviceFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR,
+    ePhysicalDeviceFloatControlsPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT_CONTROLS_PROPERTIES_KHR,
+    ePhysicalDeviceFragmentShaderBarycentricFeaturesNV = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADER_BARYCENTRIC_FEATURES_NV,
+    ePhysicalDeviceGlobalPriorityQueryFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GLOBAL_PRIORITY_QUERY_FEATURES_EXT,
+    ePhysicalDeviceGroupPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_GROUP_PROPERTIES_KHR,
+    ePhysicalDeviceHostQueryResetFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES_EXT,
+    ePhysicalDeviceIdPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES_KHR,
+    ePhysicalDeviceImagelessFramebufferFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGELESS_FRAMEBUFFER_FEATURES_KHR,
+    ePhysicalDeviceImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2_KHR,
+    ePhysicalDeviceImageRobustnessFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_ROBUSTNESS_FEATURES_EXT,
+    ePhysicalDeviceInlineUniformBlockFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_FEATURES_EXT,
+    ePhysicalDeviceInlineUniformBlockPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INLINE_UNIFORM_BLOCK_PROPERTIES_EXT,
+    ePhysicalDeviceMaintenance3PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_3_PROPERTIES_KHR,
+    ePhysicalDeviceMaintenance4FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR,
+    ePhysicalDeviceMaintenance4PropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_PROPERTIES_KHR,
+    ePhysicalDeviceMemoryProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PROPERTIES_2_KHR,
+    ePhysicalDeviceMultiviewFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR,
+    ePhysicalDeviceMultiviewPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_PROPERTIES_KHR,
+    ePhysicalDeviceMutableDescriptorTypeFeaturesVALVE = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MUTABLE_DESCRIPTOR_TYPE_FEATURES_VALVE,
+    ePhysicalDevicePipelineCreationCacheControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_CREATION_CACHE_CONTROL_FEATURES_EXT,
+    ePhysicalDevicePointClippingPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES_KHR,
+    ePhysicalDevicePrivateDataFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT,
+    ePhysicalDeviceProperties2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2_KHR,
+    ePhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_FEATURES_ARM,
+    ePhysicalDeviceSamplerFilterMinmaxPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_FILTER_MINMAX_PROPERTIES_EXT,
+    ePhysicalDeviceSamplerYcbcrConversionFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES_KHR,
+    ePhysicalDeviceScalarBlockLayoutFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT,
+    ePhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SEPARATE_DEPTH_STENCIL_LAYOUTS_FEATURES_KHR,
+    ePhysicalDeviceShaderAtomicInt64FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR,
+    ePhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DEMOTE_TO_HELPER_INVOCATION_FEATURES_EXT,
+    ePhysicalDeviceShaderDrawParameterFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETER_FEATURES,
+    ePhysicalDeviceShaderFloat16Int8FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_FLOAT16_INT8_FEATURES_KHR,
+    ePhysicalDeviceShaderIntegerDotProductFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_FEATURES_KHR,
+    ePhysicalDeviceShaderIntegerDotProductPropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_INTEGER_DOT_PRODUCT_PROPERTIES_KHR,
+    ePhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_EXTENDED_TYPES_FEATURES_KHR,
+    ePhysicalDeviceShaderTerminateInvocationFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_TERMINATE_INVOCATION_FEATURES_KHR,
+    ePhysicalDeviceSparseImageFormatInfo2KHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SPARSE_IMAGE_FORMAT_INFO_2_KHR,
+    ePhysicalDeviceSubgroupSizeControlFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_FEATURES_EXT,
+    ePhysicalDeviceSubgroupSizeControlPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES_EXT,
+    ePhysicalDeviceSynchronization2FeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SYNCHRONIZATION_2_FEATURES_KHR,
+    ePhysicalDeviceTexelBufferAlignmentPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXEL_BUFFER_ALIGNMENT_PROPERTIES_EXT,
+    ePhysicalDeviceTextureCompressionAstcHdrFeaturesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TEXTURE_COMPRESSION_ASTC_HDR_FEATURES_EXT,
+    ePhysicalDeviceTimelineSemaphoreFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR,
+    ePhysicalDeviceTimelineSemaphorePropertiesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_PROPERTIES_KHR,
+    ePhysicalDeviceToolPropertiesEXT = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TOOL_PROPERTIES_EXT,
+    ePhysicalDeviceUniformBufferStandardLayoutFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_UNIFORM_BUFFER_STANDARD_LAYOUT_FEATURES_KHR,
+    ePhysicalDeviceVariablePointersFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES_KHR,
+    ePhysicalDeviceVariablePointerFeatures = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES,
+    ePhysicalDeviceVariablePointerFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTER_FEATURES_KHR,
+    ePhysicalDeviceVulkanMemoryModelFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_MEMORY_MODEL_FEATURES_KHR,
+    ePhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_WORKGROUP_MEMORY_FEATURES_KHR,
+    ePipelineCreationFeedbackCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_CREATION_FEEDBACK_CREATE_INFO_EXT,
+    ePipelineInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_INFO_EXT,
+    ePipelineRenderingCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO_KHR,
+    ePipelineShaderStageRequiredSubgroupSizeCreateInfoEXT = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_REQUIRED_SUBGROUP_SIZE_CREATE_INFO_EXT,
+    ePipelineTessellationDomainOriginStateCreateInfoKHR = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_DOMAIN_ORIGIN_STATE_CREATE_INFO_KHR,
+    ePrivateDataSlotCreateInfoEXT = VK_STRUCTURE_TYPE_PRIVATE_DATA_SLOT_CREATE_INFO_EXT,
+    eQueryPoolCreateInfoINTEL = VK_STRUCTURE_TYPE_QUERY_POOL_CREATE_INFO_INTEL,
+    eQueueFamilyGlobalPriorityPropertiesEXT = VK_STRUCTURE_TYPE_QUEUE_FAMILY_GLOBAL_PRIORITY_PROPERTIES_EXT,
+    eQueueFamilyProperties2KHR = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2_KHR,
+    eRenderingAttachmentInfoKHR = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO_KHR,
+    eRenderingInfoKHR = VK_STRUCTURE_TYPE_RENDERING_INFO_KHR,
+    eRenderPassAttachmentBeginInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_ATTACHMENT_BEGIN_INFO_KHR,
+    eRenderPassCreateInfo2KHR = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO_2_KHR,
+    eRenderPassInputAttachmentAspectCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_INPUT_ATTACHMENT_ASPECT_CREATE_INFO_KHR,
+    eRenderPassMultiviewCreateInfoKHR = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO_KHR,
+    eResolveImageInfo2KHR = VK_STRUCTURE_TYPE_RESOLVE_IMAGE_INFO_2_KHR,
+    eSamplerReductionModeCreateInfoEXT = VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO_EXT,
+    eSamplerYcbcrConversionCreateInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_CREATE_INFO_KHR,
+    eSamplerYcbcrConversionImageFormatPropertiesKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_IMAGE_FORMAT_PROPERTIES_KHR,
+    eSamplerYcbcrConversionInfoKHR = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO_KHR,
+    eSemaphoreSignalInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SIGNAL_INFO_KHR,
+    eSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_SUBMIT_INFO_KHR,
+    eSemaphoreTypeCreateInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO_KHR,
+    eSemaphoreWaitInfoKHR = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO_KHR,
+    eSparseImageFormatProperties2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_FORMAT_PROPERTIES_2_KHR,
+    eSparseImageMemoryRequirements2KHR = VK_STRUCTURE_TYPE_SPARSE_IMAGE_MEMORY_REQUIREMENTS_2_KHR,
+    eSubmitInfo2KHR = VK_STRUCTURE_TYPE_SUBMIT_INFO_2_KHR,
+    eSubpassBeginInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_BEGIN_INFO_KHR,
+    eSubpassDependency2KHR = VK_STRUCTURE_TYPE_SUBPASS_DEPENDENCY_2_KHR,
+    eSubpassDescription2KHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_2_KHR,
+    eSubpassDescriptionDepthStencilResolveKHR = VK_STRUCTURE_TYPE_SUBPASS_DESCRIPTION_DEPTH_STENCIL_RESOLVE_KHR,
+    eSubpassEndInfoKHR = VK_STRUCTURE_TYPE_SUBPASS_END_INFO_KHR,
+    eTimelineSemaphoreSubmitInfoKHR = VK_STRUCTURE_TYPE_TIMELINE_SEMAPHORE_SUBMIT_INFO_KHR,
+    eWriteDescriptorSetInlineUniformBlockEXT = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET_INLINE_UNIFORM_BLOCK_EXT
+  };
+
+  enum class PipelineCacheHeaderVersion
+  {
+    eOne = VK_PIPELINE_CACHE_HEADER_VERSION_ONE
+  };
+
+  enum class ObjectType
+  {
+    eUnknown = VK_OBJECT_TYPE_UNKNOWN,
+    eInstance = VK_OBJECT_TYPE_INSTANCE,
+    ePhysicalDevice = VK_OBJECT_TYPE_PHYSICAL_DEVICE,
+    eDevice = VK_OBJECT_TYPE_DEVICE,
+    eQueue = VK_OBJECT_TYPE_QUEUE,
+    eSemaphore = VK_OBJECT_TYPE_SEMAPHORE,
+    eCommandBuffer = VK_OBJECT_TYPE_COMMAND_BUFFER,
+    eFence = VK_OBJECT_TYPE_FENCE,
+    eDeviceMemory = VK_OBJECT_TYPE_DEVICE_MEMORY,
+    eBuffer = VK_OBJECT_TYPE_BUFFER,
+    eImage = VK_OBJECT_TYPE_IMAGE,
+    eEvent = VK_OBJECT_TYPE_EVENT,
+    eQueryPool = VK_OBJECT_TYPE_QUERY_POOL,
+    eBufferView = VK_OBJECT_TYPE_BUFFER_VIEW,
+    eImageView = VK_OBJECT_TYPE_IMAGE_VIEW,
+    eShaderModule = VK_OBJECT_TYPE_SHADER_MODULE,
+    ePipelineCache = VK_OBJECT_TYPE_PIPELINE_CACHE,
+    ePipelineLayout = VK_OBJECT_TYPE_PIPELINE_LAYOUT,
+    eRenderPass = VK_OBJECT_TYPE_RENDER_PASS,
+    ePipeline = VK_OBJECT_TYPE_PIPELINE,
+    eDescriptorSetLayout = VK_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT,
+    eSampler = VK_OBJECT_TYPE_SAMPLER,
+    eDescriptorPool = VK_OBJECT_TYPE_DESCRIPTOR_POOL,
+    eDescriptorSet = VK_OBJECT_TYPE_DESCRIPTOR_SET,
+    eFramebuffer = VK_OBJECT_TYPE_FRAMEBUFFER,
+    eCommandPool = VK_OBJECT_TYPE_COMMAND_POOL,
+    eSamplerYcbcrConversion = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
+    eDescriptorUpdateTemplate = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE,
+    ePrivateDataSlot = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT,
+    eSurfaceKHR = VK_OBJECT_TYPE_SURFACE_KHR,
+    eSwapchainKHR = VK_OBJECT_TYPE_SWAPCHAIN_KHR,
+    eDisplayKHR = VK_OBJECT_TYPE_DISPLAY_KHR,
+    eDisplayModeKHR = VK_OBJECT_TYPE_DISPLAY_MODE_KHR,
+    eDebugReportCallbackEXT = VK_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT,
+    eVideoSessionKHR = VK_OBJECT_TYPE_VIDEO_SESSION_KHR,
+    eVideoSessionParametersKHR = VK_OBJECT_TYPE_VIDEO_SESSION_PARAMETERS_KHR,
+    eCuModuleNVX = VK_OBJECT_TYPE_CU_MODULE_NVX,
+    eCuFunctionNVX = VK_OBJECT_TYPE_CU_FUNCTION_NVX,
+    eDebugUtilsMessengerEXT = VK_OBJECT_TYPE_DEBUG_UTILS_MESSENGER_EXT,
+    eAccelerationStructureKHR = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR,
+    eValidationCacheEXT = VK_OBJECT_TYPE_VALIDATION_CACHE_EXT,
+    eAccelerationStructureNV = VK_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV,
+    ePerformanceConfigurationINTEL = VK_OBJECT_TYPE_PERFORMANCE_CONFIGURATION_INTEL,
+    eDeferredOperationKHR = VK_OBJECT_TYPE_DEFERRED_OPERATION_KHR,
+    eIndirectCommandsLayoutNV = VK_OBJECT_TYPE_INDIRECT_COMMANDS_LAYOUT_NV,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eBufferCollectionFUCHSIA = VK_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    eMicromapEXT = VK_OBJECT_TYPE_MICROMAP_EXT,
+    eOpticalFlowSessionNV = VK_OBJECT_TYPE_OPTICAL_FLOW_SESSION_NV,
+    eDescriptorUpdateTemplateKHR = VK_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR,
+    ePrivateDataSlotEXT = VK_OBJECT_TYPE_PRIVATE_DATA_SLOT_EXT,
+    eSamplerYcbcrConversionKHR = VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR
+  };
+
+  enum class VendorId
+  {
+    eVIV = VK_VENDOR_ID_VIV,
+    eVSI = VK_VENDOR_ID_VSI,
+    eKazan = VK_VENDOR_ID_KAZAN,
+    eCodeplay = VK_VENDOR_ID_CODEPLAY,
+    eMESA = VK_VENDOR_ID_MESA,
+    ePocl = VK_VENDOR_ID_POCL
+  };
+
+  enum class Format
+  {
+    eUndefined = VK_FORMAT_UNDEFINED,
+    eR4G4UnormPack8 = VK_FORMAT_R4G4_UNORM_PACK8,
+    eR4G4B4A4UnormPack16 = VK_FORMAT_R4G4B4A4_UNORM_PACK16,
+    eB4G4R4A4UnormPack16 = VK_FORMAT_B4G4R4A4_UNORM_PACK16,
+    eR5G6B5UnormPack16 = VK_FORMAT_R5G6B5_UNORM_PACK16,
+    eB5G6R5UnormPack16 = VK_FORMAT_B5G6R5_UNORM_PACK16,
+    eR5G5B5A1UnormPack16 = VK_FORMAT_R5G5B5A1_UNORM_PACK16,
+    eB5G5R5A1UnormPack16 = VK_FORMAT_B5G5R5A1_UNORM_PACK16,
+    eA1R5G5B5UnormPack16 = VK_FORMAT_A1R5G5B5_UNORM_PACK16,
+    eR8Unorm = VK_FORMAT_R8_UNORM,
+    eR8Snorm = VK_FORMAT_R8_SNORM,
+    eR8Uscaled = VK_FORMAT_R8_USCALED,
+    eR8Sscaled = VK_FORMAT_R8_SSCALED,
+    eR8Uint = VK_FORMAT_R8_UINT,
+    eR8Sint = VK_FORMAT_R8_SINT,
+    eR8Srgb = VK_FORMAT_R8_SRGB,
+    eR8G8Unorm = VK_FORMAT_R8G8_UNORM,
+    eR8G8Snorm = VK_FORMAT_R8G8_SNORM,
+    eR8G8Uscaled = VK_FORMAT_R8G8_USCALED,
+    eR8G8Sscaled = VK_FORMAT_R8G8_SSCALED,
+    eR8G8Uint = VK_FORMAT_R8G8_UINT,
+    eR8G8Sint = VK_FORMAT_R8G8_SINT,
+    eR8G8Srgb = VK_FORMAT_R8G8_SRGB,
+    eR8G8B8Unorm = VK_FORMAT_R8G8B8_UNORM,
+    eR8G8B8Snorm = VK_FORMAT_R8G8B8_SNORM,
+    eR8G8B8Uscaled = VK_FORMAT_R8G8B8_USCALED,
+    eR8G8B8Sscaled = VK_FORMAT_R8G8B8_SSCALED,
+    eR8G8B8Uint = VK_FORMAT_R8G8B8_UINT,
+    eR8G8B8Sint = VK_FORMAT_R8G8B8_SINT,
+    eR8G8B8Srgb = VK_FORMAT_R8G8B8_SRGB,
+    eB8G8R8Unorm = VK_FORMAT_B8G8R8_UNORM,
+    eB8G8R8Snorm = VK_FORMAT_B8G8R8_SNORM,
+    eB8G8R8Uscaled = VK_FORMAT_B8G8R8_USCALED,
+    eB8G8R8Sscaled = VK_FORMAT_B8G8R8_SSCALED,
+    eB8G8R8Uint = VK_FORMAT_B8G8R8_UINT,
+    eB8G8R8Sint = VK_FORMAT_B8G8R8_SINT,
+    eB8G8R8Srgb = VK_FORMAT_B8G8R8_SRGB,
+    eR8G8B8A8Unorm = VK_FORMAT_R8G8B8A8_UNORM,
+    eR8G8B8A8Snorm = VK_FORMAT_R8G8B8A8_SNORM,
+    eR8G8B8A8Uscaled = VK_FORMAT_R8G8B8A8_USCALED,
+    eR8G8B8A8Sscaled = VK_FORMAT_R8G8B8A8_SSCALED,
+    eR8G8B8A8Uint = VK_FORMAT_R8G8B8A8_UINT,
+    eR8G8B8A8Sint = VK_FORMAT_R8G8B8A8_SINT,
+    eR8G8B8A8Srgb = VK_FORMAT_R8G8B8A8_SRGB,
+    eB8G8R8A8Unorm = VK_FORMAT_B8G8R8A8_UNORM,
+    eB8G8R8A8Snorm = VK_FORMAT_B8G8R8A8_SNORM,
+    eB8G8R8A8Uscaled = VK_FORMAT_B8G8R8A8_USCALED,
+    eB8G8R8A8Sscaled = VK_FORMAT_B8G8R8A8_SSCALED,
+    eB8G8R8A8Uint = VK_FORMAT_B8G8R8A8_UINT,
+    eB8G8R8A8Sint = VK_FORMAT_B8G8R8A8_SINT,
+    eB8G8R8A8Srgb = VK_FORMAT_B8G8R8A8_SRGB,
+    eA8B8G8R8UnormPack32 = VK_FORMAT_A8B8G8R8_UNORM_PACK32,
+    eA8B8G8R8SnormPack32 = VK_FORMAT_A8B8G8R8_SNORM_PACK32,
+    eA8B8G8R8UscaledPack32 = VK_FORMAT_A8B8G8R8_USCALED_PACK32,
+    eA8B8G8R8SscaledPack32 = VK_FORMAT_A8B8G8R8_SSCALED_PACK32,
+    eA8B8G8R8UintPack32 = VK_FORMAT_A8B8G8R8_UINT_PACK32,
+    eA8B8G8R8SintPack32 = VK_FORMAT_A8B8G8R8_SINT_PACK32,
+    eA8B8G8R8SrgbPack32 = VK_FORMAT_A8B8G8R8_SRGB_PACK32,
+    eA2R10G10B10UnormPack32 = VK_FORMAT_A2R10G10B10_UNORM_PACK32,
+    eA2R10G10B10SnormPack32 = VK_FORMAT_A2R10G10B10_SNORM_PACK32,
+    eA2R10G10B10UscaledPack32 = VK_FORMAT_A2R10G10B10_USCALED_PACK32,
+    eA2R10G10B10SscaledPack32 = VK_FORMAT_A2R10G10B10_SSCALED_PACK32,
+    eA2R10G10B10UintPack32 = VK_FORMAT_A2R10G10B10_UINT_PACK32,
+    eA2R10G10B10SintPack32 = VK_FORMAT_A2R10G10B10_SINT_PACK32,
+    eA2B10G10R10UnormPack32 = VK_FORMAT_A2B10G10R10_UNORM_PACK32,
+    eA2B10G10R10SnormPack32 = VK_FORMAT_A2B10G10R10_SNORM_PACK32,
+    eA2B10G10R10UscaledPack32 = VK_FORMAT_A2B10G10R10_USCALED_PACK32,
+    eA2B10G10R10SscaledPack32 = VK_FORMAT_A2B10G10R10_SSCALED_PACK32,
+    eA2B10G10R10UintPack32 = VK_FORMAT_A2B10G10R10_UINT_PACK32,
+    eA2B10G10R10SintPack32 = VK_FORMAT_A2B10G10R10_SINT_PACK32,
+    eR16Unorm = VK_FORMAT_R16_UNORM,
+    eR16Snorm = VK_FORMAT_R16_SNORM,
+    eR16Uscaled = VK_FORMAT_R16_USCALED,
+    eR16Sscaled = VK_FORMAT_R16_SSCALED,
+    eR16Uint = VK_FORMAT_R16_UINT,
+    eR16Sint = VK_FORMAT_R16_SINT,
+    eR16Sfloat = VK_FORMAT_R16_SFLOAT,
+    eR16G16Unorm = VK_FORMAT_R16G16_UNORM,
+    eR16G16Snorm = VK_FORMAT_R16G16_SNORM,
+    eR16G16Uscaled = VK_FORMAT_R16G16_USCALED,
+    eR16G16Sscaled = VK_FORMAT_R16G16_SSCALED,
+    eR16G16Uint = VK_FORMAT_R16G16_UINT,
+    eR16G16Sint = VK_FORMAT_R16G16_SINT,
+    eR16G16Sfloat = VK_FORMAT_R16G16_SFLOAT,
+    eR16G16B16Unorm = VK_FORMAT_R16G16B16_UNORM,
+    eR16G16B16Snorm = VK_FORMAT_R16G16B16_SNORM,
+    eR16G16B16Uscaled = VK_FORMAT_R16G16B16_USCALED,
+    eR16G16B16Sscaled = VK_FORMAT_R16G16B16_SSCALED,
+    eR16G16B16Uint = VK_FORMAT_R16G16B16_UINT,
+    eR16G16B16Sint = VK_FORMAT_R16G16B16_SINT,
+    eR16G16B16Sfloat = VK_FORMAT_R16G16B16_SFLOAT,
+    eR16G16B16A16Unorm = VK_FORMAT_R16G16B16A16_UNORM,
+    eR16G16B16A16Snorm = VK_FORMAT_R16G16B16A16_SNORM,
+    eR16G16B16A16Uscaled = VK_FORMAT_R16G16B16A16_USCALED,
+    eR16G16B16A16Sscaled = VK_FORMAT_R16G16B16A16_SSCALED,
+    eR16G16B16A16Uint = VK_FORMAT_R16G16B16A16_UINT,
+    eR16G16B16A16Sint = VK_FORMAT_R16G16B16A16_SINT,
+    eR16G16B16A16Sfloat = VK_FORMAT_R16G16B16A16_SFLOAT,
+    eR32Uint = VK_FORMAT_R32_UINT,
+    eR32Sint = VK_FORMAT_R32_SINT,
+    eR32Sfloat = VK_FORMAT_R32_SFLOAT,
+    eR32G32Uint = VK_FORMAT_R32G32_UINT,
+    eR32G32Sint = VK_FORMAT_R32G32_SINT,
+    eR32G32Sfloat = VK_FORMAT_R32G32_SFLOAT,
+    eR32G32B32Uint = VK_FORMAT_R32G32B32_UINT,
+    eR32G32B32Sint = VK_FORMAT_R32G32B32_SINT,
+    eR32G32B32Sfloat = VK_FORMAT_R32G32B32_SFLOAT,
+    eR32G32B32A32Uint = VK_FORMAT_R32G32B32A32_UINT,
+    eR32G32B32A32Sint = VK_FORMAT_R32G32B32A32_SINT,
+    eR32G32B32A32Sfloat = VK_FORMAT_R32G32B32A32_SFLOAT,
+    eR64Uint = VK_FORMAT_R64_UINT,
+    eR64Sint = VK_FORMAT_R64_SINT,
+    eR64Sfloat = VK_FORMAT_R64_SFLOAT,
+    eR64G64Uint = VK_FORMAT_R64G64_UINT,
+    eR64G64Sint = VK_FORMAT_R64G64_SINT,
+    eR64G64Sfloat = VK_FORMAT_R64G64_SFLOAT,
+    eR64G64B64Uint = VK_FORMAT_R64G64B64_UINT,
+    eR64G64B64Sint = VK_FORMAT_R64G64B64_SINT,
+    eR64G64B64Sfloat = VK_FORMAT_R64G64B64_SFLOAT,
+    eR64G64B64A64Uint = VK_FORMAT_R64G64B64A64_UINT,
+    eR64G64B64A64Sint = VK_FORMAT_R64G64B64A64_SINT,
+    eR64G64B64A64Sfloat = VK_FORMAT_R64G64B64A64_SFLOAT,
+    eB10G11R11UfloatPack32 = VK_FORMAT_B10G11R11_UFLOAT_PACK32,
+    eE5B9G9R9UfloatPack32 = VK_FORMAT_E5B9G9R9_UFLOAT_PACK32,
+    eD16Unorm = VK_FORMAT_D16_UNORM,
+    eX8D24UnormPack32 = VK_FORMAT_X8_D24_UNORM_PACK32,
+    eD32Sfloat = VK_FORMAT_D32_SFLOAT,
+    eS8Uint = VK_FORMAT_S8_UINT,
+    eD16UnormS8Uint = VK_FORMAT_D16_UNORM_S8_UINT,
+    eD24UnormS8Uint = VK_FORMAT_D24_UNORM_S8_UINT,
+    eD32SfloatS8Uint = VK_FORMAT_D32_SFLOAT_S8_UINT,
+    eBc1RgbUnormBlock = VK_FORMAT_BC1_RGB_UNORM_BLOCK,
+    eBc1RgbSrgbBlock = VK_FORMAT_BC1_RGB_SRGB_BLOCK,
+    eBc1RgbaUnormBlock = VK_FORMAT_BC1_RGBA_UNORM_BLOCK,
+    eBc1RgbaSrgbBlock = VK_FORMAT_BC1_RGBA_SRGB_BLOCK,
+    eBc2UnormBlock = VK_FORMAT_BC2_UNORM_BLOCK,
+    eBc2SrgbBlock = VK_FORMAT_BC2_SRGB_BLOCK,
+    eBc3UnormBlock = VK_FORMAT_BC3_UNORM_BLOCK,
+    eBc3SrgbBlock = VK_FORMAT_BC3_SRGB_BLOCK,
+    eBc4UnormBlock = VK_FORMAT_BC4_UNORM_BLOCK,
+    eBc4SnormBlock = VK_FORMAT_BC4_SNORM_BLOCK,
+    eBc5UnormBlock = VK_FORMAT_BC5_UNORM_BLOCK,
+    eBc5SnormBlock = VK_FORMAT_BC5_SNORM_BLOCK,
+    eBc6HUfloatBlock = VK_FORMAT_BC6H_UFLOAT_BLOCK,
+    eBc6HSfloatBlock = VK_FORMAT_BC6H_SFLOAT_BLOCK,
+    eBc7UnormBlock = VK_FORMAT_BC7_UNORM_BLOCK,
+    eBc7SrgbBlock = VK_FORMAT_BC7_SRGB_BLOCK,
+    eEtc2R8G8B8UnormBlock = VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK,
+    eEtc2R8G8B8SrgbBlock = VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK,
+    eEtc2R8G8B8A1UnormBlock = VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK,
+    eEtc2R8G8B8A1SrgbBlock = VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK,
+    eEtc2R8G8B8A8UnormBlock = VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK,
+    eEtc2R8G8B8A8SrgbBlock = VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK,
+    eEacR11UnormBlock = VK_FORMAT_EAC_R11_UNORM_BLOCK,
+    eEacR11SnormBlock = VK_FORMAT_EAC_R11_SNORM_BLOCK,
+    eEacR11G11UnormBlock = VK_FORMAT_EAC_R11G11_UNORM_BLOCK,
+    eEacR11G11SnormBlock = VK_FORMAT_EAC_R11G11_SNORM_BLOCK,
+    eAstc4x4UnormBlock = VK_FORMAT_ASTC_4x4_UNORM_BLOCK,
+    eAstc4x4SrgbBlock = VK_FORMAT_ASTC_4x4_SRGB_BLOCK,
+    eAstc5x4UnormBlock = VK_FORMAT_ASTC_5x4_UNORM_BLOCK,
+    eAstc5x4SrgbBlock = VK_FORMAT_ASTC_5x4_SRGB_BLOCK,
+    eAstc5x5UnormBlock = VK_FORMAT_ASTC_5x5_UNORM_BLOCK,
+    eAstc5x5SrgbBlock = VK_FORMAT_ASTC_5x5_SRGB_BLOCK,
+    eAstc6x5UnormBlock = VK_FORMAT_ASTC_6x5_UNORM_BLOCK,
+    eAstc6x5SrgbBlock = VK_FORMAT_ASTC_6x5_SRGB_BLOCK,
+    eAstc6x6UnormBlock = VK_FORMAT_ASTC_6x6_UNORM_BLOCK,
+    eAstc6x6SrgbBlock = VK_FORMAT_ASTC_6x6_SRGB_BLOCK,
+    eAstc8x5UnormBlock = VK_FORMAT_ASTC_8x5_UNORM_BLOCK,
+    eAstc8x5SrgbBlock = VK_FORMAT_ASTC_8x5_SRGB_BLOCK,
+    eAstc8x6UnormBlock = VK_FORMAT_ASTC_8x6_UNORM_BLOCK,
+    eAstc8x6SrgbBlock = VK_FORMAT_ASTC_8x6_SRGB_BLOCK,
+    eAstc8x8UnormBlock = VK_FORMAT_ASTC_8x8_UNORM_BLOCK,
+    eAstc8x8SrgbBlock = VK_FORMAT_ASTC_8x8_SRGB_BLOCK,
+    eAstc10x5UnormBlock = VK_FORMAT_ASTC_10x5_UNORM_BLOCK,
+    eAstc10x5SrgbBlock = VK_FORMAT_ASTC_10x5_SRGB_BLOCK,
+    eAstc10x6UnormBlock = VK_FORMAT_ASTC_10x6_UNORM_BLOCK,
+    eAstc10x6SrgbBlock = VK_FORMAT_ASTC_10x6_SRGB_BLOCK,
+    eAstc10x8UnormBlock = VK_FORMAT_ASTC_10x8_UNORM_BLOCK,
+    eAstc10x8SrgbBlock = VK_FORMAT_ASTC_10x8_SRGB_BLOCK,
+    eAstc10x10UnormBlock = VK_FORMAT_ASTC_10x10_UNORM_BLOCK,
+    eAstc10x10SrgbBlock = VK_FORMAT_ASTC_10x10_SRGB_BLOCK,
+    eAstc12x10UnormBlock = VK_FORMAT_ASTC_12x10_UNORM_BLOCK,
+    eAstc12x10SrgbBlock = VK_FORMAT_ASTC_12x10_SRGB_BLOCK,
+    eAstc12x12UnormBlock = VK_FORMAT_ASTC_12x12_UNORM_BLOCK,
+    eAstc12x12SrgbBlock = VK_FORMAT_ASTC_12x12_SRGB_BLOCK,
+    eG8B8G8R8422Unorm = VK_FORMAT_G8B8G8R8_422_UNORM,
+    eB8G8R8G8422Unorm = VK_FORMAT_B8G8R8G8_422_UNORM,
+    eG8B8R83Plane420Unorm = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM,
+    eG8B8R82Plane420Unorm = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM,
+    eG8B8R83Plane422Unorm = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM,
+    eG8B8R82Plane422Unorm = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM,
+    eG8B8R83Plane444Unorm = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM,
+    eR10X6UnormPack16 = VK_FORMAT_R10X6_UNORM_PACK16,
+    eR10X6G10X6Unorm2Pack16 = VK_FORMAT_R10X6G10X6_UNORM_2PACK16,
+    eR10X6G10X6B10X6A10X6Unorm4Pack16 = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16,
+    eG10X6B10X6G10X6R10X6422Unorm4Pack16 = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
+    eB10X6G10X6R10X6G10X6422Unorm4Pack16 = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16,
+    eG10X6B10X6R10X63Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16,
+    eG10X6B10X6R10X62Plane420Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
+    eG10X6B10X6R10X63Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16,
+    eG10X6B10X6R10X62Plane422Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
+    eG10X6B10X6R10X63Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16,
+    eR12X4UnormPack16 = VK_FORMAT_R12X4_UNORM_PACK16,
+    eR12X4G12X4Unorm2Pack16 = VK_FORMAT_R12X4G12X4_UNORM_2PACK16,
+    eR12X4G12X4B12X4A12X4Unorm4Pack16 = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
+    eG12X4B12X4G12X4R12X4422Unorm4Pack16 = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
+    eB12X4G12X4R12X4G12X4422Unorm4Pack16 = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16,
+    eG12X4B12X4R12X43Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16,
+    eG12X4B12X4R12X42Plane420Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
+    eG12X4B12X4R12X43Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16,
+    eG12X4B12X4R12X42Plane422Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
+    eG12X4B12X4R12X43Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16,
+    eG16B16G16R16422Unorm = VK_FORMAT_G16B16G16R16_422_UNORM,
+    eB16G16R16G16422Unorm = VK_FORMAT_B16G16R16G16_422_UNORM,
+    eG16B16R163Plane420Unorm = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM,
+    eG16B16R162Plane420Unorm = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM,
+    eG16B16R163Plane422Unorm = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM,
+    eG16B16R162Plane422Unorm = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM,
+    eG16B16R163Plane444Unorm = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM,
+    eG8B8R82Plane444Unorm = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM,
+    eG10X6B10X6R10X62Plane444Unorm3Pack16 = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16,
+    eG12X4B12X4R12X42Plane444Unorm3Pack16 = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16,
+    eG16B16R162Plane444Unorm = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM,
+    eA4R4G4B4UnormPack16 = VK_FORMAT_A4R4G4B4_UNORM_PACK16,
+    eA4B4G4R4UnormPack16 = VK_FORMAT_A4B4G4R4_UNORM_PACK16,
+    eAstc4x4SfloatBlock = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK,
+    eAstc5x4SfloatBlock = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK,
+    eAstc5x5SfloatBlock = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK,
+    eAstc6x5SfloatBlock = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK,
+    eAstc6x6SfloatBlock = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK,
+    eAstc8x5SfloatBlock = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK,
+    eAstc8x6SfloatBlock = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK,
+    eAstc8x8SfloatBlock = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK,
+    eAstc10x5SfloatBlock = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK,
+    eAstc10x6SfloatBlock = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK,
+    eAstc10x8SfloatBlock = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK,
+    eAstc10x10SfloatBlock = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK,
+    eAstc12x10SfloatBlock = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK,
+    eAstc12x12SfloatBlock = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK,
+    ePvrtc12BppUnormBlockIMG = VK_FORMAT_PVRTC1_2BPP_UNORM_BLOCK_IMG,
+    ePvrtc14BppUnormBlockIMG = VK_FORMAT_PVRTC1_4BPP_UNORM_BLOCK_IMG,
+    ePvrtc22BppUnormBlockIMG = VK_FORMAT_PVRTC2_2BPP_UNORM_BLOCK_IMG,
+    ePvrtc24BppUnormBlockIMG = VK_FORMAT_PVRTC2_4BPP_UNORM_BLOCK_IMG,
+    ePvrtc12BppSrgbBlockIMG = VK_FORMAT_PVRTC1_2BPP_SRGB_BLOCK_IMG,
+    ePvrtc14BppSrgbBlockIMG = VK_FORMAT_PVRTC1_4BPP_SRGB_BLOCK_IMG,
+    ePvrtc22BppSrgbBlockIMG = VK_FORMAT_PVRTC2_2BPP_SRGB_BLOCK_IMG,
+    ePvrtc24BppSrgbBlockIMG = VK_FORMAT_PVRTC2_4BPP_SRGB_BLOCK_IMG,
+    eR16G16S105NV = VK_FORMAT_R16G16_S10_5_NV,
+    eA4B4G4R4UnormPack16EXT = VK_FORMAT_A4B4G4R4_UNORM_PACK16_EXT,
+    eA4R4G4B4UnormPack16EXT = VK_FORMAT_A4R4G4B4_UNORM_PACK16_EXT,
+    eAstc10x10SfloatBlockEXT = VK_FORMAT_ASTC_10x10_SFLOAT_BLOCK_EXT,
+    eAstc10x5SfloatBlockEXT = VK_FORMAT_ASTC_10x5_SFLOAT_BLOCK_EXT,
+    eAstc10x6SfloatBlockEXT = VK_FORMAT_ASTC_10x6_SFLOAT_BLOCK_EXT,
+    eAstc10x8SfloatBlockEXT = VK_FORMAT_ASTC_10x8_SFLOAT_BLOCK_EXT,
+    eAstc12x10SfloatBlockEXT = VK_FORMAT_ASTC_12x10_SFLOAT_BLOCK_EXT,
+    eAstc12x12SfloatBlockEXT = VK_FORMAT_ASTC_12x12_SFLOAT_BLOCK_EXT,
+    eAstc4x4SfloatBlockEXT = VK_FORMAT_ASTC_4x4_SFLOAT_BLOCK_EXT,
+    eAstc5x4SfloatBlockEXT = VK_FORMAT_ASTC_5x4_SFLOAT_BLOCK_EXT,
+    eAstc5x5SfloatBlockEXT = VK_FORMAT_ASTC_5x5_SFLOAT_BLOCK_EXT,
+    eAstc6x5SfloatBlockEXT = VK_FORMAT_ASTC_6x5_SFLOAT_BLOCK_EXT,
+    eAstc6x6SfloatBlockEXT = VK_FORMAT_ASTC_6x6_SFLOAT_BLOCK_EXT,
+    eAstc8x5SfloatBlockEXT = VK_FORMAT_ASTC_8x5_SFLOAT_BLOCK_EXT,
+    eAstc8x6SfloatBlockEXT = VK_FORMAT_ASTC_8x6_SFLOAT_BLOCK_EXT,
+    eAstc8x8SfloatBlockEXT = VK_FORMAT_ASTC_8x8_SFLOAT_BLOCK_EXT,
+    eB10X6G10X6R10X6G10X6422Unorm4Pack16KHR = VK_FORMAT_B10X6G10X6R10X6G10X6_422_UNORM_4PACK16_KHR,
+    eB12X4G12X4R12X4G12X4422Unorm4Pack16KHR = VK_FORMAT_B12X4G12X4R12X4G12X4_422_UNORM_4PACK16_KHR,
+    eB16G16R16G16422UnormKHR = VK_FORMAT_B16G16R16G16_422_UNORM_KHR,
+    eB8G8R8G8422UnormKHR = VK_FORMAT_B8G8R8G8_422_UNORM_KHR,
+    eG10X6B10X6G10X6R10X6422Unorm4Pack16KHR = VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16_KHR,
+    eG10X6B10X6R10X62Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X62Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X62Plane444Unorm3Pack16EXT = VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16_EXT,
+    eG10X6B10X6R10X63Plane420Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X63Plane422Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16_KHR,
+    eG10X6B10X6R10X63Plane444Unorm3Pack16KHR = VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16_KHR,
+    eG12X4B12X4G12X4R12X4422Unorm4Pack16KHR = VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16_KHR,
+    eG12X4B12X4R12X42Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X42Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X42Plane444Unorm3Pack16EXT = VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16_EXT,
+    eG12X4B12X4R12X43Plane420Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X43Plane422Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16_KHR,
+    eG12X4B12X4R12X43Plane444Unorm3Pack16KHR = VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16_KHR,
+    eG16B16G16R16422UnormKHR = VK_FORMAT_G16B16G16R16_422_UNORM_KHR,
+    eG16B16R162Plane420UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_420_UNORM_KHR,
+    eG16B16R162Plane422UnormKHR = VK_FORMAT_G16_B16R16_2PLANE_422_UNORM_KHR,
+    eG16B16R162Plane444UnormEXT = VK_FORMAT_G16_B16R16_2PLANE_444_UNORM_EXT,
+    eG16B16R163Plane420UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM_KHR,
+    eG16B16R163Plane422UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM_KHR,
+    eG16B16R163Plane444UnormKHR = VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM_KHR,
+    eG8B8G8R8422UnormKHR = VK_FORMAT_G8B8G8R8_422_UNORM_KHR,
+    eG8B8R82Plane420UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_420_UNORM_KHR,
+    eG8B8R82Plane422UnormKHR = VK_FORMAT_G8_B8R8_2PLANE_422_UNORM_KHR,
+    eG8B8R82Plane444UnormEXT = VK_FORMAT_G8_B8R8_2PLANE_444_UNORM_EXT,
+    eG8B8R83Plane420UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM_KHR,
+    eG8B8R83Plane422UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM_KHR,
+    eG8B8R83Plane444UnormKHR = VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM_KHR,
+    eR10X6G10X6B10X6A10X6Unorm4Pack16KHR = VK_FORMAT_R10X6G10X6B10X6A10X6_UNORM_4PACK16_KHR,
+    eR10X6G10X6Unorm2Pack16KHR = VK_FORMAT_R10X6G10X6_UNORM_2PACK16_KHR,
+    eR10X6UnormPack16KHR = VK_FORMAT_R10X6_UNORM_PACK16_KHR,
+    eR12X4G12X4B12X4A12X4Unorm4Pack16KHR = VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16_KHR,
+    eR12X4G12X4Unorm2Pack16KHR = VK_FORMAT_R12X4G12X4_UNORM_2PACK16_KHR,
+    eR12X4UnormPack16KHR = VK_FORMAT_R12X4_UNORM_PACK16_KHR
+  };
+
+  enum class FormatFeatureFlagBits : VkFormatFeatureFlags
+  {
+    eSampledImage = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT,
+    eStorageImage = VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT,
+    eStorageImageAtomic = VK_FORMAT_FEATURE_STORAGE_IMAGE_ATOMIC_BIT,
+    eUniformTexelBuffer = VK_FORMAT_FEATURE_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_BIT,
+    eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+    eVertexBuffer = VK_FORMAT_FEATURE_VERTEX_BUFFER_BIT,
+    eColorAttachment = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT,
+    eColorAttachmentBlend = VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BLEND_BIT,
+    eDepthStencilAttachment = VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eBlitSrc = VK_FORMAT_FEATURE_BLIT_SRC_BIT,
+    eBlitDst = VK_FORMAT_FEATURE_BLIT_DST_BIT,
+    eSampledImageFilterLinear = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+    eTransferSrc = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_FORMAT_FEATURE_TRANSFER_DST_BIT,
+    eMidpointChromaSamples = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT,
+    eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+    eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+    eDisjoint = VK_FORMAT_FEATURE_DISJOINT_BIT,
+    eCositedChromaSamples = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT,
+    eSampledImageFilterMinmax = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
+    eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_OUTPUT_BIT_KHR,
+    eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_DECODE_DPB_BIT_KHR,
+    eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR,
+    eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT,
+    eFragmentDensityMapEXT = VK_FORMAT_FEATURE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+    eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeInputKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_INPUT_BIT_KHR,
+    eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_VIDEO_ENCODE_DPB_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eCositedChromaSamplesKHR = VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT_KHR,
+    eDisjointKHR = VK_FORMAT_FEATURE_DISJOINT_BIT_KHR,
+    eMidpointChromaSamplesKHR = VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT_KHR,
+    eSampledImageFilterCubicIMG = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_CUBIC_BIT_IMG,
+    eSampledImageFilterMinmaxEXT = VK_FORMAT_FEATURE_SAMPLED_IMAGE_FILTER_MINMAX_BIT_EXT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT_KHR,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitForceableKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT_KHR,
+    eSampledImageYcbcrConversionLinearFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT_KHR,
+    eSampledImageYcbcrConversionSeparateReconstructionFilterKHR = VK_FORMAT_FEATURE_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT_KHR,
+    eTransferDstKHR = VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR,
+    eTransferSrcKHR = VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR
+  };
+
+  using FormatFeatureFlags = Flags<FormatFeatureFlagBits>;
+
+
+  template <> struct FlagTraits<FormatFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR FormatFeatureFlags allFlags = 
+          FormatFeatureFlagBits::eSampledImage
+        | FormatFeatureFlagBits::eStorageImage
+        | FormatFeatureFlagBits::eStorageImageAtomic
+        | FormatFeatureFlagBits::eUniformTexelBuffer
+        | FormatFeatureFlagBits::eStorageTexelBuffer
+        | FormatFeatureFlagBits::eStorageTexelBufferAtomic
+        | FormatFeatureFlagBits::eVertexBuffer
+        | FormatFeatureFlagBits::eColorAttachment
+        | FormatFeatureFlagBits::eColorAttachmentBlend
+        | FormatFeatureFlagBits::eDepthStencilAttachment
+        | FormatFeatureFlagBits::eBlitSrc
+        | FormatFeatureFlagBits::eBlitDst
+        | FormatFeatureFlagBits::eSampledImageFilterLinear
+        | FormatFeatureFlagBits::eTransferSrc
+        | FormatFeatureFlagBits::eTransferDst
+        | FormatFeatureFlagBits::eMidpointChromaSamples
+        | FormatFeatureFlagBits::eSampledImageYcbcrConversionLinearFilter
+        | FormatFeatureFlagBits::eSampledImageYcbcrConversionSeparateReconstructionFilter
+        | FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicit
+        | FormatFeatureFlagBits::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable
+        | FormatFeatureFlagBits::eDisjoint
+        | FormatFeatureFlagBits::eCositedChromaSamples
+        | FormatFeatureFlagBits::eSampledImageFilterMinmax
+        | FormatFeatureFlagBits::eVideoDecodeOutputKHR
+        | FormatFeatureFlagBits::eVideoDecodeDpbKHR
+        | FormatFeatureFlagBits::eAccelerationStructureVertexBufferKHR
+        | FormatFeatureFlagBits::eSampledImageFilterCubicEXT
+        | FormatFeatureFlagBits::eFragmentDensityMapEXT
+        | FormatFeatureFlagBits::eFragmentShadingRateAttachmentKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | FormatFeatureFlagBits::eVideoEncodeInputKHR
+        | FormatFeatureFlagBits::eVideoEncodeDpbKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+;
+  };
+
+  enum class ImageCreateFlagBits : VkImageCreateFlags
+  {
+    eSparseBinding = VK_IMAGE_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_IMAGE_CREATE_SPARSE_RESIDENCY_BIT,
+    eSparseAliased = VK_IMAGE_CREATE_SPARSE_ALIASED_BIT,
+    eMutableFormat = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT,
+    eCubeCompatible = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT,
+    eAlias = VK_IMAGE_CREATE_ALIAS_BIT,
+    eSplitInstanceBindRegions = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT,
+    e2DArrayCompatible = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT,
+    eBlockTexelViewCompatible = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT,
+    eExtendedUsage = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT,
+    eProtected = VK_IMAGE_CREATE_PROTECTED_BIT,
+    eDisjoint = VK_IMAGE_CREATE_DISJOINT_BIT,
+    eCornerSampledNV = VK_IMAGE_CREATE_CORNER_SAMPLED_BIT_NV,
+    eSampleLocationsCompatibleDepthEXT = VK_IMAGE_CREATE_SAMPLE_LOCATIONS_COMPATIBLE_DEPTH_BIT_EXT,
+    eSubsampledEXT = VK_IMAGE_CREATE_SUBSAMPLED_BIT_EXT,
+    eDescriptorBufferCaptureReplayEXT = VK_IMAGE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT,
+    eMultisampledRenderToSingleSampledEXT = VK_IMAGE_CREATE_MULTISAMPLED_RENDER_TO_SINGLE_SAMPLED_BIT_EXT,
+    e2DViewCompatibleEXT = VK_IMAGE_CREATE_2D_VIEW_COMPATIBLE_BIT_EXT,
+    eFragmentDensityMapOffsetQCOM = VK_IMAGE_CREATE_FRAGMENT_DENSITY_MAP_OFFSET_BIT_QCOM,
+    e2DArrayCompatibleKHR = VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT_KHR,
+    eAliasKHR = VK_IMAGE_CREATE_ALIAS_BIT_KHR,
+    eBlockTexelViewCompatibleKHR = VK_IMAGE_CREATE_BLOCK_TEXEL_VIEW_COMPATIBLE_BIT_KHR,
+    eDisjointKHR = VK_IMAGE_CREATE_DISJOINT_BIT_KHR,
+    eExtendedUsageKHR = VK_IMAGE_CREATE_EXTENDED_USAGE_BIT_KHR,
+    eSplitInstanceBindRegionsKHR = VK_IMAGE_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR
+  };
+
+  using ImageCreateFlags = Flags<ImageCreateFlagBits>;
+
+
+  template <> struct FlagTraits<ImageCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCreateFlags allFlags = 
+          ImageCreateFlagBits::eSparseBinding
+        | ImageCreateFlagBits::eSparseResidency
+        | ImageCreateFlagBits::eSparseAliased
+        | ImageCreateFlagBits::eMutableFormat
+        | ImageCreateFlagBits::eCubeCompatible
+        | ImageCreateFlagBits::eAlias
+        | ImageCreateFlagBits::eSplitInstanceBindRegions
+        | ImageCreateFlagBits::e2DArrayCompatible
+        | ImageCreateFlagBits::eBlockTexelViewCompatible
+        | ImageCreateFlagBits::eExtendedUsage
+        | ImageCreateFlagBits::eProtected
+        | ImageCreateFlagBits::eDisjoint
+        | ImageCreateFlagBits::eCornerSampledNV
+        | ImageCreateFlagBits::eSampleLocationsCompatibleDepthEXT
+        | ImageCreateFlagBits::eSubsampledEXT
+        | ImageCreateFlagBits::eDescriptorBufferCaptureReplayEXT
+        | ImageCreateFlagBits::eMultisampledRenderToSingleSampledEXT
+        | ImageCreateFlagBits::e2DViewCompatibleEXT
+        | ImageCreateFlagBits::eFragmentDensityMapOffsetQCOM;
+  };
+
+  enum class ImageTiling
+  {
+    eOptimal = VK_IMAGE_TILING_OPTIMAL,
+    eLinear = VK_IMAGE_TILING_LINEAR,
+    eDrmFormatModifierEXT = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT
+  };
+
+  enum class ImageType
+  {
+    e1D = VK_IMAGE_TYPE_1D,
+    e2D = VK_IMAGE_TYPE_2D,
+    e3D = VK_IMAGE_TYPE_3D
+  };
+
+  enum class ImageUsageFlagBits : VkImageUsageFlags
+  {
+    eTransferSrc = VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_IMAGE_USAGE_TRANSFER_DST_BIT,
+    eSampled = VK_IMAGE_USAGE_SAMPLED_BIT,
+    eStorage = VK_IMAGE_USAGE_STORAGE_BIT,
+    eColorAttachment = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
+    eDepthStencilAttachment = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eTransientAttachment = VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
+    eInputAttachment = VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT,
+    eVideoDecodeDstKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR,
+    eVideoDecodeSrcKHR = VK_IMAGE_USAGE_VIDEO_DECODE_SRC_BIT_KHR,
+    eVideoDecodeDpbKHR = VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR,
+    eFragmentDensityMapEXT = VK_IMAGE_USAGE_FRAGMENT_DENSITY_MAP_BIT_EXT,
+    eFragmentShadingRateAttachmentKHR = VK_IMAGE_USAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeDstKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DST_BIT_KHR,
+    eVideoEncodeSrcKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR,
+    eVideoEncodeDpbKHR = VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eAttachmentFeedbackLoopEXT = VK_IMAGE_USAGE_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
+    eInvocationMaskHUAWEI = VK_IMAGE_USAGE_INVOCATION_MASK_BIT_HUAWEI,
+    eSampleWeightQCOM = VK_IMAGE_USAGE_SAMPLE_WEIGHT_BIT_QCOM,
+    eSampleBlockMatchQCOM = VK_IMAGE_USAGE_SAMPLE_BLOCK_MATCH_BIT_QCOM,
+    eShadingRateImageNV = VK_IMAGE_USAGE_SHADING_RATE_IMAGE_BIT_NV
+  };
+
+  using ImageUsageFlags = Flags<ImageUsageFlagBits>;
+
+
+  template <> struct FlagTraits<ImageUsageFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageUsageFlags allFlags = 
+          ImageUsageFlagBits::eTransferSrc
+        | ImageUsageFlagBits::eTransferDst
+        | ImageUsageFlagBits::eSampled
+        | ImageUsageFlagBits::eStorage
+        | ImageUsageFlagBits::eColorAttachment
+        | ImageUsageFlagBits::eDepthStencilAttachment
+        | ImageUsageFlagBits::eTransientAttachment
+        | ImageUsageFlagBits::eInputAttachment
+        | ImageUsageFlagBits::eVideoDecodeDstKHR
+        | ImageUsageFlagBits::eVideoDecodeSrcKHR
+        | ImageUsageFlagBits::eVideoDecodeDpbKHR
+        | ImageUsageFlagBits::eFragmentDensityMapEXT
+        | ImageUsageFlagBits::eFragmentShadingRateAttachmentKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | ImageUsageFlagBits::eVideoEncodeDstKHR
+        | ImageUsageFlagBits::eVideoEncodeSrcKHR
+        | ImageUsageFlagBits::eVideoEncodeDpbKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | ImageUsageFlagBits::eAttachmentFeedbackLoopEXT
+        | ImageUsageFlagBits::eInvocationMaskHUAWEI
+        | ImageUsageFlagBits::eSampleWeightQCOM
+        | ImageUsageFlagBits::eSampleBlockMatchQCOM;
+  };
+
+  enum class InstanceCreateFlagBits : VkInstanceCreateFlags
+  {
+    eEnumeratePortabilityKHR = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR
+  };
+
+  using InstanceCreateFlags = Flags<InstanceCreateFlagBits>;
+
+
+  template <> struct FlagTraits<InstanceCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR InstanceCreateFlags allFlags = 
+          InstanceCreateFlagBits::eEnumeratePortabilityKHR;
+  };
+
+  enum class InternalAllocationType
+  {
+    eExecutable = VK_INTERNAL_ALLOCATION_TYPE_EXECUTABLE
+  };
+
+  enum class MemoryHeapFlagBits : VkMemoryHeapFlags
+  {
+    eDeviceLocal = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
+    eMultiInstance = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT,
+    eMultiInstanceKHR = VK_MEMORY_HEAP_MULTI_INSTANCE_BIT_KHR
+  };
+
+  using MemoryHeapFlags = Flags<MemoryHeapFlagBits>;
+
+
+  template <> struct FlagTraits<MemoryHeapFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryHeapFlags allFlags = 
+          MemoryHeapFlagBits::eDeviceLocal
+        | MemoryHeapFlagBits::eMultiInstance;
+  };
+
+  enum class MemoryPropertyFlagBits : VkMemoryPropertyFlags
+  {
+    eDeviceLocal = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
+    eHostVisible = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
+    eHostCoherent = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
+    eHostCached = VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
+    eLazilyAllocated = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT,
+    eProtected = VK_MEMORY_PROPERTY_PROTECTED_BIT,
+    eDeviceCoherentAMD = VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD,
+    eDeviceUncachedAMD = VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD,
+    eRdmaCapableNV = VK_MEMORY_PROPERTY_RDMA_CAPABLE_BIT_NV
+  };
+
+  using MemoryPropertyFlags = Flags<MemoryPropertyFlagBits>;
+
+
+  template <> struct FlagTraits<MemoryPropertyFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryPropertyFlags allFlags = 
+          MemoryPropertyFlagBits::eDeviceLocal
+        | MemoryPropertyFlagBits::eHostVisible
+        | MemoryPropertyFlagBits::eHostCoherent
+        | MemoryPropertyFlagBits::eHostCached
+        | MemoryPropertyFlagBits::eLazilyAllocated
+        | MemoryPropertyFlagBits::eProtected
+        | MemoryPropertyFlagBits::eDeviceCoherentAMD
+        | MemoryPropertyFlagBits::eDeviceUncachedAMD
+        | MemoryPropertyFlagBits::eRdmaCapableNV;
+  };
+
+  enum class PhysicalDeviceType
+  {
+    eOther = VK_PHYSICAL_DEVICE_TYPE_OTHER,
+    eIntegratedGpu = VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU,
+    eDiscreteGpu = VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU,
+    eVirtualGpu = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
+    eCpu = VK_PHYSICAL_DEVICE_TYPE_CPU
+  };
+
+  enum class QueueFlagBits : VkQueueFlags
+  {
+    eGraphics = VK_QUEUE_GRAPHICS_BIT,
+    eCompute = VK_QUEUE_COMPUTE_BIT,
+    eTransfer = VK_QUEUE_TRANSFER_BIT,
+    eSparseBinding = VK_QUEUE_SPARSE_BINDING_BIT,
+    eProtected = VK_QUEUE_PROTECTED_BIT,
+    eVideoDecodeKHR = VK_QUEUE_VIDEO_DECODE_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeKHR = VK_QUEUE_VIDEO_ENCODE_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eOpticalFlowNV = VK_QUEUE_OPTICAL_FLOW_BIT_NV
+  };
+
+  using QueueFlags = Flags<QueueFlagBits>;
+
+
+  template <> struct FlagTraits<QueueFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR QueueFlags allFlags = 
+          QueueFlagBits::eGraphics
+        | QueueFlagBits::eCompute
+        | QueueFlagBits::eTransfer
+        | QueueFlagBits::eSparseBinding
+        | QueueFlagBits::eProtected
+        | QueueFlagBits::eVideoDecodeKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | QueueFlagBits::eVideoEncodeKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | QueueFlagBits::eOpticalFlowNV;
+  };
+
+  enum class SampleCountFlagBits : VkSampleCountFlags
+  {
+    e1 = VK_SAMPLE_COUNT_1_BIT,
+    e2 = VK_SAMPLE_COUNT_2_BIT,
+    e4 = VK_SAMPLE_COUNT_4_BIT,
+    e8 = VK_SAMPLE_COUNT_8_BIT,
+    e16 = VK_SAMPLE_COUNT_16_BIT,
+    e32 = VK_SAMPLE_COUNT_32_BIT,
+    e64 = VK_SAMPLE_COUNT_64_BIT
+  };
+
+  using SampleCountFlags = Flags<SampleCountFlagBits>;
+
+
+  template <> struct FlagTraits<SampleCountFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SampleCountFlags allFlags = 
+          SampleCountFlagBits::e1
+        | SampleCountFlagBits::e2
+        | SampleCountFlagBits::e4
+        | SampleCountFlagBits::e8
+        | SampleCountFlagBits::e16
+        | SampleCountFlagBits::e32
+        | SampleCountFlagBits::e64;
+  };
+
+  enum class SystemAllocationScope
+  {
+    eCommand = VK_SYSTEM_ALLOCATION_SCOPE_COMMAND,
+    eObject = VK_SYSTEM_ALLOCATION_SCOPE_OBJECT,
+    eCache = VK_SYSTEM_ALLOCATION_SCOPE_CACHE,
+    eDevice = VK_SYSTEM_ALLOCATION_SCOPE_DEVICE,
+    eInstance = VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE
+  };
+
+  enum class DeviceCreateFlagBits : VkDeviceCreateFlags
+  {};
+
+  using DeviceCreateFlags = Flags<DeviceCreateFlagBits>;
+
+
+  template <> struct FlagTraits<DeviceCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineStageFlagBits : VkPipelineStageFlags
+  {
+    eTopOfPipe = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
+    eDrawIndirect = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
+    eVertexInput = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
+    eVertexShader = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
+    eTessellationControlShader = VK_PIPELINE_STAGE_TESSELLATION_CONTROL_SHADER_BIT,
+    eTessellationEvaluationShader = VK_PIPELINE_STAGE_TESSELLATION_EVALUATION_SHADER_BIT,
+    eGeometryShader = VK_PIPELINE_STAGE_GEOMETRY_SHADER_BIT,
+    eFragmentShader = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
+    eEarlyFragmentTests = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT,
+    eLateFragmentTests = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
+    eColorAttachmentOutput = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
+    eComputeShader = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
+    eTransfer = VK_PIPELINE_STAGE_TRANSFER_BIT,
+    eBottomOfPipe = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT,
+    eHost = VK_PIPELINE_STAGE_HOST_BIT,
+    eAllGraphics = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT,
+    eAllCommands = VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
+    eNone = VK_PIPELINE_STAGE_NONE,
+    eTransformFeedbackEXT = VK_PIPELINE_STAGE_TRANSFORM_FEEDBACK_BIT_EXT,
+    eConditionalRenderingEXT = VK_PIPELINE_STAGE_CONDITIONAL_RENDERING_BIT_EXT,
+    eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
+    eRayTracingShaderKHR = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_KHR,
+    eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
+    eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    eCommandPreprocessNV = VK_PIPELINE_STAGE_COMMAND_PREPROCESS_BIT_NV,
+    eTaskShaderEXT = VK_PIPELINE_STAGE_TASK_SHADER_BIT_EXT,
+    eMeshShaderEXT = VK_PIPELINE_STAGE_MESH_SHADER_BIT_EXT,
+    eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+    eMeshShaderNV = VK_PIPELINE_STAGE_MESH_SHADER_BIT_NV,
+    eNoneKHR = VK_PIPELINE_STAGE_NONE_KHR,
+    eRayTracingShaderNV = VK_PIPELINE_STAGE_RAY_TRACING_SHADER_BIT_NV,
+    eShadingRateImageNV = VK_PIPELINE_STAGE_SHADING_RATE_IMAGE_BIT_NV,
+    eTaskShaderNV = VK_PIPELINE_STAGE_TASK_SHADER_BIT_NV
+  };
+
+  using PipelineStageFlags = Flags<PipelineStageFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineStageFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineStageFlags allFlags = 
+          PipelineStageFlagBits::eTopOfPipe
+        | PipelineStageFlagBits::eDrawIndirect
+        | PipelineStageFlagBits::eVertexInput
+        | PipelineStageFlagBits::eVertexShader
+        | PipelineStageFlagBits::eTessellationControlShader
+        | PipelineStageFlagBits::eTessellationEvaluationShader
+        | PipelineStageFlagBits::eGeometryShader
+        | PipelineStageFlagBits::eFragmentShader
+        | PipelineStageFlagBits::eEarlyFragmentTests
+        | PipelineStageFlagBits::eLateFragmentTests
+        | PipelineStageFlagBits::eColorAttachmentOutput
+        | PipelineStageFlagBits::eComputeShader
+        | PipelineStageFlagBits::eTransfer
+        | PipelineStageFlagBits::eBottomOfPipe
+        | PipelineStageFlagBits::eHost
+        | PipelineStageFlagBits::eAllGraphics
+        | PipelineStageFlagBits::eAllCommands
+        | PipelineStageFlagBits::eNone
+        | PipelineStageFlagBits::eTransformFeedbackEXT
+        | PipelineStageFlagBits::eConditionalRenderingEXT
+        | PipelineStageFlagBits::eAccelerationStructureBuildKHR
+        | PipelineStageFlagBits::eRayTracingShaderKHR
+        | PipelineStageFlagBits::eFragmentDensityProcessEXT
+        | PipelineStageFlagBits::eFragmentShadingRateAttachmentKHR
+        | PipelineStageFlagBits::eCommandPreprocessNV
+        | PipelineStageFlagBits::eTaskShaderEXT
+        | PipelineStageFlagBits::eMeshShaderEXT;
+  };
+
+  enum class MemoryMapFlagBits : VkMemoryMapFlags
+  {};
+
+  using MemoryMapFlags = Flags<MemoryMapFlagBits>;
+
+
+  template <> struct FlagTraits<MemoryMapFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryMapFlags allFlags =  {};
+  };
+
+  enum class ImageAspectFlagBits : VkImageAspectFlags
+  {
+    eColor = VK_IMAGE_ASPECT_COLOR_BIT,
+    eDepth = VK_IMAGE_ASPECT_DEPTH_BIT,
+    eStencil = VK_IMAGE_ASPECT_STENCIL_BIT,
+    eMetadata = VK_IMAGE_ASPECT_METADATA_BIT,
+    ePlane0 = VK_IMAGE_ASPECT_PLANE_0_BIT,
+    ePlane1 = VK_IMAGE_ASPECT_PLANE_1_BIT,
+    ePlane2 = VK_IMAGE_ASPECT_PLANE_2_BIT,
+    eNone = VK_IMAGE_ASPECT_NONE,
+    eMemoryPlane0EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT,
+    eMemoryPlane1EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT,
+    eMemoryPlane2EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT,
+    eMemoryPlane3EXT = VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT,
+    eNoneKHR = VK_IMAGE_ASPECT_NONE_KHR,
+    ePlane0KHR = VK_IMAGE_ASPECT_PLANE_0_BIT_KHR,
+    ePlane1KHR = VK_IMAGE_ASPECT_PLANE_1_BIT_KHR,
+    ePlane2KHR = VK_IMAGE_ASPECT_PLANE_2_BIT_KHR
+  };
+
+  using ImageAspectFlags = Flags<ImageAspectFlagBits>;
+
+
+  template <> struct FlagTraits<ImageAspectFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageAspectFlags allFlags = 
+          ImageAspectFlagBits::eColor
+        | ImageAspectFlagBits::eDepth
+        | ImageAspectFlagBits::eStencil
+        | ImageAspectFlagBits::eMetadata
+        | ImageAspectFlagBits::ePlane0
+        | ImageAspectFlagBits::ePlane1
+        | ImageAspectFlagBits::ePlane2
+        | ImageAspectFlagBits::eNone
+        | ImageAspectFlagBits::eMemoryPlane0EXT
+        | ImageAspectFlagBits::eMemoryPlane1EXT
+        | ImageAspectFlagBits::eMemoryPlane2EXT
+        | ImageAspectFlagBits::eMemoryPlane3EXT;
+  };
+
+  enum class SparseImageFormatFlagBits : VkSparseImageFormatFlags
+  {
+    eSingleMiptail = VK_SPARSE_IMAGE_FORMAT_SINGLE_MIPTAIL_BIT,
+    eAlignedMipSize = VK_SPARSE_IMAGE_FORMAT_ALIGNED_MIP_SIZE_BIT,
+    eNonstandardBlockSize = VK_SPARSE_IMAGE_FORMAT_NONSTANDARD_BLOCK_SIZE_BIT
+  };
+
+  using SparseImageFormatFlags = Flags<SparseImageFormatFlagBits>;
+
+
+  template <> struct FlagTraits<SparseImageFormatFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SparseImageFormatFlags allFlags = 
+          SparseImageFormatFlagBits::eSingleMiptail
+        | SparseImageFormatFlagBits::eAlignedMipSize
+        | SparseImageFormatFlagBits::eNonstandardBlockSize;
+  };
+
+  enum class SparseMemoryBindFlagBits : VkSparseMemoryBindFlags
+  {
+    eMetadata = VK_SPARSE_MEMORY_BIND_METADATA_BIT
+  };
+
+  using SparseMemoryBindFlags = Flags<SparseMemoryBindFlagBits>;
+
+
+  template <> struct FlagTraits<SparseMemoryBindFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SparseMemoryBindFlags allFlags = 
+          SparseMemoryBindFlagBits::eMetadata;
+  };
+
+  enum class FenceCreateFlagBits : VkFenceCreateFlags
+  {
+    eSignaled = VK_FENCE_CREATE_SIGNALED_BIT
+  };
+
+  using FenceCreateFlags = Flags<FenceCreateFlagBits>;
+
+
+  template <> struct FlagTraits<FenceCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR FenceCreateFlags allFlags = 
+          FenceCreateFlagBits::eSignaled;
+  };
+
+  enum class SemaphoreCreateFlagBits : VkSemaphoreCreateFlags
+  {};
+
+  using SemaphoreCreateFlags = Flags<SemaphoreCreateFlagBits>;
+
+
+  template <> struct FlagTraits<SemaphoreCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreCreateFlags allFlags =  {};
+  };
+
+  enum class EventCreateFlagBits : VkEventCreateFlags
+  {
+    eDeviceOnly = VK_EVENT_CREATE_DEVICE_ONLY_BIT,
+    eDeviceOnlyKHR = VK_EVENT_CREATE_DEVICE_ONLY_BIT_KHR
+  };
+
+  using EventCreateFlags = Flags<EventCreateFlagBits>;
+
+
+  template <> struct FlagTraits<EventCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR EventCreateFlags allFlags = 
+          EventCreateFlagBits::eDeviceOnly;
+  };
+
+  enum class QueryPipelineStatisticFlagBits : VkQueryPipelineStatisticFlags
+  {
+    eInputAssemblyVertices = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_VERTICES_BIT,
+    eInputAssemblyPrimitives = VK_QUERY_PIPELINE_STATISTIC_INPUT_ASSEMBLY_PRIMITIVES_BIT,
+    eVertexShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_VERTEX_SHADER_INVOCATIONS_BIT,
+    eGeometryShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_INVOCATIONS_BIT,
+    eGeometryShaderPrimitives = VK_QUERY_PIPELINE_STATISTIC_GEOMETRY_SHADER_PRIMITIVES_BIT,
+    eClippingInvocations = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_INVOCATIONS_BIT,
+    eClippingPrimitives = VK_QUERY_PIPELINE_STATISTIC_CLIPPING_PRIMITIVES_BIT,
+    eFragmentShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_FRAGMENT_SHADER_INVOCATIONS_BIT,
+    eTessellationControlShaderPatches = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_CONTROL_SHADER_PATCHES_BIT,
+    eTessellationEvaluationShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_TESSELLATION_EVALUATION_SHADER_INVOCATIONS_BIT,
+    eComputeShaderInvocations = VK_QUERY_PIPELINE_STATISTIC_COMPUTE_SHADER_INVOCATIONS_BIT,
+    eTaskShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_TASK_SHADER_INVOCATIONS_BIT_EXT,
+    eMeshShaderInvocationsEXT = VK_QUERY_PIPELINE_STATISTIC_MESH_SHADER_INVOCATIONS_BIT_EXT
+  };
+
+  using QueryPipelineStatisticFlags = Flags<QueryPipelineStatisticFlagBits>;
+
+
+  template <> struct FlagTraits<QueryPipelineStatisticFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPipelineStatisticFlags allFlags = 
+          QueryPipelineStatisticFlagBits::eInputAssemblyVertices
+        | QueryPipelineStatisticFlagBits::eInputAssemblyPrimitives
+        | QueryPipelineStatisticFlagBits::eVertexShaderInvocations
+        | QueryPipelineStatisticFlagBits::eGeometryShaderInvocations
+        | QueryPipelineStatisticFlagBits::eGeometryShaderPrimitives
+        | QueryPipelineStatisticFlagBits::eClippingInvocations
+        | QueryPipelineStatisticFlagBits::eClippingPrimitives
+        | QueryPipelineStatisticFlagBits::eFragmentShaderInvocations
+        | QueryPipelineStatisticFlagBits::eTessellationControlShaderPatches
+        | QueryPipelineStatisticFlagBits::eTessellationEvaluationShaderInvocations
+        | QueryPipelineStatisticFlagBits::eComputeShaderInvocations
+        | QueryPipelineStatisticFlagBits::eTaskShaderInvocationsEXT
+        | QueryPipelineStatisticFlagBits::eMeshShaderInvocationsEXT;
+  };
+
+  enum class QueryResultFlagBits : VkQueryResultFlags
+  {
+    e64 = VK_QUERY_RESULT_64_BIT,
+    eWait = VK_QUERY_RESULT_WAIT_BIT,
+    eWithAvailability = VK_QUERY_RESULT_WITH_AVAILABILITY_BIT,
+    ePartial = VK_QUERY_RESULT_PARTIAL_BIT,
+    eWithStatusKHR = VK_QUERY_RESULT_WITH_STATUS_BIT_KHR
+  };
+
+  using QueryResultFlags = Flags<QueryResultFlagBits>;
+
+
+  template <> struct FlagTraits<QueryResultFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR QueryResultFlags allFlags = 
+          QueryResultFlagBits::e64
+        | QueryResultFlagBits::eWait
+        | QueryResultFlagBits::eWithAvailability
+        | QueryResultFlagBits::ePartial
+        | QueryResultFlagBits::eWithStatusKHR;
+  };
+
+  enum class QueryType
+  {
+    eOcclusion = VK_QUERY_TYPE_OCCLUSION,
+    ePipelineStatistics = VK_QUERY_TYPE_PIPELINE_STATISTICS,
+    eTimestamp = VK_QUERY_TYPE_TIMESTAMP,
+    eResultStatusOnlyKHR = VK_QUERY_TYPE_RESULT_STATUS_ONLY_KHR,
+    eTransformFeedbackStreamEXT = VK_QUERY_TYPE_TRANSFORM_FEEDBACK_STREAM_EXT,
+    ePerformanceQueryKHR = VK_QUERY_TYPE_PERFORMANCE_QUERY_KHR,
+    eAccelerationStructureCompactedSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_KHR,
+    eAccelerationStructureSerializationSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_SIZE_KHR,
+    eAccelerationStructureCompactedSizeNV = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_COMPACTED_SIZE_NV,
+    ePerformanceQueryINTEL = VK_QUERY_TYPE_PERFORMANCE_QUERY_INTEL,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeBitstreamBufferRangeKHR = VK_QUERY_TYPE_VIDEO_ENCODE_BITSTREAM_BUFFER_RANGE_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eMeshPrimitivesGeneratedEXT = VK_QUERY_TYPE_MESH_PRIMITIVES_GENERATED_EXT,
+    ePrimitivesGeneratedEXT = VK_QUERY_TYPE_PRIMITIVES_GENERATED_EXT,
+    eAccelerationStructureSerializationBottomLevelPointersKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SERIALIZATION_BOTTOM_LEVEL_POINTERS_KHR,
+    eAccelerationStructureSizeKHR = VK_QUERY_TYPE_ACCELERATION_STRUCTURE_SIZE_KHR,
+    eMicromapSerializationSizeEXT = VK_QUERY_TYPE_MICROMAP_SERIALIZATION_SIZE_EXT,
+    eMicromapCompactedSizeEXT = VK_QUERY_TYPE_MICROMAP_COMPACTED_SIZE_EXT
+  };
+
+  enum class QueryPoolCreateFlagBits : VkQueryPoolCreateFlags
+  {};
+
+  using QueryPoolCreateFlags = Flags<QueryPoolCreateFlagBits>;
+
+
+  template <> struct FlagTraits<QueryPoolCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR QueryPoolCreateFlags allFlags =  {};
+  };
+
+  enum class BufferCreateFlagBits : VkBufferCreateFlags
+  {
+    eSparseBinding = VK_BUFFER_CREATE_SPARSE_BINDING_BIT,
+    eSparseResidency = VK_BUFFER_CREATE_SPARSE_RESIDENCY_BIT,
+    eSparseAliased = VK_BUFFER_CREATE_SPARSE_ALIASED_BIT,
+    eProtected = VK_BUFFER_CREATE_PROTECTED_BIT,
+    eDeviceAddressCaptureReplay = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT,
+    eDescriptorBufferCaptureReplayEXT = VK_BUFFER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT,
+    eDeviceAddressCaptureReplayEXT = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT,
+    eDeviceAddressCaptureReplayKHR = VK_BUFFER_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR
+  };
+
+  using BufferCreateFlags = Flags<BufferCreateFlagBits>;
+
+
+  template <> struct FlagTraits<BufferCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR BufferCreateFlags allFlags = 
+          BufferCreateFlagBits::eSparseBinding
+        | BufferCreateFlagBits::eSparseResidency
+        | BufferCreateFlagBits::eSparseAliased
+        | BufferCreateFlagBits::eProtected
+        | BufferCreateFlagBits::eDeviceAddressCaptureReplay
+        | BufferCreateFlagBits::eDescriptorBufferCaptureReplayEXT;
+  };
+
+  enum class BufferUsageFlagBits : VkBufferUsageFlags
+  {
+    eTransferSrc = VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
+    eTransferDst = VK_BUFFER_USAGE_TRANSFER_DST_BIT,
+    eUniformTexelBuffer = VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT,
+    eUniformBuffer = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
+    eStorageBuffer = VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
+    eIndexBuffer = VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
+    eVertexBuffer = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
+    eIndirectBuffer = VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT,
+    eShaderDeviceAddress = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT,
+    eVideoDecodeSrcKHR = VK_BUFFER_USAGE_VIDEO_DECODE_SRC_BIT_KHR,
+    eVideoDecodeDstKHR = VK_BUFFER_USAGE_VIDEO_DECODE_DST_BIT_KHR,
+    eTransformFeedbackBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_BUFFER_BIT_EXT,
+    eTransformFeedbackCounterBufferEXT = VK_BUFFER_USAGE_TRANSFORM_FEEDBACK_COUNTER_BUFFER_BIT_EXT,
+    eConditionalRenderingEXT = VK_BUFFER_USAGE_CONDITIONAL_RENDERING_BIT_EXT,
+    eAccelerationStructureBuildInputReadOnlyKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_BUILD_INPUT_READ_ONLY_BIT_KHR,
+    eAccelerationStructureStorageKHR = VK_BUFFER_USAGE_ACCELERATION_STRUCTURE_STORAGE_BIT_KHR,
+    eShaderBindingTableKHR = VK_BUFFER_USAGE_SHADER_BINDING_TABLE_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeDstKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_DST_BIT_KHR,
+    eVideoEncodeSrcKHR = VK_BUFFER_USAGE_VIDEO_ENCODE_SRC_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eSamplerDescriptorBufferEXT = VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT,
+    eResourceDescriptorBufferEXT = VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT,
+    ePushDescriptorsDescriptorBufferEXT = VK_BUFFER_USAGE_PUSH_DESCRIPTORS_DESCRIPTOR_BUFFER_BIT_EXT,
+    eMicromapBuildInputReadOnlyEXT = VK_BUFFER_USAGE_MICROMAP_BUILD_INPUT_READ_ONLY_BIT_EXT,
+    eMicromapStorageEXT = VK_BUFFER_USAGE_MICROMAP_STORAGE_BIT_EXT,
+    eRayTracingNV = VK_BUFFER_USAGE_RAY_TRACING_BIT_NV,
+    eShaderDeviceAddressEXT = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_EXT,
+    eShaderDeviceAddressKHR = VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT_KHR
+  };
+
+  using BufferUsageFlags = Flags<BufferUsageFlagBits>;
+
+
+  template <> struct FlagTraits<BufferUsageFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR BufferUsageFlags allFlags = 
+          BufferUsageFlagBits::eTransferSrc
+        | BufferUsageFlagBits::eTransferDst
+        | BufferUsageFlagBits::eUniformTexelBuffer
+        | BufferUsageFlagBits::eStorageTexelBuffer
+        | BufferUsageFlagBits::eUniformBuffer
+        | BufferUsageFlagBits::eStorageBuffer
+        | BufferUsageFlagBits::eIndexBuffer
+        | BufferUsageFlagBits::eVertexBuffer
+        | BufferUsageFlagBits::eIndirectBuffer
+        | BufferUsageFlagBits::eShaderDeviceAddress
+        | BufferUsageFlagBits::eVideoDecodeSrcKHR
+        | BufferUsageFlagBits::eVideoDecodeDstKHR
+        | BufferUsageFlagBits::eTransformFeedbackBufferEXT
+        | BufferUsageFlagBits::eTransformFeedbackCounterBufferEXT
+        | BufferUsageFlagBits::eConditionalRenderingEXT
+        | BufferUsageFlagBits::eAccelerationStructureBuildInputReadOnlyKHR
+        | BufferUsageFlagBits::eAccelerationStructureStorageKHR
+        | BufferUsageFlagBits::eShaderBindingTableKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | BufferUsageFlagBits::eVideoEncodeDstKHR
+        | BufferUsageFlagBits::eVideoEncodeSrcKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | BufferUsageFlagBits::eSamplerDescriptorBufferEXT
+        | BufferUsageFlagBits::eResourceDescriptorBufferEXT
+        | BufferUsageFlagBits::ePushDescriptorsDescriptorBufferEXT
+        | BufferUsageFlagBits::eMicromapBuildInputReadOnlyEXT
+        | BufferUsageFlagBits::eMicromapStorageEXT;
+  };
+
+  enum class SharingMode
+  {
+    eExclusive = VK_SHARING_MODE_EXCLUSIVE,
+    eConcurrent = VK_SHARING_MODE_CONCURRENT
+  };
+
+  enum class BufferViewCreateFlagBits : VkBufferViewCreateFlags
+  {};
+
+  using BufferViewCreateFlags = Flags<BufferViewCreateFlagBits>;
+
+
+  template <> struct FlagTraits<BufferViewCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR BufferViewCreateFlags allFlags =  {};
+  };
+
+  enum class ImageLayout
+  {
+    eUndefined = VK_IMAGE_LAYOUT_UNDEFINED,
+    eGeneral = VK_IMAGE_LAYOUT_GENERAL,
+    eColorAttachmentOptimal = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
+    eDepthStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
+    eDepthStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL,
+    eShaderReadOnlyOptimal = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+    eTransferSrcOptimal = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
+    eTransferDstOptimal = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
+    ePreinitialized = VK_IMAGE_LAYOUT_PREINITIALIZED,
+    eDepthReadOnlyStencilAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL,
+    eDepthAttachmentStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL,
+    eDepthAttachmentOptimal = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL,
+    eDepthReadOnlyOptimal = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL,
+    eStencilAttachmentOptimal = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL,
+    eStencilReadOnlyOptimal = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL,
+    eReadOnlyOptimal = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL,
+    eAttachmentOptimal = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL,
+    ePresentSrcKHR = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
+    eVideoDecodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR,
+    eVideoDecodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_SRC_KHR,
+    eVideoDecodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR,
+    eSharedPresentKHR = VK_IMAGE_LAYOUT_SHARED_PRESENT_KHR,
+    eFragmentDensityMapOptimalEXT = VK_IMAGE_LAYOUT_FRAGMENT_DENSITY_MAP_OPTIMAL_EXT,
+    eFragmentShadingRateAttachmentOptimalKHR = VK_IMAGE_LAYOUT_FRAGMENT_SHADING_RATE_ATTACHMENT_OPTIMAL_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeDstKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DST_KHR,
+    eVideoEncodeSrcKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_SRC_KHR,
+    eVideoEncodeDpbKHR = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eAttachmentFeedbackLoopOptimalEXT = VK_IMAGE_LAYOUT_ATTACHMENT_FEEDBACK_LOOP_OPTIMAL_EXT,
+    eAttachmentOptimalKHR = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL_KHR,
+    eDepthAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL_KHR,
+    eDepthAttachmentStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_STENCIL_READ_ONLY_OPTIMAL_KHR,
+    eDepthReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_OPTIMAL_KHR,
+    eDepthReadOnlyStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_DEPTH_READ_ONLY_STENCIL_ATTACHMENT_OPTIMAL_KHR,
+    eReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_READ_ONLY_OPTIMAL_KHR,
+    eShadingRateOptimalNV = VK_IMAGE_LAYOUT_SHADING_RATE_OPTIMAL_NV,
+    eStencilAttachmentOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL_KHR,
+    eStencilReadOnlyOptimalKHR = VK_IMAGE_LAYOUT_STENCIL_READ_ONLY_OPTIMAL_KHR
+  };
+
+  enum class ComponentSwizzle
+  {
+    eIdentity = VK_COMPONENT_SWIZZLE_IDENTITY,
+    eZero = VK_COMPONENT_SWIZZLE_ZERO,
+    eOne = VK_COMPONENT_SWIZZLE_ONE,
+    eR = VK_COMPONENT_SWIZZLE_R,
+    eG = VK_COMPONENT_SWIZZLE_G,
+    eB = VK_COMPONENT_SWIZZLE_B,
+    eA = VK_COMPONENT_SWIZZLE_A
+  };
+
+  enum class ImageViewCreateFlagBits : VkImageViewCreateFlags
+  {
+    eFragmentDensityMapDynamicEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DYNAMIC_BIT_EXT,
+    eDescriptorBufferCaptureReplayEXT = VK_IMAGE_VIEW_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT,
+    eFragmentDensityMapDeferredEXT = VK_IMAGE_VIEW_CREATE_FRAGMENT_DENSITY_MAP_DEFERRED_BIT_EXT
+  };
+
+  using ImageViewCreateFlags = Flags<ImageViewCreateFlagBits>;
+
+
+  template <> struct FlagTraits<ImageViewCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageViewCreateFlags allFlags = 
+          ImageViewCreateFlagBits::eFragmentDensityMapDynamicEXT
+        | ImageViewCreateFlagBits::eDescriptorBufferCaptureReplayEXT
+        | ImageViewCreateFlagBits::eFragmentDensityMapDeferredEXT;
+  };
+
+  enum class ImageViewType
+  {
+    e1D = VK_IMAGE_VIEW_TYPE_1D,
+    e2D = VK_IMAGE_VIEW_TYPE_2D,
+    e3D = VK_IMAGE_VIEW_TYPE_3D,
+    eCube = VK_IMAGE_VIEW_TYPE_CUBE,
+    e1DArray = VK_IMAGE_VIEW_TYPE_1D_ARRAY,
+    e2DArray = VK_IMAGE_VIEW_TYPE_2D_ARRAY,
+    eCubeArray = VK_IMAGE_VIEW_TYPE_CUBE_ARRAY
+  };
+
+  enum class ShaderModuleCreateFlagBits : VkShaderModuleCreateFlags
+  {};
+
+  using ShaderModuleCreateFlags = Flags<ShaderModuleCreateFlagBits>;
+
+
+  template <> struct FlagTraits<ShaderModuleCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderModuleCreateFlags allFlags =  {};
+  };
+
+  enum class BlendFactor
+  {
+    eZero = VK_BLEND_FACTOR_ZERO,
+    eOne = VK_BLEND_FACTOR_ONE,
+    eSrcColor = VK_BLEND_FACTOR_SRC_COLOR,
+    eOneMinusSrcColor = VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
+    eDstColor = VK_BLEND_FACTOR_DST_COLOR,
+    eOneMinusDstColor = VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
+    eSrcAlpha = VK_BLEND_FACTOR_SRC_ALPHA,
+    eOneMinusSrcAlpha = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
+    eDstAlpha = VK_BLEND_FACTOR_DST_ALPHA,
+    eOneMinusDstAlpha = VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
+    eConstantColor = VK_BLEND_FACTOR_CONSTANT_COLOR,
+    eOneMinusConstantColor = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
+    eConstantAlpha = VK_BLEND_FACTOR_CONSTANT_ALPHA,
+    eOneMinusConstantAlpha = VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
+    eSrcAlphaSaturate = VK_BLEND_FACTOR_SRC_ALPHA_SATURATE,
+    eSrc1Color = VK_BLEND_FACTOR_SRC1_COLOR,
+    eOneMinusSrc1Color = VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR,
+    eSrc1Alpha = VK_BLEND_FACTOR_SRC1_ALPHA,
+    eOneMinusSrc1Alpha = VK_BLEND_FACTOR_ONE_MINUS_SRC1_ALPHA
+  };
+
+  enum class BlendOp
+  {
+    eAdd = VK_BLEND_OP_ADD,
+    eSubtract = VK_BLEND_OP_SUBTRACT,
+    eReverseSubtract = VK_BLEND_OP_REVERSE_SUBTRACT,
+    eMin = VK_BLEND_OP_MIN,
+    eMax = VK_BLEND_OP_MAX,
+    eZeroEXT = VK_BLEND_OP_ZERO_EXT,
+    eSrcEXT = VK_BLEND_OP_SRC_EXT,
+    eDstEXT = VK_BLEND_OP_DST_EXT,
+    eSrcOverEXT = VK_BLEND_OP_SRC_OVER_EXT,
+    eDstOverEXT = VK_BLEND_OP_DST_OVER_EXT,
+    eSrcInEXT = VK_BLEND_OP_SRC_IN_EXT,
+    eDstInEXT = VK_BLEND_OP_DST_IN_EXT,
+    eSrcOutEXT = VK_BLEND_OP_SRC_OUT_EXT,
+    eDstOutEXT = VK_BLEND_OP_DST_OUT_EXT,
+    eSrcAtopEXT = VK_BLEND_OP_SRC_ATOP_EXT,
+    eDstAtopEXT = VK_BLEND_OP_DST_ATOP_EXT,
+    eXorEXT = VK_BLEND_OP_XOR_EXT,
+    eMultiplyEXT = VK_BLEND_OP_MULTIPLY_EXT,
+    eScreenEXT = VK_BLEND_OP_SCREEN_EXT,
+    eOverlayEXT = VK_BLEND_OP_OVERLAY_EXT,
+    eDarkenEXT = VK_BLEND_OP_DARKEN_EXT,
+    eLightenEXT = VK_BLEND_OP_LIGHTEN_EXT,
+    eColordodgeEXT = VK_BLEND_OP_COLORDODGE_EXT,
+    eColorburnEXT = VK_BLEND_OP_COLORBURN_EXT,
+    eHardlightEXT = VK_BLEND_OP_HARDLIGHT_EXT,
+    eSoftlightEXT = VK_BLEND_OP_SOFTLIGHT_EXT,
+    eDifferenceEXT = VK_BLEND_OP_DIFFERENCE_EXT,
+    eExclusionEXT = VK_BLEND_OP_EXCLUSION_EXT,
+    eInvertEXT = VK_BLEND_OP_INVERT_EXT,
+    eInvertRgbEXT = VK_BLEND_OP_INVERT_RGB_EXT,
+    eLineardodgeEXT = VK_BLEND_OP_LINEARDODGE_EXT,
+    eLinearburnEXT = VK_BLEND_OP_LINEARBURN_EXT,
+    eVividlightEXT = VK_BLEND_OP_VIVIDLIGHT_EXT,
+    eLinearlightEXT = VK_BLEND_OP_LINEARLIGHT_EXT,
+    ePinlightEXT = VK_BLEND_OP_PINLIGHT_EXT,
+    eHardmixEXT = VK_BLEND_OP_HARDMIX_EXT,
+    eHslHueEXT = VK_BLEND_OP_HSL_HUE_EXT,
+    eHslSaturationEXT = VK_BLEND_OP_HSL_SATURATION_EXT,
+    eHslColorEXT = VK_BLEND_OP_HSL_COLOR_EXT,
+    eHslLuminosityEXT = VK_BLEND_OP_HSL_LUMINOSITY_EXT,
+    ePlusEXT = VK_BLEND_OP_PLUS_EXT,
+    ePlusClampedEXT = VK_BLEND_OP_PLUS_CLAMPED_EXT,
+    ePlusClampedAlphaEXT = VK_BLEND_OP_PLUS_CLAMPED_ALPHA_EXT,
+    ePlusDarkerEXT = VK_BLEND_OP_PLUS_DARKER_EXT,
+    eMinusEXT = VK_BLEND_OP_MINUS_EXT,
+    eMinusClampedEXT = VK_BLEND_OP_MINUS_CLAMPED_EXT,
+    eContrastEXT = VK_BLEND_OP_CONTRAST_EXT,
+    eInvertOvgEXT = VK_BLEND_OP_INVERT_OVG_EXT,
+    eRedEXT = VK_BLEND_OP_RED_EXT,
+    eGreenEXT = VK_BLEND_OP_GREEN_EXT,
+    eBlueEXT = VK_BLEND_OP_BLUE_EXT
+  };
+
+  enum class ColorComponentFlagBits : VkColorComponentFlags
+  {
+    eR = VK_COLOR_COMPONENT_R_BIT,
+    eG = VK_COLOR_COMPONENT_G_BIT,
+    eB = VK_COLOR_COMPONENT_B_BIT,
+    eA = VK_COLOR_COMPONENT_A_BIT
+  };
+
+  using ColorComponentFlags = Flags<ColorComponentFlagBits>;
+
+
+  template <> struct FlagTraits<ColorComponentFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ColorComponentFlags allFlags = 
+          ColorComponentFlagBits::eR
+        | ColorComponentFlagBits::eG
+        | ColorComponentFlagBits::eB
+        | ColorComponentFlagBits::eA;
+  };
+
+  enum class CompareOp
+  {
+    eNever = VK_COMPARE_OP_NEVER,
+    eLess = VK_COMPARE_OP_LESS,
+    eEqual = VK_COMPARE_OP_EQUAL,
+    eLessOrEqual = VK_COMPARE_OP_LESS_OR_EQUAL,
+    eGreater = VK_COMPARE_OP_GREATER,
+    eNotEqual = VK_COMPARE_OP_NOT_EQUAL,
+    eGreaterOrEqual = VK_COMPARE_OP_GREATER_OR_EQUAL,
+    eAlways = VK_COMPARE_OP_ALWAYS
+  };
+
+  enum class CullModeFlagBits : VkCullModeFlags
+  {
+    eNone = VK_CULL_MODE_NONE,
+    eFront = VK_CULL_MODE_FRONT_BIT,
+    eBack = VK_CULL_MODE_BACK_BIT,
+    eFrontAndBack = VK_CULL_MODE_FRONT_AND_BACK
+  };
+
+  using CullModeFlags = Flags<CullModeFlagBits>;
+
+
+  template <> struct FlagTraits<CullModeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CullModeFlags allFlags = 
+          CullModeFlagBits::eNone
+        | CullModeFlagBits::eFront
+        | CullModeFlagBits::eBack
+        | CullModeFlagBits::eFrontAndBack;
+  };
+
+  enum class DynamicState
+  {
+    eViewport = VK_DYNAMIC_STATE_VIEWPORT,
+    eScissor = VK_DYNAMIC_STATE_SCISSOR,
+    eLineWidth = VK_DYNAMIC_STATE_LINE_WIDTH,
+    eDepthBias = VK_DYNAMIC_STATE_DEPTH_BIAS,
+    eBlendConstants = VK_DYNAMIC_STATE_BLEND_CONSTANTS,
+    eDepthBounds = VK_DYNAMIC_STATE_DEPTH_BOUNDS,
+    eStencilCompareMask = VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
+    eStencilWriteMask = VK_DYNAMIC_STATE_STENCIL_WRITE_MASK,
+    eStencilReference = VK_DYNAMIC_STATE_STENCIL_REFERENCE,
+    eCullMode = VK_DYNAMIC_STATE_CULL_MODE,
+    eFrontFace = VK_DYNAMIC_STATE_FRONT_FACE,
+    ePrimitiveTopology = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY,
+    eViewportWithCount = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT,
+    eScissorWithCount = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT,
+    eVertexInputBindingStride = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE,
+    eDepthTestEnable = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE,
+    eDepthWriteEnable = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE,
+    eDepthCompareOp = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP,
+    eDepthBoundsTestEnable = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE,
+    eStencilTestEnable = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE,
+    eStencilOp = VK_DYNAMIC_STATE_STENCIL_OP,
+    eRasterizerDiscardEnable = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE,
+    eDepthBiasEnable = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE,
+    ePrimitiveRestartEnable = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE,
+    eViewportWScalingNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_NV,
+    eDiscardRectangleEXT = VK_DYNAMIC_STATE_DISCARD_RECTANGLE_EXT,
+    eSampleLocationsEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_EXT,
+    eRayTracingPipelineStackSizeKHR = VK_DYNAMIC_STATE_RAY_TRACING_PIPELINE_STACK_SIZE_KHR,
+    eViewportShadingRatePaletteNV = VK_DYNAMIC_STATE_VIEWPORT_SHADING_RATE_PALETTE_NV,
+    eViewportCoarseSampleOrderNV = VK_DYNAMIC_STATE_VIEWPORT_COARSE_SAMPLE_ORDER_NV,
+    eExclusiveScissorNV = VK_DYNAMIC_STATE_EXCLUSIVE_SCISSOR_NV,
+    eFragmentShadingRateKHR = VK_DYNAMIC_STATE_FRAGMENT_SHADING_RATE_KHR,
+    eLineStippleEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_EXT,
+    eVertexInputEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_EXT,
+    ePatchControlPointsEXT = VK_DYNAMIC_STATE_PATCH_CONTROL_POINTS_EXT,
+    eLogicOpEXT = VK_DYNAMIC_STATE_LOGIC_OP_EXT,
+    eColorWriteEnableEXT = VK_DYNAMIC_STATE_COLOR_WRITE_ENABLE_EXT,
+    eTessellationDomainOriginEXT = VK_DYNAMIC_STATE_TESSELLATION_DOMAIN_ORIGIN_EXT,
+    eDepthClampEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLAMP_ENABLE_EXT,
+    ePolygonModeEXT = VK_DYNAMIC_STATE_POLYGON_MODE_EXT,
+    eRasterizationSamplesEXT = VK_DYNAMIC_STATE_RASTERIZATION_SAMPLES_EXT,
+    eSampleMaskEXT = VK_DYNAMIC_STATE_SAMPLE_MASK_EXT,
+    eAlphaToCoverageEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_COVERAGE_ENABLE_EXT,
+    eAlphaToOneEnableEXT = VK_DYNAMIC_STATE_ALPHA_TO_ONE_ENABLE_EXT,
+    eLogicOpEnableEXT = VK_DYNAMIC_STATE_LOGIC_OP_ENABLE_EXT,
+    eColorBlendEnableEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ENABLE_EXT,
+    eColorBlendEquationEXT = VK_DYNAMIC_STATE_COLOR_BLEND_EQUATION_EXT,
+    eColorWriteMaskEXT = VK_DYNAMIC_STATE_COLOR_WRITE_MASK_EXT,
+    eRasterizationStreamEXT = VK_DYNAMIC_STATE_RASTERIZATION_STREAM_EXT,
+    eConservativeRasterizationModeEXT = VK_DYNAMIC_STATE_CONSERVATIVE_RASTERIZATION_MODE_EXT,
+    eExtraPrimitiveOverestimationSizeEXT = VK_DYNAMIC_STATE_EXTRA_PRIMITIVE_OVERESTIMATION_SIZE_EXT,
+    eDepthClipEnableEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_ENABLE_EXT,
+    eSampleLocationsEnableEXT = VK_DYNAMIC_STATE_SAMPLE_LOCATIONS_ENABLE_EXT,
+    eColorBlendAdvancedEXT = VK_DYNAMIC_STATE_COLOR_BLEND_ADVANCED_EXT,
+    eProvokingVertexModeEXT = VK_DYNAMIC_STATE_PROVOKING_VERTEX_MODE_EXT,
+    eLineRasterizationModeEXT = VK_DYNAMIC_STATE_LINE_RASTERIZATION_MODE_EXT,
+    eLineStippleEnableEXT = VK_DYNAMIC_STATE_LINE_STIPPLE_ENABLE_EXT,
+    eDepthClipNegativeOneToOneEXT = VK_DYNAMIC_STATE_DEPTH_CLIP_NEGATIVE_ONE_TO_ONE_EXT,
+    eViewportWScalingEnableNV = VK_DYNAMIC_STATE_VIEWPORT_W_SCALING_ENABLE_NV,
+    eViewportSwizzleNV = VK_DYNAMIC_STATE_VIEWPORT_SWIZZLE_NV,
+    eCoverageToColorEnableNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_ENABLE_NV,
+    eCoverageToColorLocationNV = VK_DYNAMIC_STATE_COVERAGE_TO_COLOR_LOCATION_NV,
+    eCoverageModulationModeNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_MODE_NV,
+    eCoverageModulationTableEnableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_ENABLE_NV,
+    eCoverageModulationTableNV = VK_DYNAMIC_STATE_COVERAGE_MODULATION_TABLE_NV,
+    eShadingRateImageEnableNV = VK_DYNAMIC_STATE_SHADING_RATE_IMAGE_ENABLE_NV,
+    eRepresentativeFragmentTestEnableNV = VK_DYNAMIC_STATE_REPRESENTATIVE_FRAGMENT_TEST_ENABLE_NV,
+    eCoverageReductionModeNV = VK_DYNAMIC_STATE_COVERAGE_REDUCTION_MODE_NV,
+    eCullModeEXT = VK_DYNAMIC_STATE_CULL_MODE_EXT,
+    eDepthBiasEnableEXT = VK_DYNAMIC_STATE_DEPTH_BIAS_ENABLE_EXT,
+    eDepthBoundsTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
+    eDepthCompareOpEXT = VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
+    eDepthTestEnableEXT = VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
+    eDepthWriteEnableEXT = VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
+    eFrontFaceEXT = VK_DYNAMIC_STATE_FRONT_FACE_EXT,
+    ePrimitiveRestartEnableEXT = VK_DYNAMIC_STATE_PRIMITIVE_RESTART_ENABLE_EXT,
+    ePrimitiveTopologyEXT = VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
+    eRasterizerDiscardEnableEXT = VK_DYNAMIC_STATE_RASTERIZER_DISCARD_ENABLE_EXT,
+    eScissorWithCountEXT = VK_DYNAMIC_STATE_SCISSOR_WITH_COUNT_EXT,
+    eStencilOpEXT = VK_DYNAMIC_STATE_STENCIL_OP_EXT,
+    eStencilTestEnableEXT = VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
+    eVertexInputBindingStrideEXT = VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
+    eViewportWithCountEXT = VK_DYNAMIC_STATE_VIEWPORT_WITH_COUNT_EXT
+  };
+
+  enum class FrontFace
+  {
+    eCounterClockwise = VK_FRONT_FACE_COUNTER_CLOCKWISE,
+    eClockwise = VK_FRONT_FACE_CLOCKWISE
+  };
+
+  enum class LogicOp
+  {
+    eClear = VK_LOGIC_OP_CLEAR,
+    eAnd = VK_LOGIC_OP_AND,
+    eAndReverse = VK_LOGIC_OP_AND_REVERSE,
+    eCopy = VK_LOGIC_OP_COPY,
+    eAndInverted = VK_LOGIC_OP_AND_INVERTED,
+    eNoOp = VK_LOGIC_OP_NO_OP,
+    eXor = VK_LOGIC_OP_XOR,
+    eOr = VK_LOGIC_OP_OR,
+    eNor = VK_LOGIC_OP_NOR,
+    eEquivalent = VK_LOGIC_OP_EQUIVALENT,
+    eInvert = VK_LOGIC_OP_INVERT,
+    eOrReverse = VK_LOGIC_OP_OR_REVERSE,
+    eCopyInverted = VK_LOGIC_OP_COPY_INVERTED,
+    eOrInverted = VK_LOGIC_OP_OR_INVERTED,
+    eNand = VK_LOGIC_OP_NAND,
+    eSet = VK_LOGIC_OP_SET
+  };
+
+  enum class PipelineCreateFlagBits : VkPipelineCreateFlags
+  {
+    eDisableOptimization = VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT,
+    eAllowDerivatives = VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT,
+    eDerivative = VK_PIPELINE_CREATE_DERIVATIVE_BIT,
+    eViewIndexFromDeviceIndex = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT,
+    eDispatchBase = VK_PIPELINE_CREATE_DISPATCH_BASE_BIT,
+    eFailOnPipelineCompileRequired = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT,
+    eEarlyReturnOnFailure = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT,
+    eRenderingFragmentShadingRateAttachmentKHR = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    eRenderingFragmentDensityMapAttachmentEXT = VK_PIPELINE_CREATE_RENDERING_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
+    eRayTracingNoNullAnyHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_ANY_HIT_SHADERS_BIT_KHR,
+    eRayTracingNoNullClosestHitShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_CLOSEST_HIT_SHADERS_BIT_KHR,
+    eRayTracingNoNullMissShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_MISS_SHADERS_BIT_KHR,
+    eRayTracingNoNullIntersectionShadersKHR = VK_PIPELINE_CREATE_RAY_TRACING_NO_NULL_INTERSECTION_SHADERS_BIT_KHR,
+    eRayTracingSkipTrianglesKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_TRIANGLES_BIT_KHR,
+    eRayTracingSkipAabbsKHR = VK_PIPELINE_CREATE_RAY_TRACING_SKIP_AABBS_BIT_KHR,
+    eRayTracingShaderGroupHandleCaptureReplayKHR = VK_PIPELINE_CREATE_RAY_TRACING_SHADER_GROUP_HANDLE_CAPTURE_REPLAY_BIT_KHR,
+    eDeferCompileNV = VK_PIPELINE_CREATE_DEFER_COMPILE_BIT_NV,
+    eCaptureStatisticsKHR = VK_PIPELINE_CREATE_CAPTURE_STATISTICS_BIT_KHR,
+    eCaptureInternalRepresentationsKHR = VK_PIPELINE_CREATE_CAPTURE_INTERNAL_REPRESENTATIONS_BIT_KHR,
+    eIndirectBindableNV = VK_PIPELINE_CREATE_INDIRECT_BINDABLE_BIT_NV,
+    eLibraryKHR = VK_PIPELINE_CREATE_LIBRARY_BIT_KHR,
+    eDescriptorBufferEXT = VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT,
+    eRetainLinkTimeOptimizationInfoEXT = VK_PIPELINE_CREATE_RETAIN_LINK_TIME_OPTIMIZATION_INFO_BIT_EXT,
+    eLinkTimeOptimizationEXT = VK_PIPELINE_CREATE_LINK_TIME_OPTIMIZATION_BIT_EXT,
+    eRayTracingAllowMotionNV = VK_PIPELINE_CREATE_RAY_TRACING_ALLOW_MOTION_BIT_NV,
+    eColorAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_COLOR_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
+    eDepthStencilAttachmentFeedbackLoopEXT = VK_PIPELINE_CREATE_DEPTH_STENCIL_ATTACHMENT_FEEDBACK_LOOP_BIT_EXT,
+    eRayTracingOpacityMicromapEXT = VK_PIPELINE_CREATE_RAY_TRACING_OPACITY_MICROMAP_BIT_EXT,
+    eNoProtectedAccessEXT = VK_PIPELINE_CREATE_NO_PROTECTED_ACCESS_BIT_EXT,
+    eProtectedAccessOnlyEXT = VK_PIPELINE_CREATE_PROTECTED_ACCESS_ONLY_BIT_EXT,
+    eDispatchBaseKHR = VK_PIPELINE_CREATE_DISPATCH_BASE_KHR,
+    eEarlyReturnOnFailureEXT = VK_PIPELINE_CREATE_EARLY_RETURN_ON_FAILURE_BIT_EXT,
+    eFailOnPipelineCompileRequiredEXT = VK_PIPELINE_CREATE_FAIL_ON_PIPELINE_COMPILE_REQUIRED_BIT_EXT,
+    eViewIndexFromDeviceIndexKHR = VK_PIPELINE_CREATE_VIEW_INDEX_FROM_DEVICE_INDEX_BIT_KHR,
+    eVkPipelineRasterizationStateCreateFragmentDensityMapAttachmentEXT = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_DENSITY_MAP_ATTACHMENT_BIT_EXT,
+    eVkPipelineRasterizationStateCreateFragmentShadingRateAttachmentKHR = VK_PIPELINE_RASTERIZATION_STATE_CREATE_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR
+  };
+
+  using PipelineCreateFlags = Flags<PipelineCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreateFlags allFlags = 
+          PipelineCreateFlagBits::eDisableOptimization
+        | PipelineCreateFlagBits::eAllowDerivatives
+        | PipelineCreateFlagBits::eDerivative
+        | PipelineCreateFlagBits::eViewIndexFromDeviceIndex
+        | PipelineCreateFlagBits::eDispatchBase
+        | PipelineCreateFlagBits::eFailOnPipelineCompileRequired
+        | PipelineCreateFlagBits::eEarlyReturnOnFailure
+        | PipelineCreateFlagBits::eRenderingFragmentShadingRateAttachmentKHR
+        | PipelineCreateFlagBits::eRenderingFragmentDensityMapAttachmentEXT
+        | PipelineCreateFlagBits::eRayTracingNoNullAnyHitShadersKHR
+        | PipelineCreateFlagBits::eRayTracingNoNullClosestHitShadersKHR
+        | PipelineCreateFlagBits::eRayTracingNoNullMissShadersKHR
+        | PipelineCreateFlagBits::eRayTracingNoNullIntersectionShadersKHR
+        | PipelineCreateFlagBits::eRayTracingSkipTrianglesKHR
+        | PipelineCreateFlagBits::eRayTracingSkipAabbsKHR
+        | PipelineCreateFlagBits::eRayTracingShaderGroupHandleCaptureReplayKHR
+        | PipelineCreateFlagBits::eDeferCompileNV
+        | PipelineCreateFlagBits::eCaptureStatisticsKHR
+        | PipelineCreateFlagBits::eCaptureInternalRepresentationsKHR
+        | PipelineCreateFlagBits::eIndirectBindableNV
+        | PipelineCreateFlagBits::eLibraryKHR
+        | PipelineCreateFlagBits::eDescriptorBufferEXT
+        | PipelineCreateFlagBits::eRetainLinkTimeOptimizationInfoEXT
+        | PipelineCreateFlagBits::eLinkTimeOptimizationEXT
+        | PipelineCreateFlagBits::eRayTracingAllowMotionNV
+        | PipelineCreateFlagBits::eColorAttachmentFeedbackLoopEXT
+        | PipelineCreateFlagBits::eDepthStencilAttachmentFeedbackLoopEXT
+        | PipelineCreateFlagBits::eRayTracingOpacityMicromapEXT
+        | PipelineCreateFlagBits::eNoProtectedAccessEXT
+        | PipelineCreateFlagBits::eProtectedAccessOnlyEXT;
+  };
+
+  enum class PipelineShaderStageCreateFlagBits : VkPipelineShaderStageCreateFlags
+  {
+    eAllowVaryingSubgroupSize = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT,
+    eRequireFullSubgroups = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT,
+    eAllowVaryingSubgroupSizeEXT = VK_PIPELINE_SHADER_STAGE_CREATE_ALLOW_VARYING_SUBGROUP_SIZE_BIT_EXT,
+    eRequireFullSubgroupsEXT = VK_PIPELINE_SHADER_STAGE_CREATE_REQUIRE_FULL_SUBGROUPS_BIT_EXT
+  };
+
+  using PipelineShaderStageCreateFlags = Flags<PipelineShaderStageCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineShaderStageCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineShaderStageCreateFlags allFlags = 
+          PipelineShaderStageCreateFlagBits::eAllowVaryingSubgroupSize
+        | PipelineShaderStageCreateFlagBits::eRequireFullSubgroups;
+  };
+
+  enum class PolygonMode
+  {
+    eFill = VK_POLYGON_MODE_FILL,
+    eLine = VK_POLYGON_MODE_LINE,
+    ePoint = VK_POLYGON_MODE_POINT,
+    eFillRectangleNV = VK_POLYGON_MODE_FILL_RECTANGLE_NV
+  };
+
+  enum class PrimitiveTopology
+  {
+    ePointList = VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
+    eLineList = VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
+    eLineStrip = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
+    eTriangleList = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
+    eTriangleStrip = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
+    eTriangleFan = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN,
+    eLineListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_LIST_WITH_ADJACENCY,
+    eLineStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_LINE_STRIP_WITH_ADJACENCY,
+    eTriangleListWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST_WITH_ADJACENCY,
+    eTriangleStripWithAdjacency = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP_WITH_ADJACENCY,
+    ePatchList = VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
+  };
+
+  enum class ShaderStageFlagBits : VkShaderStageFlags
+  {
+    eVertex = VK_SHADER_STAGE_VERTEX_BIT,
+    eTessellationControl = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT,
+    eTessellationEvaluation = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT,
+    eGeometry = VK_SHADER_STAGE_GEOMETRY_BIT,
+    eFragment = VK_SHADER_STAGE_FRAGMENT_BIT,
+    eCompute = VK_SHADER_STAGE_COMPUTE_BIT,
+    eAllGraphics = VK_SHADER_STAGE_ALL_GRAPHICS,
+    eAll = VK_SHADER_STAGE_ALL,
+    eRaygenKHR = VK_SHADER_STAGE_RAYGEN_BIT_KHR,
+    eAnyHitKHR = VK_SHADER_STAGE_ANY_HIT_BIT_KHR,
+    eClosestHitKHR = VK_SHADER_STAGE_CLOSEST_HIT_BIT_KHR,
+    eMissKHR = VK_SHADER_STAGE_MISS_BIT_KHR,
+    eIntersectionKHR = VK_SHADER_STAGE_INTERSECTION_BIT_KHR,
+    eCallableKHR = VK_SHADER_STAGE_CALLABLE_BIT_KHR,
+    eTaskEXT = VK_SHADER_STAGE_TASK_BIT_EXT,
+    eMeshEXT = VK_SHADER_STAGE_MESH_BIT_EXT,
+    eSubpassShadingHUAWEI = VK_SHADER_STAGE_SUBPASS_SHADING_BIT_HUAWEI,
+    eAnyHitNV = VK_SHADER_STAGE_ANY_HIT_BIT_NV,
+    eCallableNV = VK_SHADER_STAGE_CALLABLE_BIT_NV,
+    eClosestHitNV = VK_SHADER_STAGE_CLOSEST_HIT_BIT_NV,
+    eIntersectionNV = VK_SHADER_STAGE_INTERSECTION_BIT_NV,
+    eMeshNV = VK_SHADER_STAGE_MESH_BIT_NV,
+    eMissNV = VK_SHADER_STAGE_MISS_BIT_NV,
+    eRaygenNV = VK_SHADER_STAGE_RAYGEN_BIT_NV,
+    eTaskNV = VK_SHADER_STAGE_TASK_BIT_NV
+  };
+
+  using ShaderStageFlags = Flags<ShaderStageFlagBits>;
+
+
+  template <> struct FlagTraits<ShaderStageFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderStageFlags allFlags = 
+          ShaderStageFlagBits::eVertex
+        | ShaderStageFlagBits::eTessellationControl
+        | ShaderStageFlagBits::eTessellationEvaluation
+        | ShaderStageFlagBits::eGeometry
+        | ShaderStageFlagBits::eFragment
+        | ShaderStageFlagBits::eCompute
+        | ShaderStageFlagBits::eAllGraphics
+        | ShaderStageFlagBits::eAll
+        | ShaderStageFlagBits::eRaygenKHR
+        | ShaderStageFlagBits::eAnyHitKHR
+        | ShaderStageFlagBits::eClosestHitKHR
+        | ShaderStageFlagBits::eMissKHR
+        | ShaderStageFlagBits::eIntersectionKHR
+        | ShaderStageFlagBits::eCallableKHR
+        | ShaderStageFlagBits::eTaskEXT
+        | ShaderStageFlagBits::eMeshEXT
+        | ShaderStageFlagBits::eSubpassShadingHUAWEI;
+  };
+
+  enum class StencilOp
+  {
+    eKeep = VK_STENCIL_OP_KEEP,
+    eZero = VK_STENCIL_OP_ZERO,
+    eReplace = VK_STENCIL_OP_REPLACE,
+    eIncrementAndClamp = VK_STENCIL_OP_INCREMENT_AND_CLAMP,
+    eDecrementAndClamp = VK_STENCIL_OP_DECREMENT_AND_CLAMP,
+    eInvert = VK_STENCIL_OP_INVERT,
+    eIncrementAndWrap = VK_STENCIL_OP_INCREMENT_AND_WRAP,
+    eDecrementAndWrap = VK_STENCIL_OP_DECREMENT_AND_WRAP
+  };
+
+  enum class VertexInputRate
+  {
+    eVertex = VK_VERTEX_INPUT_RATE_VERTEX,
+    eInstance = VK_VERTEX_INPUT_RATE_INSTANCE
+  };
+
+  enum class PipelineDynamicStateCreateFlagBits : VkPipelineDynamicStateCreateFlags
+  {};
+
+  using PipelineDynamicStateCreateFlags = Flags<PipelineDynamicStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineDynamicStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDynamicStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineInputAssemblyStateCreateFlagBits : VkPipelineInputAssemblyStateCreateFlags
+  {};
+
+  using PipelineInputAssemblyStateCreateFlags = Flags<PipelineInputAssemblyStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineInputAssemblyStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineInputAssemblyStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineMultisampleStateCreateFlagBits : VkPipelineMultisampleStateCreateFlags
+  {};
+
+  using PipelineMultisampleStateCreateFlags = Flags<PipelineMultisampleStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineMultisampleStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineMultisampleStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineRasterizationStateCreateFlagBits : VkPipelineRasterizationStateCreateFlags
+  {};
+
+  using PipelineRasterizationStateCreateFlags = Flags<PipelineRasterizationStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineRasterizationStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineTessellationStateCreateFlagBits : VkPipelineTessellationStateCreateFlags
+  {};
+
+  using PipelineTessellationStateCreateFlags = Flags<PipelineTessellationStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineTessellationStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineTessellationStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineVertexInputStateCreateFlagBits : VkPipelineVertexInputStateCreateFlags
+  {};
+
+  using PipelineVertexInputStateCreateFlags = Flags<PipelineVertexInputStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineVertexInputStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineVertexInputStateCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineViewportStateCreateFlagBits : VkPipelineViewportStateCreateFlags
+  {};
+
+  using PipelineViewportStateCreateFlags = Flags<PipelineViewportStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineViewportStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineViewportStateCreateFlags allFlags =  {};
+  };
+
+  enum class BorderColor
+  {
+    eFloatTransparentBlack = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
+    eIntTransparentBlack = VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
+    eFloatOpaqueBlack = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
+    eIntOpaqueBlack = VK_BORDER_COLOR_INT_OPAQUE_BLACK,
+    eFloatOpaqueWhite = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
+    eIntOpaqueWhite = VK_BORDER_COLOR_INT_OPAQUE_WHITE,
+    eFloatCustomEXT = VK_BORDER_COLOR_FLOAT_CUSTOM_EXT,
+    eIntCustomEXT = VK_BORDER_COLOR_INT_CUSTOM_EXT
+  };
+
+  enum class Filter
+  {
+    eNearest = VK_FILTER_NEAREST,
+    eLinear = VK_FILTER_LINEAR,
+    eCubicEXT = VK_FILTER_CUBIC_EXT,
+    eCubicIMG = VK_FILTER_CUBIC_IMG
+  };
+
+  enum class SamplerAddressMode
+  {
+    eRepeat = VK_SAMPLER_ADDRESS_MODE_REPEAT,
+    eMirroredRepeat = VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
+    eClampToEdge = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
+    eClampToBorder = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER,
+    eMirrorClampToEdge = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE,
+    eMirrorClampToEdgeKHR = VK_SAMPLER_ADDRESS_MODE_MIRROR_CLAMP_TO_EDGE_KHR
+  };
+
+  enum class SamplerCreateFlagBits : VkSamplerCreateFlags
+  {
+    eSubsampledEXT = VK_SAMPLER_CREATE_SUBSAMPLED_BIT_EXT,
+    eSubsampledCoarseReconstructionEXT = VK_SAMPLER_CREATE_SUBSAMPLED_COARSE_RECONSTRUCTION_BIT_EXT,
+    eDescriptorBufferCaptureReplayEXT = VK_SAMPLER_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT,
+    eNonSeamlessCubeMapEXT = VK_SAMPLER_CREATE_NON_SEAMLESS_CUBE_MAP_BIT_EXT,
+    eImageProcessingQCOM = VK_SAMPLER_CREATE_IMAGE_PROCESSING_BIT_QCOM
+  };
+
+  using SamplerCreateFlags = Flags<SamplerCreateFlagBits>;
+
+
+  template <> struct FlagTraits<SamplerCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SamplerCreateFlags allFlags = 
+          SamplerCreateFlagBits::eSubsampledEXT
+        | SamplerCreateFlagBits::eSubsampledCoarseReconstructionEXT
+        | SamplerCreateFlagBits::eDescriptorBufferCaptureReplayEXT
+        | SamplerCreateFlagBits::eNonSeamlessCubeMapEXT
+        | SamplerCreateFlagBits::eImageProcessingQCOM;
+  };
+
+  enum class SamplerMipmapMode
+  {
+    eNearest = VK_SAMPLER_MIPMAP_MODE_NEAREST,
+    eLinear = VK_SAMPLER_MIPMAP_MODE_LINEAR
+  };
+
+  enum class DescriptorPoolCreateFlagBits : VkDescriptorPoolCreateFlags
+  {
+    eFreeDescriptorSet = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT,
+    eUpdateAfterBind = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT,
+    eHostOnlyEXT = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_EXT,
+    eHostOnlyVALVE = VK_DESCRIPTOR_POOL_CREATE_HOST_ONLY_BIT_VALVE,
+    eUpdateAfterBindEXT = VK_DESCRIPTOR_POOL_CREATE_UPDATE_AFTER_BIND_BIT_EXT
+  };
+
+  using DescriptorPoolCreateFlags = Flags<DescriptorPoolCreateFlagBits>;
+
+
+  template <> struct FlagTraits<DescriptorPoolCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorPoolCreateFlags allFlags = 
+          DescriptorPoolCreateFlagBits::eFreeDescriptorSet
+        | DescriptorPoolCreateFlagBits::eUpdateAfterBind
+        | DescriptorPoolCreateFlagBits::eHostOnlyEXT;
+  };
+
+  enum class DescriptorSetLayoutCreateFlagBits : VkDescriptorSetLayoutCreateFlags
+  {
+    eUpdateAfterBindPool = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT,
+    ePushDescriptorKHR = VK_DESCRIPTOR_SET_LAYOUT_CREATE_PUSH_DESCRIPTOR_BIT_KHR,
+    eDescriptorBufferEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT,
+    eEmbeddedImmutableSamplersEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_EMBEDDED_IMMUTABLE_SAMPLERS_BIT_EXT,
+    eHostOnlyPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_EXT,
+    eHostOnlyPoolVALVE = VK_DESCRIPTOR_SET_LAYOUT_CREATE_HOST_ONLY_POOL_BIT_VALVE,
+    eUpdateAfterBindPoolEXT = VK_DESCRIPTOR_SET_LAYOUT_CREATE_UPDATE_AFTER_BIND_POOL_BIT_EXT
+  };
+
+  using DescriptorSetLayoutCreateFlags = Flags<DescriptorSetLayoutCreateFlagBits>;
+
+
+  template <> struct FlagTraits<DescriptorSetLayoutCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorSetLayoutCreateFlags allFlags = 
+          DescriptorSetLayoutCreateFlagBits::eUpdateAfterBindPool
+        | DescriptorSetLayoutCreateFlagBits::ePushDescriptorKHR
+        | DescriptorSetLayoutCreateFlagBits::eDescriptorBufferEXT
+        | DescriptorSetLayoutCreateFlagBits::eEmbeddedImmutableSamplersEXT
+        | DescriptorSetLayoutCreateFlagBits::eHostOnlyPoolEXT;
+  };
+
+  enum class DescriptorType
+  {
+    eSampler = VK_DESCRIPTOR_TYPE_SAMPLER,
+    eCombinedImageSampler = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
+    eSampledImage = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE,
+    eStorageImage = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
+    eUniformTexelBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER,
+    eStorageTexelBuffer = VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER,
+    eUniformBuffer = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
+    eStorageBuffer = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
+    eUniformBufferDynamic = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
+    eStorageBufferDynamic = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC,
+    eInputAttachment = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT,
+    eInlineUniformBlock = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK,
+    eAccelerationStructureKHR = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR,
+    eAccelerationStructureNV = VK_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_NV,
+    eSampleWeightImageQCOM = VK_DESCRIPTOR_TYPE_SAMPLE_WEIGHT_IMAGE_QCOM,
+    eBlockMatchImageQCOM = VK_DESCRIPTOR_TYPE_BLOCK_MATCH_IMAGE_QCOM,
+    eMutableEXT = VK_DESCRIPTOR_TYPE_MUTABLE_EXT,
+    eInlineUniformBlockEXT = VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT,
+    eMutableVALVE = VK_DESCRIPTOR_TYPE_MUTABLE_VALVE
+  };
+
+  enum class DescriptorPoolResetFlagBits : VkDescriptorPoolResetFlags
+  {};
+
+  using DescriptorPoolResetFlags = Flags<DescriptorPoolResetFlagBits>;
+
+
+  template <> struct FlagTraits<DescriptorPoolResetFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorPoolResetFlags allFlags =  {};
+  };
+
+  enum class AccessFlagBits : VkAccessFlags
+  {
+    eIndirectCommandRead = VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
+    eIndexRead = VK_ACCESS_INDEX_READ_BIT,
+    eVertexAttributeRead = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
+    eUniformRead = VK_ACCESS_UNIFORM_READ_BIT,
+    eInputAttachmentRead = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
+    eShaderRead = VK_ACCESS_SHADER_READ_BIT,
+    eShaderWrite = VK_ACCESS_SHADER_WRITE_BIT,
+    eColorAttachmentRead = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
+    eColorAttachmentWrite = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
+    eDepthStencilAttachmentRead = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+    eDepthStencilAttachmentWrite = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+    eTransferRead = VK_ACCESS_TRANSFER_READ_BIT,
+    eTransferWrite = VK_ACCESS_TRANSFER_WRITE_BIT,
+    eHostRead = VK_ACCESS_HOST_READ_BIT,
+    eHostWrite = VK_ACCESS_HOST_WRITE_BIT,
+    eMemoryRead = VK_ACCESS_MEMORY_READ_BIT,
+    eMemoryWrite = VK_ACCESS_MEMORY_WRITE_BIT,
+    eNone = VK_ACCESS_NONE,
+    eTransformFeedbackWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
+    eTransformFeedbackCounterReadEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
+    eTransformFeedbackCounterWriteEXT = VK_ACCESS_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
+    eConditionalRenderingReadEXT = VK_ACCESS_CONDITIONAL_RENDERING_READ_BIT_EXT,
+    eColorAttachmentReadNoncoherentEXT = VK_ACCESS_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+    eAccelerationStructureReadKHR = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+    eAccelerationStructureWriteKHR = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
+    eFragmentDensityMapReadEXT = VK_ACCESS_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
+    eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
+    eCommandPreprocessReadNV = VK_ACCESS_COMMAND_PREPROCESS_READ_BIT_NV,
+    eCommandPreprocessWriteNV = VK_ACCESS_COMMAND_PREPROCESS_WRITE_BIT_NV,
+    eAccelerationStructureReadNV = VK_ACCESS_ACCELERATION_STRUCTURE_READ_BIT_NV,
+    eAccelerationStructureWriteNV = VK_ACCESS_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
+    eNoneKHR = VK_ACCESS_NONE_KHR,
+    eShadingRateImageReadNV = VK_ACCESS_SHADING_RATE_IMAGE_READ_BIT_NV
+  };
+
+  using AccessFlags = Flags<AccessFlagBits>;
+
+
+  template <> struct FlagTraits<AccessFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags allFlags = 
+          AccessFlagBits::eIndirectCommandRead
+        | AccessFlagBits::eIndexRead
+        | AccessFlagBits::eVertexAttributeRead
+        | AccessFlagBits::eUniformRead
+        | AccessFlagBits::eInputAttachmentRead
+        | AccessFlagBits::eShaderRead
+        | AccessFlagBits::eShaderWrite
+        | AccessFlagBits::eColorAttachmentRead
+        | AccessFlagBits::eColorAttachmentWrite
+        | AccessFlagBits::eDepthStencilAttachmentRead
+        | AccessFlagBits::eDepthStencilAttachmentWrite
+        | AccessFlagBits::eTransferRead
+        | AccessFlagBits::eTransferWrite
+        | AccessFlagBits::eHostRead
+        | AccessFlagBits::eHostWrite
+        | AccessFlagBits::eMemoryRead
+        | AccessFlagBits::eMemoryWrite
+        | AccessFlagBits::eNone
+        | AccessFlagBits::eTransformFeedbackWriteEXT
+        | AccessFlagBits::eTransformFeedbackCounterReadEXT
+        | AccessFlagBits::eTransformFeedbackCounterWriteEXT
+        | AccessFlagBits::eConditionalRenderingReadEXT
+        | AccessFlagBits::eColorAttachmentReadNoncoherentEXT
+        | AccessFlagBits::eAccelerationStructureReadKHR
+        | AccessFlagBits::eAccelerationStructureWriteKHR
+        | AccessFlagBits::eFragmentDensityMapReadEXT
+        | AccessFlagBits::eFragmentShadingRateAttachmentReadKHR
+        | AccessFlagBits::eCommandPreprocessReadNV
+        | AccessFlagBits::eCommandPreprocessWriteNV;
+  };
+
+  enum class AttachmentDescriptionFlagBits : VkAttachmentDescriptionFlags
+  {
+    eMayAlias = VK_ATTACHMENT_DESCRIPTION_MAY_ALIAS_BIT
+  };
+
+  using AttachmentDescriptionFlags = Flags<AttachmentDescriptionFlagBits>;
+
+
+  template <> struct FlagTraits<AttachmentDescriptionFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AttachmentDescriptionFlags allFlags = 
+          AttachmentDescriptionFlagBits::eMayAlias;
+  };
+
+  enum class AttachmentLoadOp
+  {
+    eLoad = VK_ATTACHMENT_LOAD_OP_LOAD,
+    eClear = VK_ATTACHMENT_LOAD_OP_CLEAR,
+    eDontCare = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
+    eNoneEXT = VK_ATTACHMENT_LOAD_OP_NONE_EXT
+  };
+
+  enum class AttachmentStoreOp
+  {
+    eStore = VK_ATTACHMENT_STORE_OP_STORE,
+    eDontCare = VK_ATTACHMENT_STORE_OP_DONT_CARE,
+    eNone = VK_ATTACHMENT_STORE_OP_NONE,
+    eNoneEXT = VK_ATTACHMENT_STORE_OP_NONE_EXT,
+    eNoneKHR = VK_ATTACHMENT_STORE_OP_NONE_KHR,
+    eNoneQCOM = VK_ATTACHMENT_STORE_OP_NONE_QCOM
+  };
+
+  enum class DependencyFlagBits : VkDependencyFlags
+  {
+    eByRegion = VK_DEPENDENCY_BY_REGION_BIT,
+    eDeviceGroup = VK_DEPENDENCY_DEVICE_GROUP_BIT,
+    eViewLocal = VK_DEPENDENCY_VIEW_LOCAL_BIT,
+    eFeedbackLoopEXT = VK_DEPENDENCY_FEEDBACK_LOOP_BIT_EXT,
+    eDeviceGroupKHR = VK_DEPENDENCY_DEVICE_GROUP_BIT_KHR,
+    eViewLocalKHR = VK_DEPENDENCY_VIEW_LOCAL_BIT_KHR
+  };
+
+  using DependencyFlags = Flags<DependencyFlagBits>;
+
+
+  template <> struct FlagTraits<DependencyFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DependencyFlags allFlags = 
+          DependencyFlagBits::eByRegion
+        | DependencyFlagBits::eDeviceGroup
+        | DependencyFlagBits::eViewLocal
+        | DependencyFlagBits::eFeedbackLoopEXT;
+  };
+
+  enum class FramebufferCreateFlagBits : VkFramebufferCreateFlags
+  {
+    eImageless = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT,
+    eImagelessKHR = VK_FRAMEBUFFER_CREATE_IMAGELESS_BIT_KHR
+  };
+
+  using FramebufferCreateFlags = Flags<FramebufferCreateFlagBits>;
+
+
+  template <> struct FlagTraits<FramebufferCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR FramebufferCreateFlags allFlags = 
+          FramebufferCreateFlagBits::eImageless;
+  };
+
+  enum class PipelineBindPoint
+  {
+    eGraphics = VK_PIPELINE_BIND_POINT_GRAPHICS,
+    eCompute = VK_PIPELINE_BIND_POINT_COMPUTE,
+    eRayTracingKHR = VK_PIPELINE_BIND_POINT_RAY_TRACING_KHR,
+    eSubpassShadingHUAWEI = VK_PIPELINE_BIND_POINT_SUBPASS_SHADING_HUAWEI,
+    eRayTracingNV = VK_PIPELINE_BIND_POINT_RAY_TRACING_NV
+  };
+
+  enum class RenderPassCreateFlagBits : VkRenderPassCreateFlags
+  {
+    eTransformQCOM = VK_RENDER_PASS_CREATE_TRANSFORM_BIT_QCOM
+  };
+
+  using RenderPassCreateFlags = Flags<RenderPassCreateFlagBits>;
+
+
+  template <> struct FlagTraits<RenderPassCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR RenderPassCreateFlags allFlags = 
+          RenderPassCreateFlagBits::eTransformQCOM;
+  };
+
+  enum class SubpassDescriptionFlagBits : VkSubpassDescriptionFlags
+  {
+    ePerViewAttributesNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_ATTRIBUTES_BIT_NVX,
+    ePerViewPositionXOnlyNVX = VK_SUBPASS_DESCRIPTION_PER_VIEW_POSITION_X_ONLY_BIT_NVX,
+    eFragmentRegionQCOM = VK_SUBPASS_DESCRIPTION_FRAGMENT_REGION_BIT_QCOM,
+    eShaderResolveQCOM = VK_SUBPASS_DESCRIPTION_SHADER_RESOLVE_BIT_QCOM,
+    eRasterizationOrderAttachmentColorAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_EXT,
+    eRasterizationOrderAttachmentDepthAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+    eRasterizationOrderAttachmentStencilAccessEXT = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
+    eEnableLegacyDitheringEXT = VK_SUBPASS_DESCRIPTION_ENABLE_LEGACY_DITHERING_BIT_EXT,
+    eRasterizationOrderAttachmentColorAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_COLOR_ACCESS_BIT_ARM,
+    eRasterizationOrderAttachmentDepthAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM,
+    eRasterizationOrderAttachmentStencilAccessARM = VK_SUBPASS_DESCRIPTION_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM
+  };
+
+  using SubpassDescriptionFlags = Flags<SubpassDescriptionFlagBits>;
+
+
+  template <> struct FlagTraits<SubpassDescriptionFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SubpassDescriptionFlags allFlags = 
+          SubpassDescriptionFlagBits::ePerViewAttributesNVX
+        | SubpassDescriptionFlagBits::ePerViewPositionXOnlyNVX
+        | SubpassDescriptionFlagBits::eFragmentRegionQCOM
+        | SubpassDescriptionFlagBits::eShaderResolveQCOM
+        | SubpassDescriptionFlagBits::eRasterizationOrderAttachmentColorAccessEXT
+        | SubpassDescriptionFlagBits::eRasterizationOrderAttachmentDepthAccessEXT
+        | SubpassDescriptionFlagBits::eRasterizationOrderAttachmentStencilAccessEXT
+        | SubpassDescriptionFlagBits::eEnableLegacyDitheringEXT;
+  };
+
+  enum class CommandPoolCreateFlagBits : VkCommandPoolCreateFlags
+  {
+    eTransient = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
+    eResetCommandBuffer = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
+    eProtected = VK_COMMAND_POOL_CREATE_PROTECTED_BIT
+  };
+
+  using CommandPoolCreateFlags = Flags<CommandPoolCreateFlagBits>;
+
+
+  template <> struct FlagTraits<CommandPoolCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolCreateFlags allFlags = 
+          CommandPoolCreateFlagBits::eTransient
+        | CommandPoolCreateFlagBits::eResetCommandBuffer
+        | CommandPoolCreateFlagBits::eProtected;
+  };
+
+  enum class CommandPoolResetFlagBits : VkCommandPoolResetFlags
+  {
+    eReleaseResources = VK_COMMAND_POOL_RESET_RELEASE_RESOURCES_BIT
+  };
+
+  using CommandPoolResetFlags = Flags<CommandPoolResetFlagBits>;
+
+
+  template <> struct FlagTraits<CommandPoolResetFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolResetFlags allFlags = 
+          CommandPoolResetFlagBits::eReleaseResources;
+  };
+
+  enum class CommandBufferLevel
+  {
+    ePrimary = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
+    eSecondary = VK_COMMAND_BUFFER_LEVEL_SECONDARY
+  };
+
+  enum class CommandBufferResetFlagBits : VkCommandBufferResetFlags
+  {
+    eReleaseResources = VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
+  };
+
+  using CommandBufferResetFlags = Flags<CommandBufferResetFlagBits>;
+
+
+  template <> struct FlagTraits<CommandBufferResetFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CommandBufferResetFlags allFlags = 
+          CommandBufferResetFlagBits::eReleaseResources;
+  };
+
+  enum class CommandBufferUsageFlagBits : VkCommandBufferUsageFlags
+  {
+    eOneTimeSubmit = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
+    eRenderPassContinue = VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT,
+    eSimultaneousUse = VK_COMMAND_BUFFER_USAGE_SIMULTANEOUS_USE_BIT
+  };
+
+  using CommandBufferUsageFlags = Flags<CommandBufferUsageFlagBits>;
+
+
+  template <> struct FlagTraits<CommandBufferUsageFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CommandBufferUsageFlags allFlags = 
+          CommandBufferUsageFlagBits::eOneTimeSubmit
+        | CommandBufferUsageFlagBits::eRenderPassContinue
+        | CommandBufferUsageFlagBits::eSimultaneousUse;
+  };
+
+  enum class QueryControlFlagBits : VkQueryControlFlags
+  {
+    ePrecise = VK_QUERY_CONTROL_PRECISE_BIT
+  };
+
+  using QueryControlFlags = Flags<QueryControlFlagBits>;
+
+
+  template <> struct FlagTraits<QueryControlFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR QueryControlFlags allFlags = 
+          QueryControlFlagBits::ePrecise;
+  };
+
+  enum class IndexType
+  {
+    eUint16 = VK_INDEX_TYPE_UINT16,
+    eUint32 = VK_INDEX_TYPE_UINT32,
+    eNoneKHR = VK_INDEX_TYPE_NONE_KHR,
+    eUint8EXT = VK_INDEX_TYPE_UINT8_EXT,
+    eNoneNV = VK_INDEX_TYPE_NONE_NV
+  };
+
+  enum class StencilFaceFlagBits : VkStencilFaceFlags
+  {
+    eFront = VK_STENCIL_FACE_FRONT_BIT,
+    eBack = VK_STENCIL_FACE_BACK_BIT,
+    eFrontAndBack = VK_STENCIL_FACE_FRONT_AND_BACK,
+    eVkStencilFrontAndBack = VK_STENCIL_FRONT_AND_BACK
+  };
+
+  using StencilFaceFlags = Flags<StencilFaceFlagBits>;
+
+
+  template <> struct FlagTraits<StencilFaceFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StencilFaceFlags allFlags = 
+          StencilFaceFlagBits::eFront
+        | StencilFaceFlagBits::eBack
+        | StencilFaceFlagBits::eFrontAndBack;
+  };
+
+  enum class SubpassContents
+  {
+    eInline = VK_SUBPASS_CONTENTS_INLINE,
+    eSecondaryCommandBuffers = VK_SUBPASS_CONTENTS_SECONDARY_COMMAND_BUFFERS
+  };
+
+  //=== VK_VERSION_1_1 ===
+
+  enum class SubgroupFeatureFlagBits : VkSubgroupFeatureFlags
+  {
+    eBasic = VK_SUBGROUP_FEATURE_BASIC_BIT,
+    eVote = VK_SUBGROUP_FEATURE_VOTE_BIT,
+    eArithmetic = VK_SUBGROUP_FEATURE_ARITHMETIC_BIT,
+    eBallot = VK_SUBGROUP_FEATURE_BALLOT_BIT,
+    eShuffle = VK_SUBGROUP_FEATURE_SHUFFLE_BIT,
+    eShuffleRelative = VK_SUBGROUP_FEATURE_SHUFFLE_RELATIVE_BIT,
+    eClustered = VK_SUBGROUP_FEATURE_CLUSTERED_BIT,
+    eQuad = VK_SUBGROUP_FEATURE_QUAD_BIT,
+    ePartitionedNV = VK_SUBGROUP_FEATURE_PARTITIONED_BIT_NV
+  };
+
+  using SubgroupFeatureFlags = Flags<SubgroupFeatureFlagBits>;
+
+
+  template <> struct FlagTraits<SubgroupFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SubgroupFeatureFlags allFlags = 
+          SubgroupFeatureFlagBits::eBasic
+        | SubgroupFeatureFlagBits::eVote
+        | SubgroupFeatureFlagBits::eArithmetic
+        | SubgroupFeatureFlagBits::eBallot
+        | SubgroupFeatureFlagBits::eShuffle
+        | SubgroupFeatureFlagBits::eShuffleRelative
+        | SubgroupFeatureFlagBits::eClustered
+        | SubgroupFeatureFlagBits::eQuad
+        | SubgroupFeatureFlagBits::ePartitionedNV;
+  };
+
+  enum class PeerMemoryFeatureFlagBits : VkPeerMemoryFeatureFlags
+  {
+    eCopySrc = VK_PEER_MEMORY_FEATURE_COPY_SRC_BIT,
+    eCopyDst = VK_PEER_MEMORY_FEATURE_COPY_DST_BIT,
+    eGenericSrc = VK_PEER_MEMORY_FEATURE_GENERIC_SRC_BIT,
+    eGenericDst = VK_PEER_MEMORY_FEATURE_GENERIC_DST_BIT
+  };
+  using PeerMemoryFeatureFlagBitsKHR = PeerMemoryFeatureFlagBits;
+
+  using PeerMemoryFeatureFlags = Flags<PeerMemoryFeatureFlagBits>;
+  using PeerMemoryFeatureFlagsKHR = PeerMemoryFeatureFlags;
+
+
+  template <> struct FlagTraits<PeerMemoryFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PeerMemoryFeatureFlags allFlags = 
+          PeerMemoryFeatureFlagBits::eCopySrc
+        | PeerMemoryFeatureFlagBits::eCopyDst
+        | PeerMemoryFeatureFlagBits::eGenericSrc
+        | PeerMemoryFeatureFlagBits::eGenericDst;
+  };
+
+  enum class MemoryAllocateFlagBits : VkMemoryAllocateFlags
+  {
+    eDeviceMask = VK_MEMORY_ALLOCATE_DEVICE_MASK_BIT,
+    eDeviceAddress = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT,
+    eDeviceAddressCaptureReplay = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT
+  };
+  using MemoryAllocateFlagBitsKHR = MemoryAllocateFlagBits;
+
+  using MemoryAllocateFlags = Flags<MemoryAllocateFlagBits>;
+  using MemoryAllocateFlagsKHR = MemoryAllocateFlags;
+
+
+  template <> struct FlagTraits<MemoryAllocateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryAllocateFlags allFlags = 
+          MemoryAllocateFlagBits::eDeviceMask
+        | MemoryAllocateFlagBits::eDeviceAddress
+        | MemoryAllocateFlagBits::eDeviceAddressCaptureReplay;
+  };
+
+  enum class CommandPoolTrimFlagBits : VkCommandPoolTrimFlags
+  {};
+
+  using CommandPoolTrimFlags = Flags<CommandPoolTrimFlagBits>;
+  using CommandPoolTrimFlagsKHR = CommandPoolTrimFlags;
+
+
+  template <> struct FlagTraits<CommandPoolTrimFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CommandPoolTrimFlags allFlags =  {};
+  };
+
+  enum class PointClippingBehavior
+  {
+    eAllClipPlanes = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES,
+    eUserClipPlanesOnly = VK_POINT_CLIPPING_BEHAVIOR_USER_CLIP_PLANES_ONLY
+  };
+  using PointClippingBehaviorKHR = PointClippingBehavior;
+
+  enum class TessellationDomainOrigin
+  {
+    eUpperLeft = VK_TESSELLATION_DOMAIN_ORIGIN_UPPER_LEFT,
+    eLowerLeft = VK_TESSELLATION_DOMAIN_ORIGIN_LOWER_LEFT
+  };
+  using TessellationDomainOriginKHR = TessellationDomainOrigin;
+
+  enum class DeviceQueueCreateFlagBits : VkDeviceQueueCreateFlags
+  {
+    eProtected = VK_DEVICE_QUEUE_CREATE_PROTECTED_BIT
+  };
+
+  using DeviceQueueCreateFlags = Flags<DeviceQueueCreateFlagBits>;
+
+
+  template <> struct FlagTraits<DeviceQueueCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceQueueCreateFlags allFlags = 
+          DeviceQueueCreateFlagBits::eProtected;
+  };
+
+  enum class SamplerYcbcrModelConversion
+  {
+    eRgbIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_RGB_IDENTITY,
+    eYcbcrIdentity = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_IDENTITY,
+    eYcbcr709 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_709,
+    eYcbcr601 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601,
+    eYcbcr2020 = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_2020
+  };
+  using SamplerYcbcrModelConversionKHR = SamplerYcbcrModelConversion;
+
+  enum class SamplerYcbcrRange
+  {
+    eItuFull = VK_SAMPLER_YCBCR_RANGE_ITU_FULL,
+    eItuNarrow = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW
+  };
+  using SamplerYcbcrRangeKHR = SamplerYcbcrRange;
+
+  enum class ChromaLocation
+  {
+    eCositedEven = VK_CHROMA_LOCATION_COSITED_EVEN,
+    eMidpoint = VK_CHROMA_LOCATION_MIDPOINT
+  };
+  using ChromaLocationKHR = ChromaLocation;
+
+  enum class DescriptorUpdateTemplateType
+  {
+    eDescriptorSet = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET,
+    ePushDescriptorsKHR = VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_PUSH_DESCRIPTORS_KHR
+  };
+  using DescriptorUpdateTemplateTypeKHR = DescriptorUpdateTemplateType;
+
+  enum class DescriptorUpdateTemplateCreateFlagBits : VkDescriptorUpdateTemplateCreateFlags
+  {};
+
+  using DescriptorUpdateTemplateCreateFlags = Flags<DescriptorUpdateTemplateCreateFlagBits>;
+  using DescriptorUpdateTemplateCreateFlagsKHR = DescriptorUpdateTemplateCreateFlags;
+
+
+  template <> struct FlagTraits<DescriptorUpdateTemplateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorUpdateTemplateCreateFlags allFlags =  {};
+  };
+
+  enum class ExternalMemoryHandleTypeFlagBits : VkExternalMemoryHandleTypeFlags
+  {
+    eOpaqueFd = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eD3D11Texture = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT,
+    eD3D11TextureKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_KMT_BIT,
+    eD3D12Heap = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_HEAP_BIT,
+    eD3D12Resource = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D12_RESOURCE_BIT,
+    eDmaBufEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+    eAndroidHardwareBufferANDROID = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID,
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+    eHostAllocationEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT,
+    eHostMappedForeignMemoryEXT = VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_MAPPED_FOREIGN_MEMORY_BIT_EXT,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eZirconVmoFUCHSIA = VK_EXTERNAL_MEMORY_HANDLE_TYPE_ZIRCON_VMO_BIT_FUCHSIA,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    eRdmaAddressNV = VK_EXTERNAL_MEMORY_HANDLE_TYPE_RDMA_ADDRESS_BIT_NV
+  };
+  using ExternalMemoryHandleTypeFlagBitsKHR = ExternalMemoryHandleTypeFlagBits;
+
+  using ExternalMemoryHandleTypeFlags = Flags<ExternalMemoryHandleTypeFlagBits>;
+  using ExternalMemoryHandleTypeFlagsKHR = ExternalMemoryHandleTypeFlags;
+
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryHandleTypeFlags allFlags = 
+          ExternalMemoryHandleTypeFlagBits::eOpaqueFd
+        | ExternalMemoryHandleTypeFlagBits::eOpaqueWin32
+        | ExternalMemoryHandleTypeFlagBits::eOpaqueWin32Kmt
+        | ExternalMemoryHandleTypeFlagBits::eD3D11Texture
+        | ExternalMemoryHandleTypeFlagBits::eD3D11TextureKmt
+        | ExternalMemoryHandleTypeFlagBits::eD3D12Heap
+        | ExternalMemoryHandleTypeFlagBits::eD3D12Resource
+        | ExternalMemoryHandleTypeFlagBits::eDmaBufEXT
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+        | ExternalMemoryHandleTypeFlagBits::eAndroidHardwareBufferANDROID
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+        | ExternalMemoryHandleTypeFlagBits::eHostAllocationEXT
+        | ExternalMemoryHandleTypeFlagBits::eHostMappedForeignMemoryEXT
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+        | ExternalMemoryHandleTypeFlagBits::eZirconVmoFUCHSIA
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+        | ExternalMemoryHandleTypeFlagBits::eRdmaAddressNV;
+  };
+
+  enum class ExternalMemoryFeatureFlagBits : VkExternalMemoryFeatureFlags
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT
+  };
+  using ExternalMemoryFeatureFlagBitsKHR = ExternalMemoryFeatureFlagBits;
+
+  using ExternalMemoryFeatureFlags = Flags<ExternalMemoryFeatureFlagBits>;
+  using ExternalMemoryFeatureFlagsKHR = ExternalMemoryFeatureFlags;
+
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryFeatureFlags allFlags = 
+          ExternalMemoryFeatureFlagBits::eDedicatedOnly
+        | ExternalMemoryFeatureFlagBits::eExportable
+        | ExternalMemoryFeatureFlagBits::eImportable;
+  };
+
+  enum class ExternalFenceHandleTypeFlagBits : VkExternalFenceHandleTypeFlags
+  {
+    eOpaqueFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_FENCE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eSyncFd = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT
+  };
+  using ExternalFenceHandleTypeFlagBitsKHR = ExternalFenceHandleTypeFlagBits;
+
+  using ExternalFenceHandleTypeFlags = Flags<ExternalFenceHandleTypeFlagBits>;
+  using ExternalFenceHandleTypeFlagsKHR = ExternalFenceHandleTypeFlags;
+
+
+  template <> struct FlagTraits<ExternalFenceHandleTypeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalFenceHandleTypeFlags allFlags = 
+          ExternalFenceHandleTypeFlagBits::eOpaqueFd
+        | ExternalFenceHandleTypeFlagBits::eOpaqueWin32
+        | ExternalFenceHandleTypeFlagBits::eOpaqueWin32Kmt
+        | ExternalFenceHandleTypeFlagBits::eSyncFd;
+  };
+
+  enum class ExternalFenceFeatureFlagBits : VkExternalFenceFeatureFlags
+  {
+    eExportable = VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT
+  };
+  using ExternalFenceFeatureFlagBitsKHR = ExternalFenceFeatureFlagBits;
+
+  using ExternalFenceFeatureFlags = Flags<ExternalFenceFeatureFlagBits>;
+  using ExternalFenceFeatureFlagsKHR = ExternalFenceFeatureFlags;
+
+
+  template <> struct FlagTraits<ExternalFenceFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalFenceFeatureFlags allFlags = 
+          ExternalFenceFeatureFlagBits::eExportable
+        | ExternalFenceFeatureFlagBits::eImportable;
+  };
+
+  enum class FenceImportFlagBits : VkFenceImportFlags
+  {
+    eTemporary = VK_FENCE_IMPORT_TEMPORARY_BIT
+  };
+  using FenceImportFlagBitsKHR = FenceImportFlagBits;
+
+  using FenceImportFlags = Flags<FenceImportFlagBits>;
+  using FenceImportFlagsKHR = FenceImportFlags;
+
+
+  template <> struct FlagTraits<FenceImportFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR FenceImportFlags allFlags = 
+          FenceImportFlagBits::eTemporary;
+  };
+
+  enum class SemaphoreImportFlagBits : VkSemaphoreImportFlags
+  {
+    eTemporary = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT
+  };
+  using SemaphoreImportFlagBitsKHR = SemaphoreImportFlagBits;
+
+  using SemaphoreImportFlags = Flags<SemaphoreImportFlagBits>;
+  using SemaphoreImportFlagsKHR = SemaphoreImportFlags;
+
+
+  template <> struct FlagTraits<SemaphoreImportFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreImportFlags allFlags = 
+          SemaphoreImportFlagBits::eTemporary;
+  };
+
+  enum class ExternalSemaphoreHandleTypeFlagBits : VkExternalSemaphoreHandleTypeFlags
+  {
+    eOpaqueFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
+    eOpaqueWin32 = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT,
+    eOpaqueWin32Kmt = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
+    eD3D12Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D12_FENCE_BIT,
+    eSyncFd = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eZirconEventFUCHSIA = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_ZIRCON_EVENT_BIT_FUCHSIA,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    eD3D11Fence = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_D3D11_FENCE_BIT
+  };
+  using ExternalSemaphoreHandleTypeFlagBitsKHR = ExternalSemaphoreHandleTypeFlagBits;
+
+  using ExternalSemaphoreHandleTypeFlags = Flags<ExternalSemaphoreHandleTypeFlagBits>;
+  using ExternalSemaphoreHandleTypeFlagsKHR = ExternalSemaphoreHandleTypeFlags;
+
+
+  template <> struct FlagTraits<ExternalSemaphoreHandleTypeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalSemaphoreHandleTypeFlags allFlags = 
+          ExternalSemaphoreHandleTypeFlagBits::eOpaqueFd
+        | ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32
+        | ExternalSemaphoreHandleTypeFlagBits::eOpaqueWin32Kmt
+        | ExternalSemaphoreHandleTypeFlagBits::eD3D12Fence
+        | ExternalSemaphoreHandleTypeFlagBits::eSyncFd
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+        | ExternalSemaphoreHandleTypeFlagBits::eZirconEventFUCHSIA
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+;
+  };
+
+  enum class ExternalSemaphoreFeatureFlagBits : VkExternalSemaphoreFeatureFlags
+  {
+    eExportable = VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT,
+    eImportable = VK_EXTERNAL_SEMAPHORE_FEATURE_IMPORTABLE_BIT
+  };
+  using ExternalSemaphoreFeatureFlagBitsKHR = ExternalSemaphoreFeatureFlagBits;
+
+  using ExternalSemaphoreFeatureFlags = Flags<ExternalSemaphoreFeatureFlagBits>;
+  using ExternalSemaphoreFeatureFlagsKHR = ExternalSemaphoreFeatureFlags;
+
+
+  template <> struct FlagTraits<ExternalSemaphoreFeatureFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalSemaphoreFeatureFlags allFlags = 
+          ExternalSemaphoreFeatureFlagBits::eExportable
+        | ExternalSemaphoreFeatureFlagBits::eImportable;
+  };
+
+  //=== VK_VERSION_1_2 ===
+
+  enum class DriverId
+  {
+    eAmdProprietary = VK_DRIVER_ID_AMD_PROPRIETARY,
+    eAmdOpenSource = VK_DRIVER_ID_AMD_OPEN_SOURCE,
+    eMesaRadv = VK_DRIVER_ID_MESA_RADV,
+    eNvidiaProprietary = VK_DRIVER_ID_NVIDIA_PROPRIETARY,
+    eIntelProprietaryWindows = VK_DRIVER_ID_INTEL_PROPRIETARY_WINDOWS,
+    eIntelOpenSourceMESA = VK_DRIVER_ID_INTEL_OPEN_SOURCE_MESA,
+    eImaginationProprietary = VK_DRIVER_ID_IMAGINATION_PROPRIETARY,
+    eQualcommProprietary = VK_DRIVER_ID_QUALCOMM_PROPRIETARY,
+    eArmProprietary = VK_DRIVER_ID_ARM_PROPRIETARY,
+    eGoogleSwiftshader = VK_DRIVER_ID_GOOGLE_SWIFTSHADER,
+    eGgpProprietary = VK_DRIVER_ID_GGP_PROPRIETARY,
+    eBroadcomProprietary = VK_DRIVER_ID_BROADCOM_PROPRIETARY,
+    eMesaLlvmpipe = VK_DRIVER_ID_MESA_LLVMPIPE,
+    eMoltenvk = VK_DRIVER_ID_MOLTENVK,
+    eCoreaviProprietary = VK_DRIVER_ID_COREAVI_PROPRIETARY,
+    eJuiceProprietary = VK_DRIVER_ID_JUICE_PROPRIETARY,
+    eVerisiliconProprietary = VK_DRIVER_ID_VERISILICON_PROPRIETARY,
+    eMesaTurnip = VK_DRIVER_ID_MESA_TURNIP,
+    eMesaV3Dv = VK_DRIVER_ID_MESA_V3DV,
+    eMesaPanvk = VK_DRIVER_ID_MESA_PANVK,
+    eSamsungProprietary = VK_DRIVER_ID_SAMSUNG_PROPRIETARY,
+    eMesaVenus = VK_DRIVER_ID_MESA_VENUS,
+    eMesaDozen = VK_DRIVER_ID_MESA_DOZEN,
+    eMesaNvk = VK_DRIVER_ID_MESA_NVK
+  };
+  using DriverIdKHR = DriverId;
+
+  enum class ShaderFloatControlsIndependence
+  {
+    e32BitOnly = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_32_BIT_ONLY,
+    eAll = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL,
+    eNone = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_NONE
+  };
+  using ShaderFloatControlsIndependenceKHR = ShaderFloatControlsIndependence;
+
+  enum class DescriptorBindingFlagBits : VkDescriptorBindingFlags
+  {
+    eUpdateAfterBind = VK_DESCRIPTOR_BINDING_UPDATE_AFTER_BIND_BIT,
+    eUpdateUnusedWhilePending = VK_DESCRIPTOR_BINDING_UPDATE_UNUSED_WHILE_PENDING_BIT,
+    ePartiallyBound = VK_DESCRIPTOR_BINDING_PARTIALLY_BOUND_BIT,
+    eVariableDescriptorCount = VK_DESCRIPTOR_BINDING_VARIABLE_DESCRIPTOR_COUNT_BIT
+  };
+  using DescriptorBindingFlagBitsEXT = DescriptorBindingFlagBits;
+
+  using DescriptorBindingFlags = Flags<DescriptorBindingFlagBits>;
+  using DescriptorBindingFlagsEXT = DescriptorBindingFlags;
+
+
+  template <> struct FlagTraits<DescriptorBindingFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DescriptorBindingFlags allFlags = 
+          DescriptorBindingFlagBits::eUpdateAfterBind
+        | DescriptorBindingFlagBits::eUpdateUnusedWhilePending
+        | DescriptorBindingFlagBits::ePartiallyBound
+        | DescriptorBindingFlagBits::eVariableDescriptorCount;
+  };
+
+  enum class ResolveModeFlagBits : VkResolveModeFlags
+  {
+    eNone = VK_RESOLVE_MODE_NONE,
+    eSampleZero = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT,
+    eAverage = VK_RESOLVE_MODE_AVERAGE_BIT,
+    eMin = VK_RESOLVE_MODE_MIN_BIT,
+    eMax = VK_RESOLVE_MODE_MAX_BIT
+  };
+  using ResolveModeFlagBitsKHR = ResolveModeFlagBits;
+
+  using ResolveModeFlags = Flags<ResolveModeFlagBits>;
+  using ResolveModeFlagsKHR = ResolveModeFlags;
+
+
+  template <> struct FlagTraits<ResolveModeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ResolveModeFlags allFlags = 
+          ResolveModeFlagBits::eNone
+        | ResolveModeFlagBits::eSampleZero
+        | ResolveModeFlagBits::eAverage
+        | ResolveModeFlagBits::eMin
+        | ResolveModeFlagBits::eMax;
+  };
+
+  enum class SamplerReductionMode
+  {
+    eWeightedAverage = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE,
+    eMin = VK_SAMPLER_REDUCTION_MODE_MIN,
+    eMax = VK_SAMPLER_REDUCTION_MODE_MAX
+  };
+  using SamplerReductionModeEXT = SamplerReductionMode;
+
+  enum class SemaphoreType
+  {
+    eBinary = VK_SEMAPHORE_TYPE_BINARY,
+    eTimeline = VK_SEMAPHORE_TYPE_TIMELINE
+  };
+  using SemaphoreTypeKHR = SemaphoreType;
+
+  enum class SemaphoreWaitFlagBits : VkSemaphoreWaitFlags
+  {
+    eAny = VK_SEMAPHORE_WAIT_ANY_BIT
+  };
+  using SemaphoreWaitFlagBitsKHR = SemaphoreWaitFlagBits;
+
+  using SemaphoreWaitFlags = Flags<SemaphoreWaitFlagBits>;
+  using SemaphoreWaitFlagsKHR = SemaphoreWaitFlags;
+
+
+  template <> struct FlagTraits<SemaphoreWaitFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SemaphoreWaitFlags allFlags = 
+          SemaphoreWaitFlagBits::eAny;
+  };
+
+  //=== VK_VERSION_1_3 ===
+
+  enum class PipelineCreationFeedbackFlagBits : VkPipelineCreationFeedbackFlags
+  {
+    eValid = VK_PIPELINE_CREATION_FEEDBACK_VALID_BIT,
+    eApplicationPipelineCacheHit = VK_PIPELINE_CREATION_FEEDBACK_APPLICATION_PIPELINE_CACHE_HIT_BIT,
+    eBasePipelineAcceleration = VK_PIPELINE_CREATION_FEEDBACK_BASE_PIPELINE_ACCELERATION_BIT
+  };
+  using PipelineCreationFeedbackFlagBitsEXT = PipelineCreationFeedbackFlagBits;
+
+  using PipelineCreationFeedbackFlags = Flags<PipelineCreationFeedbackFlagBits>;
+  using PipelineCreationFeedbackFlagsEXT = PipelineCreationFeedbackFlags;
+
+
+  template <> struct FlagTraits<PipelineCreationFeedbackFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCreationFeedbackFlags allFlags = 
+          PipelineCreationFeedbackFlagBits::eValid
+        | PipelineCreationFeedbackFlagBits::eApplicationPipelineCacheHit
+        | PipelineCreationFeedbackFlagBits::eBasePipelineAcceleration;
+  };
+
+  enum class ToolPurposeFlagBits : VkToolPurposeFlags
+  {
+    eValidation = VK_TOOL_PURPOSE_VALIDATION_BIT,
+    eProfiling = VK_TOOL_PURPOSE_PROFILING_BIT,
+    eTracing = VK_TOOL_PURPOSE_TRACING_BIT,
+    eAdditionalFeatures = VK_TOOL_PURPOSE_ADDITIONAL_FEATURES_BIT,
+    eModifyingFeatures = VK_TOOL_PURPOSE_MODIFYING_FEATURES_BIT,
+    eDebugReportingEXT = VK_TOOL_PURPOSE_DEBUG_REPORTING_BIT_EXT,
+    eDebugMarkersEXT = VK_TOOL_PURPOSE_DEBUG_MARKERS_BIT_EXT
+  };
+  using ToolPurposeFlagBitsEXT = ToolPurposeFlagBits;
+
+  using ToolPurposeFlags = Flags<ToolPurposeFlagBits>;
+  using ToolPurposeFlagsEXT = ToolPurposeFlags;
+
+
+  template <> struct FlagTraits<ToolPurposeFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ToolPurposeFlags allFlags = 
+          ToolPurposeFlagBits::eValidation
+        | ToolPurposeFlagBits::eProfiling
+        | ToolPurposeFlagBits::eTracing
+        | ToolPurposeFlagBits::eAdditionalFeatures
+        | ToolPurposeFlagBits::eModifyingFeatures
+        | ToolPurposeFlagBits::eDebugReportingEXT
+        | ToolPurposeFlagBits::eDebugMarkersEXT;
+  };
+
+  enum class PrivateDataSlotCreateFlagBits : VkPrivateDataSlotCreateFlags
+  {};
+  using PrivateDataSlotCreateFlagBitsEXT = PrivateDataSlotCreateFlagBits;
+
+  using PrivateDataSlotCreateFlags = Flags<PrivateDataSlotCreateFlagBits>;
+  using PrivateDataSlotCreateFlagsEXT = PrivateDataSlotCreateFlags;
+
+
+  template <> struct FlagTraits<PrivateDataSlotCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PrivateDataSlotCreateFlags allFlags =  {};
+  };
+
+  enum class PipelineStageFlagBits2 : VkPipelineStageFlags2
+  {
+    eNone = VK_PIPELINE_STAGE_2_NONE,
+    eTopOfPipe = VK_PIPELINE_STAGE_2_TOP_OF_PIPE_BIT,
+    eDrawIndirect = VK_PIPELINE_STAGE_2_DRAW_INDIRECT_BIT,
+    eVertexInput = VK_PIPELINE_STAGE_2_VERTEX_INPUT_BIT,
+    eVertexShader = VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT,
+    eTessellationControlShader = VK_PIPELINE_STAGE_2_TESSELLATION_CONTROL_SHADER_BIT,
+    eTessellationEvaluationShader = VK_PIPELINE_STAGE_2_TESSELLATION_EVALUATION_SHADER_BIT,
+    eGeometryShader = VK_PIPELINE_STAGE_2_GEOMETRY_SHADER_BIT,
+    eFragmentShader = VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT,
+    eEarlyFragmentTests = VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT,
+    eLateFragmentTests = VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT,
+    eColorAttachmentOutput = VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT,
+    eComputeShader = VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT,
+    eAllTransfer = VK_PIPELINE_STAGE_2_ALL_TRANSFER_BIT,
+    eBottomOfPipe = VK_PIPELINE_STAGE_2_BOTTOM_OF_PIPE_BIT,
+    eHost = VK_PIPELINE_STAGE_2_HOST_BIT,
+    eAllGraphics = VK_PIPELINE_STAGE_2_ALL_GRAPHICS_BIT,
+    eAllCommands = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
+    eCopy = VK_PIPELINE_STAGE_2_COPY_BIT,
+    eResolve = VK_PIPELINE_STAGE_2_RESOLVE_BIT,
+    eBlit = VK_PIPELINE_STAGE_2_BLIT_BIT,
+    eClear = VK_PIPELINE_STAGE_2_CLEAR_BIT,
+    eIndexInput = VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT,
+    eVertexAttributeInput = VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT,
+    ePreRasterizationShaders = VK_PIPELINE_STAGE_2_PRE_RASTERIZATION_SHADERS_BIT,
+    eVideoDecodeKHR = VK_PIPELINE_STAGE_2_VIDEO_DECODE_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeKHR = VK_PIPELINE_STAGE_2_VIDEO_ENCODE_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eTransformFeedbackEXT = VK_PIPELINE_STAGE_2_TRANSFORM_FEEDBACK_BIT_EXT,
+    eConditionalRenderingEXT = VK_PIPELINE_STAGE_2_CONDITIONAL_RENDERING_BIT_EXT,
+    eCommandPreprocessNV = VK_PIPELINE_STAGE_2_COMMAND_PREPROCESS_BIT_NV,
+    eFragmentShadingRateAttachmentKHR = VK_PIPELINE_STAGE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+    eAccelerationStructureBuildKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_KHR,
+    eRayTracingShaderKHR = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_KHR,
+    eFragmentDensityProcessEXT = VK_PIPELINE_STAGE_2_FRAGMENT_DENSITY_PROCESS_BIT_EXT,
+    eTaskShaderEXT = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_EXT,
+    eMeshShaderEXT = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_EXT,
+    eSubpassShadingHUAWEI = VK_PIPELINE_STAGE_2_SUBPASS_SHADING_BIT_HUAWEI,
+    eInvocationMaskHUAWEI = VK_PIPELINE_STAGE_2_INVOCATION_MASK_BIT_HUAWEI,
+    eAccelerationStructureCopyKHR = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_COPY_BIT_KHR,
+    eMicromapBuildEXT = VK_PIPELINE_STAGE_2_MICROMAP_BUILD_BIT_EXT,
+    eOpticalFlowNV = VK_PIPELINE_STAGE_2_OPTICAL_FLOW_BIT_NV,
+    eAccelerationStructureBuildNV = VK_PIPELINE_STAGE_2_ACCELERATION_STRUCTURE_BUILD_BIT_NV,
+    eMeshShaderNV = VK_PIPELINE_STAGE_2_MESH_SHADER_BIT_NV,
+    eRayTracingShaderNV = VK_PIPELINE_STAGE_2_RAY_TRACING_SHADER_BIT_NV,
+    eShadingRateImageNV = VK_PIPELINE_STAGE_2_SHADING_RATE_IMAGE_BIT_NV,
+    eTaskShaderNV = VK_PIPELINE_STAGE_2_TASK_SHADER_BIT_NV,
+    eTransfer = VK_PIPELINE_STAGE_2_TRANSFER_BIT
+  };
+  using PipelineStageFlagBits2KHR = PipelineStageFlagBits2;
+
+  using PipelineStageFlags2 = Flags<PipelineStageFlagBits2>;
+  using PipelineStageFlags2KHR = PipelineStageFlags2;
+
+
+  template <> struct FlagTraits<PipelineStageFlagBits2>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineStageFlags2 allFlags = 
+          PipelineStageFlagBits2::eNone
+        | PipelineStageFlagBits2::eTopOfPipe
+        | PipelineStageFlagBits2::eDrawIndirect
+        | PipelineStageFlagBits2::eVertexInput
+        | PipelineStageFlagBits2::eVertexShader
+        | PipelineStageFlagBits2::eTessellationControlShader
+        | PipelineStageFlagBits2::eTessellationEvaluationShader
+        | PipelineStageFlagBits2::eGeometryShader
+        | PipelineStageFlagBits2::eFragmentShader
+        | PipelineStageFlagBits2::eEarlyFragmentTests
+        | PipelineStageFlagBits2::eLateFragmentTests
+        | PipelineStageFlagBits2::eColorAttachmentOutput
+        | PipelineStageFlagBits2::eComputeShader
+        | PipelineStageFlagBits2::eAllTransfer
+        | PipelineStageFlagBits2::eBottomOfPipe
+        | PipelineStageFlagBits2::eHost
+        | PipelineStageFlagBits2::eAllGraphics
+        | PipelineStageFlagBits2::eAllCommands
+        | PipelineStageFlagBits2::eCopy
+        | PipelineStageFlagBits2::eResolve
+        | PipelineStageFlagBits2::eBlit
+        | PipelineStageFlagBits2::eClear
+        | PipelineStageFlagBits2::eIndexInput
+        | PipelineStageFlagBits2::eVertexAttributeInput
+        | PipelineStageFlagBits2::ePreRasterizationShaders
+        | PipelineStageFlagBits2::eVideoDecodeKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | PipelineStageFlagBits2::eVideoEncodeKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | PipelineStageFlagBits2::eTransformFeedbackEXT
+        | PipelineStageFlagBits2::eConditionalRenderingEXT
+        | PipelineStageFlagBits2::eCommandPreprocessNV
+        | PipelineStageFlagBits2::eFragmentShadingRateAttachmentKHR
+        | PipelineStageFlagBits2::eAccelerationStructureBuildKHR
+        | PipelineStageFlagBits2::eRayTracingShaderKHR
+        | PipelineStageFlagBits2::eFragmentDensityProcessEXT
+        | PipelineStageFlagBits2::eTaskShaderEXT
+        | PipelineStageFlagBits2::eMeshShaderEXT
+        | PipelineStageFlagBits2::eSubpassShadingHUAWEI
+        | PipelineStageFlagBits2::eInvocationMaskHUAWEI
+        | PipelineStageFlagBits2::eAccelerationStructureCopyKHR
+        | PipelineStageFlagBits2::eMicromapBuildEXT
+        | PipelineStageFlagBits2::eOpticalFlowNV;
+  };
+
+  enum class AccessFlagBits2 : VkAccessFlags2
+  {
+    eNone = VK_ACCESS_2_NONE,
+    eIndirectCommandRead = VK_ACCESS_2_INDIRECT_COMMAND_READ_BIT,
+    eIndexRead = VK_ACCESS_2_INDEX_READ_BIT,
+    eVertexAttributeRead = VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT,
+    eUniformRead = VK_ACCESS_2_UNIFORM_READ_BIT,
+    eInputAttachmentRead = VK_ACCESS_2_INPUT_ATTACHMENT_READ_BIT,
+    eShaderRead = VK_ACCESS_2_SHADER_READ_BIT,
+    eShaderWrite = VK_ACCESS_2_SHADER_WRITE_BIT,
+    eColorAttachmentRead = VK_ACCESS_2_COLOR_ATTACHMENT_READ_BIT,
+    eColorAttachmentWrite = VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT,
+    eDepthStencilAttachmentRead = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
+    eDepthStencilAttachmentWrite = VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
+    eTransferRead = VK_ACCESS_2_TRANSFER_READ_BIT,
+    eTransferWrite = VK_ACCESS_2_TRANSFER_WRITE_BIT,
+    eHostRead = VK_ACCESS_2_HOST_READ_BIT,
+    eHostWrite = VK_ACCESS_2_HOST_WRITE_BIT,
+    eMemoryRead = VK_ACCESS_2_MEMORY_READ_BIT,
+    eMemoryWrite = VK_ACCESS_2_MEMORY_WRITE_BIT,
+    eShaderSampledRead = VK_ACCESS_2_SHADER_SAMPLED_READ_BIT,
+    eShaderStorageRead = VK_ACCESS_2_SHADER_STORAGE_READ_BIT,
+    eShaderStorageWrite = VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT,
+    eVideoDecodeReadKHR = VK_ACCESS_2_VIDEO_DECODE_READ_BIT_KHR,
+    eVideoDecodeWriteKHR = VK_ACCESS_2_VIDEO_DECODE_WRITE_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeReadKHR = VK_ACCESS_2_VIDEO_ENCODE_READ_BIT_KHR,
+    eVideoEncodeWriteKHR = VK_ACCESS_2_VIDEO_ENCODE_WRITE_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eTransformFeedbackWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_WRITE_BIT_EXT,
+    eTransformFeedbackCounterReadEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_READ_BIT_EXT,
+    eTransformFeedbackCounterWriteEXT = VK_ACCESS_2_TRANSFORM_FEEDBACK_COUNTER_WRITE_BIT_EXT,
+    eConditionalRenderingReadEXT = VK_ACCESS_2_CONDITIONAL_RENDERING_READ_BIT_EXT,
+    eCommandPreprocessReadNV = VK_ACCESS_2_COMMAND_PREPROCESS_READ_BIT_NV,
+    eCommandPreprocessWriteNV = VK_ACCESS_2_COMMAND_PREPROCESS_WRITE_BIT_NV,
+    eFragmentShadingRateAttachmentReadKHR = VK_ACCESS_2_FRAGMENT_SHADING_RATE_ATTACHMENT_READ_BIT_KHR,
+    eAccelerationStructureReadKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_KHR,
+    eAccelerationStructureWriteKHR = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_KHR,
+    eFragmentDensityMapReadEXT = VK_ACCESS_2_FRAGMENT_DENSITY_MAP_READ_BIT_EXT,
+    eColorAttachmentReadNoncoherentEXT = VK_ACCESS_2_COLOR_ATTACHMENT_READ_NONCOHERENT_BIT_EXT,
+    eDescriptorBufferReadEXT = VK_ACCESS_2_DESCRIPTOR_BUFFER_READ_BIT_EXT,
+    eInvocationMaskReadHUAWEI = VK_ACCESS_2_INVOCATION_MASK_READ_BIT_HUAWEI,
+    eShaderBindingTableReadKHR = VK_ACCESS_2_SHADER_BINDING_TABLE_READ_BIT_KHR,
+    eMicromapReadEXT = VK_ACCESS_2_MICROMAP_READ_BIT_EXT,
+    eMicromapWriteEXT = VK_ACCESS_2_MICROMAP_WRITE_BIT_EXT,
+    eOpticalFlowReadNV = VK_ACCESS_2_OPTICAL_FLOW_READ_BIT_NV,
+    eOpticalFlowWriteNV = VK_ACCESS_2_OPTICAL_FLOW_WRITE_BIT_NV,
+    eAccelerationStructureReadNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_READ_BIT_NV,
+    eAccelerationStructureWriteNV = VK_ACCESS_2_ACCELERATION_STRUCTURE_WRITE_BIT_NV,
+    eShadingRateImageReadNV = VK_ACCESS_2_SHADING_RATE_IMAGE_READ_BIT_NV
+  };
+  using AccessFlagBits2KHR = AccessFlagBits2;
+
+  using AccessFlags2 = Flags<AccessFlagBits2>;
+  using AccessFlags2KHR = AccessFlags2;
+
+
+  template <> struct FlagTraits<AccessFlagBits2>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AccessFlags2 allFlags = 
+          AccessFlagBits2::eNone
+        | AccessFlagBits2::eIndirectCommandRead
+        | AccessFlagBits2::eIndexRead
+        | AccessFlagBits2::eVertexAttributeRead
+        | AccessFlagBits2::eUniformRead
+        | AccessFlagBits2::eInputAttachmentRead
+        | AccessFlagBits2::eShaderRead
+        | AccessFlagBits2::eShaderWrite
+        | AccessFlagBits2::eColorAttachmentRead
+        | AccessFlagBits2::eColorAttachmentWrite
+        | AccessFlagBits2::eDepthStencilAttachmentRead
+        | AccessFlagBits2::eDepthStencilAttachmentWrite
+        | AccessFlagBits2::eTransferRead
+        | AccessFlagBits2::eTransferWrite
+        | AccessFlagBits2::eHostRead
+        | AccessFlagBits2::eHostWrite
+        | AccessFlagBits2::eMemoryRead
+        | AccessFlagBits2::eMemoryWrite
+        | AccessFlagBits2::eShaderSampledRead
+        | AccessFlagBits2::eShaderStorageRead
+        | AccessFlagBits2::eShaderStorageWrite
+        | AccessFlagBits2::eVideoDecodeReadKHR
+        | AccessFlagBits2::eVideoDecodeWriteKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | AccessFlagBits2::eVideoEncodeReadKHR
+        | AccessFlagBits2::eVideoEncodeWriteKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | AccessFlagBits2::eTransformFeedbackWriteEXT
+        | AccessFlagBits2::eTransformFeedbackCounterReadEXT
+        | AccessFlagBits2::eTransformFeedbackCounterWriteEXT
+        | AccessFlagBits2::eConditionalRenderingReadEXT
+        | AccessFlagBits2::eCommandPreprocessReadNV
+        | AccessFlagBits2::eCommandPreprocessWriteNV
+        | AccessFlagBits2::eFragmentShadingRateAttachmentReadKHR
+        | AccessFlagBits2::eAccelerationStructureReadKHR
+        | AccessFlagBits2::eAccelerationStructureWriteKHR
+        | AccessFlagBits2::eFragmentDensityMapReadEXT
+        | AccessFlagBits2::eColorAttachmentReadNoncoherentEXT
+        | AccessFlagBits2::eDescriptorBufferReadEXT
+        | AccessFlagBits2::eInvocationMaskReadHUAWEI
+        | AccessFlagBits2::eShaderBindingTableReadKHR
+        | AccessFlagBits2::eMicromapReadEXT
+        | AccessFlagBits2::eMicromapWriteEXT
+        | AccessFlagBits2::eOpticalFlowReadNV
+        | AccessFlagBits2::eOpticalFlowWriteNV;
+  };
+
+  enum class SubmitFlagBits : VkSubmitFlags
+  {
+    eProtected = VK_SUBMIT_PROTECTED_BIT
+  };
+  using SubmitFlagBitsKHR = SubmitFlagBits;
+
+  using SubmitFlags = Flags<SubmitFlagBits>;
+  using SubmitFlagsKHR = SubmitFlags;
+
+
+  template <> struct FlagTraits<SubmitFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SubmitFlags allFlags = 
+          SubmitFlagBits::eProtected;
+  };
+
+  enum class RenderingFlagBits : VkRenderingFlags
+  {
+    eContentsSecondaryCommandBuffers = VK_RENDERING_CONTENTS_SECONDARY_COMMAND_BUFFERS_BIT,
+    eSuspending = VK_RENDERING_SUSPENDING_BIT,
+    eResuming = VK_RENDERING_RESUMING_BIT,
+    eEnableLegacyDitheringEXT = VK_RENDERING_ENABLE_LEGACY_DITHERING_BIT_EXT
+  };
+  using RenderingFlagBitsKHR = RenderingFlagBits;
+
+  using RenderingFlags = Flags<RenderingFlagBits>;
+  using RenderingFlagsKHR = RenderingFlags;
+
+
+  template <> struct FlagTraits<RenderingFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR RenderingFlags allFlags = 
+          RenderingFlagBits::eContentsSecondaryCommandBuffers
+        | RenderingFlagBits::eSuspending
+        | RenderingFlagBits::eResuming
+        | RenderingFlagBits::eEnableLegacyDitheringEXT;
+  };
+
+  enum class FormatFeatureFlagBits2 : VkFormatFeatureFlags2
+  {
+    eSampledImage = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT,
+    eStorageImage = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT,
+    eStorageImageAtomic = VK_FORMAT_FEATURE_2_STORAGE_IMAGE_ATOMIC_BIT,
+    eUniformTexelBuffer = VK_FORMAT_FEATURE_2_UNIFORM_TEXEL_BUFFER_BIT,
+    eStorageTexelBuffer = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_BIT,
+    eStorageTexelBufferAtomic = VK_FORMAT_FEATURE_2_STORAGE_TEXEL_BUFFER_ATOMIC_BIT,
+    eVertexBuffer = VK_FORMAT_FEATURE_2_VERTEX_BUFFER_BIT,
+    eColorAttachment = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BIT,
+    eColorAttachmentBlend = VK_FORMAT_FEATURE_2_COLOR_ATTACHMENT_BLEND_BIT,
+    eDepthStencilAttachment = VK_FORMAT_FEATURE_2_DEPTH_STENCIL_ATTACHMENT_BIT,
+    eBlitSrc = VK_FORMAT_FEATURE_2_BLIT_SRC_BIT,
+    eBlitDst = VK_FORMAT_FEATURE_2_BLIT_DST_BIT,
+    eSampledImageFilterLinear = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_LINEAR_BIT,
+    eSampledImageFilterCubic = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT,
+    eTransferSrc = VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT,
+    eTransferDst = VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT,
+    eSampledImageFilterMinmax = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_MINMAX_BIT,
+    eMidpointChromaSamples = VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT,
+    eSampledImageYcbcrConversionLinearFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_LINEAR_FILTER_BIT,
+    eSampledImageYcbcrConversionSeparateReconstructionFilter = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_SEPARATE_RECONSTRUCTION_FILTER_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicit = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_BIT,
+    eSampledImageYcbcrConversionChromaReconstructionExplicitForceable = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_YCBCR_CONVERSION_CHROMA_RECONSTRUCTION_EXPLICIT_FORCEABLE_BIT,
+    eDisjoint = VK_FORMAT_FEATURE_2_DISJOINT_BIT,
+    eCositedChromaSamples = VK_FORMAT_FEATURE_2_COSITED_CHROMA_SAMPLES_BIT,
+    eStorageReadWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_READ_WITHOUT_FORMAT_BIT,
+    eStorageWriteWithoutFormat = VK_FORMAT_FEATURE_2_STORAGE_WRITE_WITHOUT_FORMAT_BIT,
+    eSampledImageDepthComparison = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_DEPTH_COMPARISON_BIT,
+    eVideoDecodeOutputKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_OUTPUT_BIT_KHR,
+    eVideoDecodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_DECODE_DPB_BIT_KHR,
+    eAccelerationStructureVertexBufferKHR = VK_FORMAT_FEATURE_2_ACCELERATION_STRUCTURE_VERTEX_BUFFER_BIT_KHR,
+    eFragmentDensityMapEXT = VK_FORMAT_FEATURE_2_FRAGMENT_DENSITY_MAP_BIT_EXT,
+    eFragmentShadingRateAttachmentKHR = VK_FORMAT_FEATURE_2_FRAGMENT_SHADING_RATE_ATTACHMENT_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eVideoEncodeInputKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_INPUT_BIT_KHR,
+    eVideoEncodeDpbKHR = VK_FORMAT_FEATURE_2_VIDEO_ENCODE_DPB_BIT_KHR,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eLinearColorAttachmentNV = VK_FORMAT_FEATURE_2_LINEAR_COLOR_ATTACHMENT_BIT_NV,
+    eWeightImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_IMAGE_BIT_QCOM,
+    eWeightSampledImageQCOM = VK_FORMAT_FEATURE_2_WEIGHT_SAMPLED_IMAGE_BIT_QCOM,
+    eBlockMatchingQCOM = VK_FORMAT_FEATURE_2_BLOCK_MATCHING_BIT_QCOM,
+    eBoxFilterSampledQCOM = VK_FORMAT_FEATURE_2_BOX_FILTER_SAMPLED_BIT_QCOM,
+    eOpticalFlowImageNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_IMAGE_BIT_NV,
+    eOpticalFlowVectorNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_VECTOR_BIT_NV,
+    eOpticalFlowCostNV = VK_FORMAT_FEATURE_2_OPTICAL_FLOW_COST_BIT_NV,
+    eSampledImageFilterCubicEXT = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_FILTER_CUBIC_BIT_EXT
+  };
+  using FormatFeatureFlagBits2KHR = FormatFeatureFlagBits2;
+
+  using FormatFeatureFlags2 = Flags<FormatFeatureFlagBits2>;
+  using FormatFeatureFlags2KHR = FormatFeatureFlags2;
+
+
+  template <> struct FlagTraits<FormatFeatureFlagBits2>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR FormatFeatureFlags2 allFlags = 
+          FormatFeatureFlagBits2::eSampledImage
+        | FormatFeatureFlagBits2::eStorageImage
+        | FormatFeatureFlagBits2::eStorageImageAtomic
+        | FormatFeatureFlagBits2::eUniformTexelBuffer
+        | FormatFeatureFlagBits2::eStorageTexelBuffer
+        | FormatFeatureFlagBits2::eStorageTexelBufferAtomic
+        | FormatFeatureFlagBits2::eVertexBuffer
+        | FormatFeatureFlagBits2::eColorAttachment
+        | FormatFeatureFlagBits2::eColorAttachmentBlend
+        | FormatFeatureFlagBits2::eDepthStencilAttachment
+        | FormatFeatureFlagBits2::eBlitSrc
+        | FormatFeatureFlagBits2::eBlitDst
+        | FormatFeatureFlagBits2::eSampledImageFilterLinear
+        | FormatFeatureFlagBits2::eSampledImageFilterCubic
+        | FormatFeatureFlagBits2::eTransferSrc
+        | FormatFeatureFlagBits2::eTransferDst
+        | FormatFeatureFlagBits2::eSampledImageFilterMinmax
+        | FormatFeatureFlagBits2::eMidpointChromaSamples
+        | FormatFeatureFlagBits2::eSampledImageYcbcrConversionLinearFilter
+        | FormatFeatureFlagBits2::eSampledImageYcbcrConversionSeparateReconstructionFilter
+        | FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicit
+        | FormatFeatureFlagBits2::eSampledImageYcbcrConversionChromaReconstructionExplicitForceable
+        | FormatFeatureFlagBits2::eDisjoint
+        | FormatFeatureFlagBits2::eCositedChromaSamples
+        | FormatFeatureFlagBits2::eStorageReadWithoutFormat
+        | FormatFeatureFlagBits2::eStorageWriteWithoutFormat
+        | FormatFeatureFlagBits2::eSampledImageDepthComparison
+        | FormatFeatureFlagBits2::eVideoDecodeOutputKHR
+        | FormatFeatureFlagBits2::eVideoDecodeDpbKHR
+        | FormatFeatureFlagBits2::eAccelerationStructureVertexBufferKHR
+        | FormatFeatureFlagBits2::eFragmentDensityMapEXT
+        | FormatFeatureFlagBits2::eFragmentShadingRateAttachmentKHR
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | FormatFeatureFlagBits2::eVideoEncodeInputKHR
+        | FormatFeatureFlagBits2::eVideoEncodeDpbKHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | FormatFeatureFlagBits2::eLinearColorAttachmentNV
+        | FormatFeatureFlagBits2::eWeightImageQCOM
+        | FormatFeatureFlagBits2::eWeightSampledImageQCOM
+        | FormatFeatureFlagBits2::eBlockMatchingQCOM
+        | FormatFeatureFlagBits2::eBoxFilterSampledQCOM
+        | FormatFeatureFlagBits2::eOpticalFlowImageNV
+        | FormatFeatureFlagBits2::eOpticalFlowVectorNV
+        | FormatFeatureFlagBits2::eOpticalFlowCostNV;
+  };
+
+  //=== VK_KHR_surface ===
+
+  enum class SurfaceTransformFlagBitsKHR : VkSurfaceTransformFlagsKHR
+  {
+    eIdentity = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR,
+    eRotate90 = VK_SURFACE_TRANSFORM_ROTATE_90_BIT_KHR,
+    eRotate180 = VK_SURFACE_TRANSFORM_ROTATE_180_BIT_KHR,
+    eRotate270 = VK_SURFACE_TRANSFORM_ROTATE_270_BIT_KHR,
+    eHorizontalMirror = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_BIT_KHR,
+    eHorizontalMirrorRotate90 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_90_BIT_KHR,
+    eHorizontalMirrorRotate180 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_180_BIT_KHR,
+    eHorizontalMirrorRotate270 = VK_SURFACE_TRANSFORM_HORIZONTAL_MIRROR_ROTATE_270_BIT_KHR,
+    eInherit = VK_SURFACE_TRANSFORM_INHERIT_BIT_KHR
+  };
+
+  using SurfaceTransformFlagsKHR = Flags<SurfaceTransformFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<SurfaceTransformFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SurfaceTransformFlagsKHR allFlags = 
+          SurfaceTransformFlagBitsKHR::eIdentity
+        | SurfaceTransformFlagBitsKHR::eRotate90
+        | SurfaceTransformFlagBitsKHR::eRotate180
+        | SurfaceTransformFlagBitsKHR::eRotate270
+        | SurfaceTransformFlagBitsKHR::eHorizontalMirror
+        | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate90
+        | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate180
+        | SurfaceTransformFlagBitsKHR::eHorizontalMirrorRotate270
+        | SurfaceTransformFlagBitsKHR::eInherit;
+  };
+
+  enum class PresentModeKHR
+  {
+    eImmediate = VK_PRESENT_MODE_IMMEDIATE_KHR,
+    eMailbox = VK_PRESENT_MODE_MAILBOX_KHR,
+    eFifo = VK_PRESENT_MODE_FIFO_KHR,
+    eFifoRelaxed = VK_PRESENT_MODE_FIFO_RELAXED_KHR,
+    eSharedDemandRefresh = VK_PRESENT_MODE_SHARED_DEMAND_REFRESH_KHR,
+    eSharedContinuousRefresh = VK_PRESENT_MODE_SHARED_CONTINUOUS_REFRESH_KHR
+  };
+
+  enum class ColorSpaceKHR
+  {
+    eSrgbNonlinear = VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
+    eDisplayP3NonlinearEXT = VK_COLOR_SPACE_DISPLAY_P3_NONLINEAR_EXT,
+    eExtendedSrgbLinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
+    eDisplayP3LinearEXT = VK_COLOR_SPACE_DISPLAY_P3_LINEAR_EXT,
+    eDciP3NonlinearEXT = VK_COLOR_SPACE_DCI_P3_NONLINEAR_EXT,
+    eBt709LinearEXT = VK_COLOR_SPACE_BT709_LINEAR_EXT,
+    eBt709NonlinearEXT = VK_COLOR_SPACE_BT709_NONLINEAR_EXT,
+    eBt2020LinearEXT = VK_COLOR_SPACE_BT2020_LINEAR_EXT,
+    eHdr10St2084EXT = VK_COLOR_SPACE_HDR10_ST2084_EXT,
+    eDolbyvisionEXT = VK_COLOR_SPACE_DOLBYVISION_EXT,
+    eHdr10HlgEXT = VK_COLOR_SPACE_HDR10_HLG_EXT,
+    eAdobergbLinearEXT = VK_COLOR_SPACE_ADOBERGB_LINEAR_EXT,
+    eAdobergbNonlinearEXT = VK_COLOR_SPACE_ADOBERGB_NONLINEAR_EXT,
+    ePassThroughEXT = VK_COLOR_SPACE_PASS_THROUGH_EXT,
+    eExtendedSrgbNonlinearEXT = VK_COLOR_SPACE_EXTENDED_SRGB_NONLINEAR_EXT,
+    eDisplayNativeAMD = VK_COLOR_SPACE_DISPLAY_NATIVE_AMD,
+    eVkColorspaceSrgbNonlinear = VK_COLORSPACE_SRGB_NONLINEAR_KHR,
+    eDciP3LinearEXT = VK_COLOR_SPACE_DCI_P3_LINEAR_EXT
+  };
+
+  enum class CompositeAlphaFlagBitsKHR : VkCompositeAlphaFlagsKHR
+  {
+    eOpaque = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
+    ePreMultiplied = VK_COMPOSITE_ALPHA_PRE_MULTIPLIED_BIT_KHR,
+    ePostMultiplied = VK_COMPOSITE_ALPHA_POST_MULTIPLIED_BIT_KHR,
+    eInherit = VK_COMPOSITE_ALPHA_INHERIT_BIT_KHR
+  };
+
+  using CompositeAlphaFlagsKHR = Flags<CompositeAlphaFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<CompositeAlphaFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR CompositeAlphaFlagsKHR allFlags = 
+          CompositeAlphaFlagBitsKHR::eOpaque
+        | CompositeAlphaFlagBitsKHR::ePreMultiplied
+        | CompositeAlphaFlagBitsKHR::ePostMultiplied
+        | CompositeAlphaFlagBitsKHR::eInherit;
+  };
+
+  //=== VK_KHR_swapchain ===
+
+  enum class SwapchainCreateFlagBitsKHR : VkSwapchainCreateFlagsKHR
+  {
+    eSplitInstanceBindRegions = VK_SWAPCHAIN_CREATE_SPLIT_INSTANCE_BIND_REGIONS_BIT_KHR,
+    eProtected = VK_SWAPCHAIN_CREATE_PROTECTED_BIT_KHR,
+    eMutableFormat = VK_SWAPCHAIN_CREATE_MUTABLE_FORMAT_BIT_KHR,
+    eDeferredMemoryAllocationEXT = VK_SWAPCHAIN_CREATE_DEFERRED_MEMORY_ALLOCATION_BIT_EXT
+  };
+
+  using SwapchainCreateFlagsKHR = Flags<SwapchainCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<SwapchainCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SwapchainCreateFlagsKHR allFlags = 
+          SwapchainCreateFlagBitsKHR::eSplitInstanceBindRegions
+        | SwapchainCreateFlagBitsKHR::eProtected
+        | SwapchainCreateFlagBitsKHR::eMutableFormat
+        | SwapchainCreateFlagBitsKHR::eDeferredMemoryAllocationEXT;
+  };
+
+  enum class DeviceGroupPresentModeFlagBitsKHR : VkDeviceGroupPresentModeFlagsKHR
+  {
+    eLocal = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_BIT_KHR,
+    eRemote = VK_DEVICE_GROUP_PRESENT_MODE_REMOTE_BIT_KHR,
+    eSum = VK_DEVICE_GROUP_PRESENT_MODE_SUM_BIT_KHR,
+    eLocalMultiDevice = VK_DEVICE_GROUP_PRESENT_MODE_LOCAL_MULTI_DEVICE_BIT_KHR
+  };
+
+  using DeviceGroupPresentModeFlagsKHR = Flags<DeviceGroupPresentModeFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<DeviceGroupPresentModeFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceGroupPresentModeFlagsKHR allFlags = 
+          DeviceGroupPresentModeFlagBitsKHR::eLocal
+        | DeviceGroupPresentModeFlagBitsKHR::eRemote
+        | DeviceGroupPresentModeFlagBitsKHR::eSum
+        | DeviceGroupPresentModeFlagBitsKHR::eLocalMultiDevice;
+  };
+
+  //=== VK_KHR_display ===
+
+  enum class DisplayPlaneAlphaFlagBitsKHR : VkDisplayPlaneAlphaFlagsKHR
+  {
+    eOpaque = VK_DISPLAY_PLANE_ALPHA_OPAQUE_BIT_KHR,
+    eGlobal = VK_DISPLAY_PLANE_ALPHA_GLOBAL_BIT_KHR,
+    ePerPixel = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_BIT_KHR,
+    ePerPixelPremultiplied = VK_DISPLAY_PLANE_ALPHA_PER_PIXEL_PREMULTIPLIED_BIT_KHR
+  };
+
+  using DisplayPlaneAlphaFlagsKHR = Flags<DisplayPlaneAlphaFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<DisplayPlaneAlphaFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DisplayPlaneAlphaFlagsKHR allFlags = 
+          DisplayPlaneAlphaFlagBitsKHR::eOpaque
+        | DisplayPlaneAlphaFlagBitsKHR::eGlobal
+        | DisplayPlaneAlphaFlagBitsKHR::ePerPixel
+        | DisplayPlaneAlphaFlagBitsKHR::ePerPixelPremultiplied;
+  };
+
+  enum class DisplayModeCreateFlagBitsKHR : VkDisplayModeCreateFlagsKHR
+  {};
+
+  using DisplayModeCreateFlagsKHR = Flags<DisplayModeCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<DisplayModeCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DisplayModeCreateFlagsKHR allFlags =  {};
+  };
+
+  enum class DisplaySurfaceCreateFlagBitsKHR : VkDisplaySurfaceCreateFlagsKHR
+  {};
+
+  using DisplaySurfaceCreateFlagsKHR = Flags<DisplaySurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<DisplaySurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DisplaySurfaceCreateFlagsKHR allFlags =  {};
+  };
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+
+  enum class XlibSurfaceCreateFlagBitsKHR : VkXlibSurfaceCreateFlagsKHR
+  {};
+
+  using XlibSurfaceCreateFlagsKHR = Flags<XlibSurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<XlibSurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR XlibSurfaceCreateFlagsKHR allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+
+  enum class XcbSurfaceCreateFlagBitsKHR : VkXcbSurfaceCreateFlagsKHR
+  {};
+
+  using XcbSurfaceCreateFlagsKHR = Flags<XcbSurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<XcbSurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR XcbSurfaceCreateFlagsKHR allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+
+  enum class WaylandSurfaceCreateFlagBitsKHR : VkWaylandSurfaceCreateFlagsKHR
+  {};
+
+  using WaylandSurfaceCreateFlagsKHR = Flags<WaylandSurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<WaylandSurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR WaylandSurfaceCreateFlagsKHR allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+
+  enum class AndroidSurfaceCreateFlagBitsKHR : VkAndroidSurfaceCreateFlagsKHR
+  {};
+
+  using AndroidSurfaceCreateFlagsKHR = Flags<AndroidSurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<AndroidSurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AndroidSurfaceCreateFlagsKHR allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+
+  enum class Win32SurfaceCreateFlagBitsKHR : VkWin32SurfaceCreateFlagsKHR
+  {};
+
+  using Win32SurfaceCreateFlagsKHR = Flags<Win32SurfaceCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<Win32SurfaceCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR Win32SurfaceCreateFlagsKHR allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+
+  enum class DebugReportFlagBitsEXT : VkDebugReportFlagsEXT
+  {
+    eInformation = VK_DEBUG_REPORT_INFORMATION_BIT_EXT,
+    eWarning = VK_DEBUG_REPORT_WARNING_BIT_EXT,
+    ePerformanceWarning = VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT,
+    eError = VK_DEBUG_REPORT_ERROR_BIT_EXT,
+    eDebug = VK_DEBUG_REPORT_DEBUG_BIT_EXT
+  };
+
+  using DebugReportFlagsEXT = Flags<DebugReportFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DebugReportFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DebugReportFlagsEXT allFlags = 
+          DebugReportFlagBitsEXT::eInformation
+        | DebugReportFlagBitsEXT::eWarning
+        | DebugReportFlagBitsEXT::ePerformanceWarning
+        | DebugReportFlagBitsEXT::eError
+        | DebugReportFlagBitsEXT::eDebug;
+  };
+
+  enum class DebugReportObjectTypeEXT
+  {
+    eUnknown = VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,
+    eInstance = VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,
+    ePhysicalDevice = VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,
+    eDevice = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,
+    eQueue = VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,
+    eSemaphore = VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,
+    eCommandBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,
+    eFence = VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,
+    eDeviceMemory = VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,
+    eBuffer = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,
+    eImage = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,
+    eEvent = VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,
+    eQueryPool = VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,
+    eBufferView = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,
+    eImageView = VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,
+    eShaderModule = VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,
+    ePipelineCache = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,
+    ePipelineLayout = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,
+    eRenderPass = VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,
+    ePipeline = VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,
+    eDescriptorSetLayout = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,
+    eSampler = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,
+    eDescriptorPool = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,
+    eDescriptorSet = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,
+    eFramebuffer = VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,
+    eCommandPool = VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,
+    eSurfaceKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,
+    eSwapchainKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,
+    eDebugReportCallbackEXT = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,
+    eDisplayKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_KHR_EXT,
+    eDisplayModeKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DISPLAY_MODE_KHR_EXT,
+    eValidationCacheEXT = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT_EXT,
+    eSamplerYcbcrConversion = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_EXT,
+    eDescriptorUpdateTemplate = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_EXT,
+    eCuModuleNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_MODULE_NVX_EXT,
+    eCuFunctionNVX = VK_DEBUG_REPORT_OBJECT_TYPE_CU_FUNCTION_NVX_EXT,
+    eAccelerationStructureKHR = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_KHR_EXT,
+    eAccelerationStructureNV = VK_DEBUG_REPORT_OBJECT_TYPE_ACCELERATION_STRUCTURE_NV_EXT,
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+    eBufferCollectionFUCHSIA = VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_COLLECTION_FUCHSIA_EXT,
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+    eDebugReport = VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_EXT,
+    eDescriptorUpdateTemplateKHR = VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_UPDATE_TEMPLATE_KHR_EXT,
+    eSamplerYcbcrConversionKHR = VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION_KHR_EXT,
+    eValidationCache = VK_DEBUG_REPORT_OBJECT_TYPE_VALIDATION_CACHE_EXT
+  };
+
+  //=== VK_AMD_rasterization_order ===
+
+  enum class RasterizationOrderAMD
+  {
+    eStrict = VK_RASTERIZATION_ORDER_STRICT_AMD,
+    eRelaxed = VK_RASTERIZATION_ORDER_RELAXED_AMD
+  };
+
+  //=== VK_KHR_video_queue ===
+
+  enum class VideoCodecOperationFlagBitsKHR : VkVideoCodecOperationFlagsKHR
+  {
+    eNone = VK_VIDEO_CODEC_OPERATION_NONE_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eEncodeH264EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_EXT,
+    eEncodeH265EXT = VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_EXT,
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+    eDecodeH264 = VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR,
+    eDecodeH265 = VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR
+  };
+
+  using VideoCodecOperationFlagsKHR = Flags<VideoCodecOperationFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoCodecOperationFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodecOperationFlagsKHR allFlags = 
+          VideoCodecOperationFlagBitsKHR::eNone
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | VideoCodecOperationFlagBitsKHR::eEncodeH264EXT
+        | VideoCodecOperationFlagBitsKHR::eEncodeH265EXT
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+        | VideoCodecOperationFlagBitsKHR::eDecodeH264
+        | VideoCodecOperationFlagBitsKHR::eDecodeH265;
+  };
+
+  enum class VideoChromaSubsamplingFlagBitsKHR : VkVideoChromaSubsamplingFlagsKHR
+  {
+    eInvalid = VK_VIDEO_CHROMA_SUBSAMPLING_INVALID_KHR,
+    eMonochrome = VK_VIDEO_CHROMA_SUBSAMPLING_MONOCHROME_BIT_KHR,
+    e420 = VK_VIDEO_CHROMA_SUBSAMPLING_420_BIT_KHR,
+    e422 = VK_VIDEO_CHROMA_SUBSAMPLING_422_BIT_KHR,
+    e444 = VK_VIDEO_CHROMA_SUBSAMPLING_444_BIT_KHR
+  };
+
+  using VideoChromaSubsamplingFlagsKHR = Flags<VideoChromaSubsamplingFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoChromaSubsamplingFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoChromaSubsamplingFlagsKHR allFlags = 
+          VideoChromaSubsamplingFlagBitsKHR::eInvalid
+        | VideoChromaSubsamplingFlagBitsKHR::eMonochrome
+        | VideoChromaSubsamplingFlagBitsKHR::e420
+        | VideoChromaSubsamplingFlagBitsKHR::e422
+        | VideoChromaSubsamplingFlagBitsKHR::e444;
+  };
+
+  enum class VideoComponentBitDepthFlagBitsKHR : VkVideoComponentBitDepthFlagsKHR
+  {
+    eInvalid = VK_VIDEO_COMPONENT_BIT_DEPTH_INVALID_KHR,
+    e8 = VK_VIDEO_COMPONENT_BIT_DEPTH_8_BIT_KHR,
+    e10 = VK_VIDEO_COMPONENT_BIT_DEPTH_10_BIT_KHR,
+    e12 = VK_VIDEO_COMPONENT_BIT_DEPTH_12_BIT_KHR
+  };
+
+  using VideoComponentBitDepthFlagsKHR = Flags<VideoComponentBitDepthFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoComponentBitDepthFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoComponentBitDepthFlagsKHR allFlags = 
+          VideoComponentBitDepthFlagBitsKHR::eInvalid
+        | VideoComponentBitDepthFlagBitsKHR::e8
+        | VideoComponentBitDepthFlagBitsKHR::e10
+        | VideoComponentBitDepthFlagBitsKHR::e12;
+  };
+
+  enum class VideoCapabilityFlagBitsKHR : VkVideoCapabilityFlagsKHR
+  {
+    eProtectedContent = VK_VIDEO_CAPABILITY_PROTECTED_CONTENT_BIT_KHR,
+    eSeparateReferenceImages = VK_VIDEO_CAPABILITY_SEPARATE_REFERENCE_IMAGES_BIT_KHR
+  };
+
+  using VideoCapabilityFlagsKHR = Flags<VideoCapabilityFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoCapabilityFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCapabilityFlagsKHR allFlags = 
+          VideoCapabilityFlagBitsKHR::eProtectedContent
+        | VideoCapabilityFlagBitsKHR::eSeparateReferenceImages;
+  };
+
+  enum class VideoSessionCreateFlagBitsKHR : VkVideoSessionCreateFlagsKHR
+  {
+    eProtectedContent = VK_VIDEO_SESSION_CREATE_PROTECTED_CONTENT_BIT_KHR
+  };
+
+  using VideoSessionCreateFlagsKHR = Flags<VideoSessionCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoSessionCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionCreateFlagsKHR allFlags = 
+          VideoSessionCreateFlagBitsKHR::eProtectedContent;
+  };
+
+  enum class VideoCodingControlFlagBitsKHR : VkVideoCodingControlFlagsKHR
+  {
+    eReset = VK_VIDEO_CODING_CONTROL_RESET_BIT_KHR,
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+    eEncodeRateControl = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_BIT_KHR,
+    eEncodeRateControlLayer = VK_VIDEO_CODING_CONTROL_ENCODE_RATE_CONTROL_LAYER_BIT_KHR
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+  };
+
+  using VideoCodingControlFlagsKHR = Flags<VideoCodingControlFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoCodingControlFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoCodingControlFlagsKHR allFlags = 
+          VideoCodingControlFlagBitsKHR::eReset
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+        | VideoCodingControlFlagBitsKHR::eEncodeRateControl
+        | VideoCodingControlFlagBitsKHR::eEncodeRateControlLayer
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+;
+  };
+
+  enum class QueryResultStatusKHR
+  {
+    eError = VK_QUERY_RESULT_STATUS_ERROR_KHR,
+    eNotReady = VK_QUERY_RESULT_STATUS_NOT_READY_KHR,
+    eComplete = VK_QUERY_RESULT_STATUS_COMPLETE_KHR
+  };
+
+  enum class VideoSessionParametersCreateFlagBitsKHR : VkVideoSessionParametersCreateFlagsKHR
+  {};
+
+  using VideoSessionParametersCreateFlagsKHR = Flags<VideoSessionParametersCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoSessionParametersCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoSessionParametersCreateFlagsKHR allFlags =  {};
+  };
+
+  enum class VideoBeginCodingFlagBitsKHR : VkVideoBeginCodingFlagsKHR
+  {};
+
+  using VideoBeginCodingFlagsKHR = Flags<VideoBeginCodingFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoBeginCodingFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoBeginCodingFlagsKHR allFlags =  {};
+  };
+
+  enum class VideoEndCodingFlagBitsKHR : VkVideoEndCodingFlagsKHR
+  {};
+
+  using VideoEndCodingFlagsKHR = Flags<VideoEndCodingFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEndCodingFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEndCodingFlagsKHR allFlags =  {};
+  };
+
+  //=== VK_KHR_video_decode_queue ===
+
+  enum class VideoDecodeCapabilityFlagBitsKHR : VkVideoDecodeCapabilityFlagsKHR
+  {
+    eDpbAndOutputCoincide = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_COINCIDE_BIT_KHR,
+    eDpbAndOutputDistinct = VK_VIDEO_DECODE_CAPABILITY_DPB_AND_OUTPUT_DISTINCT_BIT_KHR
+  };
+
+  using VideoDecodeCapabilityFlagsKHR = Flags<VideoDecodeCapabilityFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoDecodeCapabilityFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeCapabilityFlagsKHR allFlags = 
+          VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputCoincide
+        | VideoDecodeCapabilityFlagBitsKHR::eDpbAndOutputDistinct;
+  };
+
+  enum class VideoDecodeUsageFlagBitsKHR : VkVideoDecodeUsageFlagsKHR
+  {
+    eDefault = VK_VIDEO_DECODE_USAGE_DEFAULT_KHR,
+    eTranscoding = VK_VIDEO_DECODE_USAGE_TRANSCODING_BIT_KHR,
+    eOffline = VK_VIDEO_DECODE_USAGE_OFFLINE_BIT_KHR,
+    eStreaming = VK_VIDEO_DECODE_USAGE_STREAMING_BIT_KHR
+  };
+
+  using VideoDecodeUsageFlagsKHR = Flags<VideoDecodeUsageFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoDecodeUsageFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeUsageFlagsKHR allFlags = 
+          VideoDecodeUsageFlagBitsKHR::eDefault
+        | VideoDecodeUsageFlagBitsKHR::eTranscoding
+        | VideoDecodeUsageFlagBitsKHR::eOffline
+        | VideoDecodeUsageFlagBitsKHR::eStreaming;
+  };
+
+  enum class VideoDecodeFlagBitsKHR : VkVideoDecodeFlagsKHR
+  {};
+
+  using VideoDecodeFlagsKHR = Flags<VideoDecodeFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoDecodeFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeFlagsKHR allFlags =  {};
+  };
+
+  //=== VK_EXT_transform_feedback ===
+
+  enum class PipelineRasterizationStateStreamCreateFlagBitsEXT : VkPipelineRasterizationStateStreamCreateFlagsEXT
+  {};
+
+  using PipelineRasterizationStateStreamCreateFlagsEXT = Flags<PipelineRasterizationStateStreamCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PipelineRasterizationStateStreamCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationStateStreamCreateFlagsEXT allFlags =  {};
+  };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h264 ===
+
+  enum class VideoEncodeH264CapabilityFlagBitsEXT : VkVideoEncodeH264CapabilityFlagsEXT
+  {
+    eDirect8X8InferenceEnabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_ENABLED_BIT_EXT,
+    eDirect8X8InferenceDisabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DIRECT_8X8_INFERENCE_DISABLED_BIT_EXT,
+    eSeparateColourPlane = VK_VIDEO_ENCODE_H264_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT,
+    eQpprimeYZeroTransformBypass = VK_VIDEO_ENCODE_H264_CAPABILITY_QPPRIME_Y_ZERO_TRANSFORM_BYPASS_BIT_EXT,
+    eScalingLists = VK_VIDEO_ENCODE_H264_CAPABILITY_SCALING_LISTS_BIT_EXT,
+    eHrdCompliance = VK_VIDEO_ENCODE_H264_CAPABILITY_HRD_COMPLIANCE_BIT_EXT,
+    eChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_CHROMA_QP_OFFSET_BIT_EXT,
+    eSecondChromaQpOffset = VK_VIDEO_ENCODE_H264_CAPABILITY_SECOND_CHROMA_QP_OFFSET_BIT_EXT,
+    ePicInitQpMinus26 = VK_VIDEO_ENCODE_H264_CAPABILITY_PIC_INIT_QP_MINUS26_BIT_EXT,
+    eWeightedPred = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_BIT_EXT,
+    eWeightedBipredExplicit = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_EXPLICIT_BIT_EXT,
+    eWeightedBipredImplicit = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_BIPRED_IMPLICIT_BIT_EXT,
+    eWeightedPredNoTable = VK_VIDEO_ENCODE_H264_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT,
+    eTransform8X8 = VK_VIDEO_ENCODE_H264_CAPABILITY_TRANSFORM_8X8_BIT_EXT,
+    eCabac = VK_VIDEO_ENCODE_H264_CAPABILITY_CABAC_BIT_EXT,
+    eCavlc = VK_VIDEO_ENCODE_H264_CAPABILITY_CAVLC_BIT_EXT,
+    eDeblockingFilterDisabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_DISABLED_BIT_EXT,
+    eDeblockingFilterEnabled = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_ENABLED_BIT_EXT,
+    eDeblockingFilterPartial = VK_VIDEO_ENCODE_H264_CAPABILITY_DEBLOCKING_FILTER_PARTIAL_BIT_EXT,
+    eDisableDirectSpatialMvPred = VK_VIDEO_ENCODE_H264_CAPABILITY_DISABLE_DIRECT_SPATIAL_MV_PRED_BIT_EXT,
+    eMultipleSlicePerFrame = VK_VIDEO_ENCODE_H264_CAPABILITY_MULTIPLE_SLICE_PER_FRAME_BIT_EXT,
+    eSliceMbCount = VK_VIDEO_ENCODE_H264_CAPABILITY_SLICE_MB_COUNT_BIT_EXT,
+    eRowUnalignedSlice = VK_VIDEO_ENCODE_H264_CAPABILITY_ROW_UNALIGNED_SLICE_BIT_EXT,
+    eDifferentSliceType = VK_VIDEO_ENCODE_H264_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT,
+    eBFrameInL1List = VK_VIDEO_ENCODE_H264_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT
+  };
+
+  using VideoEncodeH264CapabilityFlagsEXT = Flags<VideoEncodeH264CapabilityFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH264CapabilityFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264CapabilityFlagsEXT allFlags = 
+          VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceEnabled
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDirect8X8InferenceDisabled
+        | VideoEncodeH264CapabilityFlagBitsEXT::eSeparateColourPlane
+        | VideoEncodeH264CapabilityFlagBitsEXT::eQpprimeYZeroTransformBypass
+        | VideoEncodeH264CapabilityFlagBitsEXT::eScalingLists
+        | VideoEncodeH264CapabilityFlagBitsEXT::eHrdCompliance
+        | VideoEncodeH264CapabilityFlagBitsEXT::eChromaQpOffset
+        | VideoEncodeH264CapabilityFlagBitsEXT::eSecondChromaQpOffset
+        | VideoEncodeH264CapabilityFlagBitsEXT::ePicInitQpMinus26
+        | VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPred
+        | VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredExplicit
+        | VideoEncodeH264CapabilityFlagBitsEXT::eWeightedBipredImplicit
+        | VideoEncodeH264CapabilityFlagBitsEXT::eWeightedPredNoTable
+        | VideoEncodeH264CapabilityFlagBitsEXT::eTransform8X8
+        | VideoEncodeH264CapabilityFlagBitsEXT::eCabac
+        | VideoEncodeH264CapabilityFlagBitsEXT::eCavlc
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterDisabled
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterEnabled
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDeblockingFilterPartial
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDisableDirectSpatialMvPred
+        | VideoEncodeH264CapabilityFlagBitsEXT::eMultipleSlicePerFrame
+        | VideoEncodeH264CapabilityFlagBitsEXT::eSliceMbCount
+        | VideoEncodeH264CapabilityFlagBitsEXT::eRowUnalignedSlice
+        | VideoEncodeH264CapabilityFlagBitsEXT::eDifferentSliceType
+        | VideoEncodeH264CapabilityFlagBitsEXT::eBFrameInL1List;
+  };
+
+  enum class VideoEncodeH264InputModeFlagBitsEXT : VkVideoEncodeH264InputModeFlagsEXT
+  {
+    eFrame = VK_VIDEO_ENCODE_H264_INPUT_MODE_FRAME_BIT_EXT,
+    eSlice = VK_VIDEO_ENCODE_H264_INPUT_MODE_SLICE_BIT_EXT,
+    eNonVcl = VK_VIDEO_ENCODE_H264_INPUT_MODE_NON_VCL_BIT_EXT
+  };
+
+  using VideoEncodeH264InputModeFlagsEXT = Flags<VideoEncodeH264InputModeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH264InputModeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264InputModeFlagsEXT allFlags = 
+          VideoEncodeH264InputModeFlagBitsEXT::eFrame
+        | VideoEncodeH264InputModeFlagBitsEXT::eSlice
+        | VideoEncodeH264InputModeFlagBitsEXT::eNonVcl;
+  };
+
+  enum class VideoEncodeH264OutputModeFlagBitsEXT : VkVideoEncodeH264OutputModeFlagsEXT
+  {
+    eFrame = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_FRAME_BIT_EXT,
+    eSlice = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_SLICE_BIT_EXT,
+    eNonVcl = VK_VIDEO_ENCODE_H264_OUTPUT_MODE_NON_VCL_BIT_EXT
+  };
+
+  using VideoEncodeH264OutputModeFlagsEXT = Flags<VideoEncodeH264OutputModeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH264OutputModeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH264OutputModeFlagsEXT allFlags = 
+          VideoEncodeH264OutputModeFlagBitsEXT::eFrame
+        | VideoEncodeH264OutputModeFlagBitsEXT::eSlice
+        | VideoEncodeH264OutputModeFlagBitsEXT::eNonVcl;
+  };
+
+  enum class VideoEncodeH264RateControlStructureEXT
+  {
+    eUnknown = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT,
+    eFlat = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_FLAT_EXT,
+    eDyadic = VK_VIDEO_ENCODE_H264_RATE_CONTROL_STRUCTURE_DYADIC_EXT
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h265 ===
+
+  enum class VideoEncodeH265CapabilityFlagBitsEXT : VkVideoEncodeH265CapabilityFlagsEXT
+  {
+    eSeparateColourPlane = VK_VIDEO_ENCODE_H265_CAPABILITY_SEPARATE_COLOUR_PLANE_BIT_EXT,
+    eScalingLists = VK_VIDEO_ENCODE_H265_CAPABILITY_SCALING_LISTS_BIT_EXT,
+    eSampleAdaptiveOffsetEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SAMPLE_ADAPTIVE_OFFSET_ENABLED_BIT_EXT,
+    ePcmEnable = VK_VIDEO_ENCODE_H265_CAPABILITY_PCM_ENABLE_BIT_EXT,
+    eSpsTemporalMvpEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SPS_TEMPORAL_MVP_ENABLED_BIT_EXT,
+    eHrdCompliance = VK_VIDEO_ENCODE_H265_CAPABILITY_HRD_COMPLIANCE_BIT_EXT,
+    eInitQpMinus26 = VK_VIDEO_ENCODE_H265_CAPABILITY_INIT_QP_MINUS26_BIT_EXT,
+    eLog2ParallelMergeLevelMinus2 = VK_VIDEO_ENCODE_H265_CAPABILITY_LOG2_PARALLEL_MERGE_LEVEL_MINUS2_BIT_EXT,
+    eSignDataHidingEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_SIGN_DATA_HIDING_ENABLED_BIT_EXT,
+    eTransformSkipEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_ENABLED_BIT_EXT,
+    eTransformSkipDisabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSFORM_SKIP_DISABLED_BIT_EXT,
+    ePpsSliceChromaQpOffsetsPresent = VK_VIDEO_ENCODE_H265_CAPABILITY_PPS_SLICE_CHROMA_QP_OFFSETS_PRESENT_BIT_EXT,
+    eWeightedPred = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_BIT_EXT,
+    eWeightedBipred = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_BIPRED_BIT_EXT,
+    eWeightedPredNoTable = VK_VIDEO_ENCODE_H265_CAPABILITY_WEIGHTED_PRED_NO_TABLE_BIT_EXT,
+    eTransquantBypassEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_TRANSQUANT_BYPASS_ENABLED_BIT_EXT,
+    eEntropyCodingSyncEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_ENTROPY_CODING_SYNC_ENABLED_BIT_EXT,
+    eDeblockingFilterOverrideEnabled = VK_VIDEO_ENCODE_H265_CAPABILITY_DEBLOCKING_FILTER_OVERRIDE_ENABLED_BIT_EXT,
+    eMultipleTilePerFrame = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_FRAME_BIT_EXT,
+    eMultipleSlicePerTile = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_SLICE_PER_TILE_BIT_EXT,
+    eMultipleTilePerSlice = VK_VIDEO_ENCODE_H265_CAPABILITY_MULTIPLE_TILE_PER_SLICE_BIT_EXT,
+    eSliceSegmentCtbCount = VK_VIDEO_ENCODE_H265_CAPABILITY_SLICE_SEGMENT_CTB_COUNT_BIT_EXT,
+    eRowUnalignedSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_ROW_UNALIGNED_SLICE_SEGMENT_BIT_EXT,
+    eDependentSliceSegment = VK_VIDEO_ENCODE_H265_CAPABILITY_DEPENDENT_SLICE_SEGMENT_BIT_EXT,
+    eDifferentSliceType = VK_VIDEO_ENCODE_H265_CAPABILITY_DIFFERENT_SLICE_TYPE_BIT_EXT,
+    eBFrameInL1List = VK_VIDEO_ENCODE_H265_CAPABILITY_B_FRAME_IN_L1_LIST_BIT_EXT
+  };
+
+  using VideoEncodeH265CapabilityFlagsEXT = Flags<VideoEncodeH265CapabilityFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH265CapabilityFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265CapabilityFlagsEXT allFlags = 
+          VideoEncodeH265CapabilityFlagBitsEXT::eSeparateColourPlane
+        | VideoEncodeH265CapabilityFlagBitsEXT::eScalingLists
+        | VideoEncodeH265CapabilityFlagBitsEXT::eSampleAdaptiveOffsetEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::ePcmEnable
+        | VideoEncodeH265CapabilityFlagBitsEXT::eSpsTemporalMvpEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eHrdCompliance
+        | VideoEncodeH265CapabilityFlagBitsEXT::eInitQpMinus26
+        | VideoEncodeH265CapabilityFlagBitsEXT::eLog2ParallelMergeLevelMinus2
+        | VideoEncodeH265CapabilityFlagBitsEXT::eSignDataHidingEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eTransformSkipDisabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::ePpsSliceChromaQpOffsetsPresent
+        | VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPred
+        | VideoEncodeH265CapabilityFlagBitsEXT::eWeightedBipred
+        | VideoEncodeH265CapabilityFlagBitsEXT::eWeightedPredNoTable
+        | VideoEncodeH265CapabilityFlagBitsEXT::eTransquantBypassEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eEntropyCodingSyncEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eDeblockingFilterOverrideEnabled
+        | VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerFrame
+        | VideoEncodeH265CapabilityFlagBitsEXT::eMultipleSlicePerTile
+        | VideoEncodeH265CapabilityFlagBitsEXT::eMultipleTilePerSlice
+        | VideoEncodeH265CapabilityFlagBitsEXT::eSliceSegmentCtbCount
+        | VideoEncodeH265CapabilityFlagBitsEXT::eRowUnalignedSliceSegment
+        | VideoEncodeH265CapabilityFlagBitsEXT::eDependentSliceSegment
+        | VideoEncodeH265CapabilityFlagBitsEXT::eDifferentSliceType
+        | VideoEncodeH265CapabilityFlagBitsEXT::eBFrameInL1List;
+  };
+
+  enum class VideoEncodeH265InputModeFlagBitsEXT : VkVideoEncodeH265InputModeFlagsEXT
+  {
+    eFrame = VK_VIDEO_ENCODE_H265_INPUT_MODE_FRAME_BIT_EXT,
+    eSliceSegment = VK_VIDEO_ENCODE_H265_INPUT_MODE_SLICE_SEGMENT_BIT_EXT,
+    eNonVcl = VK_VIDEO_ENCODE_H265_INPUT_MODE_NON_VCL_BIT_EXT
+  };
+
+  using VideoEncodeH265InputModeFlagsEXT = Flags<VideoEncodeH265InputModeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH265InputModeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265InputModeFlagsEXT allFlags = 
+          VideoEncodeH265InputModeFlagBitsEXT::eFrame
+        | VideoEncodeH265InputModeFlagBitsEXT::eSliceSegment
+        | VideoEncodeH265InputModeFlagBitsEXT::eNonVcl;
+  };
+
+  enum class VideoEncodeH265OutputModeFlagBitsEXT : VkVideoEncodeH265OutputModeFlagsEXT
+  {
+    eFrame = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_FRAME_BIT_EXT,
+    eSliceSegment = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_SLICE_SEGMENT_BIT_EXT,
+    eNonVcl = VK_VIDEO_ENCODE_H265_OUTPUT_MODE_NON_VCL_BIT_EXT
+  };
+
+  using VideoEncodeH265OutputModeFlagsEXT = Flags<VideoEncodeH265OutputModeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH265OutputModeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265OutputModeFlagsEXT allFlags = 
+          VideoEncodeH265OutputModeFlagBitsEXT::eFrame
+        | VideoEncodeH265OutputModeFlagBitsEXT::eSliceSegment
+        | VideoEncodeH265OutputModeFlagBitsEXT::eNonVcl;
+  };
+
+  enum class VideoEncodeH265CtbSizeFlagBitsEXT : VkVideoEncodeH265CtbSizeFlagsEXT
+  {
+    e16 = VK_VIDEO_ENCODE_H265_CTB_SIZE_16_BIT_EXT,
+    e32 = VK_VIDEO_ENCODE_H265_CTB_SIZE_32_BIT_EXT,
+    e64 = VK_VIDEO_ENCODE_H265_CTB_SIZE_64_BIT_EXT
+  };
+
+  using VideoEncodeH265CtbSizeFlagsEXT = Flags<VideoEncodeH265CtbSizeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH265CtbSizeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265CtbSizeFlagsEXT allFlags = 
+          VideoEncodeH265CtbSizeFlagBitsEXT::e16
+        | VideoEncodeH265CtbSizeFlagBitsEXT::e32
+        | VideoEncodeH265CtbSizeFlagBitsEXT::e64;
+  };
+
+  enum class VideoEncodeH265TransformBlockSizeFlagBitsEXT : VkVideoEncodeH265TransformBlockSizeFlagsEXT
+  {
+    e4 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_4_BIT_EXT,
+    e8 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_8_BIT_EXT,
+    e16 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_16_BIT_EXT,
+    e32 = VK_VIDEO_ENCODE_H265_TRANSFORM_BLOCK_SIZE_32_BIT_EXT
+  };
+
+  using VideoEncodeH265TransformBlockSizeFlagsEXT = Flags<VideoEncodeH265TransformBlockSizeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<VideoEncodeH265TransformBlockSizeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeH265TransformBlockSizeFlagsEXT allFlags = 
+          VideoEncodeH265TransformBlockSizeFlagBitsEXT::e4
+        | VideoEncodeH265TransformBlockSizeFlagBitsEXT::e8
+        | VideoEncodeH265TransformBlockSizeFlagBitsEXT::e16
+        | VideoEncodeH265TransformBlockSizeFlagBitsEXT::e32;
+  };
+
+  enum class VideoEncodeH265RateControlStructureEXT
+  {
+    eUnknown = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_UNKNOWN_EXT,
+    eFlat = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_FLAT_EXT,
+    eDyadic = VK_VIDEO_ENCODE_H265_RATE_CONTROL_STRUCTURE_DYADIC_EXT
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_decode_h264 ===
+
+  enum class VideoDecodeH264PictureLayoutFlagBitsKHR : VkVideoDecodeH264PictureLayoutFlagsKHR
+  {
+    eProgressive = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_PROGRESSIVE_KHR,
+    eInterlacedInterleavedLines = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_INTERLEAVED_LINES_BIT_KHR,
+    eInterlacedSeparatePlanes = VK_VIDEO_DECODE_H264_PICTURE_LAYOUT_INTERLACED_SEPARATE_PLANES_BIT_KHR
+  };
+
+  using VideoDecodeH264PictureLayoutFlagsKHR = Flags<VideoDecodeH264PictureLayoutFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoDecodeH264PictureLayoutFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoDecodeH264PictureLayoutFlagsKHR allFlags = 
+          VideoDecodeH264PictureLayoutFlagBitsKHR::eProgressive
+        | VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedInterleavedLines
+        | VideoDecodeH264PictureLayoutFlagBitsKHR::eInterlacedSeparatePlanes;
+  };
+
+  //=== VK_AMD_shader_info ===
+
+  enum class ShaderInfoTypeAMD
+  {
+    eStatistics = VK_SHADER_INFO_TYPE_STATISTICS_AMD,
+    eBinary = VK_SHADER_INFO_TYPE_BINARY_AMD,
+    eDisassembly = VK_SHADER_INFO_TYPE_DISASSEMBLY_AMD
+  };
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+
+  enum class StreamDescriptorSurfaceCreateFlagBitsGGP : VkStreamDescriptorSurfaceCreateFlagsGGP
+  {};
+
+  using StreamDescriptorSurfaceCreateFlagsGGP = Flags<StreamDescriptorSurfaceCreateFlagBitsGGP>;
+
+
+  template <> struct FlagTraits<StreamDescriptorSurfaceCreateFlagBitsGGP>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR StreamDescriptorSurfaceCreateFlagsGGP allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+
+  enum class ExternalMemoryHandleTypeFlagBitsNV : VkExternalMemoryHandleTypeFlagsNV
+  {
+    eOpaqueWin32 = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT_NV,
+    eOpaqueWin32Kmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT_NV,
+    eD3D11Image = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_BIT_NV,
+    eD3D11ImageKmt = VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_IMAGE_KMT_BIT_NV
+  };
+
+  using ExternalMemoryHandleTypeFlagsNV = Flags<ExternalMemoryHandleTypeFlagBitsNV>;
+
+
+  template <> struct FlagTraits<ExternalMemoryHandleTypeFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryHandleTypeFlagsNV allFlags = 
+          ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32
+        | ExternalMemoryHandleTypeFlagBitsNV::eOpaqueWin32Kmt
+        | ExternalMemoryHandleTypeFlagBitsNV::eD3D11Image
+        | ExternalMemoryHandleTypeFlagBitsNV::eD3D11ImageKmt;
+  };
+
+  enum class ExternalMemoryFeatureFlagBitsNV : VkExternalMemoryFeatureFlagsNV
+  {
+    eDedicatedOnly = VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT_NV,
+    eExportable = VK_EXTERNAL_MEMORY_FEATURE_EXPORTABLE_BIT_NV,
+    eImportable = VK_EXTERNAL_MEMORY_FEATURE_IMPORTABLE_BIT_NV
+  };
+
+  using ExternalMemoryFeatureFlagsNV = Flags<ExternalMemoryFeatureFlagBitsNV>;
+
+
+  template <> struct FlagTraits<ExternalMemoryFeatureFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExternalMemoryFeatureFlagsNV allFlags = 
+          ExternalMemoryFeatureFlagBitsNV::eDedicatedOnly
+        | ExternalMemoryFeatureFlagBitsNV::eExportable
+        | ExternalMemoryFeatureFlagBitsNV::eImportable;
+  };
+
+  //=== VK_EXT_validation_flags ===
+
+  enum class ValidationCheckEXT
+  {
+    eAll = VK_VALIDATION_CHECK_ALL_EXT,
+    eShaders = VK_VALIDATION_CHECK_SHADERS_EXT
+  };
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+
+  enum class ViSurfaceCreateFlagBitsNN : VkViSurfaceCreateFlagsNN
+  {};
+
+  using ViSurfaceCreateFlagsNN = Flags<ViSurfaceCreateFlagBitsNN>;
+
+
+  template <> struct FlagTraits<ViSurfaceCreateFlagBitsNN>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ViSurfaceCreateFlagsNN allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_EXT_pipeline_robustness ===
+
+  enum class PipelineRobustnessBufferBehaviorEXT
+  {
+    eDeviceDefault = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DEVICE_DEFAULT_EXT,
+    eDisabled = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_DISABLED_EXT,
+    eRobustBufferAccess = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_EXT,
+    eRobustBufferAccess2 = VK_PIPELINE_ROBUSTNESS_BUFFER_BEHAVIOR_ROBUST_BUFFER_ACCESS_2_EXT
+  };
+
+  enum class PipelineRobustnessImageBehaviorEXT
+  {
+    eDeviceDefault = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DEVICE_DEFAULT_EXT,
+    eDisabled = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_DISABLED_EXT,
+    eRobustImageAccess = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_EXT,
+    eRobustImageAccess2 = VK_PIPELINE_ROBUSTNESS_IMAGE_BEHAVIOR_ROBUST_IMAGE_ACCESS_2_EXT
+  };
+
+  //=== VK_EXT_conditional_rendering ===
+
+  enum class ConditionalRenderingFlagBitsEXT : VkConditionalRenderingFlagsEXT
+  {
+    eInverted = VK_CONDITIONAL_RENDERING_INVERTED_BIT_EXT
+  };
+
+  using ConditionalRenderingFlagsEXT = Flags<ConditionalRenderingFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<ConditionalRenderingFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ConditionalRenderingFlagsEXT allFlags = 
+          ConditionalRenderingFlagBitsEXT::eInverted;
+  };
+
+  //=== VK_EXT_display_surface_counter ===
+
+  enum class SurfaceCounterFlagBitsEXT : VkSurfaceCounterFlagsEXT
+  {
+    eVblank = VK_SURFACE_COUNTER_VBLANK_BIT_EXT
+  };
+
+  using SurfaceCounterFlagsEXT = Flags<SurfaceCounterFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<SurfaceCounterFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR SurfaceCounterFlagsEXT allFlags = 
+          SurfaceCounterFlagBitsEXT::eVblank;
+  };
+
+  //=== VK_EXT_display_control ===
+
+  enum class DisplayPowerStateEXT
+  {
+    eOff = VK_DISPLAY_POWER_STATE_OFF_EXT,
+    eSuspend = VK_DISPLAY_POWER_STATE_SUSPEND_EXT,
+    eOn = VK_DISPLAY_POWER_STATE_ON_EXT
+  };
+
+  enum class DeviceEventTypeEXT
+  {
+    eDisplayHotplug = VK_DEVICE_EVENT_TYPE_DISPLAY_HOTPLUG_EXT
+  };
+
+  enum class DisplayEventTypeEXT
+  {
+    eFirstPixelOut = VK_DISPLAY_EVENT_TYPE_FIRST_PIXEL_OUT_EXT
+  };
+
+  //=== VK_NV_viewport_swizzle ===
+
+  enum class ViewportCoordinateSwizzleNV
+  {
+    ePositiveX = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_X_NV,
+    eNegativeX = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_X_NV,
+    ePositiveY = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Y_NV,
+    eNegativeY = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Y_NV,
+    ePositiveZ = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_Z_NV,
+    eNegativeZ = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_Z_NV,
+    ePositiveW = VK_VIEWPORT_COORDINATE_SWIZZLE_POSITIVE_W_NV,
+    eNegativeW = VK_VIEWPORT_COORDINATE_SWIZZLE_NEGATIVE_W_NV
+  };
+
+  enum class PipelineViewportSwizzleStateCreateFlagBitsNV : VkPipelineViewportSwizzleStateCreateFlagsNV
+  {};
+
+  using PipelineViewportSwizzleStateCreateFlagsNV = Flags<PipelineViewportSwizzleStateCreateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<PipelineViewportSwizzleStateCreateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineViewportSwizzleStateCreateFlagsNV allFlags =  {};
+  };
+
+  //=== VK_EXT_discard_rectangles ===
+
+  enum class DiscardRectangleModeEXT
+  {
+    eInclusive = VK_DISCARD_RECTANGLE_MODE_INCLUSIVE_EXT,
+    eExclusive = VK_DISCARD_RECTANGLE_MODE_EXCLUSIVE_EXT
+  };
+
+  enum class PipelineDiscardRectangleStateCreateFlagBitsEXT : VkPipelineDiscardRectangleStateCreateFlagsEXT
+  {};
+
+  using PipelineDiscardRectangleStateCreateFlagsEXT = Flags<PipelineDiscardRectangleStateCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PipelineDiscardRectangleStateCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDiscardRectangleStateCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_EXT_conservative_rasterization ===
+
+  enum class ConservativeRasterizationModeEXT
+  {
+    eDisabled = VK_CONSERVATIVE_RASTERIZATION_MODE_DISABLED_EXT,
+    eOverestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_OVERESTIMATE_EXT,
+    eUnderestimate = VK_CONSERVATIVE_RASTERIZATION_MODE_UNDERESTIMATE_EXT
+  };
+
+  enum class PipelineRasterizationConservativeStateCreateFlagBitsEXT : VkPipelineRasterizationConservativeStateCreateFlagsEXT
+  {};
+
+  using PipelineRasterizationConservativeStateCreateFlagsEXT = Flags<PipelineRasterizationConservativeStateCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PipelineRasterizationConservativeStateCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationConservativeStateCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_EXT_depth_clip_enable ===
+
+  enum class PipelineRasterizationDepthClipStateCreateFlagBitsEXT : VkPipelineRasterizationDepthClipStateCreateFlagsEXT
+  {};
+
+  using PipelineRasterizationDepthClipStateCreateFlagsEXT = Flags<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PipelineRasterizationDepthClipStateCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineRasterizationDepthClipStateCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_KHR_performance_query ===
+
+  enum class PerformanceCounterDescriptionFlagBitsKHR : VkPerformanceCounterDescriptionFlagsKHR
+  {
+    ePerformanceImpacting = VK_PERFORMANCE_COUNTER_DESCRIPTION_PERFORMANCE_IMPACTING_BIT_KHR,
+    eConcurrentlyImpacted = VK_PERFORMANCE_COUNTER_DESCRIPTION_CONCURRENTLY_IMPACTED_BIT_KHR
+  };
+
+  using PerformanceCounterDescriptionFlagsKHR = Flags<PerformanceCounterDescriptionFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<PerformanceCounterDescriptionFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PerformanceCounterDescriptionFlagsKHR allFlags = 
+          PerformanceCounterDescriptionFlagBitsKHR::ePerformanceImpacting
+        | PerformanceCounterDescriptionFlagBitsKHR::eConcurrentlyImpacted;
+  };
+
+  enum class PerformanceCounterScopeKHR
+  {
+    eCommandBuffer = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_BUFFER_KHR,
+    eRenderPass = VK_PERFORMANCE_COUNTER_SCOPE_RENDER_PASS_KHR,
+    eCommand = VK_PERFORMANCE_COUNTER_SCOPE_COMMAND_KHR,
+    eVkQueryScopeCommandBuffer = VK_QUERY_SCOPE_COMMAND_BUFFER_KHR,
+    eVkQueryScopeCommand = VK_QUERY_SCOPE_COMMAND_KHR,
+    eVkQueryScopeRenderPass = VK_QUERY_SCOPE_RENDER_PASS_KHR
+  };
+
+  enum class PerformanceCounterStorageKHR
+  {
+    eInt32 = VK_PERFORMANCE_COUNTER_STORAGE_INT32_KHR,
+    eInt64 = VK_PERFORMANCE_COUNTER_STORAGE_INT64_KHR,
+    eUint32 = VK_PERFORMANCE_COUNTER_STORAGE_UINT32_KHR,
+    eUint64 = VK_PERFORMANCE_COUNTER_STORAGE_UINT64_KHR,
+    eFloat32 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT32_KHR,
+    eFloat64 = VK_PERFORMANCE_COUNTER_STORAGE_FLOAT64_KHR
+  };
+
+  enum class PerformanceCounterUnitKHR
+  {
+    eGeneric = VK_PERFORMANCE_COUNTER_UNIT_GENERIC_KHR,
+    ePercentage = VK_PERFORMANCE_COUNTER_UNIT_PERCENTAGE_KHR,
+    eNanoseconds = VK_PERFORMANCE_COUNTER_UNIT_NANOSECONDS_KHR,
+    eBytes = VK_PERFORMANCE_COUNTER_UNIT_BYTES_KHR,
+    eBytesPerSecond = VK_PERFORMANCE_COUNTER_UNIT_BYTES_PER_SECOND_KHR,
+    eKelvin = VK_PERFORMANCE_COUNTER_UNIT_KELVIN_KHR,
+    eWatts = VK_PERFORMANCE_COUNTER_UNIT_WATTS_KHR,
+    eVolts = VK_PERFORMANCE_COUNTER_UNIT_VOLTS_KHR,
+    eAmps = VK_PERFORMANCE_COUNTER_UNIT_AMPS_KHR,
+    eHertz = VK_PERFORMANCE_COUNTER_UNIT_HERTZ_KHR,
+    eCycles = VK_PERFORMANCE_COUNTER_UNIT_CYCLES_KHR
+  };
+
+  enum class AcquireProfilingLockFlagBitsKHR : VkAcquireProfilingLockFlagsKHR
+  {};
+
+  using AcquireProfilingLockFlagsKHR = Flags<AcquireProfilingLockFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<AcquireProfilingLockFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AcquireProfilingLockFlagsKHR allFlags =  {};
+  };
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+
+  enum class IOSSurfaceCreateFlagBitsMVK : VkIOSSurfaceCreateFlagsMVK
+  {};
+
+  using IOSSurfaceCreateFlagsMVK = Flags<IOSSurfaceCreateFlagBitsMVK>;
+
+
+  template <> struct FlagTraits<IOSSurfaceCreateFlagBitsMVK>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IOSSurfaceCreateFlagsMVK allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+
+  enum class MacOSSurfaceCreateFlagBitsMVK : VkMacOSSurfaceCreateFlagsMVK
+  {};
+
+  using MacOSSurfaceCreateFlagsMVK = Flags<MacOSSurfaceCreateFlagBitsMVK>;
+
+
+  template <> struct FlagTraits<MacOSSurfaceCreateFlagBitsMVK>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MacOSSurfaceCreateFlagsMVK allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+
+  enum class DebugUtilsMessageSeverityFlagBitsEXT : VkDebugUtilsMessageSeverityFlagsEXT
+  {
+    eVerbose = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT,
+    eInfo = VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT,
+    eWarning = VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT,
+    eError = VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT
+  };
+
+  using DebugUtilsMessageSeverityFlagsEXT = Flags<DebugUtilsMessageSeverityFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DebugUtilsMessageSeverityFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessageSeverityFlagsEXT allFlags = 
+          DebugUtilsMessageSeverityFlagBitsEXT::eVerbose
+        | DebugUtilsMessageSeverityFlagBitsEXT::eInfo
+        | DebugUtilsMessageSeverityFlagBitsEXT::eWarning
+        | DebugUtilsMessageSeverityFlagBitsEXT::eError;
+  };
+
+  enum class DebugUtilsMessageTypeFlagBitsEXT : VkDebugUtilsMessageTypeFlagsEXT
+  {
+    eGeneral = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT,
+    eValidation = VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT,
+    ePerformance = VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
+    eDeviceAddressBinding = VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT
+  };
+
+  using DebugUtilsMessageTypeFlagsEXT = Flags<DebugUtilsMessageTypeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DebugUtilsMessageTypeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessageTypeFlagsEXT allFlags = 
+          DebugUtilsMessageTypeFlagBitsEXT::eGeneral
+        | DebugUtilsMessageTypeFlagBitsEXT::eValidation
+        | DebugUtilsMessageTypeFlagBitsEXT::ePerformance
+        | DebugUtilsMessageTypeFlagBitsEXT::eDeviceAddressBinding;
+  };
+
+  enum class DebugUtilsMessengerCallbackDataFlagBitsEXT : VkDebugUtilsMessengerCallbackDataFlagsEXT
+  {};
+
+  using DebugUtilsMessengerCallbackDataFlagsEXT = Flags<DebugUtilsMessengerCallbackDataFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DebugUtilsMessengerCallbackDataFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCallbackDataFlagsEXT allFlags =  {};
+  };
+
+  enum class DebugUtilsMessengerCreateFlagBitsEXT : VkDebugUtilsMessengerCreateFlagsEXT
+  {};
+
+  using DebugUtilsMessengerCreateFlagsEXT = Flags<DebugUtilsMessengerCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DebugUtilsMessengerCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DebugUtilsMessengerCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_EXT_blend_operation_advanced ===
+
+  enum class BlendOverlapEXT
+  {
+    eUncorrelated = VK_BLEND_OVERLAP_UNCORRELATED_EXT,
+    eDisjoint = VK_BLEND_OVERLAP_DISJOINT_EXT,
+    eConjoint = VK_BLEND_OVERLAP_CONJOINT_EXT
+  };
+
+  //=== VK_NV_fragment_coverage_to_color ===
+
+  enum class PipelineCoverageToColorStateCreateFlagBitsNV : VkPipelineCoverageToColorStateCreateFlagsNV
+  {};
+
+  using PipelineCoverageToColorStateCreateFlagsNV = Flags<PipelineCoverageToColorStateCreateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<PipelineCoverageToColorStateCreateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageToColorStateCreateFlagsNV allFlags =  {};
+  };
+
+  //=== VK_KHR_acceleration_structure ===
+
+  enum class AccelerationStructureTypeKHR
+  {
+    eTopLevel = VK_ACCELERATION_STRUCTURE_TYPE_TOP_LEVEL_KHR,
+    eBottomLevel = VK_ACCELERATION_STRUCTURE_TYPE_BOTTOM_LEVEL_KHR,
+    eGeneric = VK_ACCELERATION_STRUCTURE_TYPE_GENERIC_KHR
+  };
+  using AccelerationStructureTypeNV = AccelerationStructureTypeKHR;
+
+  enum class AccelerationStructureBuildTypeKHR
+  {
+    eHost = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_KHR,
+    eDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_DEVICE_KHR,
+    eHostOrDevice = VK_ACCELERATION_STRUCTURE_BUILD_TYPE_HOST_OR_DEVICE_KHR
+  };
+
+  enum class GeometryFlagBitsKHR : VkGeometryFlagsKHR
+  {
+    eOpaque = VK_GEOMETRY_OPAQUE_BIT_KHR,
+    eNoDuplicateAnyHitInvocation = VK_GEOMETRY_NO_DUPLICATE_ANY_HIT_INVOCATION_BIT_KHR
+  };
+  using GeometryFlagBitsNV = GeometryFlagBitsKHR;
+
+  using GeometryFlagsKHR = Flags<GeometryFlagBitsKHR>;
+  using GeometryFlagsNV = GeometryFlagsKHR;
+
+
+  template <> struct FlagTraits<GeometryFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR GeometryFlagsKHR allFlags = 
+          GeometryFlagBitsKHR::eOpaque
+        | GeometryFlagBitsKHR::eNoDuplicateAnyHitInvocation;
+  };
+
+  enum class GeometryInstanceFlagBitsKHR : VkGeometryInstanceFlagsKHR
+  {
+    eTriangleFacingCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_FACING_CULL_DISABLE_BIT_KHR,
+    eTriangleFlipFacing = VK_GEOMETRY_INSTANCE_TRIANGLE_FLIP_FACING_BIT_KHR,
+    eForceOpaque = VK_GEOMETRY_INSTANCE_FORCE_OPAQUE_BIT_KHR,
+    eForceNoOpaque = VK_GEOMETRY_INSTANCE_FORCE_NO_OPAQUE_BIT_KHR,
+    eForceOpacityMicromap2StateEXT = VK_GEOMETRY_INSTANCE_FORCE_OPACITY_MICROMAP_2_STATE_EXT,
+    eDisableOpacityMicromapsEXT = VK_GEOMETRY_INSTANCE_DISABLE_OPACITY_MICROMAPS_EXT,
+    eTriangleCullDisable = VK_GEOMETRY_INSTANCE_TRIANGLE_CULL_DISABLE_BIT_NV,
+    eTriangleFrontCounterclockwiseKHR = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_KHR,
+    eTriangleFrontCounterclockwise = VK_GEOMETRY_INSTANCE_TRIANGLE_FRONT_COUNTERCLOCKWISE_BIT_NV
+  };
+  using GeometryInstanceFlagBitsNV = GeometryInstanceFlagBitsKHR;
+
+  using GeometryInstanceFlagsKHR = Flags<GeometryInstanceFlagBitsKHR>;
+  using GeometryInstanceFlagsNV = GeometryInstanceFlagsKHR;
+
+
+  template <> struct FlagTraits<GeometryInstanceFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR GeometryInstanceFlagsKHR allFlags = 
+          GeometryInstanceFlagBitsKHR::eTriangleFacingCullDisable
+        | GeometryInstanceFlagBitsKHR::eTriangleFlipFacing
+        | GeometryInstanceFlagBitsKHR::eForceOpaque
+        | GeometryInstanceFlagBitsKHR::eForceNoOpaque
+        | GeometryInstanceFlagBitsKHR::eForceOpacityMicromap2StateEXT
+        | GeometryInstanceFlagBitsKHR::eDisableOpacityMicromapsEXT;
+  };
+
+  enum class BuildAccelerationStructureFlagBitsKHR : VkBuildAccelerationStructureFlagsKHR
+  {
+    eAllowUpdate = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_UPDATE_BIT_KHR,
+    eAllowCompaction = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_COMPACTION_BIT_KHR,
+    ePreferFastTrace = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_TRACE_BIT_KHR,
+    ePreferFastBuild = VK_BUILD_ACCELERATION_STRUCTURE_PREFER_FAST_BUILD_BIT_KHR,
+    eLowMemory = VK_BUILD_ACCELERATION_STRUCTURE_LOW_MEMORY_BIT_KHR,
+    eMotionNV = VK_BUILD_ACCELERATION_STRUCTURE_MOTION_BIT_NV,
+    eAllowOpacityMicromapUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_UPDATE_EXT,
+    eAllowDisableOpacityMicromapsEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_DISABLE_OPACITY_MICROMAPS_EXT,
+    eAllowOpacityMicromapDataUpdateEXT = VK_BUILD_ACCELERATION_STRUCTURE_ALLOW_OPACITY_MICROMAP_DATA_UPDATE_EXT
+  };
+  using BuildAccelerationStructureFlagBitsNV = BuildAccelerationStructureFlagBitsKHR;
+
+  using BuildAccelerationStructureFlagsKHR = Flags<BuildAccelerationStructureFlagBitsKHR>;
+  using BuildAccelerationStructureFlagsNV = BuildAccelerationStructureFlagsKHR;
+
+
+  template <> struct FlagTraits<BuildAccelerationStructureFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR BuildAccelerationStructureFlagsKHR allFlags = 
+          BuildAccelerationStructureFlagBitsKHR::eAllowUpdate
+        | BuildAccelerationStructureFlagBitsKHR::eAllowCompaction
+        | BuildAccelerationStructureFlagBitsKHR::ePreferFastTrace
+        | BuildAccelerationStructureFlagBitsKHR::ePreferFastBuild
+        | BuildAccelerationStructureFlagBitsKHR::eLowMemory
+        | BuildAccelerationStructureFlagBitsKHR::eMotionNV
+        | BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapUpdateEXT
+        | BuildAccelerationStructureFlagBitsKHR::eAllowDisableOpacityMicromapsEXT
+        | BuildAccelerationStructureFlagBitsKHR::eAllowOpacityMicromapDataUpdateEXT;
+  };
+
+  enum class CopyAccelerationStructureModeKHR
+  {
+    eClone = VK_COPY_ACCELERATION_STRUCTURE_MODE_CLONE_KHR,
+    eCompact = VK_COPY_ACCELERATION_STRUCTURE_MODE_COMPACT_KHR,
+    eSerialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_SERIALIZE_KHR,
+    eDeserialize = VK_COPY_ACCELERATION_STRUCTURE_MODE_DESERIALIZE_KHR
+  };
+  using CopyAccelerationStructureModeNV = CopyAccelerationStructureModeKHR;
+
+  enum class GeometryTypeKHR
+  {
+    eTriangles = VK_GEOMETRY_TYPE_TRIANGLES_KHR,
+    eAabbs = VK_GEOMETRY_TYPE_AABBS_KHR,
+    eInstances = VK_GEOMETRY_TYPE_INSTANCES_KHR
+  };
+  using GeometryTypeNV = GeometryTypeKHR;
+
+  enum class AccelerationStructureCompatibilityKHR
+  {
+    eCompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_COMPATIBLE_KHR,
+    eIncompatible = VK_ACCELERATION_STRUCTURE_COMPATIBILITY_INCOMPATIBLE_KHR
+  };
+
+  enum class AccelerationStructureCreateFlagBitsKHR : VkAccelerationStructureCreateFlagsKHR
+  {
+    eDeviceAddressCaptureReplay = VK_ACCELERATION_STRUCTURE_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_KHR,
+    eDescriptorBufferCaptureReplayEXT = VK_ACCELERATION_STRUCTURE_CREATE_DESCRIPTOR_BUFFER_CAPTURE_REPLAY_BIT_EXT,
+    eMotionNV = VK_ACCELERATION_STRUCTURE_CREATE_MOTION_BIT_NV
+  };
+
+  using AccelerationStructureCreateFlagsKHR = Flags<AccelerationStructureCreateFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<AccelerationStructureCreateFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureCreateFlagsKHR allFlags = 
+          AccelerationStructureCreateFlagBitsKHR::eDeviceAddressCaptureReplay
+        | AccelerationStructureCreateFlagBitsKHR::eDescriptorBufferCaptureReplayEXT
+        | AccelerationStructureCreateFlagBitsKHR::eMotionNV;
+  };
+
+  enum class BuildAccelerationStructureModeKHR
+  {
+    eBuild = VK_BUILD_ACCELERATION_STRUCTURE_MODE_BUILD_KHR,
+    eUpdate = VK_BUILD_ACCELERATION_STRUCTURE_MODE_UPDATE_KHR
+  };
+
+  //=== VK_NV_framebuffer_mixed_samples ===
+
+  enum class CoverageModulationModeNV
+  {
+    eNone = VK_COVERAGE_MODULATION_MODE_NONE_NV,
+    eRgb = VK_COVERAGE_MODULATION_MODE_RGB_NV,
+    eAlpha = VK_COVERAGE_MODULATION_MODE_ALPHA_NV,
+    eRgba = VK_COVERAGE_MODULATION_MODE_RGBA_NV
+  };
+
+  enum class PipelineCoverageModulationStateCreateFlagBitsNV : VkPipelineCoverageModulationStateCreateFlagsNV
+  {};
+
+  using PipelineCoverageModulationStateCreateFlagsNV = Flags<PipelineCoverageModulationStateCreateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<PipelineCoverageModulationStateCreateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageModulationStateCreateFlagsNV allFlags =  {};
+  };
+
+  //=== VK_EXT_validation_cache ===
+
+  enum class ValidationCacheHeaderVersionEXT
+  {
+    eOne = VK_VALIDATION_CACHE_HEADER_VERSION_ONE_EXT
+  };
+
+  enum class ValidationCacheCreateFlagBitsEXT : VkValidationCacheCreateFlagsEXT
+  {};
+
+  using ValidationCacheCreateFlagsEXT = Flags<ValidationCacheCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<ValidationCacheCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ValidationCacheCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_NV_shading_rate_image ===
+
+  enum class ShadingRatePaletteEntryNV
+  {
+    eNoInvocations = VK_SHADING_RATE_PALETTE_ENTRY_NO_INVOCATIONS_NV,
+    e16InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_16_INVOCATIONS_PER_PIXEL_NV,
+    e8InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_8_INVOCATIONS_PER_PIXEL_NV,
+    e4InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_4_INVOCATIONS_PER_PIXEL_NV,
+    e2InvocationsPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_2_INVOCATIONS_PER_PIXEL_NV,
+    e1InvocationPerPixel = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_PIXEL_NV,
+    e1InvocationPer2X1Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X1_PIXELS_NV,
+    e1InvocationPer1X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_1X2_PIXELS_NV,
+    e1InvocationPer2X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X2_PIXELS_NV,
+    e1InvocationPer4X2Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X2_PIXELS_NV,
+    e1InvocationPer2X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_2X4_PIXELS_NV,
+    e1InvocationPer4X4Pixels = VK_SHADING_RATE_PALETTE_ENTRY_1_INVOCATION_PER_4X4_PIXELS_NV
+  };
+
+  enum class CoarseSampleOrderTypeNV
+  {
+    eDefault = VK_COARSE_SAMPLE_ORDER_TYPE_DEFAULT_NV,
+    eCustom = VK_COARSE_SAMPLE_ORDER_TYPE_CUSTOM_NV,
+    ePixelMajor = VK_COARSE_SAMPLE_ORDER_TYPE_PIXEL_MAJOR_NV,
+    eSampleMajor = VK_COARSE_SAMPLE_ORDER_TYPE_SAMPLE_MAJOR_NV
+  };
+
+  //=== VK_NV_ray_tracing ===
+
+  enum class AccelerationStructureMemoryRequirementsTypeNV
+  {
+    eObject = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_OBJECT_NV,
+    eBuildScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_BUILD_SCRATCH_NV,
+    eUpdateScratch = VK_ACCELERATION_STRUCTURE_MEMORY_REQUIREMENTS_TYPE_UPDATE_SCRATCH_NV
+  };
+
+  //=== VK_AMD_pipeline_compiler_control ===
+
+  enum class PipelineCompilerControlFlagBitsAMD : VkPipelineCompilerControlFlagsAMD
+  {};
+
+  using PipelineCompilerControlFlagsAMD = Flags<PipelineCompilerControlFlagBitsAMD>;
+
+
+  template <> struct FlagTraits<PipelineCompilerControlFlagBitsAMD>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCompilerControlFlagsAMD allFlags =  {};
+  };
+
+  //=== VK_EXT_calibrated_timestamps ===
+
+  enum class TimeDomainEXT
+  {
+    eDevice = VK_TIME_DOMAIN_DEVICE_EXT,
+    eClockMonotonic = VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT,
+    eClockMonotonicRaw = VK_TIME_DOMAIN_CLOCK_MONOTONIC_RAW_EXT,
+    eQueryPerformanceCounter = VK_TIME_DOMAIN_QUERY_PERFORMANCE_COUNTER_EXT
+  };
+
+  //=== VK_KHR_global_priority ===
+
+  enum class QueueGlobalPriorityKHR
+  {
+    eLow = VK_QUEUE_GLOBAL_PRIORITY_LOW_KHR,
+    eMedium = VK_QUEUE_GLOBAL_PRIORITY_MEDIUM_KHR,
+    eHigh = VK_QUEUE_GLOBAL_PRIORITY_HIGH_KHR,
+    eRealtime = VK_QUEUE_GLOBAL_PRIORITY_REALTIME_KHR
+  };
+  using QueueGlobalPriorityEXT = QueueGlobalPriorityKHR;
+
+  //=== VK_AMD_memory_overallocation_behavior ===
+
+  enum class MemoryOverallocationBehaviorAMD
+  {
+    eDefault = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DEFAULT_AMD,
+    eAllowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_ALLOWED_AMD,
+    eDisallowed = VK_MEMORY_OVERALLOCATION_BEHAVIOR_DISALLOWED_AMD
+  };
+
+  //=== VK_INTEL_performance_query ===
+
+  enum class PerformanceConfigurationTypeINTEL
+  {
+    eCommandQueueMetricsDiscoveryActivated = VK_PERFORMANCE_CONFIGURATION_TYPE_COMMAND_QUEUE_METRICS_DISCOVERY_ACTIVATED_INTEL
+  };
+
+  enum class QueryPoolSamplingModeINTEL
+  {
+    eManual = VK_QUERY_POOL_SAMPLING_MODE_MANUAL_INTEL
+  };
+
+  enum class PerformanceOverrideTypeINTEL
+  {
+    eNullHardware = VK_PERFORMANCE_OVERRIDE_TYPE_NULL_HARDWARE_INTEL,
+    eFlushGpuCaches = VK_PERFORMANCE_OVERRIDE_TYPE_FLUSH_GPU_CACHES_INTEL
+  };
+
+  enum class PerformanceParameterTypeINTEL
+  {
+    eHwCountersSupported = VK_PERFORMANCE_PARAMETER_TYPE_HW_COUNTERS_SUPPORTED_INTEL,
+    eStreamMarkerValidBits = VK_PERFORMANCE_PARAMETER_TYPE_STREAM_MARKER_VALID_BITS_INTEL
+  };
+
+  enum class PerformanceValueTypeINTEL
+  {
+    eUint32 = VK_PERFORMANCE_VALUE_TYPE_UINT32_INTEL,
+    eUint64 = VK_PERFORMANCE_VALUE_TYPE_UINT64_INTEL,
+    eFloat = VK_PERFORMANCE_VALUE_TYPE_FLOAT_INTEL,
+    eBool = VK_PERFORMANCE_VALUE_TYPE_BOOL_INTEL,
+    eString = VK_PERFORMANCE_VALUE_TYPE_STRING_INTEL
+  };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+
+  enum class ImagePipeSurfaceCreateFlagBitsFUCHSIA : VkImagePipeSurfaceCreateFlagsFUCHSIA
+  {};
+
+  using ImagePipeSurfaceCreateFlagsFUCHSIA = Flags<ImagePipeSurfaceCreateFlagBitsFUCHSIA>;
+
+
+  template <> struct FlagTraits<ImagePipeSurfaceCreateFlagBitsFUCHSIA>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImagePipeSurfaceCreateFlagsFUCHSIA allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+
+  enum class MetalSurfaceCreateFlagBitsEXT : VkMetalSurfaceCreateFlagsEXT
+  {};
+
+  using MetalSurfaceCreateFlagsEXT = Flags<MetalSurfaceCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<MetalSurfaceCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MetalSurfaceCreateFlagsEXT allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+
+  enum class FragmentShadingRateCombinerOpKHR
+  {
+    eKeep = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_KEEP_KHR,
+    eReplace = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_REPLACE_KHR,
+    eMin = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MIN_KHR,
+    eMax = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MAX_KHR,
+    eMul = VK_FRAGMENT_SHADING_RATE_COMBINER_OP_MUL_KHR
+  };
+
+  //=== VK_AMD_shader_core_properties2 ===
+
+  enum class ShaderCorePropertiesFlagBitsAMD : VkShaderCorePropertiesFlagsAMD
+  {};
+
+  using ShaderCorePropertiesFlagsAMD = Flags<ShaderCorePropertiesFlagBitsAMD>;
+
+
+  template <> struct FlagTraits<ShaderCorePropertiesFlagBitsAMD>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ShaderCorePropertiesFlagsAMD allFlags =  {};
+  };
+
+  //=== VK_EXT_validation_features ===
+
+  enum class ValidationFeatureEnableEXT
+  {
+    eGpuAssisted = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
+    eGpuAssistedReserveBindingSlot = VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT,
+    eBestPractices = VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT,
+    eDebugPrintf = VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT,
+    eSynchronizationValidation = VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT
+  };
+
+  enum class ValidationFeatureDisableEXT
+  {
+    eAll = VK_VALIDATION_FEATURE_DISABLE_ALL_EXT,
+    eShaders = VK_VALIDATION_FEATURE_DISABLE_SHADERS_EXT,
+    eThreadSafety = VK_VALIDATION_FEATURE_DISABLE_THREAD_SAFETY_EXT,
+    eApiParameters = VK_VALIDATION_FEATURE_DISABLE_API_PARAMETERS_EXT,
+    eObjectLifetimes = VK_VALIDATION_FEATURE_DISABLE_OBJECT_LIFETIMES_EXT,
+    eCoreChecks = VK_VALIDATION_FEATURE_DISABLE_CORE_CHECKS_EXT,
+    eUniqueHandles = VK_VALIDATION_FEATURE_DISABLE_UNIQUE_HANDLES_EXT,
+    eShaderValidationCache = VK_VALIDATION_FEATURE_DISABLE_SHADER_VALIDATION_CACHE_EXT
+  };
+
+  //=== VK_NV_cooperative_matrix ===
+
+  enum class ScopeNV
+  {
+    eDevice = VK_SCOPE_DEVICE_NV,
+    eWorkgroup = VK_SCOPE_WORKGROUP_NV,
+    eSubgroup = VK_SCOPE_SUBGROUP_NV,
+    eQueueFamily = VK_SCOPE_QUEUE_FAMILY_NV
+  };
+
+  enum class ComponentTypeNV
+  {
+    eFloat16 = VK_COMPONENT_TYPE_FLOAT16_NV,
+    eFloat32 = VK_COMPONENT_TYPE_FLOAT32_NV,
+    eFloat64 = VK_COMPONENT_TYPE_FLOAT64_NV,
+    eSint8 = VK_COMPONENT_TYPE_SINT8_NV,
+    eSint16 = VK_COMPONENT_TYPE_SINT16_NV,
+    eSint32 = VK_COMPONENT_TYPE_SINT32_NV,
+    eSint64 = VK_COMPONENT_TYPE_SINT64_NV,
+    eUint8 = VK_COMPONENT_TYPE_UINT8_NV,
+    eUint16 = VK_COMPONENT_TYPE_UINT16_NV,
+    eUint32 = VK_COMPONENT_TYPE_UINT32_NV,
+    eUint64 = VK_COMPONENT_TYPE_UINT64_NV
+  };
+
+  //=== VK_NV_coverage_reduction_mode ===
+
+  enum class CoverageReductionModeNV
+  {
+    eMerge = VK_COVERAGE_REDUCTION_MODE_MERGE_NV,
+    eTruncate = VK_COVERAGE_REDUCTION_MODE_TRUNCATE_NV
+  };
+
+  enum class PipelineCoverageReductionStateCreateFlagBitsNV : VkPipelineCoverageReductionStateCreateFlagsNV
+  {};
+
+  using PipelineCoverageReductionStateCreateFlagsNV = Flags<PipelineCoverageReductionStateCreateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<PipelineCoverageReductionStateCreateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCoverageReductionStateCreateFlagsNV allFlags =  {};
+  };
+
+  //=== VK_EXT_provoking_vertex ===
+
+  enum class ProvokingVertexModeEXT
+  {
+    eFirstVertex = VK_PROVOKING_VERTEX_MODE_FIRST_VERTEX_EXT,
+    eLastVertex = VK_PROVOKING_VERTEX_MODE_LAST_VERTEX_EXT
+  };
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+
+  enum class FullScreenExclusiveEXT
+  {
+    eDefault = VK_FULL_SCREEN_EXCLUSIVE_DEFAULT_EXT,
+    eAllowed = VK_FULL_SCREEN_EXCLUSIVE_ALLOWED_EXT,
+    eDisallowed = VK_FULL_SCREEN_EXCLUSIVE_DISALLOWED_EXT,
+    eApplicationControlled = VK_FULL_SCREEN_EXCLUSIVE_APPLICATION_CONTROLLED_EXT
+  };
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+
+  enum class HeadlessSurfaceCreateFlagBitsEXT : VkHeadlessSurfaceCreateFlagsEXT
+  {};
+
+  using HeadlessSurfaceCreateFlagsEXT = Flags<HeadlessSurfaceCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<HeadlessSurfaceCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR HeadlessSurfaceCreateFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_EXT_line_rasterization ===
+
+  enum class LineRasterizationModeEXT
+  {
+    eDefault = VK_LINE_RASTERIZATION_MODE_DEFAULT_EXT,
+    eRectangular = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_EXT,
+    eBresenham = VK_LINE_RASTERIZATION_MODE_BRESENHAM_EXT,
+    eRectangularSmooth = VK_LINE_RASTERIZATION_MODE_RECTANGULAR_SMOOTH_EXT
+  };
+
+  //=== VK_KHR_pipeline_executable_properties ===
+
+  enum class PipelineExecutableStatisticFormatKHR
+  {
+    eBool32 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_BOOL32_KHR,
+    eInt64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_INT64_KHR,
+    eUint64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_UINT64_KHR,
+    eFloat64 = VK_PIPELINE_EXECUTABLE_STATISTIC_FORMAT_FLOAT64_KHR
+  };
+
+  //=== VK_EXT_surface_maintenance1 ===
+
+  enum class PresentScalingFlagBitsEXT : VkPresentScalingFlagsEXT
+  {
+    eOneToOne = VK_PRESENT_SCALING_ONE_TO_ONE_BIT_EXT,
+    eAspectRatioStretch = VK_PRESENT_SCALING_ASPECT_RATIO_STRETCH_BIT_EXT,
+    eStretch = VK_PRESENT_SCALING_STRETCH_BIT_EXT
+  };
+
+  using PresentScalingFlagsEXT = Flags<PresentScalingFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PresentScalingFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PresentScalingFlagsEXT allFlags = 
+          PresentScalingFlagBitsEXT::eOneToOne
+        | PresentScalingFlagBitsEXT::eAspectRatioStretch
+        | PresentScalingFlagBitsEXT::eStretch;
+  };
+
+  enum class PresentGravityFlagBitsEXT : VkPresentGravityFlagsEXT
+  {
+    eMin = VK_PRESENT_GRAVITY_MIN_BIT_EXT,
+    eMax = VK_PRESENT_GRAVITY_MAX_BIT_EXT,
+    eCentered = VK_PRESENT_GRAVITY_CENTERED_BIT_EXT
+  };
+
+  using PresentGravityFlagsEXT = Flags<PresentGravityFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<PresentGravityFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PresentGravityFlagsEXT allFlags = 
+          PresentGravityFlagBitsEXT::eMin
+        | PresentGravityFlagBitsEXT::eMax
+        | PresentGravityFlagBitsEXT::eCentered;
+  };
+
+  //=== VK_NV_device_generated_commands ===
+
+  enum class IndirectStateFlagBitsNV : VkIndirectStateFlagsNV
+  {
+    eFlagFrontface = VK_INDIRECT_STATE_FLAG_FRONTFACE_BIT_NV
+  };
+
+  using IndirectStateFlagsNV = Flags<IndirectStateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<IndirectStateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectStateFlagsNV allFlags = 
+          IndirectStateFlagBitsNV::eFlagFrontface;
+  };
+
+  enum class IndirectCommandsTokenTypeNV
+  {
+    eShaderGroup = VK_INDIRECT_COMMANDS_TOKEN_TYPE_SHADER_GROUP_NV,
+    eStateFlags = VK_INDIRECT_COMMANDS_TOKEN_TYPE_STATE_FLAGS_NV,
+    eIndexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_INDEX_BUFFER_NV,
+    eVertexBuffer = VK_INDIRECT_COMMANDS_TOKEN_TYPE_VERTEX_BUFFER_NV,
+    ePushConstant = VK_INDIRECT_COMMANDS_TOKEN_TYPE_PUSH_CONSTANT_NV,
+    eDrawIndexed = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_INDEXED_NV,
+    eDraw = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_NV,
+    eDrawTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_TASKS_NV,
+    eDrawMeshTasks = VK_INDIRECT_COMMANDS_TOKEN_TYPE_DRAW_MESH_TASKS_NV
+  };
+
+  enum class IndirectCommandsLayoutUsageFlagBitsNV : VkIndirectCommandsLayoutUsageFlagsNV
+  {
+    eExplicitPreprocess = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_EXPLICIT_PREPROCESS_BIT_NV,
+    eIndexedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_INDEXED_SEQUENCES_BIT_NV,
+    eUnorderedSequences = VK_INDIRECT_COMMANDS_LAYOUT_USAGE_UNORDERED_SEQUENCES_BIT_NV
+  };
+
+  using IndirectCommandsLayoutUsageFlagsNV = Flags<IndirectCommandsLayoutUsageFlagBitsNV>;
+
+
+  template <> struct FlagTraits<IndirectCommandsLayoutUsageFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndirectCommandsLayoutUsageFlagsNV allFlags = 
+          IndirectCommandsLayoutUsageFlagBitsNV::eExplicitPreprocess
+        | IndirectCommandsLayoutUsageFlagBitsNV::eIndexedSequences
+        | IndirectCommandsLayoutUsageFlagBitsNV::eUnorderedSequences;
+  };
+
+  //=== VK_EXT_device_memory_report ===
+
+  enum class DeviceMemoryReportEventTypeEXT
+  {
+    eAllocate = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATE_EXT,
+    eFree = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_FREE_EXT,
+    eImport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_IMPORT_EXT,
+    eUnimport = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_UNIMPORT_EXT,
+    eAllocationFailed = VK_DEVICE_MEMORY_REPORT_EVENT_TYPE_ALLOCATION_FAILED_EXT
+  };
+
+  enum class DeviceMemoryReportFlagBitsEXT : VkDeviceMemoryReportFlagsEXT
+  {};
+
+  using DeviceMemoryReportFlagsEXT = Flags<DeviceMemoryReportFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DeviceMemoryReportFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceMemoryReportFlagsEXT allFlags =  {};
+  };
+
+  //=== VK_EXT_pipeline_creation_cache_control ===
+
+  enum class PipelineCacheCreateFlagBits : VkPipelineCacheCreateFlags
+  {
+    eExternallySynchronized = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT,
+    eExternallySynchronizedEXT = VK_PIPELINE_CACHE_CREATE_EXTERNALLY_SYNCHRONIZED_BIT_EXT
+  };
+
+  using PipelineCacheCreateFlags = Flags<PipelineCacheCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineCacheCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineCacheCreateFlags allFlags = 
+          PipelineCacheCreateFlagBits::eExternallySynchronized;
+  };
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+
+  enum class VideoEncodeCapabilityFlagBitsKHR : VkVideoEncodeCapabilityFlagsKHR
+  {
+    ePrecedingExternallyEncodedBytes = VK_VIDEO_ENCODE_CAPABILITY_PRECEDING_EXTERNALLY_ENCODED_BYTES_BIT_KHR
+  };
+
+  using VideoEncodeCapabilityFlagsKHR = Flags<VideoEncodeCapabilityFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeCapabilityFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeCapabilityFlagsKHR allFlags = 
+          VideoEncodeCapabilityFlagBitsKHR::ePrecedingExternallyEncodedBytes;
+  };
+
+  enum class VideoEncodeUsageFlagBitsKHR : VkVideoEncodeUsageFlagsKHR
+  {
+    eDefault = VK_VIDEO_ENCODE_USAGE_DEFAULT_KHR,
+    eTranscoding = VK_VIDEO_ENCODE_USAGE_TRANSCODING_BIT_KHR,
+    eStreaming = VK_VIDEO_ENCODE_USAGE_STREAMING_BIT_KHR,
+    eRecording = VK_VIDEO_ENCODE_USAGE_RECORDING_BIT_KHR,
+    eConferencing = VK_VIDEO_ENCODE_USAGE_CONFERENCING_BIT_KHR
+  };
+
+  using VideoEncodeUsageFlagsKHR = Flags<VideoEncodeUsageFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeUsageFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeUsageFlagsKHR allFlags = 
+          VideoEncodeUsageFlagBitsKHR::eDefault
+        | VideoEncodeUsageFlagBitsKHR::eTranscoding
+        | VideoEncodeUsageFlagBitsKHR::eStreaming
+        | VideoEncodeUsageFlagBitsKHR::eRecording
+        | VideoEncodeUsageFlagBitsKHR::eConferencing;
+  };
+
+  enum class VideoEncodeContentFlagBitsKHR : VkVideoEncodeContentFlagsKHR
+  {
+    eDefault = VK_VIDEO_ENCODE_CONTENT_DEFAULT_KHR,
+    eCamera = VK_VIDEO_ENCODE_CONTENT_CAMERA_BIT_KHR,
+    eDesktop = VK_VIDEO_ENCODE_CONTENT_DESKTOP_BIT_KHR,
+    eRendered = VK_VIDEO_ENCODE_CONTENT_RENDERED_BIT_KHR
+  };
+
+  using VideoEncodeContentFlagsKHR = Flags<VideoEncodeContentFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeContentFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeContentFlagsKHR allFlags = 
+          VideoEncodeContentFlagBitsKHR::eDefault
+        | VideoEncodeContentFlagBitsKHR::eCamera
+        | VideoEncodeContentFlagBitsKHR::eDesktop
+        | VideoEncodeContentFlagBitsKHR::eRendered;
+  };
+
+  enum class VideoEncodeTuningModeKHR
+  {
+    eDefault = VK_VIDEO_ENCODE_TUNING_MODE_DEFAULT_KHR,
+    eHighQuality = VK_VIDEO_ENCODE_TUNING_MODE_HIGH_QUALITY_KHR,
+    eLowLatency = VK_VIDEO_ENCODE_TUNING_MODE_LOW_LATENCY_KHR,
+    eUltraLowLatency = VK_VIDEO_ENCODE_TUNING_MODE_ULTRA_LOW_LATENCY_KHR,
+    eLossless = VK_VIDEO_ENCODE_TUNING_MODE_LOSSLESS_KHR
+  };
+
+  enum class VideoEncodeRateControlModeFlagBitsKHR : VkVideoEncodeRateControlModeFlagsKHR
+  {
+    eNone = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_NONE_BIT_KHR,
+    eCbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_CBR_BIT_KHR,
+    eVbr = VK_VIDEO_ENCODE_RATE_CONTROL_MODE_VBR_BIT_KHR
+  };
+
+  using VideoEncodeRateControlModeFlagsKHR = Flags<VideoEncodeRateControlModeFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeRateControlModeFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeRateControlModeFlagsKHR allFlags = 
+          VideoEncodeRateControlModeFlagBitsKHR::eNone
+        | VideoEncodeRateControlModeFlagBitsKHR::eCbr
+        | VideoEncodeRateControlModeFlagBitsKHR::eVbr;
+  };
+
+  enum class VideoEncodeFlagBitsKHR : VkVideoEncodeFlagsKHR
+  {};
+
+  using VideoEncodeFlagsKHR = Flags<VideoEncodeFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeFlagsKHR allFlags =  {};
+  };
+
+  enum class VideoEncodeRateControlFlagBitsKHR : VkVideoEncodeRateControlFlagsKHR
+  {};
+
+  using VideoEncodeRateControlFlagsKHR = Flags<VideoEncodeRateControlFlagBitsKHR>;
+
+
+  template <> struct FlagTraits<VideoEncodeRateControlFlagBitsKHR>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR VideoEncodeRateControlFlagsKHR allFlags =  {};
+  };
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_device_diagnostics_config ===
+
+  enum class DeviceDiagnosticsConfigFlagBitsNV : VkDeviceDiagnosticsConfigFlagsNV
+  {
+    eEnableShaderDebugInfo = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_DEBUG_INFO_BIT_NV,
+    eEnableResourceTracking = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_RESOURCE_TRACKING_BIT_NV,
+    eEnableAutomaticCheckpoints = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_AUTOMATIC_CHECKPOINTS_BIT_NV,
+    eEnableShaderErrorReporting = VK_DEVICE_DIAGNOSTICS_CONFIG_ENABLE_SHADER_ERROR_REPORTING_BIT_NV
+  };
+
+  using DeviceDiagnosticsConfigFlagsNV = Flags<DeviceDiagnosticsConfigFlagBitsNV>;
+
+
+  template <> struct FlagTraits<DeviceDiagnosticsConfigFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceDiagnosticsConfigFlagsNV allFlags = 
+          DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderDebugInfo
+        | DeviceDiagnosticsConfigFlagBitsNV::eEnableResourceTracking
+        | DeviceDiagnosticsConfigFlagBitsNV::eEnableAutomaticCheckpoints
+        | DeviceDiagnosticsConfigFlagBitsNV::eEnableShaderErrorReporting;
+  };
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+
+  enum class ExportMetalObjectTypeFlagBitsEXT : VkExportMetalObjectTypeFlagsEXT
+  {
+    eMetalDevice = VK_EXPORT_METAL_OBJECT_TYPE_METAL_DEVICE_BIT_EXT,
+    eMetalCommandQueue = VK_EXPORT_METAL_OBJECT_TYPE_METAL_COMMAND_QUEUE_BIT_EXT,
+    eMetalBuffer = VK_EXPORT_METAL_OBJECT_TYPE_METAL_BUFFER_BIT_EXT,
+    eMetalTexture = VK_EXPORT_METAL_OBJECT_TYPE_METAL_TEXTURE_BIT_EXT,
+    eMetalIosurface = VK_EXPORT_METAL_OBJECT_TYPE_METAL_IOSURFACE_BIT_EXT,
+    eMetalSharedEvent = VK_EXPORT_METAL_OBJECT_TYPE_METAL_SHARED_EVENT_BIT_EXT
+  };
+
+  using ExportMetalObjectTypeFlagsEXT = Flags<ExportMetalObjectTypeFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<ExportMetalObjectTypeFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ExportMetalObjectTypeFlagsEXT allFlags = 
+          ExportMetalObjectTypeFlagBitsEXT::eMetalDevice
+        | ExportMetalObjectTypeFlagBitsEXT::eMetalCommandQueue
+        | ExportMetalObjectTypeFlagBitsEXT::eMetalBuffer
+        | ExportMetalObjectTypeFlagBitsEXT::eMetalTexture
+        | ExportMetalObjectTypeFlagBitsEXT::eMetalIosurface
+        | ExportMetalObjectTypeFlagBitsEXT::eMetalSharedEvent;
+  };
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_graphics_pipeline_library ===
+
+  enum class GraphicsPipelineLibraryFlagBitsEXT : VkGraphicsPipelineLibraryFlagsEXT
+  {
+    eVertexInputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_VERTEX_INPUT_INTERFACE_BIT_EXT,
+    ePreRasterizationShaders = VK_GRAPHICS_PIPELINE_LIBRARY_PRE_RASTERIZATION_SHADERS_BIT_EXT,
+    eFragmentShader = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_SHADER_BIT_EXT,
+    eFragmentOutputInterface = VK_GRAPHICS_PIPELINE_LIBRARY_FRAGMENT_OUTPUT_INTERFACE_BIT_EXT
+  };
+
+  using GraphicsPipelineLibraryFlagsEXT = Flags<GraphicsPipelineLibraryFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<GraphicsPipelineLibraryFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR GraphicsPipelineLibraryFlagsEXT allFlags = 
+          GraphicsPipelineLibraryFlagBitsEXT::eVertexInputInterface
+        | GraphicsPipelineLibraryFlagBitsEXT::ePreRasterizationShaders
+        | GraphicsPipelineLibraryFlagBitsEXT::eFragmentShader
+        | GraphicsPipelineLibraryFlagBitsEXT::eFragmentOutputInterface;
+  };
+
+  enum class PipelineLayoutCreateFlagBits : VkPipelineLayoutCreateFlags
+  {
+    eIndependentSetsEXT = VK_PIPELINE_LAYOUT_CREATE_INDEPENDENT_SETS_BIT_EXT
+  };
+
+  using PipelineLayoutCreateFlags = Flags<PipelineLayoutCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineLayoutCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineLayoutCreateFlags allFlags = 
+          PipelineLayoutCreateFlagBits::eIndependentSetsEXT;
+  };
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+
+  enum class FragmentShadingRateNV
+  {
+    e1InvocationPerPixel = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_PIXEL_NV,
+    e1InvocationPer1X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_1X2_PIXELS_NV,
+    e1InvocationPer2X1Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X1_PIXELS_NV,
+    e1InvocationPer2X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X2_PIXELS_NV,
+    e1InvocationPer2X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_2X4_PIXELS_NV,
+    e1InvocationPer4X2Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X2_PIXELS_NV,
+    e1InvocationPer4X4Pixels = VK_FRAGMENT_SHADING_RATE_1_INVOCATION_PER_4X4_PIXELS_NV,
+    e2InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_2_INVOCATIONS_PER_PIXEL_NV,
+    e4InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_4_INVOCATIONS_PER_PIXEL_NV,
+    e8InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_8_INVOCATIONS_PER_PIXEL_NV,
+    e16InvocationsPerPixel = VK_FRAGMENT_SHADING_RATE_16_INVOCATIONS_PER_PIXEL_NV,
+    eNoInvocations = VK_FRAGMENT_SHADING_RATE_NO_INVOCATIONS_NV
+  };
+
+  enum class FragmentShadingRateTypeNV
+  {
+    eFragmentSize = VK_FRAGMENT_SHADING_RATE_TYPE_FRAGMENT_SIZE_NV,
+    eEnums = VK_FRAGMENT_SHADING_RATE_TYPE_ENUMS_NV
+  };
+
+  //=== VK_NV_ray_tracing_motion_blur ===
+
+  enum class AccelerationStructureMotionInstanceTypeNV
+  {
+    eStatic = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_STATIC_NV,
+    eMatrixMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_MATRIX_MOTION_NV,
+    eSrtMotion = VK_ACCELERATION_STRUCTURE_MOTION_INSTANCE_TYPE_SRT_MOTION_NV
+  };
+
+  enum class AccelerationStructureMotionInfoFlagBitsNV : VkAccelerationStructureMotionInfoFlagsNV
+  {};
+
+  using AccelerationStructureMotionInfoFlagsNV = Flags<AccelerationStructureMotionInfoFlagBitsNV>;
+
+
+  template <> struct FlagTraits<AccelerationStructureMotionInfoFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureMotionInfoFlagsNV allFlags =  {};
+  };
+
+  enum class AccelerationStructureMotionInstanceFlagBitsNV : VkAccelerationStructureMotionInstanceFlagsNV
+  {};
+
+  using AccelerationStructureMotionInstanceFlagsNV = Flags<AccelerationStructureMotionInstanceFlagBitsNV>;
+
+
+  template <> struct FlagTraits<AccelerationStructureMotionInstanceFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR AccelerationStructureMotionInstanceFlagsNV allFlags =  {};
+  };
+
+  //=== VK_EXT_image_compression_control ===
+
+  enum class ImageCompressionFlagBitsEXT : VkImageCompressionFlagsEXT
+  {
+    eDefault = VK_IMAGE_COMPRESSION_DEFAULT_EXT,
+    eFixedRateDefault = VK_IMAGE_COMPRESSION_FIXED_RATE_DEFAULT_EXT,
+    eFixedRateExplicit = VK_IMAGE_COMPRESSION_FIXED_RATE_EXPLICIT_EXT,
+    eDisabled = VK_IMAGE_COMPRESSION_DISABLED_EXT
+  };
+
+  using ImageCompressionFlagsEXT = Flags<ImageCompressionFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<ImageCompressionFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCompressionFlagsEXT allFlags = 
+          ImageCompressionFlagBitsEXT::eDefault
+        | ImageCompressionFlagBitsEXT::eFixedRateDefault
+        | ImageCompressionFlagBitsEXT::eFixedRateExplicit
+        | ImageCompressionFlagBitsEXT::eDisabled;
+  };
+
+  enum class ImageCompressionFixedRateFlagBitsEXT : VkImageCompressionFixedRateFlagsEXT
+  {
+    eNone = VK_IMAGE_COMPRESSION_FIXED_RATE_NONE_EXT,
+    e1Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_1BPC_BIT_EXT,
+    e2Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_2BPC_BIT_EXT,
+    e3Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_3BPC_BIT_EXT,
+    e4Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_4BPC_BIT_EXT,
+    e5Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_5BPC_BIT_EXT,
+    e6Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_6BPC_BIT_EXT,
+    e7Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_7BPC_BIT_EXT,
+    e8Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_8BPC_BIT_EXT,
+    e9Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_9BPC_BIT_EXT,
+    e10Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_10BPC_BIT_EXT,
+    e11Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_11BPC_BIT_EXT,
+    e12Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_12BPC_BIT_EXT,
+    e13Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_13BPC_BIT_EXT,
+    e14Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_14BPC_BIT_EXT,
+    e15Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_15BPC_BIT_EXT,
+    e16Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_16BPC_BIT_EXT,
+    e17Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_17BPC_BIT_EXT,
+    e18Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_18BPC_BIT_EXT,
+    e19Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_19BPC_BIT_EXT,
+    e20Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_20BPC_BIT_EXT,
+    e21Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_21BPC_BIT_EXT,
+    e22Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_22BPC_BIT_EXT,
+    e23Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_23BPC_BIT_EXT,
+    e24Bpc = VK_IMAGE_COMPRESSION_FIXED_RATE_24BPC_BIT_EXT
+  };
+
+  using ImageCompressionFixedRateFlagsEXT = Flags<ImageCompressionFixedRateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<ImageCompressionFixedRateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageCompressionFixedRateFlagsEXT allFlags = 
+          ImageCompressionFixedRateFlagBitsEXT::eNone
+        | ImageCompressionFixedRateFlagBitsEXT::e1Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e2Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e3Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e4Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e5Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e6Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e7Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e8Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e9Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e10Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e11Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e12Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e13Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e14Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e15Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e16Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e17Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e18Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e19Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e20Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e21Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e22Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e23Bpc
+        | ImageCompressionFixedRateFlagBitsEXT::e24Bpc;
+  };
+
+  //=== VK_EXT_device_fault ===
+
+  enum class DeviceFaultAddressTypeEXT
+  {
+    eNone = VK_DEVICE_FAULT_ADDRESS_TYPE_NONE_EXT,
+    eReadInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_READ_INVALID_EXT,
+    eWriteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_WRITE_INVALID_EXT,
+    eExecuteInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_EXECUTE_INVALID_EXT,
+    eInstructionPointerUnknown = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_UNKNOWN_EXT,
+    eInstructionPointerInvalid = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_INVALID_EXT,
+    eInstructionPointerFault = VK_DEVICE_FAULT_ADDRESS_TYPE_INSTRUCTION_POINTER_FAULT_EXT
+  };
+
+  enum class DeviceFaultVendorBinaryHeaderVersionEXT
+  {
+    eOne = VK_DEVICE_FAULT_VENDOR_BINARY_HEADER_VERSION_ONE_EXT
+  };
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+
+  enum class DirectFBSurfaceCreateFlagBitsEXT : VkDirectFBSurfaceCreateFlagsEXT
+  {};
+
+  using DirectFBSurfaceCreateFlagsEXT = Flags<DirectFBSurfaceCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DirectFBSurfaceCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DirectFBSurfaceCreateFlagsEXT allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+
+  enum class RayTracingShaderGroupTypeKHR
+  {
+    eGeneral = VK_RAY_TRACING_SHADER_GROUP_TYPE_GENERAL_KHR,
+    eTrianglesHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_TRIANGLES_HIT_GROUP_KHR,
+    eProceduralHitGroup = VK_RAY_TRACING_SHADER_GROUP_TYPE_PROCEDURAL_HIT_GROUP_KHR
+  };
+  using RayTracingShaderGroupTypeNV = RayTracingShaderGroupTypeKHR;
+
+  enum class ShaderGroupShaderKHR
+  {
+    eGeneral = VK_SHADER_GROUP_SHADER_GENERAL_KHR,
+    eClosestHit = VK_SHADER_GROUP_SHADER_CLOSEST_HIT_KHR,
+    eAnyHit = VK_SHADER_GROUP_SHADER_ANY_HIT_KHR,
+    eIntersection = VK_SHADER_GROUP_SHADER_INTERSECTION_KHR
+  };
+
+  //=== VK_EXT_device_address_binding_report ===
+
+  enum class DeviceAddressBindingFlagBitsEXT : VkDeviceAddressBindingFlagsEXT
+  {
+    eInternalObject = VK_DEVICE_ADDRESS_BINDING_INTERNAL_OBJECT_BIT_EXT
+  };
+
+  using DeviceAddressBindingFlagsEXT = Flags<DeviceAddressBindingFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<DeviceAddressBindingFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DeviceAddressBindingFlagsEXT allFlags = 
+          DeviceAddressBindingFlagBitsEXT::eInternalObject;
+  };
+
+  enum class DeviceAddressBindingTypeEXT
+  {
+    eBind = VK_DEVICE_ADDRESS_BINDING_TYPE_BIND_EXT,
+    eUnbind = VK_DEVICE_ADDRESS_BINDING_TYPE_UNBIND_EXT
+  };
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+
+  enum class ImageConstraintsInfoFlagBitsFUCHSIA : VkImageConstraintsInfoFlagsFUCHSIA
+  {
+    eCpuReadRarely = VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA,
+    eCpuReadOften = VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA,
+    eCpuWriteRarely = VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA,
+    eCpuWriteOften = VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA,
+    eProtectedOptional = VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA
+  };
+
+  using ImageConstraintsInfoFlagsFUCHSIA = Flags<ImageConstraintsInfoFlagBitsFUCHSIA>;
+
+
+  template <> struct FlagTraits<ImageConstraintsInfoFlagBitsFUCHSIA>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageConstraintsInfoFlagsFUCHSIA allFlags = 
+          ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadRarely
+        | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuReadOften
+        | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteRarely
+        | ImageConstraintsInfoFlagBitsFUCHSIA::eCpuWriteOften
+        | ImageConstraintsInfoFlagBitsFUCHSIA::eProtectedOptional;
+  };
+
+  enum class ImageFormatConstraintsFlagBitsFUCHSIA : VkImageFormatConstraintsFlagsFUCHSIA
+  {};
+
+  using ImageFormatConstraintsFlagsFUCHSIA = Flags<ImageFormatConstraintsFlagBitsFUCHSIA>;
+
+
+  template <> struct FlagTraits<ImageFormatConstraintsFlagBitsFUCHSIA>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ImageFormatConstraintsFlagsFUCHSIA allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+
+  enum class ScreenSurfaceCreateFlagBitsQNX : VkScreenSurfaceCreateFlagsQNX
+  {};
+
+  using ScreenSurfaceCreateFlagsQNX = Flags<ScreenSurfaceCreateFlagBitsQNX>;
+
+
+  template <> struct FlagTraits<ScreenSurfaceCreateFlagBitsQNX>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR ScreenSurfaceCreateFlagsQNX allFlags =  {};
+  };
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_opacity_micromap ===
+
+  enum class MicromapTypeEXT
+  {
+    eOpacityMicromap = VK_MICROMAP_TYPE_OPACITY_MICROMAP_EXT
+  };
+
+  enum class BuildMicromapFlagBitsEXT : VkBuildMicromapFlagsEXT
+  {
+    ePreferFastTrace = VK_BUILD_MICROMAP_PREFER_FAST_TRACE_BIT_EXT,
+    ePreferFastBuild = VK_BUILD_MICROMAP_PREFER_FAST_BUILD_BIT_EXT,
+    eAllowCompaction = VK_BUILD_MICROMAP_ALLOW_COMPACTION_BIT_EXT
+  };
+
+  using BuildMicromapFlagsEXT = Flags<BuildMicromapFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<BuildMicromapFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR BuildMicromapFlagsEXT allFlags = 
+          BuildMicromapFlagBitsEXT::ePreferFastTrace
+        | BuildMicromapFlagBitsEXT::ePreferFastBuild
+        | BuildMicromapFlagBitsEXT::eAllowCompaction;
+  };
+
+  enum class CopyMicromapModeEXT
+  {
+    eClone = VK_COPY_MICROMAP_MODE_CLONE_EXT,
+    eSerialize = VK_COPY_MICROMAP_MODE_SERIALIZE_EXT,
+    eDeserialize = VK_COPY_MICROMAP_MODE_DESERIALIZE_EXT,
+    eCompact = VK_COPY_MICROMAP_MODE_COMPACT_EXT
+  };
+
+  enum class MicromapCreateFlagBitsEXT : VkMicromapCreateFlagsEXT
+  {
+    eDeviceAddressCaptureReplay = VK_MICROMAP_CREATE_DEVICE_ADDRESS_CAPTURE_REPLAY_BIT_EXT
+  };
+
+  using MicromapCreateFlagsEXT = Flags<MicromapCreateFlagBitsEXT>;
+
+
+  template <> struct FlagTraits<MicromapCreateFlagBitsEXT>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MicromapCreateFlagsEXT allFlags = 
+          MicromapCreateFlagBitsEXT::eDeviceAddressCaptureReplay;
+  };
+
+  enum class BuildMicromapModeEXT
+  {
+    eBuild = VK_BUILD_MICROMAP_MODE_BUILD_EXT
+  };
+
+  enum class OpacityMicromapFormatEXT
+  {
+    e2State = VK_OPACITY_MICROMAP_FORMAT_2_STATE_EXT,
+    e4State = VK_OPACITY_MICROMAP_FORMAT_4_STATE_EXT
+  };
+
+  enum class OpacityMicromapSpecialIndexEXT
+  {
+    eFullyTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_TRANSPARENT_EXT,
+    eFullyOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_OPAQUE_EXT,
+    eFullyUnknownTransparent = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_TRANSPARENT_EXT,
+    eFullyUnknownOpaque = VK_OPACITY_MICROMAP_SPECIAL_INDEX_FULLY_UNKNOWN_OPAQUE_EXT
+  };
+
+  //=== VK_NV_memory_decompression ===
+
+  enum class MemoryDecompressionMethodFlagBitsNV : VkMemoryDecompressionMethodFlagsNV
+  {
+    eGdeflate10 = VK_MEMORY_DECOMPRESSION_METHOD_GDEFLATE_1_0_BIT_NV
+  };
+
+  using MemoryDecompressionMethodFlagsNV = Flags<MemoryDecompressionMethodFlagBitsNV>;
+
+
+  template <> struct FlagTraits<MemoryDecompressionMethodFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR MemoryDecompressionMethodFlagsNV allFlags = 
+          MemoryDecompressionMethodFlagBitsNV::eGdeflate10;
+  };
+
+  //=== VK_EXT_subpass_merge_feedback ===
+
+  enum class SubpassMergeStatusEXT
+  {
+    eMerged = VK_SUBPASS_MERGE_STATUS_MERGED_EXT,
+    eDisallowed = VK_SUBPASS_MERGE_STATUS_DISALLOWED_EXT,
+    eNotMergedSideEffects = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SIDE_EFFECTS_EXT,
+    eNotMergedSamplesMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SAMPLES_MISMATCH_EXT,
+    eNotMergedViewsMismatch = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_VIEWS_MISMATCH_EXT,
+    eNotMergedAliasing = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_ALIASING_EXT,
+    eNotMergedDependencies = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPENDENCIES_EXT,
+    eNotMergedIncompatibleInputAttachment = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INCOMPATIBLE_INPUT_ATTACHMENT_EXT,
+    eNotMergedTooManyAttachments = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_TOO_MANY_ATTACHMENTS_EXT,
+    eNotMergedInsufficientStorage = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_INSUFFICIENT_STORAGE_EXT,
+    eNotMergedDepthStencilCount = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_DEPTH_STENCIL_COUNT_EXT,
+    eNotMergedResolveAttachmentReuse = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_RESOLVE_ATTACHMENT_REUSE_EXT,
+    eNotMergedSingleSubpass = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_SINGLE_SUBPASS_EXT,
+    eNotMergedUnspecified = VK_SUBPASS_MERGE_STATUS_NOT_MERGED_UNSPECIFIED_EXT
+  };
+
+  //=== VK_LUNARG_direct_driver_loading ===
+
+  enum class DirectDriverLoadingModeLUNARG
+  {
+    eExclusive = VK_DIRECT_DRIVER_LOADING_MODE_EXCLUSIVE_LUNARG,
+    eInclusive = VK_DIRECT_DRIVER_LOADING_MODE_INCLUSIVE_LUNARG
+  };
+
+  enum class DirectDriverLoadingFlagBitsLUNARG : VkDirectDriverLoadingFlagsLUNARG
+  {};
+
+  using DirectDriverLoadingFlagsLUNARG = Flags<DirectDriverLoadingFlagBitsLUNARG>;
+
+
+  template <> struct FlagTraits<DirectDriverLoadingFlagBitsLUNARG>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR DirectDriverLoadingFlagsLUNARG allFlags =  {};
+  };
+
+  //=== VK_EXT_rasterization_order_attachment_access ===
+
+  enum class PipelineColorBlendStateCreateFlagBits : VkPipelineColorBlendStateCreateFlags
+  {
+    eRasterizationOrderAttachmentAccessEXT = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_EXT,
+    eRasterizationOrderAttachmentAccessARM = VK_PIPELINE_COLOR_BLEND_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_ACCESS_BIT_ARM
+  };
+
+  using PipelineColorBlendStateCreateFlags = Flags<PipelineColorBlendStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineColorBlendStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineColorBlendStateCreateFlags allFlags = 
+          PipelineColorBlendStateCreateFlagBits::eRasterizationOrderAttachmentAccessEXT;
+  };
+
+  enum class PipelineDepthStencilStateCreateFlagBits : VkPipelineDepthStencilStateCreateFlags
+  {
+    eRasterizationOrderAttachmentDepthAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_EXT,
+    eRasterizationOrderAttachmentStencilAccessEXT = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_EXT,
+    eRasterizationOrderAttachmentDepthAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_DEPTH_ACCESS_BIT_ARM,
+    eRasterizationOrderAttachmentStencilAccessARM = VK_PIPELINE_DEPTH_STENCIL_STATE_CREATE_RASTERIZATION_ORDER_ATTACHMENT_STENCIL_ACCESS_BIT_ARM
+  };
+
+  using PipelineDepthStencilStateCreateFlags = Flags<PipelineDepthStencilStateCreateFlagBits>;
+
+
+  template <> struct FlagTraits<PipelineDepthStencilStateCreateFlagBits>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR PipelineDepthStencilStateCreateFlags allFlags = 
+          PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentDepthAccessEXT
+        | PipelineDepthStencilStateCreateFlagBits::eRasterizationOrderAttachmentStencilAccessEXT;
+  };
+
+  //=== VK_NV_optical_flow ===
+
+  enum class OpticalFlowUsageFlagBitsNV : VkOpticalFlowUsageFlagsNV
+  {
+    eUnknown = VK_OPTICAL_FLOW_USAGE_UNKNOWN_NV,
+    eInput = VK_OPTICAL_FLOW_USAGE_INPUT_BIT_NV,
+    eOutput = VK_OPTICAL_FLOW_USAGE_OUTPUT_BIT_NV,
+    eHint = VK_OPTICAL_FLOW_USAGE_HINT_BIT_NV,
+    eCost = VK_OPTICAL_FLOW_USAGE_COST_BIT_NV,
+    eGlobalFlow = VK_OPTICAL_FLOW_USAGE_GLOBAL_FLOW_BIT_NV
+  };
+
+  using OpticalFlowUsageFlagsNV = Flags<OpticalFlowUsageFlagBitsNV>;
+
+
+  template <> struct FlagTraits<OpticalFlowUsageFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowUsageFlagsNV allFlags = 
+          OpticalFlowUsageFlagBitsNV::eUnknown
+        | OpticalFlowUsageFlagBitsNV::eInput
+        | OpticalFlowUsageFlagBitsNV::eOutput
+        | OpticalFlowUsageFlagBitsNV::eHint
+        | OpticalFlowUsageFlagBitsNV::eCost
+        | OpticalFlowUsageFlagBitsNV::eGlobalFlow;
+  };
+
+  enum class OpticalFlowGridSizeFlagBitsNV : VkOpticalFlowGridSizeFlagsNV
+  {
+    eUnknown = VK_OPTICAL_FLOW_GRID_SIZE_UNKNOWN_NV,
+    e1X1 = VK_OPTICAL_FLOW_GRID_SIZE_1X1_BIT_NV,
+    e2X2 = VK_OPTICAL_FLOW_GRID_SIZE_2X2_BIT_NV,
+    e4X4 = VK_OPTICAL_FLOW_GRID_SIZE_4X4_BIT_NV,
+    e8X8 = VK_OPTICAL_FLOW_GRID_SIZE_8X8_BIT_NV
+  };
+
+  using OpticalFlowGridSizeFlagsNV = Flags<OpticalFlowGridSizeFlagBitsNV>;
+
+
+  template <> struct FlagTraits<OpticalFlowGridSizeFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowGridSizeFlagsNV allFlags = 
+          OpticalFlowGridSizeFlagBitsNV::eUnknown
+        | OpticalFlowGridSizeFlagBitsNV::e1X1
+        | OpticalFlowGridSizeFlagBitsNV::e2X2
+        | OpticalFlowGridSizeFlagBitsNV::e4X4
+        | OpticalFlowGridSizeFlagBitsNV::e8X8;
+  };
+
+  enum class OpticalFlowPerformanceLevelNV
+  {
+    eUnknown = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_UNKNOWN_NV,
+    eSlow = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_SLOW_NV,
+    eMedium = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_MEDIUM_NV,
+    eFast = VK_OPTICAL_FLOW_PERFORMANCE_LEVEL_FAST_NV
+  };
+
+  enum class OpticalFlowSessionBindingPointNV
+  {
+    eUnknown = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_UNKNOWN_NV,
+    eInput = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_INPUT_NV,
+    eReference = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_REFERENCE_NV,
+    eHint = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_HINT_NV,
+    eFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_FLOW_VECTOR_NV,
+    eBackwardFlowVector = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_FLOW_VECTOR_NV,
+    eCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_COST_NV,
+    eBackwardCost = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_BACKWARD_COST_NV,
+    eGlobalFlow = VK_OPTICAL_FLOW_SESSION_BINDING_POINT_GLOBAL_FLOW_NV
+  };
+
+  enum class OpticalFlowSessionCreateFlagBitsNV : VkOpticalFlowSessionCreateFlagsNV
+  {
+    eEnableHint = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_HINT_BIT_NV,
+    eEnableCost = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_COST_BIT_NV,
+    eEnableGlobalFlow = VK_OPTICAL_FLOW_SESSION_CREATE_ENABLE_GLOBAL_FLOW_BIT_NV,
+    eAllowRegions = VK_OPTICAL_FLOW_SESSION_CREATE_ALLOW_REGIONS_BIT_NV,
+    eBothDirections = VK_OPTICAL_FLOW_SESSION_CREATE_BOTH_DIRECTIONS_BIT_NV
+  };
+
+  using OpticalFlowSessionCreateFlagsNV = Flags<OpticalFlowSessionCreateFlagBitsNV>;
+
+
+  template <> struct FlagTraits<OpticalFlowSessionCreateFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowSessionCreateFlagsNV allFlags = 
+          OpticalFlowSessionCreateFlagBitsNV::eEnableHint
+        | OpticalFlowSessionCreateFlagBitsNV::eEnableCost
+        | OpticalFlowSessionCreateFlagBitsNV::eEnableGlobalFlow
+        | OpticalFlowSessionCreateFlagBitsNV::eAllowRegions
+        | OpticalFlowSessionCreateFlagBitsNV::eBothDirections;
+  };
+
+  enum class OpticalFlowExecuteFlagBitsNV : VkOpticalFlowExecuteFlagsNV
+  {
+    eDisableTemporalHints = VK_OPTICAL_FLOW_EXECUTE_DISABLE_TEMPORAL_HINTS_BIT_NV
+  };
+
+  using OpticalFlowExecuteFlagsNV = Flags<OpticalFlowExecuteFlagBitsNV>;
+
+
+  template <> struct FlagTraits<OpticalFlowExecuteFlagBitsNV>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR bool isBitmask = true;
+    static VULKAN_HPP_CONST_OR_CONSTEXPR OpticalFlowExecuteFlagsNV allFlags = 
+          OpticalFlowExecuteFlagBitsNV::eDisableTemporalHints;
+  };
+
+  //=== VK_NV_ray_tracing_invocation_reorder ===
+
+  enum class RayTracingInvocationReorderModeNV
+  {
+    eNone = VK_RAY_TRACING_INVOCATION_REORDER_MODE_NONE_NV,
+    eReorder = VK_RAY_TRACING_INVOCATION_REORDER_MODE_REORDER_NV
+  };
+
+
+
+  //=========================
+  //=== Index Type Traits ===
+  //=========================
+
+  template<typename T>
+  struct IndexTypeValue
+  {};
+
+
+  template <>
+  struct IndexTypeValue<uint16_t>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint16;
+  };
+
+  template <>
+  struct CppType<IndexType, IndexType::eUint16>
+  {
+    using Type = uint16_t;
+  };
+
+  template <>
+  struct IndexTypeValue<uint32_t>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint32;
+  };
+
+  template <>
+  struct CppType<IndexType, IndexType::eUint32>
+  {
+    using Type = uint32_t;
+  };
+
+  template <>
+  struct IndexTypeValue<uint8_t>
+  {
+    static VULKAN_HPP_CONST_OR_CONSTEXPR IndexType value = IndexType::eUint8EXT;
+  };
+
+  template <>
+  struct CppType<IndexType, IndexType::eUint8EXT>
+  {
+    using Type = uint8_t;
+  };
+
+
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/include/vulkan/vulkan_format_traits.hpp b/include/vulkan/vulkan_format_traits.hpp
new file mode 100644
index 0000000..07f4bd7
--- /dev/null
+++ b/include/vulkan/vulkan_format_traits.hpp
@@ -0,0 +1,7617 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_FORMAT_TRAITS_HPP
+#  define VULKAN_FORMAT_TRAITS_HPP
+
+#include <vulkan/vulkan.hpp>
+
+namespace VULKAN_HPP_NAMESPACE
+{
+
+  //=====================
+  //=== Format Traits ===
+  //=====================
+
+  // The three-dimensional extent of a texel block.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 std::array<uint8_t, 3> blockExtent( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return {{ 5, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return {{ 5, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return {{ 5, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return {{ 5, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return {{ 6, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return {{ 6, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return {{ 6, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return {{ 6, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return {{ 8, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return {{ 8, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return {{ 8, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return {{ 8, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return {{ 8, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return {{ 8, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return {{ 10, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return {{ 10, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return {{ 10, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return {{ 10, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return {{ 10, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return {{ 10, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return {{ 10, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return {{ 10, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return {{ 12, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return {{ 12, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return {{ 12, 12, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return {{ 12, 12, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return {{ 2, 1, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return {{ 5, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return {{ 5, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return {{ 6, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return {{ 6, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return {{ 8, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return {{ 8, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return {{ 8, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return {{ 10, 5, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return {{ 10, 6, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return {{ 10, 8, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return {{ 10, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return {{ 12, 10, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return {{ 12, 12, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return {{ 8, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return {{ 8, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return {{ 8, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return {{ 4, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return {{ 8, 4, 1 }};
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return {{ 4, 4, 1 }};
+
+      default: return {{1, 1, 1 }};
+    }
+  }
+
+  // The texel block size in bytes.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t blockSize( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 12;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 12;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 12;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 24;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 24;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 24;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 5;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 6;
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 4;
+
+      default : VULKAN_HPP_ASSERT( false ); return 0;
+    }
+  }
+
+  // The class of the format (can't be just named "class"!)
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compatibilityClass( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return "8-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return "24-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return "48-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return "96-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return "96-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return "96-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return "64-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return "128-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return "192-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return "192-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return "192-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return "256-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return "256-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return "256-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return "D16";
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return "D24";
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return "D32";
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return "S8";
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return "D16S8";
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return "D24S8";
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return "D32S8";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC1_RGB";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC1_RGB";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC1_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC1_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC2";
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC2";
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC3";
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC3";
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC4";
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC4";
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC5";
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC5";
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC6H";
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC6H";
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC7";
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC7";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2_RGB";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2_RGB";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2_EAC_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2_EAC_RGBA";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC_R";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC_R";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC_RG";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC_RG";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC_4x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC_4x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC_5x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC_5x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC_5x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC_5x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC_6x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC_6x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC_6x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC_6x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC_8x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC_8x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC_8x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC_8x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC_8x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC_8x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC_10x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC_10x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC_10x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC_10x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC_10x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC_10x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC_10x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC_10x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC_12x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC_12x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC_12x12";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC_12x12";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return "32-bit G8B8G8R8";
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return "32-bit B8G8R8G8";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return "8-bit 3-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return "8-bit 2-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return "8-bit 3-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return "8-bit 2-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return "8-bit 3-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return "64-bit R10G10B10A10";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return "64-bit G10B10G10R10";
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return "64-bit B10G10R10G10";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return "10-bit 3-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return "10-bit 2-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return "10-bit 3-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return "10-bit 2-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return "10-bit 3-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return "32-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return "64-bit R12G12B12A12";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return "64-bit G12B12G12R12";
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return "64-bit B12G12R12G12";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return "12-bit 3-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return "12-bit 2-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return "12-bit 3-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return "12-bit 2-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return "12-bit 3-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return "64-bit G16B16G16R16";
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return "64-bit B16G16R16G16";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return "16-bit 3-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return "16-bit 2-plane 420";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return "16-bit 3-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return "16-bit 2-plane 422";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return "16-bit 3-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return "8-bit 2-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return "10-bit 2-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return "12-bit 2-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return "16-bit 2-plane 444";
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return "16-bit";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC_4x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC_5x4";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC_5x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC_6x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC_6x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC_8x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC_8x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC_8x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC_10x5";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC_10x6";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC_10x8";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC_10x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC_12x10";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC_12x12";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC1_2BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC1_4BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC2_2BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC2_4BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC1_2BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC1_4BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC2_2BPP";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC2_4BPP";
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return "32-bit";
+
+      default : VULKAN_HPP_ASSERT( false ); return "";
+    }
+  }
+
+  // The number of bits in this component, if not compressed, otherwise 0.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentBits( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8:
+        switch( component )
+        {
+          case 0: return 4;
+          case 1: return 4;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return 4;
+          case 1: return 4;
+          case 2: return 4;
+          case 3: return 4;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return 4;
+          case 1: return 4;
+          case 2: return 4;
+          case 3: return 4;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16:
+        switch( component )
+        {
+          case 0: return 5;
+          case 1: return 6;
+          case 2: return 5;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16:
+        switch( component )
+        {
+          case 0: return 5;
+          case 1: return 6;
+          case 2: return 5;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return 5;
+          case 1: return 5;
+          case 2: return 5;
+          case 3: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return 5;
+          case 1: return 5;
+          case 2: return 5;
+          case 3: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16:
+        switch( component )
+        {
+          case 0: return 1;
+          case 1: return 5;
+          case 2: return 5;
+          case 3: return 5;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32:
+        switch( component )
+        {
+          case 0: return 2;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint:
+        switch( component )
+        {
+          case 0: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint:
+        switch( component )
+        {
+          case 0: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat:
+        switch( component )
+        {
+          case 0: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          case 3: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          case 3: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 32;
+          case 2: return 32;
+          case 3: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint:
+        switch( component )
+        {
+          case 0: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint:
+        switch( component )
+        {
+          case 0: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat:
+        switch( component )
+        {
+          case 0: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          case 3: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          case 3: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat:
+        switch( component )
+        {
+          case 0: return 64;
+          case 1: return 64;
+          case 2: return 64;
+          case 3: return 64;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 11;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32:
+        switch( component )
+        {
+          case 0: return 9;
+          case 1: return 9;
+          case 2: return 9;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32:
+        switch( component )
+        {
+          case 0: return 24;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat:
+        switch( component )
+        {
+          case 0: return 32;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint:
+        switch( component )
+        {
+          case 0: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint:
+        switch( component )
+        {
+          case 0: return 24;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint:
+        switch( component )
+        {
+          case 0: return 32;
+          case 1: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock:
+        switch( component )
+        {
+          case 0: return 11;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock:
+        switch( component )
+        {
+          case 0: return 11;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock:
+        switch( component )
+        {
+          case 0: return 11;
+          case 1: return 11;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock:
+        switch( component )
+        {
+          case 0: return 11;
+          case 1: return 11;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          case 3: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16:
+        switch( component )
+        {
+          case 0: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          case 3: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16:
+        switch( component )
+        {
+          case 0: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          case 3: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          case 3: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          case 3: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          case 3: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 8;
+          case 1: return 8;
+          case 2: return 8;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 10;
+          case 1: return 10;
+          case 2: return 10;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 12;
+          case 1: return 12;
+          case 2: return 12;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          case 2: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16:
+        switch( component )
+        {
+          case 0: return 4;
+          case 1: return 4;
+          case 2: return 4;
+          case 3: return 4;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16:
+        switch( component )
+        {
+          case 0: return 4;
+          case 1: return 4;
+          case 2: return 4;
+          case 3: return 4;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+        switch( component )
+        {
+          case 0: return 16;
+          case 1: return 16;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+
+      default: return 0;
+    }
+  }
+
+  // The number of components of this format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentCount( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 4;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 2;
+
+      default: return 0;
+    }
+  }
+
+  // The name of the component
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentName( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm:
+        switch( component )
+        {
+          case 0: return "D";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32:
+        switch( component )
+        {
+          case 0: return "D";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat:
+        switch( component )
+        {
+          case 0: return "D";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint:
+        switch( component )
+        {
+          case 0: return "S";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint:
+        switch( component )
+        {
+          case 0: return "D";
+          case 1: return "S";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint:
+        switch( component )
+        {
+          case 0: return "D";
+          case 1: return "S";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint:
+        switch( component )
+        {
+          case 0: return "D";
+          case 1: return "S";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16:
+        switch( component )
+        {
+          case 0: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm:
+        switch( component )
+        {
+          case 0: return "B";
+          case 1: return "G";
+          case 2: return "R";
+          case 3: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "G";
+          case 1: return "B";
+          case 2: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "R";
+          case 2: return "G";
+          case 3: return "B";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16:
+        switch( component )
+        {
+          case 0: return "A";
+          case 1: return "B";
+          case 2: return "G";
+          case 3: return "R";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          case 2: return "B";
+          case 3: return "A";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+        switch( component )
+        {
+          case 0: return "R";
+          case 1: return "G";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+
+      default: return "";
+    }
+  }
+
+  // The numeric format of the component
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * componentNumericFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          case 2: return "SNORM";
+          case 3: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled:
+        switch( component )
+        {
+          case 0: return "USCALED";
+          case 1: return "USCALED";
+          case 2: return "USCALED";
+          case 3: return "USCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled:
+        switch( component )
+        {
+          case 0: return "SSCALED";
+          case 1: return "SSCALED";
+          case 2: return "SSCALED";
+          case 3: return "SSCALED";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          case 1: return "UINT";
+          case 2: return "UINT";
+          case 3: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          case 2: return "SINT";
+          case 3: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32:
+        switch( component )
+        {
+          case 0: return "UFLOAT";
+          case 1: return "UFLOAT";
+          case 2: return "UFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32:
+        switch( component )
+        {
+          case 0: return "UFLOAT";
+          case 1: return "UFLOAT";
+          case 2: return "UFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint:
+        switch( component )
+        {
+          case 0: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "UINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock:
+        switch( component )
+        {
+          case 0: return "UFLOAT";
+          case 1: return "UFLOAT";
+          case 2: return "UFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock:
+        switch( component )
+        {
+          case 0: return "SNORM";
+          case 1: return "SNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock:
+        switch( component )
+        {
+          case 0: return "SFLOAT";
+          case 1: return "SFLOAT";
+          case 2: return "SFLOAT";
+          case 3: return "SFLOAT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG:
+        switch( component )
+        {
+          case 0: return "UNORM";
+          case 1: return "UNORM";
+          case 2: return "UNORM";
+          case 3: return "UNORM";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG:
+        switch( component )
+        {
+          case 0: return "SRGB";
+          case 1: return "SRGB";
+          case 2: return "SRGB";
+          case 3: return "SRGB";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV:
+        switch( component )
+        {
+          case 0: return "SINT";
+          case 1: return "SINT";
+          default: VULKAN_HPP_ASSERT( false ); return "";
+        }
+
+      default: return "";
+    }
+  }
+
+  // The plane this component lies in.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t componentPlaneIndex( VULKAN_HPP_NAMESPACE::Format format, uint8_t component )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( component )
+        {
+          case 0: return 0;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 0;
+        }
+
+      default: return 0;
+    }
+  }
+
+  // True, if the components of this format are compressed, otherwise false.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool componentsAreCompressed( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG:
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG:
+
+        return true;
+      default: return false;
+    }
+  }
+
+  // A textual description of the compression scheme, or an empty string if it is not compressed
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 char const * compressionScheme( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return "BC";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return "ETC2";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return "EAC";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return "EAC";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return "EAC";
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return "EAC";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return "ASTC LDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return "ASTC HDR";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return "PVRTC";
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return "PVRTC";
+
+      default: return "";
+    }
+  }
+
+  // True, if this format is a compressed one.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 bool isCompressed( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    return ( *VULKAN_HPP_NAMESPACE::compressionScheme( format ) != 0 );
+  }
+
+  // The number of bits into which the format is packed. A single image element in this format
+  // can be stored in the same space as a scalar type of this bit width.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t packed( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 8;
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 32;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 16;
+
+      default: return 0;
+    }
+  }
+
+  // The single-plane format that this plane is compatible with.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 VULKAN_HPP_NAMESPACE::Format planeCompatibleFormat( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 2: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR8Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return VULKAN_HPP_NAMESPACE::Format::eR16Unorm;
+          case 1: return VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm;
+          default: VULKAN_HPP_ASSERT( false ); return VULKAN_HPP_NAMESPACE::Format::eUndefined;
+        }
+
+      default: VULKAN_HPP_ASSERT( plane == 0 ); return format;
+    }
+  }
+
+  // The number of image planes of this format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeCount( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 3;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 2;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 2;
+
+      default: return 1;
+    }
+  }
+
+  // The relative height of this plane. A value of k means that this plane is 1/k the height of the overall format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeHeightDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+
+      default: VULKAN_HPP_ASSERT( plane == 0 ); return 1;
+    }
+  }
+
+  // The relative width of this plane. A value of k means that this plane is 1/k the width of the overall format.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t planeWidthDivisor( VULKAN_HPP_NAMESPACE::Format format, uint8_t plane )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          case 2: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 2;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          case 2: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm:
+        switch( plane )
+        {
+          case 0: return 1;
+          case 1: return 1;
+          default: VULKAN_HPP_ASSERT( false ); return 1;
+        }
+
+      default: VULKAN_HPP_ASSERT( plane == 0 ); return 1;
+    }
+  }
+
+  // The number of texels in a texel block.
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_14 uint8_t texelsPerBlock( VULKAN_HPP_NAMESPACE::Format format )
+  {
+    switch( format )
+    {
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4UnormPack8: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR4G4B4A4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB4G4R4A4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G6B5UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G6R5UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR5G5B5A1UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB5G5R5A1UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA1R5G5B5UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR8G8B8A8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8A8Srgb: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8UintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA8B8G8R8SrgbPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10UintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2R10G10B10SintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SscaledPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10UintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA2B10G10R10SintPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Snorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sscaled: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16B16A16Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR32G32B32A32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR64G64B64A64Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB10G11R11UfloatPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eE5B9G9R9UfloatPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD16Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eX8D24UnormPack32: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD32Sfloat: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD16UnormS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD24UnormS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eD32SfloatS8Uint: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbUnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbSrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaUnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc1RgbaSrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc2SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc3SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc4SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc5SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HUfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc6HSfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eBc7SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A1SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEtc2R8G8B8A8SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eEacR11G11SnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4UnormBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SrgbBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4UnormBlock: return 20;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SrgbBlock: return 20;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5UnormBlock: return 25;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SrgbBlock: return 25;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5UnormBlock: return 30;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SrgbBlock: return 30;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6UnormBlock: return 36;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SrgbBlock: return 36;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5UnormBlock: return 40;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SrgbBlock: return 40;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6UnormBlock: return 48;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SrgbBlock: return 48;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8UnormBlock: return 64;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SrgbBlock: return 64;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5UnormBlock: return 50;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SrgbBlock: return 50;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6UnormBlock: return 60;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SrgbBlock: return 60;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8UnormBlock: return 80;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SrgbBlock: return 80;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10UnormBlock: return 100;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SrgbBlock: return 100;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10UnormBlock: return 120;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SrgbBlock: return 120;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12UnormBlock: return 144;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SrgbBlock: return 144;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8G8R8422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB8G8R8G8422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane420Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane420Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R83Plane444Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6Unorm2Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR10X6G10X6B10X6A10X6Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6G10X6R10X6422Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB10X6G10X6R10X6G10X6422Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane420Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane420Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane422Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane422Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X63Plane444Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4Unorm2Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR12X4G12X4B12X4A12X4Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4G12X4R12X4422Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB12X4G12X4R12X4G12X4422Unorm4Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane420Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane420Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane422Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane422Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X43Plane444Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16G16R16422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eB16G16R16G16422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane420Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane420Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane422Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R163Plane444Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG8B8R82Plane444Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG10X6B10X6R10X62Plane444Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG12X4B12X4R12X42Plane444Unorm3Pack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eG16B16R162Plane444Unorm: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA4R4G4B4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eA4B4G4R4UnormPack16: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc4x4SfloatBlock: return 16;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x4SfloatBlock: return 20;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc5x5SfloatBlock: return 25;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x5SfloatBlock: return 30;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc6x6SfloatBlock: return 36;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x5SfloatBlock: return 40;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x6SfloatBlock: return 48;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc8x8SfloatBlock: return 64;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x5SfloatBlock: return 50;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x6SfloatBlock: return 60;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x8SfloatBlock: return 80;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc10x10SfloatBlock: return 100;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x10SfloatBlock: return 120;
+      case VULKAN_HPP_NAMESPACE::Format::eAstc12x12SfloatBlock: return 144;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppUnormBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppUnormBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppUnormBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppUnormBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc12BppSrgbBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc14BppSrgbBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc22BppSrgbBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::ePvrtc24BppSrgbBlockIMG: return 1;
+      case VULKAN_HPP_NAMESPACE::Format::eR16G16S105NV: return 1;
+
+      default: VULKAN_HPP_ASSERT( false ); return 0;
+    }
+  }
+
+}   // namespace VULKAN_HPP_NAMESPACE
+#endif
diff --git a/include/vulkan/vulkan_fuchsia.h b/include/vulkan/vulkan_fuchsia.h
new file mode 100644
index 0000000..61774ff
--- /dev/null
+++ b/include/vulkan/vulkan_fuchsia.h
@@ -0,0 +1,258 @@
+#ifndef VULKAN_FUCHSIA_H_
+#define VULKAN_FUCHSIA_H_ 1
+
+/*
+** Copyright 2015-2022 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+#define VK_FUCHSIA_imagepipe_surface 1
+#define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1
+#define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface"
+typedef VkFlags VkImagePipeSurfaceCreateFlagsFUCHSIA;
+typedef struct VkImagePipeSurfaceCreateInfoFUCHSIA {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkImagePipeSurfaceCreateFlagsFUCHSIA    flags;
+    zx_handle_t                             imagePipeHandle;
+} VkImagePipeSurfaceCreateInfoFUCHSIA;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateImagePipeSurfaceFUCHSIA)(VkInstance instance, const VkImagePipeSurfaceCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA(
+    VkInstance                                  instance,
+    const VkImagePipeSurfaceCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkSurfaceKHR*                               pSurface);
+#endif
+
+
+#define VK_FUCHSIA_external_memory 1
+#define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1
+#define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory"
+typedef struct VkImportMemoryZirconHandleInfoFUCHSIA {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+    zx_handle_t                           handle;
+} VkImportMemoryZirconHandleInfoFUCHSIA;
+
+typedef struct VkMemoryZirconHandlePropertiesFUCHSIA {
+    VkStructureType    sType;
+    void*              pNext;
+    uint32_t           memoryTypeBits;
+} VkMemoryZirconHandlePropertiesFUCHSIA;
+
+typedef struct VkMemoryGetZirconHandleInfoFUCHSIA {
+    VkStructureType                       sType;
+    const void*                           pNext;
+    VkDeviceMemory                        memory;
+    VkExternalMemoryHandleTypeFlagBits    handleType;
+} VkMemoryGetZirconHandleInfoFUCHSIA;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandleFUCHSIA)(VkDevice device, const VkMemoryGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle);
+typedef VkResult (VKAPI_PTR *PFN_vkGetMemoryZirconHandlePropertiesFUCHSIA)(VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, zx_handle_t zirconHandle, VkMemoryZirconHandlePropertiesFUCHSIA* pMemoryZirconHandleProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandleFUCHSIA(
+    VkDevice                                    device,
+    const VkMemoryGetZirconHandleInfoFUCHSIA*   pGetZirconHandleInfo,
+    zx_handle_t*                                pZirconHandle);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA(
+    VkDevice                                    device,
+    VkExternalMemoryHandleTypeFlagBits          handleType,
+    zx_handle_t                                 zirconHandle,
+    VkMemoryZirconHandlePropertiesFUCHSIA*      pMemoryZirconHandleProperties);
+#endif
+
+
+#define VK_FUCHSIA_external_semaphore 1
+#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1
+#define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore"
+typedef struct VkImportSemaphoreZirconHandleInfoFUCHSIA {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkSemaphoreImportFlags                   flags;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+    zx_handle_t                              zirconHandle;
+} VkImportSemaphoreZirconHandleInfoFUCHSIA;
+
+typedef struct VkSemaphoreGetZirconHandleInfoFUCHSIA {
+    VkStructureType                          sType;
+    const void*                              pNext;
+    VkSemaphore                              semaphore;
+    VkExternalSemaphoreHandleTypeFlagBits    handleType;
+} VkSemaphoreGetZirconHandleInfoFUCHSIA;
+
+typedef VkResult (VKAPI_PTR *PFN_vkImportSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkGetSemaphoreZirconHandleFUCHSIA)(VkDevice device, const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo, zx_handle_t* pZirconHandle);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkImportSemaphoreZirconHandleFUCHSIA(
+    VkDevice                                    device,
+    const VkImportSemaphoreZirconHandleInfoFUCHSIA* pImportSemaphoreZirconHandleInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA(
+    VkDevice                                    device,
+    const VkSemaphoreGetZirconHandleInfoFUCHSIA* pGetZirconHandleInfo,
+    zx_handle_t*                                pZirconHandle);
+#endif
+
+
+#define VK_FUCHSIA_buffer_collection 1
+VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA)
+#define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 2
+#define VK_FUCHSIA_BUFFER_COLLECTION_EXTENSION_NAME "VK_FUCHSIA_buffer_collection"
+typedef VkFlags VkImageFormatConstraintsFlagsFUCHSIA;
+
+typedef enum VkImageConstraintsInfoFlagBitsFUCHSIA {
+    VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_RARELY_FUCHSIA = 0x00000001,
+    VK_IMAGE_CONSTRAINTS_INFO_CPU_READ_OFTEN_FUCHSIA = 0x00000002,
+    VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_RARELY_FUCHSIA = 0x00000004,
+    VK_IMAGE_CONSTRAINTS_INFO_CPU_WRITE_OFTEN_FUCHSIA = 0x00000008,
+    VK_IMAGE_CONSTRAINTS_INFO_PROTECTED_OPTIONAL_FUCHSIA = 0x00000010,
+    VK_IMAGE_CONSTRAINTS_INFO_FLAG_BITS_MAX_ENUM_FUCHSIA = 0x7FFFFFFF
+} VkImageConstraintsInfoFlagBitsFUCHSIA;
+typedef VkFlags VkImageConstraintsInfoFlagsFUCHSIA;
+typedef struct VkBufferCollectionCreateInfoFUCHSIA {
+    VkStructureType    sType;
+    const void*        pNext;
+    zx_handle_t        collectionToken;
+} VkBufferCollectionCreateInfoFUCHSIA;
+
+typedef struct VkImportMemoryBufferCollectionFUCHSIA {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkBufferCollectionFUCHSIA    collection;
+    uint32_t                     index;
+} VkImportMemoryBufferCollectionFUCHSIA;
+
+typedef struct VkBufferCollectionImageCreateInfoFUCHSIA {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkBufferCollectionFUCHSIA    collection;
+    uint32_t                     index;
+} VkBufferCollectionImageCreateInfoFUCHSIA;
+
+typedef struct VkBufferCollectionConstraintsInfoFUCHSIA {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           minBufferCount;
+    uint32_t           maxBufferCount;
+    uint32_t           minBufferCountForCamping;
+    uint32_t           minBufferCountForDedicatedSlack;
+    uint32_t           minBufferCountForSharedSlack;
+} VkBufferCollectionConstraintsInfoFUCHSIA;
+
+typedef struct VkBufferConstraintsInfoFUCHSIA {
+    VkStructureType                             sType;
+    const void*                                 pNext;
+    VkBufferCreateInfo                          createInfo;
+    VkFormatFeatureFlags                        requiredFormatFeatures;
+    VkBufferCollectionConstraintsInfoFUCHSIA    bufferCollectionConstraints;
+} VkBufferConstraintsInfoFUCHSIA;
+
+typedef struct VkBufferCollectionBufferCreateInfoFUCHSIA {
+    VkStructureType              sType;
+    const void*                  pNext;
+    VkBufferCollectionFUCHSIA    collection;
+    uint32_t                     index;
+} VkBufferCollectionBufferCreateInfoFUCHSIA;
+
+typedef struct VkSysmemColorSpaceFUCHSIA {
+    VkStructureType    sType;
+    const void*        pNext;
+    uint32_t           colorSpace;
+} VkSysmemColorSpaceFUCHSIA;
+
+typedef struct VkBufferCollectionPropertiesFUCHSIA {
+    VkStructureType                  sType;
+    void*                            pNext;
+    uint32_t                         memoryTypeBits;
+    uint32_t                         bufferCount;
+    uint32_t                         createInfoIndex;
+    uint64_t                         sysmemPixelFormat;
+    VkFormatFeatureFlags             formatFeatures;
+    VkSysmemColorSpaceFUCHSIA        sysmemColorSpaceIndex;
+    VkComponentMapping               samplerYcbcrConversionComponents;
+    VkSamplerYcbcrModelConversion    suggestedYcbcrModel;
+    VkSamplerYcbcrRange              suggestedYcbcrRange;
+    VkChromaLocation                 suggestedXChromaOffset;
+    VkChromaLocation                 suggestedYChromaOffset;
+} VkBufferCollectionPropertiesFUCHSIA;
+
+typedef struct VkImageFormatConstraintsInfoFUCHSIA {
+    VkStructureType                         sType;
+    const void*                             pNext;
+    VkImageCreateInfo                       imageCreateInfo;
+    VkFormatFeatureFlags                    requiredFormatFeatures;
+    VkImageFormatConstraintsFlagsFUCHSIA    flags;
+    uint64_t                                sysmemPixelFormat;
+    uint32_t                                colorSpaceCount;
+    const VkSysmemColorSpaceFUCHSIA*        pColorSpaces;
+} VkImageFormatConstraintsInfoFUCHSIA;
+
+typedef struct VkImageConstraintsInfoFUCHSIA {
+    VkStructureType                               sType;
+    const void*                                   pNext;
+    uint32_t                                      formatConstraintsCount;
+    const VkImageFormatConstraintsInfoFUCHSIA*    pFormatConstraints;
+    VkBufferCollectionConstraintsInfoFUCHSIA      bufferCollectionConstraints;
+    VkImageConstraintsInfoFlagsFUCHSIA            flags;
+} VkImageConstraintsInfoFUCHSIA;
+
+typedef VkResult (VKAPI_PTR *PFN_vkCreateBufferCollectionFUCHSIA)(VkDevice device, const VkBufferCollectionCreateInfoFUCHSIA* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferCollectionFUCHSIA* pCollection);
+typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionImageConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkImageConstraintsInfoFUCHSIA* pImageConstraintsInfo);
+typedef VkResult (VKAPI_PTR *PFN_vkSetBufferCollectionBufferConstraintsFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo);
+typedef void (VKAPI_PTR *PFN_vkDestroyBufferCollectionFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, const VkAllocationCallbacks* pAllocator);
+typedef VkResult (VKAPI_PTR *PFN_vkGetBufferCollectionPropertiesFUCHSIA)(VkDevice device, VkBufferCollectionFUCHSIA collection, VkBufferCollectionPropertiesFUCHSIA* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkCreateBufferCollectionFUCHSIA(
+    VkDevice                                    device,
+    const VkBufferCollectionCreateInfoFUCHSIA*  pCreateInfo,
+    const VkAllocationCallbacks*                pAllocator,
+    VkBufferCollectionFUCHSIA*                  pCollection);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionImageConstraintsFUCHSIA(
+    VkDevice                                    device,
+    VkBufferCollectionFUCHSIA                   collection,
+    const VkImageConstraintsInfoFUCHSIA*        pImageConstraintsInfo);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkSetBufferCollectionBufferConstraintsFUCHSIA(
+    VkDevice                                    device,
+    VkBufferCollectionFUCHSIA                   collection,
+    const VkBufferConstraintsInfoFUCHSIA*       pBufferConstraintsInfo);
+
+VKAPI_ATTR void VKAPI_CALL vkDestroyBufferCollectionFUCHSIA(
+    VkDevice                                    device,
+    VkBufferCollectionFUCHSIA                   collection,
+    const VkAllocationCallbacks*                pAllocator);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetBufferCollectionPropertiesFUCHSIA(
+    VkDevice                                    device,
+    VkBufferCollectionFUCHSIA                   collection,
+    VkBufferCollectionPropertiesFUCHSIA*        pProperties);
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif
diff --git a/include/vulkan/vulkan_funcs.hpp b/include/vulkan/vulkan_funcs.hpp
new file mode 100644
index 0000000..1a8aa70
--- /dev/null
+++ b/include/vulkan/vulkan_funcs.hpp
@@ -0,0 +1,17873 @@
+// Copyright 2015-2022 The Khronos Group Inc.
+// 
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_FUNCS_HPP
+#  define VULKAN_FUNCS_HPP
+
+namespace VULKAN_HPP_NAMESPACE
+{
+
+  //===========================
+  //=== COMMAND Definitions ===
+  //===========================
+
+
+  //=== VK_VERSION_1_0 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Instance * pInstance, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkInstance *>( pInstance ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Instance instance;
+    VkResult result = d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkInstance *>( &instance ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstance" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), instance );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Instance instance;
+    VkResult result = d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkInstance *>( &instance ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PhysicalDeviceAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type Instance::enumeratePhysicalDevices( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices;
+    uint32_t physicalDeviceCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
+      {
+        physicalDevices.resize( physicalDeviceCount );
+        result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+    VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+    if ( physicalDeviceCount < physicalDevices.size() )
+    {
+      physicalDevices.resize( physicalDeviceCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
+  }
+
+  template <typename PhysicalDeviceAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDevice>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator );
+    uint32_t physicalDeviceCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceCount )
+      {
+        physicalDevices.resize( physicalDeviceCount );
+        result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" );
+    VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() );
+    if ( physicalDeviceCount < physicalDevices.size() )
+    {
+      physicalDevices.resize( physicalDeviceCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features;
+    d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) );
+    
+    
+    return features;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::FormatProperties formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) );
+    
+    
+    return formatProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties;
+    VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties;
+    d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) );
+    
+    
+    return properties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename QueueFamilyPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  template <typename QueueFamilyPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) );
+    
+    
+    return memoryProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetInstanceProcAddr( m_instance, pName );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetDeviceProcAddr( m_device, pName );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Device * pDevice, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDevice *>( pDevice ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Device device;
+    VkResult result = d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDevice *>( &device ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), device );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Device device;
+    VkResult result = d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDevice *>( &device ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename ExtensionPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename ExtensionPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename ExtensionPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename ExtensionPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename LayerPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename LayerPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename LayerPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+
+  template <typename LayerPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator );
+    uint32_t propertyCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && propertyCount )
+      {
+        properties.resize( propertyCount );
+        result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" );
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Queue queue;
+    d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) );
+    
+    
+    return queue;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkQueueWaitIdle( m_queue );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkDeviceWaitIdle( m_device );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeviceMemory *>( pMemory ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VkResult result = d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeviceMemory *>( &memory ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memory );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceMemory memory;
+    VkResult result = d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeviceMemory *>( &memory ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, ObjectFree<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void ** ppData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), ppData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    void * pData;
+    VkResult result = d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), &pData );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pData );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes;
+    d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) );
+    
+    
+    return committedMemoryInBytes;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+    d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements;
+    d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( sparseImageMemoryRequirementsAllocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageFormatPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  template <typename SparseImageFormatPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VkResult result = d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Fence fence;
+    VkResult result = d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence * pFences, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence * pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSemaphore *>( pSemaphore ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    VkResult result = d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphore *>( &semaphore ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), semaphore );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Semaphore semaphore;
+    VkResult result = d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphore *>( &semaphore ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Event * pEvent, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkEvent *>( pEvent ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Event event;
+    VkResult result = d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkEvent *>( &event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), event );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Event event;
+    VkResult result = d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkEvent *>( &event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSetEvent( m_device, static_cast<VkEvent>( event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkResetEvent( m_device, static_cast<VkEvent>( event ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkQueryPool *>( pQueryPool ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::QueryPool queryPool;
+    VkResult result = d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkQueryPool *>( &queryPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), queryPool );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::QueryPool queryPool;
+    VkResult result = d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkQueryPool *>( &queryPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void * pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename DataTypeAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+        VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 );
+    std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) );
+    VkResult result = d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+    
+    return ResultValue<std::vector<DataType, DataTypeAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    DataType data;
+    VkResult result = d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, sizeof( DataType ), reinterpret_cast<void *>( &data ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } );
+    
+    return ResultValue<DataType>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Buffer * pBuffer, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBuffer *>( pBuffer ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VkResult result = d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBuffer *>( &buffer ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Buffer buffer;
+    VkResult result = d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBuffer *>( &buffer ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::BufferView * pView, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBufferView *>( pView ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::BufferView view;
+    VkResult result = d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferView *>( &view ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::BufferView view;
+    VkResult result = d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferView *>( &view ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Image * pImage, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkImage *>( pImage ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Image image;
+    VkResult result = d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImage *>( &image ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), image );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Image image;
+    VkResult result = d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImage *>( &image ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( pSubresource ), reinterpret_cast<VkSubresourceLayout *>( pLayout ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SubresourceLayout layout;
+    d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( &subresource ), reinterpret_cast<VkSubresourceLayout *>( &layout ) );
+    
+    
+    return layout;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ImageView * pView, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkImageView *>( pView ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageView view;
+    VkResult result = d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImageView *>( &view ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageView view;
+    VkResult result = d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImageView *>( &view ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkShaderModule *>( pShaderModule ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
+    VkResult result = d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkShaderModule *>( &shaderModule ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaderModule );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ShaderModule shaderModule;
+    VkResult result = d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkShaderModule *>( &shaderModule ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+    VkResult result = d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineCache );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache;
+    VkResult result = d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineCache *>( &pipelineCache ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t * pDataSize, void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Uint8_tAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<uint8_t, Uint8_tAllocator> data;
+    size_t dataSize;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
+    VULKAN_HPP_ASSERT( dataSize <= data.size() );
+    if ( dataSize < data.size() )
+    {
+      data.resize( dataSize );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+
+  template <typename Uint8_tAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator );
+    size_t dataSize;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr );
+      if ( ( result == VK_SUCCESS ) && dataSize )
+      {
+        data.resize( dataSize );
+        result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" );
+    VULKAN_HPP_ASSERT( dataSize <= data.size() );
+    if ( dataSize < data.size() )
+    {
+      data.resize( dataSize );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename PipelineAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename PipelineAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch, typename PipelineAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipeline *>( pPipelines ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PipelineAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename PipelineAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator );
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename PipelineAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines;
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch, typename PipelineAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() );
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator );
+    uniquePipelines.reserve( createInfos.size() );
+    ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d );
+    for ( auto const & pipeline : pipelines )
+    {
+      uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) );
+    }
+    return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) );
+  }
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Pipeline pipeline;
+    VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } );
+    
+    return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    VkResult result = d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineLayout );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout;
+    VkResult result = d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Sampler * pSampler, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSampler *>( pSampler ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Sampler sampler;
+    VkResult result = d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSampler *>( &sampler ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sampler );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Sampler sampler;
+    VkResult result = d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSampler *>( &sampler ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
+    VkResult result = d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), setLayout );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout;
+    VkResult result = d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+    VkResult result = d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorPool );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool;
+    VkResult result = d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) );
+    
+    
+    
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DescriptorSetAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount );
+    VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
+  }
+
+  template <typename DescriptorSetAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, DescriptorSet>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator );
+    VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename DescriptorSetAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+    VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets;
+    uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
+    PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+    for ( auto const & descriptorSet : descriptorSets )
+    {
+      uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
+  }
+
+  template <typename Dispatch, typename DescriptorSetAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount );
+    VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator );
+    uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount );
+    PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d );
+    for ( auto const & descriptorSet : descriptorSets )
+    {
+      uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE Result ( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkUpdateDescriptorSets( m_device, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), descriptorCopies.size(), reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+    VkResult result = d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), framebuffer );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Framebuffer framebuffer;
+    VkResult result = d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFramebuffer *>( &framebuffer ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VkResult result = d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VkResult result = d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Extent2D granularity;
+    d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) );
+    
+    
+    return granularity;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCommandPool *>( pCommandPool ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+    VkResult result = d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCommandPool *>( &commandPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandPool );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::CommandPool commandPool;
+    VkResult result = d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCommandPool *>( &commandPool ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename CommandBufferAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount );
+    VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
+  }
+
+  template <typename CommandBufferAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, CommandBuffer>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator );
+    VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch, typename CommandBufferAllocator>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+    VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers;
+    uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
+    PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
+    for ( auto const & commandBuffer : commandBuffers )
+    {
+      uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
+  }
+
+  template <typename Dispatch, typename CommandBufferAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount );
+    VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" );
+    std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator );
+    uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount );
+    PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d );
+    for ( auto const & commandBuffer : commandBuffers )
+    {
+      uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkEndCommandBuffer( m_commandBuffer );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+#ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) );
+  }
+#else
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetLineWidth( m_commandBuffer, lineWidth );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t * pDynamicOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), dynamicOffsets.size(), dynamicOffsets.data() );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+#ifdef VULKAN_HPP_NO_EXCEPTIONS
+    VULKAN_HPP_ASSERT( buffers.size() == offsets.size() );
+#else
+    if ( buffers.size() != offsets.size() )
+  {
+    throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" );
+  }
+#endif  /*VULKAN_HPP_NO_EXCEPTIONS*/
+    
+    
+    d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferCopy *>( regions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageCopy *>( regions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit *>( pRegions ), static_cast<VkFilter>( filter ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageBlit *>( regions.data() ), static_cast<VkFilter>( filter ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( DataType ), reinterpret_cast<const void *>( data.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue & color, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( &color ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect * pRects, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment *>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect *>( pRects ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdClearAttachments( m_commandBuffer, attachments.size(), reinterpret_cast<const VkClearAttachment *>( attachments.data() ), rects.size(), reinterpret_cast<const VkClearRect *>( rects.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve *>( pRegions ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageResolve *>( regions.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdWaitEvents( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void * pValues, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename ValuesType, typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( ValuesType ), reinterpret_cast<const void *>( values.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndRenderPass( m_commandBuffer );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_1 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d  ) VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d )
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint32_t apiVersion;
+    VkResult result = d.vkEnumerateInstanceVersion( &apiVersion );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), apiVersion );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures;
+    d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) );
+    
+    
+    return peerMemoryFeatures;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties;
+    uint32_t physicalDeviceGroupCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+    if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
+    {
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
+  }
+
+  template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator );
+    uint32_t physicalDeviceGroupCount;
+    VkResult result;
+    do
+    {
+      result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount )
+      {
+        physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+        result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" );
+    VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() );
+    if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() )
+    {
+      physicalDeviceGroupProperties.resize( physicalDeviceGroupCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements;
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return memoryRequirements;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>();
+    d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements;
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+
+  template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator );
+    uint32_t sparseMemoryRequirementCount;
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr );
+    sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) );
+    
+    VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() );
+    if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() )
+    {
+      sparseMemoryRequirements.resize( sparseMemoryRequirementCount );
+    }
+    return sparseMemoryRequirements;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features;
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+    
+    
+    return features;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>();
+    d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties;
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+    
+    
+    return properties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>();
+    d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties;
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+    
+    
+    return formatProperties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>();
+    d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties;
+    VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties );
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>();
+    VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename QueueFamilyProperties2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  template <typename QueueFamilyProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator );
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+    if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+    {
+      queueFamilyProperties.resize( queueFamilyPropertyCount );
+    }
+    return queueFamilyProperties;
+  }
+
+  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<StructureChain, StructureChainAllocator> structureChains;
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    structureChains.resize( queueFamilyPropertyCount );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+    {
+      queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+      if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+      {
+        structureChains.resize( queueFamilyPropertyCount );
+      }
+      for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+      }
+    return structureChains;
+  }
+
+  template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator );
+    std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties;
+    uint32_t queueFamilyPropertyCount;
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr );
+    structureChains.resize( queueFamilyPropertyCount );
+    queueFamilyProperties.resize( queueFamilyPropertyCount );
+    for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+    {
+      queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext;
+    }
+    d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) );
+    
+    VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() );
+      if ( queueFamilyPropertyCount < queueFamilyProperties.size() )
+      {
+        structureChains.resize( queueFamilyPropertyCount );
+      }
+      for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ )
+      {
+        structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i];
+      }
+    return structureChains;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties;
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    
+    
+    return memoryProperties;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>();
+    d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename SparseImageFormatProperties2Allocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties;
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+
+  template <typename SparseImageFormatProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator );
+    uint32_t propertyCount;
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr );
+    properties.resize( propertyCount );
+    d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) );
+    
+    VULKAN_HPP_ASSERT( propertyCount <= properties.size() );
+    if ( propertyCount < properties.size() )
+    {
+      properties.resize( propertyCount );
+    }
+    return properties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::Queue queue;
+    d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) );
+    
+    
+    return queue;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    VkResult result = d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion;
+    VkResult result = d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    VkResult result = d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate;
+    VkResult result = d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename DataType, typename Dispatch>
+  VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const void *>( &data ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties;
+    d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) );
+    
+    
+    return externalBufferProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties;
+    d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) );
+    
+    
+    return externalFenceProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties;
+    d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) );
+    
+    
+    return externalSemaphoreProperties;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support;
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+    
+    
+    return support;
+  }
+
+  template <typename X, typename Y, typename... Z, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    StructureChain<X, Y, Z...> structureChain;
+    VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>();
+    d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) );
+    
+    
+    return structureChain;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_2 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VkResult result = d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass );
+  }
+
+#  ifndef VULKAN_HPP_NO_SMART_HANDLE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    VULKAN_HPP_NAMESPACE::RenderPass renderPass;
+    VkResult result = d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) );
+  }
+#  endif /* VULKAN_HPP_NO_SMART_HANDLE */
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) );
+    
+    
+    
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount );
+  }
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    uint64_t value;
+    VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, uint64_t timeout, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } );
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::Result>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" );
+    
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result );
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename Dispatch>
+  VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    
+    uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) );
+    
+    
+    return result;
+  }
+#endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */
+
+  //=== VK_VERSION_1_3 ===
+
+
+  template <typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, Dispatch const & d  ) const VULKAN_HPP_NOEXCEPT
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+    return static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) );
+  }
+
+#ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE
+  template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type PhysicalDevice::getToolProperties( Dispatch const & d ) const
+  {
+    VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION );
+
+    
+    std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties;
+    uint32_t toolCount;
+    VkResult result;
+    do
+    {
+      result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr );
+      if ( ( result == VK_SUCCESS ) && toolCount )
+      {
+        toolProperties.resize( toolCount );
+        result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) );
+      }
+    } while ( result == VK_INCOMPLETE );
+    resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" );
+    VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() );
+    if ( toolCount < toolProperties.size() )
+    {
+      toolProperties.resize( toolCount );
+    }
+    return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties );
+  }
+
+  template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type>
+  VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const &