blob: c20a0af6d2c4820230b05dfd0fce9eba08f391e6 [file] [log] [blame]
// Copyright 2015-2022 The Khronos Group Inc.
//
// SPDX-License-Identifier: Apache-2.0 OR MIT
//
// This header is generated from the Khronos Vulkan XML API Registry.
#ifndef VULKAN_HPP
#define VULKAN_HPP
#if defined(_MSVC_LANG)
# define VULKAN_HPP_CPLUSPLUS _MSVC_LANG
#else
# define VULKAN_HPP_CPLUSPLUS __cplusplus
#endif
#if 201703L < VULKAN_HPP_CPLUSPLUS
# define VULKAN_HPP_CPP_VERSION 20
#elif 201402L < VULKAN_HPP_CPLUSPLUS
# define VULKAN_HPP_CPP_VERSION 17
#elif 201103L < VULKAN_HPP_CPLUSPLUS
# define VULKAN_HPP_CPP_VERSION 14
#elif 199711L < VULKAN_HPP_CPLUSPLUS
# define VULKAN_HPP_CPP_VERSION 11
#else
# error "vulkan.hpp needs at least c++ standard version 11"
#endif
#include <vulkan/vulkan.h>
#include <algorithm>
#include <array>
#include <cstddef>
#include <cstdint>
#include <cstring>
#include <functional>
#include <initializer_list>
#include <sstream>
#include <string>
#include <system_error>
#include <tuple>
#include <type_traits>
#if 17 <= VULKAN_HPP_CPP_VERSION
# include <string_view>
#endif
#if defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
# if !defined(VULKAN_HPP_NO_SMART_HANDLE)
# define VULKAN_HPP_NO_SMART_HANDLE
# endif
#else
# include <memory>
# include <vector>
#endif
#if defined(VULKAN_HPP_NO_CONSTRUCTORS)
# if !defined(VULKAN_HPP_NO_STRUCT_CONSTRUCTORS)
# define VULKAN_HPP_NO_STRUCT_CONSTRUCTORS
# endif
# if !defined(VULKAN_HPP_NO_UNION_CONSTRUCTORS)
# define VULKAN_HPP_NO_UNION_CONSTRUCTORS
# endif
#endif
#if defined(VULKAN_HPP_NO_SETTERS)
# if !defined(VULKAN_HPP_NO_STRUCT_SETTERS)
# define VULKAN_HPP_NO_STRUCT_SETTERS
# endif
# if !defined(VULKAN_HPP_NO_UNION_SETTERS)
# define VULKAN_HPP_NO_UNION_SETTERS
# endif
#endif
#if !defined(VULKAN_HPP_ASSERT)
# include <cassert>
# define VULKAN_HPP_ASSERT assert
#endif
#if !defined(VULKAN_HPP_ASSERT_ON_RESULT)
# define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT
#endif
#if !defined(VULKAN_HPP_STATIC_ASSERT)
# define VULKAN_HPP_STATIC_ASSERT static_assert
#endif
#if !defined(VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL)
# define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
#endif
#if VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL == 1
# if defined(__unix__) || defined(__APPLE__) || defined(__QNXNTO__) || defined(__Fuchsia__)
# include <dlfcn.h>
# elif defined(_WIN32)
typedef struct HINSTANCE__ *HINSTANCE;
# if defined(_WIN64)
typedef int64_t(__stdcall *FARPROC)();
# else
typedef int(__stdcall *FARPROC)();
# endif
extern "C" __declspec(dllimport) HINSTANCE __stdcall LoadLibraryA(char const *lpLibFileName);
extern "C" __declspec(dllimport) int __stdcall FreeLibrary(HINSTANCE hLibModule);
extern "C" __declspec(dllimport) FARPROC __stdcall GetProcAddress(HINSTANCE hModule, const char *lpProcName);
# endif
#endif
#if !defined(__has_include)
# define __has_include(x) false
#endif
#if(201711 <= __cpp_impl_three_way_comparison) && __has_include(<compare> ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR )
# define VULKAN_HPP_HAS_SPACESHIP_OPERATOR
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
# include <compare>
#endif
#if(201803 <= __cpp_lib_span)
# define VULKAN_HPP_SUPPORT_SPAN
# include <span>
#endif
static_assert(VK_HEADER_VERSION == 211, "Wrong VK_HEADER_VERSION!");
// 32-bit vulkan is not typesafe for handles, so don't allow copy constructors on this platform by default.
// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
#if(VK_USE_64_BIT_PTR_DEFINES == 1)
# if !defined(VULKAN_HPP_TYPESAFE_CONVERSION)
# define VULKAN_HPP_TYPESAFE_CONVERSION
# endif
#endif
// <tuple> includes <sys/sysmacros.h> through some other header
// this results in major(x) being resolved to gnu_dev_major(x)
// which is an expression in a constructor initializer list.
#if defined(major)
# undef major
#endif
#if defined(minor)
# undef minor
#endif
// Windows defines MemoryBarrier which is deprecated and collides
// with the VULKAN_HPP_NAMESPACE::MemoryBarrier struct.
#if defined(MemoryBarrier)
# undef MemoryBarrier
#endif
#if !defined(VULKAN_HPP_HAS_UNRESTRICTED_UNIONS)
# if defined(__clang__)
# if __has_feature(cxx_unrestricted_unions)
# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
# endif
# elif defined(__GNUC__)
# define GCC_VERSION (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__)
# if 40600 <= GCC_VERSION
# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
# endif
# elif defined(_MSC_VER)
# if 1900 <= _MSC_VER
# define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
# endif
# endif
#endif
#if !defined(VULKAN_HPP_INLINE)
# if defined(__clang__)
# if __has_attribute(always_inline)
# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
# else
# define VULKAN_HPP_INLINE inline
# endif
# elif defined(__GNUC__)
# define VULKAN_HPP_INLINE __attribute__((always_inline)) __inline__
# elif defined(_MSC_VER)
# define VULKAN_HPP_INLINE inline
# else
# define VULKAN_HPP_INLINE inline
# endif
#endif
#if defined(VULKAN_HPP_TYPESAFE_CONVERSION)
# define VULKAN_HPP_TYPESAFE_EXPLICIT
#else
# define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
#endif
#if defined(__cpp_constexpr)
# define VULKAN_HPP_CONSTEXPR constexpr
# if __cpp_constexpr >= 201304
# define VULKAN_HPP_CONSTEXPR_14 constexpr
# else
# define VULKAN_HPP_CONSTEXPR_14
# endif
# define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr
#else
# define VULKAN_HPP_CONSTEXPR
# define VULKAN_HPP_CONSTEXPR_14
# define VULKAN_HPP_CONST_OR_CONSTEXPR const
#endif
#if !defined(VULKAN_HPP_NOEXCEPT)
# if defined(_MSC_VER) && (_MSC_VER <= 1800)
# define VULKAN_HPP_NOEXCEPT
# else
# define VULKAN_HPP_NOEXCEPT noexcept
# define VULKAN_HPP_HAS_NOEXCEPT 1
# if defined(VULKAN_HPP_NO_EXCEPTIONS)
# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept
# else
# define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
# endif
# endif
#endif
#if 14 <= VULKAN_HPP_CPP_VERSION
# define VULKAN_HPP_DEPRECATED(msg) [[deprecated(msg)]]
#else
# define VULKAN_HPP_DEPRECATED(msg)
#endif
#if(17 <= VULKAN_HPP_CPP_VERSION) && !defined(VULKAN_HPP_NO_NODISCARD_WARNINGS)
# define VULKAN_HPP_NODISCARD [[nodiscard]]
# if defined(VULKAN_HPP_NO_EXCEPTIONS)
# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]]
# else
# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
# endif
#else
# define VULKAN_HPP_NODISCARD
# define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
#endif
#if !defined(VULKAN_HPP_NAMESPACE)
# define VULKAN_HPP_NAMESPACE vk
#endif
#define VULKAN_HPP_STRINGIFY2(text) #text
#define VULKAN_HPP_STRINGIFY(text) VULKAN_HPP_STRINGIFY2(text)
#define VULKAN_HPP_NAMESPACE_STRING VULKAN_HPP_STRINGIFY(VULKAN_HPP_NAMESPACE)
namespace VULKAN_HPP_NAMESPACE {
#if !defined(VULKAN_HPP_DISABLE_ENHANCED_MODE)
template<typename T>
class ArrayProxy
{
public:
VULKAN_HPP_CONSTEXPR ArrayProxy() VULKAN_HPP_NOEXCEPT
: m_count(0),
m_ptr(nullptr)
{
}
VULKAN_HPP_CONSTEXPR ArrayProxy(std::nullptr_t) VULKAN_HPP_NOEXCEPT
: m_count(0),
m_ptr(nullptr)
{
}
ArrayProxy(T &value) VULKAN_HPP_NOEXCEPT
: m_count(1),
m_ptr(&value)
{
}
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxy(typename std::remove_const<T>::type &value) VULKAN_HPP_NOEXCEPT
: m_count(1),
m_ptr(&value)
{
}
ArrayProxy(uint32_t count, T *ptr) VULKAN_HPP_NOEXCEPT
: m_count(count),
m_ptr(ptr)
{
}
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxy(uint32_t count, typename std::remove_const<T>::type *ptr) VULKAN_HPP_NOEXCEPT
: m_count(count),
m_ptr(ptr)
{
}
# if __GNUC__ >= 9
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Winit-list-lifetime"
# endif
ArrayProxy(std::initializer_list<T> const &list) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(list.size())),
m_ptr(list.begin())
{
}
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxy(std::initializer_list<typename std::remove_const<T>::type> const &list) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(list.size())),
m_ptr(list.begin())
{
}
ArrayProxy(std::initializer_list<T> &list) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(list.size())),
m_ptr(list.begin())
{
}
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxy(std::initializer_list<typename std::remove_const<T>::type> &list) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(list.size())),
m_ptr(list.begin())
{
}
# if __GNUC__ >= 9
# pragma GCC diagnostic pop
# endif
// Any type with a .data() return type implicitly convertible to T*, and a .size() return type implicitly
// convertible to size_t. The const version can capture temporaries, with lifetime ending at end of statement.
template<typename V,
typename std::enable_if<std::is_convertible<decltype(std::declval<V>().data()), T *>::value &&
std::is_convertible<decltype(std::declval<V>().size()), std::size_t>::value>::type * = nullptr>
ArrayProxy(V const &v) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(v.size())),
m_ptr(v.data())
{
}
template<typename V,
typename std::enable_if<std::is_convertible<decltype(std::declval<V>().data()), T *>::value &&
std::is_convertible<decltype(std::declval<V>().size()), std::size_t>::value>::type * = nullptr>
ArrayProxy(V &v) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(v.size())),
m_ptr(v.data())
{
}
const T *begin() const VULKAN_HPP_NOEXCEPT
{
return m_ptr;
}
const T *end() const VULKAN_HPP_NOEXCEPT
{
return m_ptr + m_count;
}
const T &front() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT(m_count && m_ptr);
return *m_ptr;
}
const T &back() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT(m_count && m_ptr);
return *(m_ptr + m_count - 1);
}
bool empty() const VULKAN_HPP_NOEXCEPT
{
return (m_count == 0);
}
uint32_t size() const VULKAN_HPP_NOEXCEPT
{
return m_count;
}
T *data() const VULKAN_HPP_NOEXCEPT
{
return m_ptr;
}
private:
uint32_t m_count;
T *m_ptr;
};
template<typename T>
class ArrayProxyNoTemporaries
{
public:
VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries() VULKAN_HPP_NOEXCEPT
: m_count(0),
m_ptr(nullptr)
{
}
VULKAN_HPP_CONSTEXPR ArrayProxyNoTemporaries(std::nullptr_t) VULKAN_HPP_NOEXCEPT
: m_count(0),
m_ptr(nullptr)
{
}
ArrayProxyNoTemporaries(T &value) VULKAN_HPP_NOEXCEPT
: m_count(1),
m_ptr(&value)
{
}
template<typename V>
ArrayProxyNoTemporaries(V &&value) = delete;
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxyNoTemporaries(typename std::remove_const<T>::type &value) VULKAN_HPP_NOEXCEPT
: m_count(1),
m_ptr(&value)
{
}
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxyNoTemporaries(typename std::remove_const<T>::type &&value) = delete;
ArrayProxyNoTemporaries(uint32_t count, T *ptr) VULKAN_HPP_NOEXCEPT
: m_count(count),
m_ptr(ptr)
{
}
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxyNoTemporaries(uint32_t count, typename std::remove_const<T>::type *ptr) VULKAN_HPP_NOEXCEPT
: m_count(count),
m_ptr(ptr)
{
}
ArrayProxyNoTemporaries(std::initializer_list<T> const &list) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(list.size())),
m_ptr(list.begin())
{
}
ArrayProxyNoTemporaries(std::initializer_list<T> const &&list) = delete;
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxyNoTemporaries(std::initializer_list<typename std::remove_const<T>::type> const &list) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(list.size())),
m_ptr(list.begin())
{
}
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxyNoTemporaries(std::initializer_list<typename std::remove_const<T>::type> const &&list) = delete;
ArrayProxyNoTemporaries(std::initializer_list<T> &list) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(list.size())),
m_ptr(list.begin())
{
}
ArrayProxyNoTemporaries(std::initializer_list<T> &&list) = delete;
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxyNoTemporaries(std::initializer_list<typename std::remove_const<T>::type> &list) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(list.size())),
m_ptr(list.begin())
{
}
template<typename B = T, typename std::enable_if<std::is_const<B>::value, int>::type = 0>
ArrayProxyNoTemporaries(std::initializer_list<typename std::remove_const<T>::type> &&list) = delete;
// Any type with a .data() return type implicitly convertible to T*, and a // .size() return type implicitly
// convertible to size_t.
template<typename V,
typename std::enable_if<std::is_convertible<decltype(std::declval<V>().data()), T *>::value &&
std::is_convertible<decltype(std::declval<V>().size()), std::size_t>::value>::type * = nullptr>
ArrayProxyNoTemporaries(V &v) VULKAN_HPP_NOEXCEPT
: m_count(static_cast<uint32_t>(v.size())),
m_ptr(v.data())
{
}
const T *begin() const VULKAN_HPP_NOEXCEPT
{
return m_ptr;
}
const T *end() const VULKAN_HPP_NOEXCEPT
{
return m_ptr + m_count;
}
const T &front() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT(m_count && m_ptr);
return *m_ptr;
}
const T &back() const VULKAN_HPP_NOEXCEPT
{
VULKAN_HPP_ASSERT(m_count && m_ptr);
return *(m_ptr + m_count - 1);
}
bool empty() const VULKAN_HPP_NOEXCEPT
{
return (m_count == 0);
}
uint32_t size() const VULKAN_HPP_NOEXCEPT
{
return m_count;
}
T *data() const VULKAN_HPP_NOEXCEPT
{
return m_ptr;
}
private:
uint32_t m_count;
T *m_ptr;
};
#endif
template<typename T, size_t N>
class ArrayWrapper1D : public std::array<T, N>
{
public:
VULKAN_HPP_CONSTEXPR ArrayWrapper1D() VULKAN_HPP_NOEXCEPT : std::array<T, N>() {}
VULKAN_HPP_CONSTEXPR ArrayWrapper1D(std::array<T, N> const &data) VULKAN_HPP_NOEXCEPT : std::array<T, N>(data) {}
#if(VK_USE_64_BIT_PTR_DEFINES == 0)
// on 32 bit compiles, needs overloads on index type int to resolve ambiguities
VULKAN_HPP_CONSTEXPR T const &operator[](int index) const VULKAN_HPP_NOEXCEPT
{
return std::array<T, N>::operator[](index);
}
T &operator[](int index) VULKAN_HPP_NOEXCEPT
{
return std::array<T, N>::operator[](index);
}
#endif
operator T const *() const VULKAN_HPP_NOEXCEPT
{
return this->data();
}
operator T *() VULKAN_HPP_NOEXCEPT
{
return this->data();
}
template<typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
operator std::string() const
{
return std::string(this->data());
}
#if 17 <= VULKAN_HPP_CPP_VERSION
template<typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
operator std::string_view() const
{
return std::string_view(this->data());
}
#endif
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
template<typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
std::strong_ordering operator<=>(ArrayWrapper1D<char, N> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return *static_cast<std::array<char, N> const *>(this) <=> *static_cast<std::array<char, N> const *>(&rhs);
}
#else
template<typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
bool operator<(ArrayWrapper1D<char, N> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return *static_cast<std::array<char, N> const *>(this) < *static_cast<std::array<char, N> const *>(&rhs);
}
template<typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
bool operator<=(ArrayWrapper1D<char, N> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return *static_cast<std::array<char, N> const *>(this) <= *static_cast<std::array<char, N> const *>(&rhs);
}
template<typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
bool operator>(ArrayWrapper1D<char, N> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return *static_cast<std::array<char, N> const *>(this) > *static_cast<std::array<char, N> const *>(&rhs);
}
template<typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
bool operator>=(ArrayWrapper1D<char, N> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return *static_cast<std::array<char, N> const *>(this) >= *static_cast<std::array<char, N> const *>(&rhs);
}
#endif
template<typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
bool operator==(ArrayWrapper1D<char, N> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return *static_cast<std::array<char, N> const *>(this) == *static_cast<std::array<char, N> const *>(&rhs);
}
template<typename B = T, typename std::enable_if<std::is_same<B, char>::value, int>::type = 0>
bool operator!=(ArrayWrapper1D<char, N> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return *static_cast<std::array<char, N> const *>(this) != *static_cast<std::array<char, N> const *>(&rhs);
}
};
// specialization of relational operators between std::string and arrays of chars
template<size_t N>
bool operator<(std::string const &lhs, ArrayWrapper1D<char, N> const &rhs) VULKAN_HPP_NOEXCEPT
{
return lhs < rhs.data();
}
template<size_t N>
bool operator<=(std::string const &lhs, ArrayWrapper1D<char, N> const &rhs) VULKAN_HPP_NOEXCEPT
{
return lhs <= rhs.data();
}
template<size_t N>
bool operator>(std::string const &lhs, ArrayWrapper1D<char, N> const &rhs) VULKAN_HPP_NOEXCEPT
{
return lhs > rhs.data();
}
template<size_t N>
bool operator>=(std::string const &lhs, ArrayWrapper1D<char, N> const &rhs) VULKAN_HPP_NOEXCEPT
{
return lhs >= rhs.data();
}
template<size_t N>
bool operator==(std::string const &lhs, ArrayWrapper1D<char, N> const &rhs) VULKAN_HPP_NOEXCEPT
{
return lhs == rhs.data();
}
template<size_t N>
bool operator!=(std::string const &lhs, ArrayWrapper1D<char, N> const &rhs) VULKAN_HPP_NOEXCEPT
{
return lhs != rhs.data();
}
template<typename T, size_t N, size_t M>
class ArrayWrapper2D : public std::array<ArrayWrapper1D<T, M>, N>
{
public:
VULKAN_HPP_CONSTEXPR ArrayWrapper2D() VULKAN_HPP_NOEXCEPT : std::array<ArrayWrapper1D<T, M>, N>() {}
VULKAN_HPP_CONSTEXPR ArrayWrapper2D(std::array<std::array<T, M>, N> const &data) VULKAN_HPP_NOEXCEPT
: std::array<ArrayWrapper1D<T, M>, N>(*reinterpret_cast<std::array<ArrayWrapper1D<T, M>, N> const *>(&data))
{
}
};
template<typename FlagBitsType>
struct FlagTraits
{
};
template<typename BitType>
class Flags
{
public:
using MaskType = typename std::underlying_type<BitType>::type;
// constructors
VULKAN_HPP_CONSTEXPR Flags() VULKAN_HPP_NOEXCEPT : m_mask(0) {}
VULKAN_HPP_CONSTEXPR Flags(BitType bit) VULKAN_HPP_NOEXCEPT : m_mask(static_cast<MaskType>(bit)) {}
VULKAN_HPP_CONSTEXPR Flags(Flags<BitType> const &rhs) VULKAN_HPP_NOEXCEPT = default;
VULKAN_HPP_CONSTEXPR explicit Flags(MaskType flags) VULKAN_HPP_NOEXCEPT : m_mask(flags) {}
// relational operators
#if defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
auto operator<=>(Flags<BitType> const &) const = default;
#else
VULKAN_HPP_CONSTEXPR bool operator<(Flags<BitType> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask < rhs.m_mask;
}
VULKAN_HPP_CONSTEXPR bool operator<=(Flags<BitType> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask <= rhs.m_mask;
}
VULKAN_HPP_CONSTEXPR bool operator>(Flags<BitType> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask > rhs.m_mask;
}
VULKAN_HPP_CONSTEXPR bool operator>=(Flags<BitType> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask >= rhs.m_mask;
}
VULKAN_HPP_CONSTEXPR bool operator==(Flags<BitType> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask == rhs.m_mask;
}
VULKAN_HPP_CONSTEXPR bool operator!=(Flags<BitType> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return m_mask != rhs.m_mask;
}
#endif
// logical operator
VULKAN_HPP_CONSTEXPR bool operator!() const VULKAN_HPP_NOEXCEPT
{
return !m_mask;
}
// bitwise operators
VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(Flags<BitType> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return Flags<BitType>(m_mask & rhs.m_mask);
}
VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(Flags<BitType> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return Flags<BitType>(m_mask | rhs.m_mask);
}
VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(Flags<BitType> const &rhs) const VULKAN_HPP_NOEXCEPT
{
return Flags<BitType>(m_mask ^ rhs.m_mask);
}
VULKAN_HPP_CONSTEXPR Flags<BitType> operator~() const VULKAN_HPP_NOEXCEPT
{
return Flags<BitType>(m_mask ^ FlagTraits<BitType>::allFlags);
}
// assignment operators
VULKAN_HPP_CONSTEXPR_14 Flags<BitType> &operator=(Flags<BitType> const &rhs) VULKAN_HPP_NOEXCEPT = default;
VULKAN_HPP_CONSTEXPR_14 Flags<BitType> &operator|=(Flags<BitType> const &rhs) VULKAN_HPP_NOEXCEPT
{
m_mask |= rhs.m_mask;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Flags<BitType> &operator&=(Flags<BitType> const &rhs) VULKAN_HPP_NOEXCEPT
{
m_mask &= rhs.m_mask;
return *this;
}
VULKAN_HPP_CONSTEXPR_14 Flags<BitType> &operator^=(Flags<BitType> const &rhs) VULKAN_HPP_NOEXCEPT
{
m_mask ^= rhs.m_mask;
return *this;
}
// cast operators
explicit VULKAN_HPP_CONSTEXPR operator bool() const VULKAN_HPP_NOEXCEPT
{
return !!m_mask;
}
explicit VULKAN_HPP_CONSTEXPR operator MaskType() const VULKAN_HPP_NOEXCEPT
{
return m_mask;
}
#if defined(VULKAN_HPP_FLAGS_MASK_TYPE_AS_PUBLIC)
public:
#else
private:
#endif
MaskType m_mask;
};
#if !defined(VULKAN_HPP_HAS_SPACESHIP_OPERATOR)
// relational operators only needed for pre C++20
template<typename BitType>
VULKAN_HPP_CONSTEXPR bool operator<(BitType bit, Flags<BitType> const &flags) VULKAN_HPP_NOEXCEPT
{
return flags.operator>(bit);
}
template<typename BitType>
VULKAN_HPP_CONSTEXPR bool operator<=(BitType bit, Flags<BitType> const &flags) VULKAN_HPP_NOEXCEPT
{
return flags.operator>=(bit);
}
template<typename BitType>
VULKAN_HPP_CONSTEXPR bool operator>(BitType bit, Flags<BitType> const &flags) VULKAN_HPP_NOEXCEPT
{
return flags.operator<(bit);
}
template<typename BitType>
VULKAN_HPP_CONSTEXPR bool operator>=(BitType bit, Flags<BitType> const &flags) VULKAN_HPP_NOEXCEPT
{
return flags.operator<=(bit);
}
template<typename BitType>
VULKAN_HPP_CONSTEXPR bool operator==(BitType bit, Flags<BitType> const &flags) VULKAN_HPP_NOEXCEPT
{
return flags.operator==(bit);
}
template<typename BitType>
VULKAN_HPP_CONSTEXPR bool operator!=(BitType bit, Flags<BitType> const &flags) VULKAN_HPP_NOEXCEPT
{
return flags.operator!=(bit);
}
#endif
// bitwise operators
template<typename BitType>
VULKAN_HPP_CONSTEXPR Flags<BitType> operator&(BitType bit, Flags<BitType> const &flags) VULKAN_HPP_NOEXCEPT
{
return flags.operator&(bit);
}
template<typename BitType>
VULKAN_HPP_CONSTEXPR Flags<BitType> operator|(BitType bit, Flags<BitType> const &flags) VULKAN_HPP_NOEXCEPT
{
return flags.operator|(bit);
}
template<typename BitType>
VULKAN_HPP_CONSTEXPR Flags<BitType> operator^(BitType bit, Flags<BitType> const &flags) VULKAN_HPP_NOEXCEPT
{
return flags.operator^(bit);
}
template<typename RefType>
class Optional
{
public:
Optional(RefType &reference) VULKAN_HPP_NOEXCEPT
{
m_ptr = &reference;
}
Optional(RefType *ptr) VULKAN_HPP_NOEXCEPT
{
m_ptr = ptr;
}
Optional(std::nullptr_t) VULKAN_HPP_NOEXCEPT
{
m_ptr = nullptr;
}
operator RefType *() const VULKAN_HPP_NOEXCEPT
{
return m_ptr;
}
RefType const *operator->() const VULKAN_HPP_NOEXCEPT
{
return m_ptr;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
return !!m_ptr;
}
private:
RefType *m_ptr;
};
template<typename X, typename Y>
struct StructExtends
{
enum
{
value = false
};
};
template<typename Type, class...>
struct IsPartOfStructureChain
{
static const bool valid = false;
};
template<typename Type, typename Head, typename... Tail>
struct IsPartOfStructureChain<Type, Head, Tail...>
{
static const bool valid = std::is_same<Type, Head>::value || IsPartOfStructureChain<Type, Tail...>::valid;
};
template<size_t Index, typename T, typename... ChainElements>
struct StructureChainContains
{
static const bool value = std::is_same<T, typename std::tuple_element<Index, std::tuple<ChainElements...>>::type>::value ||
StructureChainContains<Index - 1, T, ChainElements...>::value;
};
template<typename T, typename... ChainElements>
struct StructureChainContains<0, T, ChainElements...>
{
static const bool value = std::is_same<T, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value;
};
template<size_t Index, typename... ChainElements>
struct StructureChainValidation
{
using TestType = typename std::tuple_element<Index, std::tuple<ChainElements...>>::type;
static const bool valid = StructExtends<TestType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value &&
(TestType::allowDuplicate || !StructureChainContains<Index - 1, TestType, ChainElements...>::value) &&
StructureChainValidation<Index - 1, ChainElements...>::valid;
};
template<typename... ChainElements>
struct StructureChainValidation<0, ChainElements...>
{
static const bool valid = true;
};
template<typename... ChainElements>
class StructureChain : public std::tuple<ChainElements...>
{
public:
StructureChain() VULKAN_HPP_NOEXCEPT
{
static_assert(StructureChainValidation<sizeof...(ChainElements) - 1, ChainElements...>::valid, "The structure chain is not valid!");
link<sizeof...(ChainElements) - 1>();
}
StructureChain(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>(rhs)
{
static_assert(StructureChainValidation<sizeof...(ChainElements) - 1, ChainElements...>::valid, "The structure chain is not valid!");
link(&std::get<0>(*this),
&std::get<0>(rhs),
reinterpret_cast<VkBaseOutStructure *>(&std::get<0>(*this)),
reinterpret_cast<VkBaseInStructure const *>(&std::get<0>(rhs)));
}
StructureChain(StructureChain &&rhs) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>(std::forward<std::tuple<ChainElements...>>(rhs))
{
static_assert(StructureChainValidation<sizeof...(ChainElements) - 1, ChainElements...>::valid, "The structure chain is not valid!");
link(&std::get<0>(*this),
&std::get<0>(rhs),
reinterpret_cast<VkBaseOutStructure *>(&std::get<0>(*this)),
reinterpret_cast<VkBaseInStructure const *>(&std::get<0>(rhs)));
}
StructureChain(ChainElements const &... elems) VULKAN_HPP_NOEXCEPT : std::tuple<ChainElements...>(elems...)
{
static_assert(StructureChainValidation<sizeof...(ChainElements) - 1, ChainElements...>::valid, "The structure chain is not valid!");
link<sizeof...(ChainElements) - 1>();
}
StructureChain &operator=(StructureChain const &rhs) VULKAN_HPP_NOEXCEPT
{
std::tuple<ChainElements...>::operator=(rhs);
link(&std::get<0>(*this),
&std::get<0>(rhs),
reinterpret_cast<VkBaseOutStructure *>(&std::get<0>(*this)),
reinterpret_cast<VkBaseInStructure const *>(&std::get<0>(rhs)));
return *this;
}
StructureChain &operator=(StructureChain &&rhs) = delete;
template<typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
T &get() VULKAN_HPP_NOEXCEPT
{
return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>(static_cast<std::tuple<ChainElements...> &>(*this));
}
template<typename T = typename std::tuple_element<0, std::tuple<ChainElements...>>::type, size_t Which = 0>
T const &get() const VULKAN_HPP_NOEXCEPT
{
return std::get<ChainElementIndex<0, T, Which, void, ChainElements...>::value>(static_cast<std::tuple<ChainElements...> const &>(*this));
}
template<typename T0, typename T1, typename... Ts>
std::tuple<T0 &, T1 &, Ts &...> get() VULKAN_HPP_NOEXCEPT
{
return std::tie(get<T0>(), get<T1>(), get<Ts>()...);
}
template<typename T0, typename T1, typename... Ts>
std::tuple<T0 const &, T1 const &, Ts const &...> get() const VULKAN_HPP_NOEXCEPT
{
return std::tie(get<T0>(), get<T1>(), get<Ts>()...);
}
template<typename ClassType, size_t Which = 0>
typename std::enable_if<std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value && (Which == 0), bool>::type
isLinked() const VULKAN_HPP_NOEXCEPT
{
return true;
}
template<typename ClassType, size_t Which = 0>
typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || (Which != 0), bool>::type
isLinked() const VULKAN_HPP_NOEXCEPT
{
static_assert(IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!");
return isLinked(reinterpret_cast<VkBaseInStructure const *>(&get<ClassType, Which>()));
}
template<typename ClassType, size_t Which = 0>
typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || (Which != 0), void>::type
relink() VULKAN_HPP_NOEXCEPT
{
static_assert(IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't relink Structure that's not part of this StructureChain!");
auto pNext = reinterpret_cast<VkBaseInStructure *>(&get<ClassType, Which>());
VULKAN_HPP_ASSERT(!isLinked(pNext));
auto &headElement = std::get<0>(static_cast<std::tuple<ChainElements...> &>(*this));
pNext->pNext = reinterpret_cast<VkBaseInStructure const *>(headElement.pNext);
headElement.pNext = pNext;
}
template<typename ClassType, size_t Which = 0>
typename std::enable_if<!std::is_same<ClassType, typename std::tuple_element<0, std::tuple<ChainElements...>>::type>::value || (Which != 0), void>::type
unlink() VULKAN_HPP_NOEXCEPT
{
static_assert(IsPartOfStructureChain<ClassType, ChainElements...>::valid, "Can't unlink Structure that's not part of this StructureChain!");
unlink(reinterpret_cast<VkBaseOutStructure const *>(&get<ClassType, Which>()));
}
private:
template<int Index, typename T, int Which, typename, class First, class... Types>
struct ChainElementIndex : ChainElementIndex<Index + 1, T, Which, void, Types...>
{
};
template<int Index, typename T, int Which, class First, class... Types>
struct ChainElementIndex<Index, T, Which, typename std::enable_if<!std::is_same<T, First>::value, void>::type, First, Types...>
: ChainElementIndex<Index + 1, T, Which, void, Types...>
{
};
template<int Index, typename T, int Which, class First, class... Types>
struct ChainElementIndex<Index, T, Which, typename std::enable_if<std::is_same<T, First>::value, void>::type, First, Types...>
: ChainElementIndex<Index + 1, T, Which - 1, void, Types...>
{
};
template<int Index, typename T, class First, class... Types>
struct ChainElementIndex<Index, T, 0, typename std::enable_if<std::is_same<T, First>::value, void>::type, First, Types...>
: std::integral_constant<int, Index>
{
};
bool isLinked(VkBaseInStructure const *pNext) const VULKAN_HPP_NOEXCEPT
{
VkBaseInStructure const *elementPtr =
reinterpret_cast<VkBaseInStructure const *>(&std::get<0>(static_cast<std::tuple<ChainElements...> const &>(*this)));
while(elementPtr)
{
if(elementPtr->pNext == pNext)
{
return true;
}
elementPtr = elementPtr->pNext;
}
return false;
}
template<size_t Index>
typename std::enable_if<Index != 0, void>::type link() VULKAN_HPP_NOEXCEPT
{
auto &x = std::get<Index - 1>(static_cast<std::tuple<ChainElements...> &>(*this));
x.pNext = &std::get<Index>(static_cast<std::tuple<ChainElements...> &>(*this));
link<Index - 1>();
}
template<size_t Index>
typename std::enable_if<Index == 0, void>::type link() VULKAN_HPP_NOEXCEPT
{
}
void link(void *dstBase, void const *srcBase, VkBaseOutStructure *dst, VkBaseInStructure const *src)
{
while(src->pNext)
{
std::ptrdiff_t offset = reinterpret_cast<char const *>(src->pNext) - reinterpret_cast<char const *>(srcBase);
dst->pNext = reinterpret_cast<VkBaseOutStructure *>(reinterpret_cast<char *>(dstBase) + offset);
dst = dst->pNext;
src = src->pNext;
}
dst->pNext = nullptr;
}
void unlink(VkBaseOutStructure const *pNext) VULKAN_HPP_NOEXCEPT
{
VkBaseOutStructure *elementPtr = reinterpret_cast<VkBaseOutStructure *>(&std::get<0>(static_cast<std::tuple<ChainElements...> &>(*this)));
while(elementPtr && (elementPtr->pNext != pNext))
{
elementPtr = elementPtr->pNext;
}
if(elementPtr)
{
elementPtr->pNext = pNext->pNext;
}
else
{
VULKAN_HPP_ASSERT(false); // fires, if the ClassType member has already been unlinked !
}
}
};
#if !defined(VULKAN_HPP_NO_SMART_HANDLE)
template<typename Type, typename Dispatch>
class UniqueHandleTraits;
template<typename Type, typename Dispatch>
class UniqueHandle : public UniqueHandleTraits<Type, Dispatch>::deleter
{
private:
using Deleter = typename UniqueHandleTraits<Type, Dispatch>::deleter;
public:
using element_type = Type;
UniqueHandle()
: Deleter()
, m_value()
{}
explicit UniqueHandle(Type const &value, Deleter const &deleter = Deleter()) VULKAN_HPP_NOEXCEPT
: Deleter(deleter),
m_value(value)
{
}
UniqueHandle(UniqueHandle const &) = delete;
UniqueHandle(UniqueHandle &&other) VULKAN_HPP_NOEXCEPT
: Deleter(std::move(static_cast<Deleter &>(other))),
m_value(other.release())
{
}
~UniqueHandle() VULKAN_HPP_NOEXCEPT
{
if(m_value)
{
this->destroy(m_value);
}
}
UniqueHandle &operator=(UniqueHandle const &) = delete;
UniqueHandle &operator=(UniqueHandle &&other) VULKAN_HPP_NOEXCEPT
{
reset(other.release());
*static_cast<Deleter *>(this) = std::move(static_cast<Deleter &>(other));
return *this;
}
explicit operator bool() const VULKAN_HPP_NOEXCEPT
{
return m_value.operator bool();
}
Type const *operator->() const VULKAN_HPP_NOEXCEPT
{
return &m_value;
}
Type *operator->() VULKAN_HPP_NOEXCEPT
{
return &m_value;
}
Type const &operator*() const VULKAN_HPP_NOEXCEPT
{
return m_value;
}
Type &operator*() VULKAN_HPP_NOEXCEPT
{
return m_value;
}
const Type &get() const VULKAN_HPP_NOEXCEPT
{
return m_value;
}
Type &get() VULKAN_HPP_NOEXCEPT
{
return m_value;
}
void reset(Type const &value = Type()) VULKAN_HPP_NOEXCEPT
{
if(m_value != value)
{
if(m_value)
{
this->destroy(m_value);
}
m_value = value;
}
}
Type release() VULKAN_HPP_NOEXCEPT
{
Type value = m_value;
m_value = nullptr;
return value;
}
void swap(UniqueHandle<Type, Dispatch> &rhs) VULKAN_HPP_NOEXCEPT
{
std::swap(m_value, rhs.m_value);
std::swap(static_cast<Deleter &>(*this), static_cast<Deleter &>(rhs));
}
private:
Type m_value;
};
template<typename UniqueType>
VULKAN_HPP_INLINE std::vector<typename UniqueType::element_type> uniqueToRaw(std::vector<UniqueType> const &handles)
{
std::vector<typename UniqueType::element_type> newBuffer(handles.size());
std::transform(handles.begin(), handles.end(), newBuffer.begin(), [](UniqueType const &handle) { return handle.get(); });
return newBuffer;
}
template<typename Type, typename Dispatch>
VULKAN_HPP_INLINE void swap(UniqueHandle<Type, Dispatch> &lhs, UniqueHandle<Type, Dispatch> &rhs) VULKAN_HPP_NOEXCEPT
{
lhs.swap(rhs);
}
#endif
class DispatchLoaderBase
{
public:
DispatchLoaderBase() = default;
DispatchLoaderBase(std::nullptr_t)
#if !defined(NDEBUG)
: m_valid(false)
#endif
{
}
#if !defined(NDEBUG)
size_t getVkHeaderVersion() const
{
VULKAN_HPP_ASSERT(m_valid);
return vkHeaderVersion;
}
private:
size_t vkHeaderVersion = VK_HEADER_VERSION;
bool m_valid = true;
#endif
};
#if !defined(VK_NO_PROTOTYPES)
class DispatchLoaderStatic : public DispatchLoaderBase
{
public:
//=== VK_VERSION_1_0 ===
VkResult
vkCreateInstance(const VkInstanceCreateInfo *pCreateInfo, const VkAllocationCallbacks *pAllocator, VkInstance *pInstance) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateInstance(pCreateInfo, pAllocator, pInstance);
}
void vkDestroyInstance(VkInstance instance, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyInstance(instance, pAllocator);
}
VkResult vkEnumeratePhysicalDevices(VkInstance instance, uint32_t *pPhysicalDeviceCount, VkPhysicalDevice *pPhysicalDevices) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices);
}
void vkGetPhysicalDeviceFeatures(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures *pFeatures) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceFeatures(physicalDevice, pFeatures);
}
void
vkGetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties *pFormatProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties);
}
VkResult vkGetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,
VkFormat format,
VkImageType type,
VkImageTiling tiling,
VkImageUsageFlags usage,
VkImageCreateFlags flags,
VkImageFormatProperties *pImageFormatProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties);
}
void vkGetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties *pProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceProperties(physicalDevice, pProperties);
}
void vkGetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice physicalDevice,
uint32_t *pQueueFamilyPropertyCount,
VkQueueFamilyProperties *pQueueFamilyProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
}
void vkGetPhysicalDeviceMemoryProperties(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties *pMemoryProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties);
}
PFN_vkVoidFunction vkGetInstanceProcAddr(VkInstance instance, const char *pName) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetInstanceProcAddr(instance, pName);
}
PFN_vkVoidFunction vkGetDeviceProcAddr(VkDevice device, const char *pName) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceProcAddr(device, pName);
}
VkResult vkCreateDevice(VkPhysicalDevice physicalDevice,
const VkDeviceCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkDevice *pDevice) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice);
}
void vkDestroyDevice(VkDevice device, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyDevice(device, pAllocator);
}
VkResult vkEnumerateInstanceExtensionProperties(const char *pLayerName,
uint32_t *pPropertyCount,
VkExtensionProperties *pProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumerateInstanceExtensionProperties(pLayerName, pPropertyCount, pProperties);
}
VkResult vkEnumerateDeviceExtensionProperties(VkPhysicalDevice physicalDevice,
const char *pLayerName,
uint32_t *pPropertyCount,
VkExtensionProperties *pProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName, pPropertyCount, pProperties);
}
VkResult vkEnumerateInstanceLayerProperties(uint32_t *pPropertyCount, VkLayerProperties *pProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumerateInstanceLayerProperties(pPropertyCount, pProperties);
}
VkResult
vkEnumerateDeviceLayerProperties(VkPhysicalDevice physicalDevice, uint32_t *pPropertyCount, VkLayerProperties *pProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumerateDeviceLayerProperties(physicalDevice, pPropertyCount, pProperties);
}
void vkGetDeviceQueue(VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue *pQueue) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue);
}
VkResult vkQueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo *pSubmits, VkFence fence) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueSubmit(queue, submitCount, pSubmits, fence);
}
VkResult vkQueueWaitIdle(VkQueue queue) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueWaitIdle(queue);
}
VkResult vkDeviceWaitIdle(VkDevice device) const VULKAN_HPP_NOEXCEPT
{
return ::vkDeviceWaitIdle(device);
}
VkResult vkAllocateMemory(VkDevice device,
const VkMemoryAllocateInfo *pAllocateInfo,
const VkAllocationCallbacks *pAllocator,
VkDeviceMemory *pMemory) const VULKAN_HPP_NOEXCEPT
{
return ::vkAllocateMemory(device, pAllocateInfo, pAllocator, pMemory);
}
void vkFreeMemory(VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkFreeMemory(device, memory, pAllocator);
}
VkResult vkMapMemory(VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void **ppData) const
VULKAN_HPP_NOEXCEPT
{
return ::vkMapMemory(device, memory, offset, size, flags, ppData);
}
void vkUnmapMemory(VkDevice device, VkDeviceMemory memory) const VULKAN_HPP_NOEXCEPT
{
return ::vkUnmapMemory(device, memory);
}
VkResult vkFlushMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges) const VULKAN_HPP_NOEXCEPT
{
return ::vkFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
}
VkResult vkInvalidateMappedMemoryRanges(VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange *pMemoryRanges) const VULKAN_HPP_NOEXCEPT
{
return ::vkInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges);
}
void vkGetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize *pCommittedMemoryInBytes) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes);
}
VkResult vkBindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindBufferMemory(device, buffer, memory, memoryOffset);
}
VkResult vkBindImageMemory(VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindImageMemory(device, image, memory, memoryOffset);
}
void vkGetBufferMemoryRequirements(VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferMemoryRequirements(device, buffer, pMemoryRequirements);
}
void vkGetImageMemoryRequirements(VkDevice device, VkImage image, VkMemoryRequirements *pMemoryRequirements) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageMemoryRequirements(device, image, pMemoryRequirements);
}
void vkGetImageSparseMemoryRequirements(VkDevice device,
VkImage image,
uint32_t *pSparseMemoryRequirementCount,
VkSparseImageMemoryRequirements *pSparseMemoryRequirements) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
}
void vkGetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice,
VkFormat format,
VkImageType type,
VkSampleCountFlagBits samples,
VkImageUsageFlags usage,
VkImageTiling tiling,
uint32_t *pPropertyCount,
VkSparseImageFormatProperties *pProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties);
}
VkResult vkQueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo *pBindInfo, VkFence fence) const VULKAN_HPP_NOEXCEPT
{
return ::vkQueueBindSparse(queue, bindInfoCount, pBindInfo, fence);
}
VkResult vkCreateFence(VkDevice device,
const VkFenceCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkFence *pFence) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateFence(device, pCreateInfo, pAllocator, pFence);
}
void vkDestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyFence(device, fence, pAllocator);
}
VkResult vkResetFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetFences(device, fenceCount, pFences);
}
VkResult vkGetFenceStatus(VkDevice device, VkFence fence) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetFenceStatus(device, fence);
}
VkResult vkWaitForFences(VkDevice device, uint32_t fenceCount, const VkFence *pFences, VkBool32 waitAll, uint64_t timeout) const VULKAN_HPP_NOEXCEPT
{
return ::vkWaitForFences(device, fenceCount, pFences, waitAll, timeout);
}
VkResult vkCreateSemaphore(VkDevice device,
const VkSemaphoreCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkSemaphore *pSemaphore) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore);
}
void vkDestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroySemaphore(device, semaphore, pAllocator);
}
VkResult vkCreateEvent(VkDevice device,
const VkEventCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkEvent *pEvent) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateEvent(device, pCreateInfo, pAllocator, pEvent);
}
void vkDestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyEvent(device, event, pAllocator);
}
VkResult vkGetEventStatus(VkDevice device, VkEvent event) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetEventStatus(device, event);
}
VkResult vkSetEvent(VkDevice device, VkEvent event) const VULKAN_HPP_NOEXCEPT
{
return ::vkSetEvent(device, event);
}
VkResult vkResetEvent(VkDevice device, VkEvent event) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetEvent(device, event);
}
VkResult vkCreateQueryPool(VkDevice device,
const VkQueryPoolCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkQueryPool *pQueryPool) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool);
}
void vkDestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyQueryPool(device, queryPool, pAllocator);
}
VkResult vkGetQueryPoolResults(VkDevice device,
VkQueryPool queryPool,
uint32_t firstQuery,
uint32_t queryCount,
size_t dataSize,
void *pData,
VkDeviceSize stride,
VkQueryResultFlags flags) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags);
}
VkResult vkCreateBuffer(VkDevice device,
const VkBufferCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkBuffer *pBuffer) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
}
void vkDestroyBuffer(VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyBuffer(device, buffer, pAllocator);
}
VkResult vkCreateBufferView(VkDevice device,
const VkBufferViewCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkBufferView *pView) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateBufferView(device, pCreateInfo, pAllocator, pView);
}
void vkDestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyBufferView(device, bufferView, pAllocator);
}
VkResult vkCreateImage(VkDevice device,
const VkImageCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkImage *pImage) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateImage(device, pCreateInfo, pAllocator, pImage);
}
void vkDestroyImage(VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyImage(device, image, pAllocator);
}
void vkGetImageSubresourceLayout(VkDevice device,
VkImage image,
const VkImageSubresource *pSubresource,
VkSubresourceLayout *pLayout) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageSubresourceLayout(device, image, pSubresource, pLayout);
}
VkResult vkCreateImageView(VkDevice device,
const VkImageViewCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkImageView *pView) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateImageView(device, pCreateInfo, pAllocator, pView);
}
void vkDestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyImageView(device, imageView, pAllocator);
}
VkResult vkCreateShaderModule(VkDevice device,
const VkShaderModuleCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkShaderModule *pShaderModule) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule);
}
void vkDestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyShaderModule(device, shaderModule, pAllocator);
}
VkResult vkCreatePipelineCache(VkDevice device,
const VkPipelineCacheCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkPipelineCache *pPipelineCache) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache);
}
void vkDestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyPipelineCache(device, pipelineCache, pAllocator);
}
VkResult vkGetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t *pDataSize, void *pData) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPipelineCacheData(device, pipelineCache, pDataSize, pData);
}
VkResult
vkMergePipelineCaches(VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache *pSrcCaches) const VULKAN_HPP_NOEXCEPT
{
return ::vkMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches);
}
VkResult vkCreateGraphicsPipelines(VkDevice device,
VkPipelineCache pipelineCache,
uint32_t createInfoCount,
const VkGraphicsPipelineCreateInfo *pCreateInfos,
const VkAllocationCallbacks *pAllocator,
VkPipeline *pPipelines) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
}
VkResult vkCreateComputePipelines(VkDevice device,
VkPipelineCache pipelineCache,
uint32_t createInfoCount,
const VkComputePipelineCreateInfo *pCreateInfos,
const VkAllocationCallbacks *pAllocator,
VkPipeline *pPipelines) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines);
}
void vkDestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyPipeline(device, pipeline, pAllocator);
}
VkResult vkCreatePipelineLayout(VkDevice device,
const VkPipelineLayoutCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkPipelineLayout *pPipelineLayout) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout);
}
void vkDestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyPipelineLayout(device, pipelineLayout, pAllocator);
}
VkResult vkCreateSampler(VkDevice device,
const VkSamplerCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkSampler *pSampler) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateSampler(device, pCreateInfo, pAllocator, pSampler);
}
void vkDestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroySampler(device, sampler, pAllocator);
}
VkResult vkCreateDescriptorSetLayout(VkDevice device,
const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkDescriptorSetLayout *pSetLayout) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout);
}
void vkDestroyDescriptorSetLayout(VkDevice device,
VkDescriptorSetLayout descriptorSetLayout,
const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator);
}
VkResult vkCreateDescriptorPool(VkDevice device,
const VkDescriptorPoolCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkDescriptorPool *pDescriptorPool) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool);
}
void vkDestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyDescriptorPool(device, descriptorPool, pAllocator);
}
VkResult vkResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetDescriptorPool(device, descriptorPool, flags);
}
VkResult vkAllocateDescriptorSets(VkDevice device,
const VkDescriptorSetAllocateInfo *pAllocateInfo,
VkDescriptorSet *pDescriptorSets) const VULKAN_HPP_NOEXCEPT
{
return ::vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
}
VkResult vkFreeDescriptorSets(VkDevice device,
VkDescriptorPool descriptorPool,
uint32_t descriptorSetCount,
const VkDescriptorSet *pDescriptorSets) const VULKAN_HPP_NOEXCEPT
{
return ::vkFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets);
}
void vkUpdateDescriptorSets(VkDevice device,
uint32_t descriptorWriteCount,
const VkWriteDescriptorSet *pDescriptorWrites,
uint32_t descriptorCopyCount,
const VkCopyDescriptorSet *pDescriptorCopies) const VULKAN_HPP_NOEXCEPT
{
return ::vkUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
}
VkResult vkCreateFramebuffer(VkDevice device,
const VkFramebufferCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkFramebuffer *pFramebuffer) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer);
}
void vkDestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyFramebuffer(device, framebuffer, pAllocator);
}
VkResult vkCreateRenderPass(VkDevice device,
const VkRenderPassCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkRenderPass *pRenderPass) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass);
}
void vkDestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyRenderPass(device, renderPass, pAllocator);
}
void vkGetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D *pGranularity) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetRenderAreaGranularity(device, renderPass, pGranularity);
}
VkResult vkCreateCommandPool(VkDevice device,
const VkCommandPoolCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkCommandPool *pCommandPool) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool);
}
void vkDestroyCommandPool(VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyCommandPool(device, commandPool, pAllocator);
}
VkResult vkResetCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetCommandPool(device, commandPool, flags);
}
VkResult vkAllocateCommandBuffers(VkDevice device,
const VkCommandBufferAllocateInfo *pAllocateInfo,
VkCommandBuffer *pCommandBuffers) const VULKAN_HPP_NOEXCEPT
{
return ::vkAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers);
}
void vkFreeCommandBuffers(VkDevice device,
VkCommandPool commandPool,
uint32_t commandBufferCount,
const VkCommandBuffer *pCommandBuffers) const VULKAN_HPP_NOEXCEPT
{
return ::vkFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers);
}
VkResult vkBeginCommandBuffer(VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo *pBeginInfo) const VULKAN_HPP_NOEXCEPT
{
return ::vkBeginCommandBuffer(commandBuffer, pBeginInfo);
}
VkResult vkEndCommandBuffer(VkCommandBuffer commandBuffer) const VULKAN_HPP_NOEXCEPT
{
return ::vkEndCommandBuffer(commandBuffer);
}
VkResult vkResetCommandBuffer(VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetCommandBuffer(commandBuffer, flags);
}
void vkCmdBindPipeline(VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
}
void
vkCmdSetViewport(VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport *pViewports) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports);
}
void vkCmdSetScissor(VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D *pScissors) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors);
}
void vkCmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetLineWidth(commandBuffer, lineWidth);
}
void vkCmdSetDepthBias(VkCommandBuffer commandBuffer,
float depthBiasConstantFactor,
float depthBiasClamp,
float depthBiasSlopeFactor) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor);
}
void vkCmdSetBlendConstants(VkCommandBuffer commandBuffer, const float blendConstants[4]) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetBlendConstants(commandBuffer, blendConstants);
}
void vkCmdSetDepthBounds(VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds);
}
void vkCmdSetStencilCompareMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask);
}
void vkCmdSetStencilWriteMask(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
}
void vkCmdSetStencilReference(VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetStencilReference(commandBuffer, faceMask, reference);
}
void vkCmdBindDescriptorSets(VkCommandBuffer commandBuffer,
VkPipelineBindPoint pipelineBindPoint,
VkPipelineLayout layout,
uint32_t firstSet,
uint32_t descriptorSetCount,
const VkDescriptorSet *pDescriptorSets,
uint32_t dynamicOffsetCount,
const uint32_t *pDynamicOffsets) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindDescriptorSets(
commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
}
void vkCmdBindIndexBuffer(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType);
}
void vkCmdBindVertexBuffers(VkCommandBuffer commandBuffer,
uint32_t firstBinding,
uint32_t bindingCount,
const VkBuffer *pBuffers,
const VkDeviceSize *pOffsets) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
}
void vkCmdDraw(VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) const
VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance);
}
void vkCmdDrawIndexed(VkCommandBuffer commandBuffer,
uint32_t indexCount,
uint32_t instanceCount,
uint32_t firstIndex,
int32_t vertexOffset,
uint32_t firstInstance) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance);
}
void vkCmdDrawIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride);
}
void vkCmdDrawIndexedIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride) const
VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride);
}
void vkCmdDispatch(VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ);
}
void vkCmdDispatchIndirect(VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDispatchIndirect(commandBuffer, buffer, offset);
}
void vkCmdCopyBuffer(VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy *pRegions) const
VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions);
}
void vkCmdCopyImage(VkCommandBuffer commandBuffer,
VkImage srcImage,
VkImageLayout srcImageLayout,
VkImage dstImage,
VkImageLayout dstImageLayout,
uint32_t regionCount,
const VkImageCopy *pRegions) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
}
void vkCmdBlitImage(VkCommandBuffer commandBuffer,
VkImage srcImage,
VkImageLayout srcImageLayout,
VkImage dstImage,
VkImageLayout dstImageLayout,
uint32_t regionCount,
const VkImageBlit *pRegions,
VkFilter filter) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter);
}
void vkCmdCopyBufferToImage(VkCommandBuffer commandBuffer,
VkBuffer srcBuffer,
VkImage dstImage,
VkImageLayout dstImageLayout,
uint32_t regionCount,
const VkBufferImageCopy *pRegions) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions);
}
void vkCmdCopyImageToBuffer(VkCommandBuffer commandBuffer,
VkImage srcImage,
VkImageLayout srcImageLayout,
VkBuffer dstBuffer,
uint32_t regionCount,
const VkBufferImageCopy *pRegions) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions);
}
void vkCmdUpdateBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void *pData) const
VULKAN_HPP_NOEXCEPT
{
return ::vkCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData);
}
void
vkCmdFillBuffer(VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data);
}
void vkCmdClearColorImage(VkCommandBuffer commandBuffer,
VkImage image,
VkImageLayout imageLayout,
const VkClearColorValue *pColor,
uint32_t rangeCount,
const VkImageSubresourceRange *pRanges) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
}
void vkCmdClearDepthStencilImage(VkCommandBuffer commandBuffer,
VkImage image,
VkImageLayout imageLayout,
const VkClearDepthStencilValue *pDepthStencil,
uint32_t rangeCount,
const VkImageSubresourceRange *pRanges) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
}
void vkCmdClearAttachments(VkCommandBuffer commandBuffer,
uint32_t attachmentCount,
const VkClearAttachment *pAttachments,
uint32_t rectCount,
const VkClearRect *pRects) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
}
void vkCmdResolveImage(VkCommandBuffer commandBuffer,
VkImage srcImage,
VkImageLayout srcImageLayout,
VkImage dstImage,
VkImageLayout dstImageLayout,
uint32_t regionCount,
const VkImageResolve *pRegions) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions);
}
void vkCmdSetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetEvent(commandBuffer, event, stageMask);
}
void vkCmdResetEvent(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResetEvent(commandBuffer, event, stageMask);
}
void vkCmdWaitEvents(VkCommandBuffer commandBuffer,
uint32_t eventCount,
const VkEvent *pEvents,
VkPipelineStageFlags srcStageMask,
VkPipelineStageFlags dstStageMask,
uint32_t memoryBarrierCount,
const VkMemoryBarrier *pMemoryBarriers,
uint32_t bufferMemoryBarrierCount,
const VkBufferMemoryBarrier *pBufferMemoryBarriers,
uint32_t imageMemoryBarrierCount,
const VkImageMemoryBarrier *pImageMemoryBarriers) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdWaitEvents(commandBuffer,
eventCount,
pEvents,
srcStageMask,
dstStageMask,
memoryBarrierCount,
pMemoryBarriers,
bufferMemoryBarrierCount,
pBufferMemoryBarriers,
imageMemoryBarrierCount,
pImageMemoryBarriers);
}
void vkCmdPipelineBarrier(VkCommandBuffer commandBuffer,
VkPipelineStageFlags srcStageMask,
VkPipelineStageFlags dstStageMask,
VkDependencyFlags dependencyFlags,
uint32_t memoryBarrierCount,
const VkMemoryBarrier *pMemoryBarriers,
uint32_t bufferMemoryBarrierCount,
const VkBufferMemoryBarrier *pBufferMemoryBarriers,
uint32_t imageMemoryBarrierCount,
const VkImageMemoryBarrier *pImageMemoryBarriers) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdPipelineBarrier(commandBuffer,
srcStageMask,
dstStageMask,
dependencyFlags,
memoryBarrierCount,
pMemoryBarriers,
bufferMemoryBarrierCount,
pBufferMemoryBarriers,
imageMemoryBarrierCount,
pImageMemoryBarriers);
}
void vkCmdBeginQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginQuery(commandBuffer, queryPool, query, flags);
}
void vkCmdEndQuery(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdEndQuery(commandBuffer, queryPool, query);
}
void vkCmdResetQueryPool(VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount);
}
void vkCmdWriteTimestamp(VkCommandBuffer commandBuffer,
VkPipelineStageFlagBits pipelineStage,
VkQueryPool queryPool,
uint32_t query) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query);
}
void vkCmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,
VkQueryPool queryPool,
uint32_t firstQuery,
uint32_t queryCount,
VkBuffer dstBuffer,
VkDeviceSize dstOffset,
VkDeviceSize stride,
VkQueryResultFlags flags) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags);
}
void vkCmdPushConstants(VkCommandBuffer commandBuffer,
VkPipelineLayout layout,
VkShaderStageFlags stageFlags,
uint32_t offset,
uint32_t size,
const void *pValues) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues);
}
void vkCmdBeginRenderPass(VkCommandBuffer commandBuffer,
const VkRenderPassBeginInfo *pRenderPassBegin,
VkSubpassContents contents) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents);
}
void vkCmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdNextSubpass(commandBuffer, contents);
}
void vkCmdEndRenderPass(VkCommandBuffer commandBuffer) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdEndRenderPass(commandBuffer);
}
void vkCmdExecuteCommands(VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer *pCommandBuffers) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers);
}
//=== VK_VERSION_1_1 ===
VkResult vkEnumerateInstanceVersion(uint32_t *pApiVersion) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumerateInstanceVersion(pApiVersion);
}
VkResult vkBindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindBufferMemory2(device, bindInfoCount, pBindInfos);
}
VkResult vkBindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo *pBindInfos) const VULKAN_HPP_NOEXCEPT
{
return ::vkBindImageMemory2(device, bindInfoCount, pBindInfos);
}
void vkGetDeviceGroupPeerMemoryFeatures(VkDevice device,
uint32_t heapIndex,
uint32_t localDeviceIndex,
uint32_t remoteDeviceIndex,
VkPeerMemoryFeatureFlags *pPeerMemoryFeatures) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures);
}
void vkCmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetDeviceMask(commandBuffer, deviceMask);
}
void vkCmdDispatchBase(VkCommandBuffer commandBuffer,
uint32_t baseGroupX,
uint32_t baseGroupY,
uint32_t baseGroupZ,
uint32_t groupCountX,
uint32_t groupCountY,
uint32_t groupCountZ) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ);
}
VkResult vkEnumeratePhysicalDeviceGroups(VkInstance instance,
uint32_t *pPhysicalDeviceGroupCount,
VkPhysicalDeviceGroupProperties *pPhysicalDeviceGroupProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties);
}
void vkGetImageMemoryRequirements2(VkDevice device,
const VkImageMemoryRequirementsInfo2 *pInfo,
VkMemoryRequirements2 *pMemoryRequirements) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements);
}
void vkGetBufferMemoryRequirements2(VkDevice device,
const VkBufferMemoryRequirementsInfo2 *pInfo,
VkMemoryRequirements2 *pMemoryRequirements) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
}
void vkGetImageSparseMemoryRequirements2(VkDevice device,
const VkImageSparseMemoryRequirementsInfo2 *pInfo,
uint32_t *pSparseMemoryRequirementCount,
VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements);
}
void vkGetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2 *pFeatures) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceFeatures2(physicalDevice, pFeatures);
}
void vkGetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2 *pProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceProperties2(physicalDevice, pProperties);
}
void vkGetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,
VkFormat format,
VkFormatProperties2 *pFormatProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties);
}
VkResult vkGetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceImageFormatInfo2 *pImageFormatInfo,
VkImageFormatProperties2 *pImageFormatProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties);
}
void vkGetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,
uint32_t *pQueueFamilyPropertyCount,
VkQueueFamilyProperties2 *pQueueFamilyProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties);
}
void vkGetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,
VkPhysicalDeviceMemoryProperties2 *pMemoryProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties);
}
void vkGetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceSparseImageFormatInfo2 *pFormatInfo,
uint32_t *pPropertyCount,
VkSparseImageFormatProperties2 *pProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties);
}
void vkTrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) const VULKAN_HPP_NOEXCEPT
{
return ::vkTrimCommandPool(device, commandPool, flags);
}
void vkGetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2 *pQueueInfo, VkQueue *pQueue) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceQueue2(device, pQueueInfo, pQueue);
}
VkResult vkCreateSamplerYcbcrConversion(VkDevice device,
const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkSamplerYcbcrConversion *pYcbcrConversion) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion);
}
void vkDestroySamplerYcbcrConversion(VkDevice device,
VkSamplerYcbcrConversion ycbcrConversion,
const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
}
VkResult vkCreateDescriptorUpdateTemplate(VkDevice device,
const VkDescriptorUpdateTemplateCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkDescriptorUpdateTemplate *pDescriptorUpdateTemplate) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
}
void vkDestroyDescriptorUpdateTemplate(VkDevice device,
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator);
}
void vkUpdateDescriptorSetWithTemplate(VkDevice device,
VkDescriptorSet descriptorSet,
VkDescriptorUpdateTemplate descriptorUpdateTemplate,
const void *pData) const VULKAN_HPP_NOEXCEPT
{
return ::vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData);
}
void vkGetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalBufferInfo *pExternalBufferInfo,
VkExternalBufferProperties *pExternalBufferProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties);
}
void vkGetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalFenceInfo *pExternalFenceInfo,
VkExternalFenceProperties *pExternalFenceProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
}
void vkGetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,
const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
VkExternalSemaphoreProperties *pExternalSemaphoreProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties);
}
void vkGetDescriptorSetLayoutSupport(VkDevice device,
const VkDescriptorSetLayoutCreateInfo *pCreateInfo,
VkDescriptorSetLayoutSupport *pSupport) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport);
}
//=== VK_VERSION_1_2 ===
void vkCmdDrawIndirectCount(VkCommandBuffer commandBuffer,
VkBuffer buffer,
VkDeviceSize offset,
VkBuffer countBuffer,
VkDeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
}
void vkCmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,
VkBuffer buffer,
VkDeviceSize offset,
VkBuffer countBuffer,
VkDeviceSize countBufferOffset,
uint32_t maxDrawCount,
uint32_t stride) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride);
}
VkResult vkCreateRenderPass2(VkDevice device,
const VkRenderPassCreateInfo2 *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkRenderPass *pRenderPass) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass);
}
void vkCmdBeginRenderPass2(VkCommandBuffer commandBuffer,
const VkRenderPassBeginInfo *pRenderPassBegin,
const VkSubpassBeginInfo *pSubpassBeginInfo) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo);
}
void vkCmdNextSubpass2(VkCommandBuffer commandBuffer,
const VkSubpassBeginInfo *pSubpassBeginInfo,
const VkSubpassEndInfo *pSubpassEndInfo) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo);
}
void vkCmdEndRenderPass2(VkCommandBuffer commandBuffer, const VkSubpassEndInfo *pSubpassEndInfo) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
}
void vkResetQueryPool(VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount) const VULKAN_HPP_NOEXCEPT
{
return ::vkResetQueryPool(device, queryPool, firstQuery, queryCount);
}
VkResult vkGetSemaphoreCounterValue(VkDevice device, VkSemaphore semaphore, uint64_t *pValue) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetSemaphoreCounterValue(device, semaphore, pValue);
}
VkResult vkWaitSemaphores(VkDevice device, const VkSemaphoreWaitInfo *pWaitInfo, uint64_t timeout) const VULKAN_HPP_NOEXCEPT
{
return ::vkWaitSemaphores(device, pWaitInfo, timeout);
}
VkResult vkSignalSemaphore(VkDevice device, const VkSemaphoreSignalInfo *pSignalInfo) const VULKAN_HPP_NOEXCEPT
{
return ::vkSignalSemaphore(device, pSignalInfo);
}
VkDeviceAddress vkGetBufferDeviceAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferDeviceAddress(device, pInfo);
}
uint64_t vkGetBufferOpaqueCaptureAddress(VkDevice device, const VkBufferDeviceAddressInfo *pInfo) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetBufferOpaqueCaptureAddress(device, pInfo);
}
uint64_t vkGetDeviceMemoryOpaqueCaptureAddress(VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo *pInfo) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetDeviceMemoryOpaqueCaptureAddress(device, pInfo);
}
//=== VK_VERSION_1_3 ===
VkResult vkGetPhysicalDeviceToolProperties(VkPhysicalDevice physicalDevice,
uint32_t *pToolCount,
VkPhysicalDeviceToolProperties *pToolProperties) const VULKAN_HPP_NOEXCEPT
{
return ::vkGetPhysicalDeviceToolProperties(physicalDevice, pToolCount, pToolProperties);
}
VkResult vkCreatePrivateDataSlot(VkDevice device,
const VkPrivateDataSlotCreateInfo *pCreateInfo,
const VkAllocationCallbacks *pAllocator,
VkPrivateDataSlot *pPrivateDataSlot) const VULKAN_HPP_NOEXCEPT
{
return ::vkCreatePrivateDataSlot(device, pCreateInfo, pAllocator, pPrivateDataSlot);
}
void vkDestroyPrivateDataSlot(VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks *pAllocator) const VULKAN_HPP_NOEXCEPT
{
return ::vkDestroyPrivateDataSlot(device, privateDataSlot, pAllocator);
}
VkResult vkSetPrivateData(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data) const
VULKAN_HPP_NOEXCEPT
{
return ::vkSetPrivateData(device, objectType, objectHandle, privateDataSlot, data);
}
void vkGetPrivateData(VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t *pData) const
VULKAN_HPP_NOEXCEPT
{
return ::vkGetPrivateData(device, objectType, objectHandle, privateDataSlot, pData);
}
void vkCmdSetEvent2(VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo *pDependencyInfo) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdSetEvent2(commandBuffer, event, pDependencyInfo);
}
void vkCmdResetEvent2(VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask) const VULKAN_HPP_NOEXCEPT
{
return ::vkCmdResetEvent2(commandBuffer,