| // Copyright 2015-2022 The Khronos Group Inc. |
| // |
| // SPDX-License-Identifier: Apache-2.0 OR MIT |
| // |
| |
| // This header is generated from the Khronos Vulkan XML API Registry. |
| |
| #ifndef VULKAN_FUNCS_HPP |
| # define VULKAN_FUNCS_HPP |
| |
| namespace VULKAN_HPP_NAMESPACE |
| { |
| |
| //=========================== |
| //=== COMMAND Definitions === |
| //=========================== |
| |
| |
| //=== VK_VERSION_1_0 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Instance * pInstance, Dispatch const & d ) VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkInstance *>( pInstance ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Instance>::type createInstance( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Instance instance; |
| VkResult result = d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkInstance *>( &instance ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstance" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), instance ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>>::type createInstanceUnique( const VULKAN_HPP_NAMESPACE::InstanceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Instance instance; |
| VkResult result = d.vkCreateInstance( reinterpret_cast<const VkInstanceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkInstance *>( &instance ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::createInstanceUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Instance, Dispatch>( instance, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyInstance( m_instance, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDevices( uint32_t * pPhysicalDeviceCount, VULKAN_HPP_NAMESPACE::PhysicalDevice * pPhysicalDevices, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkEnumeratePhysicalDevices( m_instance, pPhysicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( pPhysicalDevices ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename PhysicalDeviceAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type Instance::enumeratePhysicalDevices( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices; |
| uint32_t physicalDeviceCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && physicalDeviceCount ) |
| { |
| physicalDevices.resize( physicalDeviceCount ); |
| result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); |
| VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); |
| if ( physicalDeviceCount < physicalDevices.size() ) |
| { |
| physicalDevices.resize( physicalDeviceCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices ); |
| } |
| |
| template <typename PhysicalDeviceAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDevice>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator>>::type Instance::enumeratePhysicalDevices( PhysicalDeviceAllocator & physicalDeviceAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PhysicalDevice, PhysicalDeviceAllocator> physicalDevices( physicalDeviceAllocator ); |
| uint32_t physicalDeviceCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && physicalDeviceCount ) |
| { |
| physicalDevices.resize( physicalDeviceCount ); |
| result = d.vkEnumeratePhysicalDevices( m_instance, &physicalDeviceCount, reinterpret_cast<VkPhysicalDevice *>( physicalDevices.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDevices" ); |
| VULKAN_HPP_ASSERT( physicalDeviceCount <= physicalDevices.size() ); |
| if ( physicalDeviceCount < physicalDevices.size() ) |
| { |
| physicalDevices.resize( physicalDeviceCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDevices ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getFeatures( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( pFeatures ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures PhysicalDevice::getFeatures( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures features; |
| d.vkGetPhysicalDeviceFeatures( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures *>( &features ) ); |
| |
| |
| return features; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties * pFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( pFormatProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties PhysicalDevice::getFormatProperties( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::FormatProperties formatProperties; |
| d.vkGetPhysicalDeviceFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties *>( &formatProperties ) ); |
| |
| |
| return formatProperties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ImageFormatProperties * pImageFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties *>( pImageFormatProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::type PhysicalDevice::getImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ImageFormatProperties imageFormatProperties; |
| VkResult result = d.vkGetPhysicalDeviceImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), reinterpret_cast<VkImageFormatProperties *>( &imageFormatProperties ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( pProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties PhysicalDevice::getProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties properties; |
| d.vkGetPhysicalDeviceProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties *>( &properties ) ); |
| |
| |
| return properties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties * pQueueFamilyProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( pQueueFamilyProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename QueueFamilyPropertiesAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties; |
| uint32_t queueFamilyPropertyCount; |
| d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); |
| if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) |
| { |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| } |
| return queueFamilyProperties; |
| } |
| |
| template <typename QueueFamilyPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> PhysicalDevice::getQueueFamilyProperties( QueueFamilyPropertiesAllocator & queueFamilyPropertiesAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties, QueueFamilyPropertiesAllocator> queueFamilyProperties( queueFamilyPropertiesAllocator ); |
| uint32_t queueFamilyPropertyCount; |
| d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| d.vkGetPhysicalDeviceQueueFamilyProperties( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties *>( queueFamilyProperties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); |
| if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) |
| { |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| } |
| return queueFamilyProperties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties * pMemoryProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( pMemoryProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties PhysicalDevice::getMemoryProperties( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties memoryProperties; |
| d.vkGetPhysicalDeviceMemoryProperties( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties *>( &memoryProperties ) ); |
| |
| |
| return memoryProperties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return d.vkGetInstanceProcAddr( m_instance, pName ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE PFN_vkVoidFunction Instance::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| PFN_vkVoidFunction result = d.vkGetInstanceProcAddr( m_instance, name.c_str() ); |
| |
| |
| return result; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const char * pName, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return d.vkGetDeviceProcAddr( m_device, pName ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE PFN_vkVoidFunction Device::getProcAddr( const std::string & name, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| PFN_vkVoidFunction result = d.vkGetDeviceProcAddr( m_device, name.c_str() ); |
| |
| |
| return result; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Device * pDevice, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDevice *>( pDevice ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Device>::type PhysicalDevice::createDevice( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Device device; |
| VkResult result = d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDevice *>( &device ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDevice" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), device ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>>::type PhysicalDevice::createDeviceUnique( const VULKAN_HPP_NAMESPACE::DeviceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Device device; |
| VkResult result = d.vkCreateDevice( m_physicalDevice, reinterpret_cast<const VkDeviceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDevice *>( &device ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDeviceUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Device, Dispatch>( device, ObjectDestroy<NoParent, Dispatch>( allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyDevice( m_device, reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d ) VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkEnumerateInstanceExtensionProperties( pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename ExtensionPropertiesAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties; |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| |
| template <typename ExtensionPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type enumerateInstanceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d ) |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkEnumerateInstanceExtensionProperties( layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceExtensionProperties" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceExtensionProperties( const char * pLayerName, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::ExtensionProperties * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, pLayerName, pPropertyCount, reinterpret_cast<VkExtensionProperties *>( pProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename ExtensionPropertiesAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties; |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| |
| template <typename ExtensionPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, ExtensionProperties>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceExtensionProperties( Optional<const std::string> layerName, ExtensionPropertiesAllocator & extensionPropertiesAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::ExtensionProperties, ExtensionPropertiesAllocator> properties( extensionPropertiesAllocator ); |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkEnumerateDeviceExtensionProperties( m_physicalDevice, layerName ? layerName->c_str() : nullptr, &propertyCount, reinterpret_cast<VkExtensionProperties *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceExtensionProperties" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d ) VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkEnumerateInstanceLayerProperties( pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename LayerPropertiesAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( Dispatch const & d ) |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties; |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| |
| template <typename LayerPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type enumerateInstanceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumerateInstanceLayerProperties( &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkEnumerateInstanceLayerProperties( &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceLayerProperties" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateDeviceLayerProperties( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::LayerProperties * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkEnumerateDeviceLayerProperties( m_physicalDevice, pPropertyCount, reinterpret_cast<VkLayerProperties *>( pProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename LayerPropertiesAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceLayerProperties( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties; |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| |
| template <typename LayerPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, LayerProperties>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator>>::type PhysicalDevice::enumerateDeviceLayerProperties( LayerPropertiesAllocator & layerPropertiesAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::LayerProperties, LayerPropertiesAllocator> properties( layerPropertiesAllocator ); |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkEnumerateDeviceLayerProperties( m_physicalDevice, &propertyCount, reinterpret_cast<VkLayerProperties *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateDeviceLayerProperties" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( pQueue ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue( uint32_t queueFamilyIndex, uint32_t queueIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Queue queue; |
| d.vkGetDeviceQueue( m_device, queueFamilyIndex, queueIndex, reinterpret_cast<VkQueue *>( &queue ) ); |
| |
| |
| return queue; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkQueueSubmit( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo *>( pSubmits ), static_cast<VkFence>( fence ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkQueueSubmit( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo *>( submits.data() ), static_cast<VkFence>( fence ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkQueueWaitIdle( m_queue ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::waitIdle( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkQueueWaitIdle( m_queue ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::waitIdle" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitIdle( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkDeviceWaitIdle( m_device ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::waitIdle( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkDeviceWaitIdle( m_device ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitIdle" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo * pAllocateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DeviceMemory * pMemory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( pAllocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDeviceMemory *>( pMemory ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceMemory>::type Device::allocateMemory( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DeviceMemory memory; |
| VkResult result = d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeviceMemory *>( &memory ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemory" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memory ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>>::type Device::allocateMemoryUnique( const VULKAN_HPP_NAMESPACE::MemoryAllocateInfo & allocateInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DeviceMemory memory; |
| VkResult result = d.vkAllocateMemory( m_device, reinterpret_cast<const VkMemoryAllocateInfo *>( &allocateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDeviceMemory *>( &memory ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateMemoryUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DeviceMemory, Dispatch>( memory, ObjectFree<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::freeMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkFreeMemory( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, void ** ppData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), ppData ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<void *>::type Device::mapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::DeviceSize size, VULKAN_HPP_NAMESPACE::MemoryMapFlags flags, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| void * pData; |
| VkResult result = d.vkMapMemory( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( offset ), static_cast<VkDeviceSize>( size ), static_cast<VkMemoryMapFlags>( flags ), &pData ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mapMemory" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pData ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::unmapMemory( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkUnmapMemory( m_device, static_cast<VkDeviceMemory>( memory ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::flushMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkFlushMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::flushMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkFlushMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::flushMappedMemoryRanges" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::invalidateMappedMemoryRanges( uint32_t memoryRangeCount, const VULKAN_HPP_NAMESPACE::MappedMemoryRange * pMemoryRanges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkInvalidateMappedMemoryRanges( m_device, memoryRangeCount, reinterpret_cast<const VkMappedMemoryRange *>( pMemoryRanges ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::invalidateMappedMemoryRanges( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MappedMemoryRange> const & memoryRanges, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkInvalidateMappedMemoryRanges( m_device, memoryRanges.size(), reinterpret_cast<const VkMappedMemoryRange *>( memoryRanges.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::invalidateMappedMemoryRanges" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize * pCommittedMemoryInBytes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( pCommittedMemoryInBytes ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceSize Device::getMemoryCommitment( VULKAN_HPP_NAMESPACE::DeviceMemory memory, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DeviceSize committedMemoryInBytes; |
| d.vkGetDeviceMemoryCommitment( m_device, static_cast<VkDeviceMemory>( memory ), reinterpret_cast<VkDeviceSize *>( &committedMemoryInBytes ) ); |
| |
| |
| return committedMemoryInBytes; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkBindBufferMemory( m_device, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::DeviceSize memoryOffset, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkBindImageMemory( m_device, static_cast<VkImage>( image ), static_cast<VkDeviceMemory>( memory ), static_cast<VkDeviceSize>( memoryOffset ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getBufferMemoryRequirements( VULKAN_HPP_NAMESPACE::Buffer buffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; |
| d.vkGetBufferMemoryRequirements( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); |
| |
| |
| return memoryRequirements; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::MemoryRequirements * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( pMemoryRequirements ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements Device::getImageMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::MemoryRequirements memoryRequirements; |
| d.vkGetImageMemoryRequirements( m_device, static_cast<VkImage>( image ), reinterpret_cast<VkMemoryRequirements *>( &memoryRequirements ) ); |
| |
| |
| return memoryRequirements; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements * pSparseMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( pSparseMemoryRequirements ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements; |
| uint32_t sparseMemoryRequirementCount; |
| d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); |
| |
| VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); |
| if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) |
| { |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| } |
| return sparseMemoryRequirements; |
| } |
| |
| template <typename SparseImageMemoryRequirementsAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> Device::getImageSparseMemoryRequirements( VULKAN_HPP_NAMESPACE::Image image, SparseImageMemoryRequirementsAllocator & sparseImageMemoryRequirementsAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements, SparseImageMemoryRequirementsAllocator> sparseMemoryRequirements( sparseImageMemoryRequirementsAllocator ); |
| uint32_t sparseMemoryRequirementCount; |
| d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, nullptr ); |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| d.vkGetImageSparseMemoryRequirements( m_device, static_cast<VkImage>( image ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements *>( sparseMemoryRequirements.data() ) ); |
| |
| VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); |
| if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) |
| { |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| } |
| return sparseMemoryRequirements; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( pProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename SparseImageFormatPropertiesAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties; |
| uint32_t propertyCount; |
| d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr ); |
| properties.resize( propertyCount ); |
| d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return properties; |
| } |
| |
| template <typename SparseImageFormatPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> PhysicalDevice::getSparseImageFormatProperties( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::SampleCountFlagBits samples, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageTiling tiling, SparseImageFormatPropertiesAllocator & sparseImageFormatPropertiesAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties, SparseImageFormatPropertiesAllocator> properties( sparseImageFormatPropertiesAllocator ); |
| uint32_t propertyCount; |
| d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, nullptr ); |
| properties.resize( propertyCount ); |
| d.vkGetPhysicalDeviceSparseImageFormatProperties( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkSampleCountFlagBits>( samples ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageTiling>( tiling ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties *>( properties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return properties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::bindSparse( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindSparseInfo * pBindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkQueueBindSparse( m_queue, bindInfoCount, reinterpret_cast<const VkBindSparseInfo *>( pBindInfo ), static_cast<VkFence>( fence ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::bindSparse( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindSparseInfo> const & bindInfo, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkQueueBindSparse( m_queue, bindInfo.size(), reinterpret_cast<const VkBindSparseInfo *>( bindInfo.data() ), static_cast<VkFence>( fence ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::bindSparse" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::createFence( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Fence fence; |
| VkResult result = d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFence" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::createFenceUnique( const VULKAN_HPP_NAMESPACE::FenceCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Fence fence; |
| VkResult result = d.vkCreateFence( m_device, reinterpret_cast<const VkFenceCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFenceUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyFence( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Fence fence, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyFence( m_device, static_cast<VkFence>( fence ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence * pFences, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkResetFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkResetFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetFences" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getFenceStatus( VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkGetFenceStatus( m_device, static_cast<VkFence>( fence ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceStatus", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); |
| |
| return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitForFences( uint32_t fenceCount, const VULKAN_HPP_NAMESPACE::Fence * pFences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkWaitForFences( m_device, fenceCount, reinterpret_cast<const VkFence *>( pFences ), static_cast<VkBool32>( waitAll ), timeout ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitForFences( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Fence> const & fences, VULKAN_HPP_NAMESPACE::Bool32 waitAll, uint64_t timeout, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkWaitForFences( m_device, fences.size(), reinterpret_cast<const VkFence *>( fences.data() ), static_cast<VkBool32>( waitAll ), timeout ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitForFences", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); |
| |
| return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Semaphore * pSemaphore, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSemaphore *>( pSemaphore ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Semaphore>::type Device::createSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Semaphore semaphore; |
| VkResult result = d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphore *>( &semaphore ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphore" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), semaphore ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>>::type Device::createSemaphoreUnique( const VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Semaphore semaphore; |
| VkResult result = d.vkCreateSemaphore( m_device, reinterpret_cast<const VkSemaphoreCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSemaphore *>( &semaphore ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSemaphoreUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Semaphore, Dispatch>( semaphore, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroySemaphore( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroySemaphore( m_device, static_cast<VkSemaphore>( semaphore ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Event * pEvent, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkEvent *>( pEvent ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Event>::type Device::createEvent( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Event event; |
| VkResult result = d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkEvent *>( &event ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEvent" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), event ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>>::type Device::createEventUnique( const VULKAN_HPP_NAMESPACE::EventCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Event event; |
| VkResult result = d.vkCreateEvent( m_device, reinterpret_cast<const VkEventCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkEvent *>( &event ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createEventUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Event, Dispatch>( event, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyEvent( VULKAN_HPP_NAMESPACE::Event event, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Event event, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyEvent( m_device, static_cast<VkEvent>( event ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getEventStatus( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkGetEventStatus( m_device, static_cast<VkEvent>( event ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getEventStatus", { VULKAN_HPP_NAMESPACE::Result::eEventSet, VULKAN_HPP_NAMESPACE::Result::eEventReset } ); |
| |
| return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkSetEvent( m_device, static_cast<VkEvent>( event ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setEvent" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetEvent( VULKAN_HPP_NAMESPACE::Event event, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkResetEvent( m_device, static_cast<VkEvent>( event ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetEvent" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::QueryPool * pQueryPool, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkQueryPool *>( pQueryPool ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::QueryPool>::type Device::createQueryPool( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::QueryPool queryPool; |
| VkResult result = d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkQueryPool *>( &queryPool ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPool" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), queryPool ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>>::type Device::createQueryPoolUnique( const VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::QueryPool queryPool; |
| VkResult result = d.vkCreateQueryPool( m_device, reinterpret_cast<const VkQueryPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkQueryPool *>( &queryPool ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createQueryPoolUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::QueryPool, Dispatch>( queryPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::QueryPool queryPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void * pData, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, dataSize, pData, static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DataType, typename DataTypeAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<DataType, DataTypeAllocator>> Device::getQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| VULKAN_HPP_ASSERT( dataSize % sizeof( DataType ) == 0 ); |
| std::vector<DataType, DataTypeAllocator> data( dataSize / sizeof( DataType ) ); |
| VkResult result = d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, data.size() * sizeof( DataType ), reinterpret_cast<void *>( data.data() ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResults", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); |
| |
| return ResultValue<std::vector<DataType, DataTypeAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data ); |
| } |
| |
| template <typename DataType, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<DataType> Device::getQueryPoolResult( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| DataType data; |
| VkResult result = d.vkGetQueryPoolResults( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, sizeof( DataType ), reinterpret_cast<void *>( &data ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getQueryPoolResult", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eNotReady } ); |
| |
| return ResultValue<DataType>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Buffer * pBuffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBuffer *>( pBuffer ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Buffer>::type Device::createBuffer( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Buffer buffer; |
| VkResult result = d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBuffer *>( &buffer ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBuffer" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), buffer ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>>::type Device::createBufferUnique( const VULKAN_HPP_NAMESPACE::BufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Buffer buffer; |
| VkResult result = d.vkCreateBuffer( m_device, reinterpret_cast<const VkBufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBuffer *>( &buffer ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Buffer, Dispatch>( buffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Buffer buffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyBuffer( m_device, static_cast<VkBuffer>( buffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::BufferView * pView, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkBufferView *>( pView ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::BufferView>::type Device::createBufferView( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::BufferView view; |
| VkResult result = d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferView *>( &view ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferView" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>>::type Device::createBufferViewUnique( const VULKAN_HPP_NAMESPACE::BufferViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::BufferView view; |
| VkResult result = d.vkCreateBufferView( m_device, reinterpret_cast<const VkBufferViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkBufferView *>( &view ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createBufferViewUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::BufferView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyBufferView( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::BufferView bufferView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyBufferView( m_device, static_cast<VkBufferView>( bufferView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Image * pImage, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkImage *>( pImage ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Image>::type Device::createImage( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Image image; |
| VkResult result = d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImage *>( &image ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImage" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), image ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>>::type Device::createImageUnique( const VULKAN_HPP_NAMESPACE::ImageCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Image image; |
| VkResult result = d.vkCreateImage( m_device, reinterpret_cast<const VkImageCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImage *>( &image ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Image, Dispatch>( image, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyImage( VULKAN_HPP_NAMESPACE::Image image, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Image image, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyImage( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource * pSubresource, VULKAN_HPP_NAMESPACE::SubresourceLayout * pLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( pSubresource ), reinterpret_cast<VkSubresourceLayout *>( pLayout ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::SubresourceLayout Device::getImageSubresourceLayout( VULKAN_HPP_NAMESPACE::Image image, const VULKAN_HPP_NAMESPACE::ImageSubresource & subresource, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SubresourceLayout layout; |
| d.vkGetImageSubresourceLayout( m_device, static_cast<VkImage>( image ), reinterpret_cast<const VkImageSubresource *>( &subresource ), reinterpret_cast<VkSubresourceLayout *>( &layout ) ); |
| |
| |
| return layout; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ImageView * pView, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkImageView *>( pView ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageView>::type Device::createImageView( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ImageView view; |
| VkResult result = d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImageView *>( &view ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageView" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), view ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>>::type Device::createImageViewUnique( const VULKAN_HPP_NAMESPACE::ImageViewCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ImageView view; |
| VkResult result = d.vkCreateImageView( m_device, reinterpret_cast<const VkImageViewCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkImageView *>( &view ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createImageViewUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::ImageView, Dispatch>( view, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyImageView( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ImageView imageView, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyImageView( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::ShaderModule * pShaderModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkShaderModule *>( pShaderModule ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ShaderModule>::type Device::createShaderModule( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; |
| VkResult result = d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkShaderModule *>( &shaderModule ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModule" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), shaderModule ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>>::type Device::createShaderModuleUnique( const VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ShaderModule shaderModule; |
| VkResult result = d.vkCreateShaderModule( m_device, reinterpret_cast<const VkShaderModuleCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkShaderModule *>( &shaderModule ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createShaderModuleUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::ShaderModule, Dispatch>( shaderModule, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyShaderModule( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::ShaderModule shaderModule, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyShaderModule( m_device, static_cast<VkShaderModule>( shaderModule ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PipelineCache * pPipelineCache, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineCache *>( pPipelineCache ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineCache>::type Device::createPipelineCache( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; |
| VkResult result = d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCache" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineCache ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>>::type Device::createPipelineCacheUnique( const VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache; |
| VkResult result = d.vkCreatePipelineCache( m_device, reinterpret_cast<const VkPipelineCacheCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineCache *>( &pipelineCache ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineCacheUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineCache, Dispatch>( pipelineCache, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyPipelineCache( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyPipelineCache( m_device, static_cast<VkPipelineCache>( pipelineCache ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, size_t * pDataSize, void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), pDataSize, pData ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Uint8_tAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<uint8_t, Uint8_tAllocator> data; |
| size_t dataSize; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ); |
| if ( ( result == VK_SUCCESS ) && dataSize ) |
| { |
| data.resize( dataSize ); |
| result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); |
| VULKAN_HPP_ASSERT( dataSize <= data.size() ); |
| if ( dataSize < data.size() ) |
| { |
| data.resize( dataSize ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data ); |
| } |
| |
| template <typename Uint8_tAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getPipelineCacheData( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<uint8_t, Uint8_tAllocator> data( uint8_tAllocator ); |
| size_t dataSize; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, nullptr ); |
| if ( ( result == VK_SUCCESS ) && dataSize ) |
| { |
| data.resize( dataSize ); |
| result = d.vkGetPipelineCacheData( m_device, static_cast<VkPipelineCache>( pipelineCache ), &dataSize, reinterpret_cast<void *>( data.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPipelineCacheData" ); |
| VULKAN_HPP_ASSERT( dataSize <= data.size() ); |
| if ( dataSize < data.size() ) |
| { |
| data.resize( dataSize ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, uint32_t srcCacheCount, const VULKAN_HPP_NAMESPACE::PipelineCache * pSrcCaches, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCacheCount, reinterpret_cast<const VkPipelineCache *>( pSrcCaches ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::mergePipelineCaches( VULKAN_HPP_NAMESPACE::PipelineCache dstCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::PipelineCache> const & srcCaches, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkMergePipelineCaches( m_device, static_cast<VkPipelineCache>( dstCache ), srcCaches.size(), reinterpret_cast<const VkPipelineCache *>( srcCaches.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::mergePipelineCaches" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipeline *>( pPipelines ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename PipelineAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); |
| VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| |
| return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); |
| } |
| |
| template <typename PipelineAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createGraphicsPipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); |
| VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| |
| return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); |
| } |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> Device::createGraphicsPipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Pipeline pipeline; |
| VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| |
| return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch, typename PipelineAllocator> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); |
| VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; |
| uniquePipelines.reserve( createInfos.size() ); |
| ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); |
| for ( auto const & pipeline : pipelines ) |
| { |
| uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); |
| } |
| return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); |
| } |
| |
| template <typename Dispatch, typename PipelineAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createGraphicsPipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); |
| VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); |
| uniquePipelines.reserve( createInfos.size() ); |
| ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); |
| for ( auto const & pipeline : pipelines ) |
| { |
| uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); |
| } |
| return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); |
| } |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> Device::createGraphicsPipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Pipeline pipeline; |
| VkResult result = d.vkCreateGraphicsPipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkGraphicsPipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createGraphicsPipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| |
| return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, uint32_t createInfoCount, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Pipeline * pPipelines, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfoCount, reinterpret_cast<const VkComputePipelineCreateInfo *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipeline *>( pPipelines ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename PipelineAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size() ); |
| VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| |
| return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); |
| } |
| |
| template <typename PipelineAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, Pipeline>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>> Device::createComputePipelines( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator> pipelines( createInfos.size(), pipelineAllocator ); |
| VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelines", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| |
| return ResultValue<std::vector<VULKAN_HPP_NAMESPACE::Pipeline, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelines ); |
| } |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<VULKAN_HPP_NAMESPACE::Pipeline> Device::createComputePipeline( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Pipeline pipeline; |
| VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipeline", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| |
| return ResultValue<VULKAN_HPP_NAMESPACE::Pipeline>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipeline ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch, typename PipelineAllocator> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); |
| VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines; |
| uniquePipelines.reserve( createInfos.size() ); |
| ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); |
| for ( auto const & pipeline : pipelines ) |
| { |
| uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); |
| } |
| return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); |
| } |
| |
| template <typename Dispatch, typename PipelineAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<Pipeline, Dispatch>>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>> Device::createComputePipelinesUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, PipelineAllocator & pipelineAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Pipeline> pipelines( createInfos.size() ); |
| VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), createInfos.size(), reinterpret_cast<const VkComputePipelineCreateInfo *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( pipelines.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelinesUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator> uniquePipelines( pipelineAllocator ); |
| uniquePipelines.reserve( createInfos.size() ); |
| ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); |
| for ( auto const & pipeline : pipelines ) |
| { |
| uniquePipelines.push_back( UniqueHandle<Pipeline, Dispatch>( pipeline, deleter ) ); |
| } |
| return ResultValue<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>, PipelineAllocator>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniquePipelines ) ); |
| } |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>> Device::createComputePipelineUnique( VULKAN_HPP_NAMESPACE::PipelineCache pipelineCache, const VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Pipeline pipeline; |
| VkResult result = d.vkCreateComputePipelines( m_device, static_cast<VkPipelineCache>( pipelineCache ), 1, reinterpret_cast<const VkComputePipelineCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipeline *>( &pipeline ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createComputePipelineUnique", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::ePipelineCompileRequiredEXT } ); |
| |
| return ResultValue<UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Pipeline, Dispatch>( pipeline, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyPipeline( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Pipeline pipeline, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyPipeline( m_device, static_cast<VkPipeline>( pipeline ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PipelineLayout * pPipelineLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPipelineLayout *>( pPipelineLayout ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PipelineLayout>::type Device::createPipelineLayout( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; |
| VkResult result = d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayout" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), pipelineLayout ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>>::type Device::createPipelineLayoutUnique( const VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout; |
| VkResult result = d.vkCreatePipelineLayout( m_device, reinterpret_cast<const VkPipelineLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPipelineLayout *>( &pipelineLayout ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPipelineLayoutUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PipelineLayout, Dispatch>( pipelineLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyPipelineLayout( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PipelineLayout pipelineLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyPipelineLayout( m_device, static_cast<VkPipelineLayout>( pipelineLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Sampler * pSampler, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSampler *>( pSampler ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Sampler>::type Device::createSampler( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Sampler sampler; |
| VkResult result = d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSampler *>( &sampler ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSampler" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), sampler ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>>::type Device::createSamplerUnique( const VULKAN_HPP_NAMESPACE::SamplerCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Sampler sampler; |
| VkResult result = d.vkCreateSampler( m_device, reinterpret_cast<const VkSamplerCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSampler *>( &sampler ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Sampler, Dispatch>( sampler, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroySampler( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Sampler sampler, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroySampler( m_device, static_cast<VkSampler>( sampler ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorSetLayout * pSetLayout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorSetLayout *>( pSetLayout ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::type Device::createDescriptorSetLayout( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; |
| VkResult result = d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayout" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), setLayout ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>>::type Device::createDescriptorSetLayoutUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DescriptorSetLayout setLayout; |
| VkResult result = d.vkCreateDescriptorSetLayout( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorSetLayout *>( &setLayout ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorSetLayoutUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSetLayout, Dispatch>( setLayout, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyDescriptorSetLayout( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorSetLayout descriptorSetLayout, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyDescriptorSetLayout( m_device, static_cast<VkDescriptorSetLayout>( descriptorSetLayout ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorPool * pDescriptorPool, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorPool *>( pDescriptorPool ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorPool>::type Device::createDescriptorPool( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; |
| VkResult result = d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPool" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorPool ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>>::type Device::createDescriptorPoolUnique( const VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool; |
| VkResult result = d.vkCreateDescriptorPool( m_device, reinterpret_cast<const VkDescriptorPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorPool *>( &descriptorPool ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorPoolUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorPool, Dispatch>( descriptorPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE Result Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::resetDescriptorPool( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkResetDescriptorPool( m_device, static_cast<VkDescriptorPool>( descriptorPool ), static_cast<VkDescriptorPoolResetFlags>( flags ) ); |
| |
| |
| |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo * pAllocateInfo, VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkDescriptorSet *>( pDescriptorSets ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DescriptorSetAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount ); |
| VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets ); |
| } |
| |
| template <typename DescriptorSetAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, DescriptorSet>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator>>::type Device::allocateDescriptorSets( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet, DescriptorSetAllocator> descriptorSets( allocateInfo.descriptorSetCount, descriptorSetAllocator ); |
| VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSets" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorSets ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch, typename DescriptorSetAllocator> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); |
| VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); |
| std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets; |
| uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); |
| PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); |
| for ( auto const & descriptorSet : descriptorSets ) |
| { |
| uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) ); |
| } |
| |
| template <typename Dispatch, typename DescriptorSetAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<DescriptorSet, Dispatch>>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator>>::type Device::allocateDescriptorSetsUnique( const VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo & allocateInfo, DescriptorSetAllocator & descriptorSetAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DescriptorSet> descriptorSets( allocateInfo.descriptorSetCount ); |
| VkResult result = d.vkAllocateDescriptorSets( m_device, reinterpret_cast<const VkDescriptorSetAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkDescriptorSet *>( descriptorSets.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateDescriptorSetsUnique" ); |
| std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorSet, Dispatch>, DescriptorSetAllocator> uniqueDescriptorSets( descriptorSetAllocator ); |
| uniqueDescriptorSets.reserve( allocateInfo.descriptorSetCount ); |
| PoolFree<Device, DescriptorPool, Dispatch> deleter( *this, allocateInfo.descriptorPool, d ); |
| for ( auto const & descriptorSet : descriptorSets ) |
| { |
| uniqueDescriptorSets.push_back( UniqueHandle<DescriptorSet, Dispatch>( descriptorSet, deleter ) ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueDescriptorSets ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE Result Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::freeDescriptorSets( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE Result ( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::DescriptorPool descriptorPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkFreeDescriptorSets( m_device, static_cast<VkDescriptorPool>( descriptorPool ), descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::updateDescriptorSets( uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, uint32_t descriptorCopyCount, const VULKAN_HPP_NAMESPACE::CopyDescriptorSet * pDescriptorCopies, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkUpdateDescriptorSets( m_device, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ), descriptorCopyCount, reinterpret_cast<const VkCopyDescriptorSet *>( pDescriptorCopies ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::updateDescriptorSets( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CopyDescriptorSet> const & descriptorCopies, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkUpdateDescriptorSets( m_device, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ), descriptorCopies.size(), reinterpret_cast<const VkCopyDescriptorSet *>( descriptorCopies.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Framebuffer * pFramebuffer, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFramebuffer *>( pFramebuffer ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Framebuffer>::type Device::createFramebuffer( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; |
| VkResult result = d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFramebuffer *>( &framebuffer ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebuffer" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), framebuffer ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>>::type Device::createFramebufferUnique( const VULKAN_HPP_NAMESPACE::FramebufferCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Framebuffer framebuffer; |
| VkResult result = d.vkCreateFramebuffer( m_device, reinterpret_cast<const VkFramebufferCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFramebuffer *>( &framebuffer ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createFramebufferUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Framebuffer, Dispatch>( framebuffer, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyFramebuffer( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::Framebuffer framebuffer, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyFramebuffer( m_device, static_cast<VkFramebuffer>( framebuffer ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::RenderPass renderPass; |
| VkResult result = d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPassUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::RenderPass renderPass; |
| VkResult result = d.vkCreateRenderPass( m_device, reinterpret_cast<const VkRenderPassCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPassUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyRenderPass( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyRenderPass( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, VULKAN_HPP_NAMESPACE::Extent2D * pGranularity, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( pGranularity ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Extent2D Device::getRenderAreaGranularity( VULKAN_HPP_NAMESPACE::RenderPass renderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Extent2D granularity; |
| d.vkGetRenderAreaGranularity( m_device, static_cast<VkRenderPass>( renderPass ), reinterpret_cast<VkExtent2D *>( &granularity ) ); |
| |
| |
| return granularity; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CommandPool * pCommandPool, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCommandPool *>( pCommandPool ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CommandPool>::type Device::createCommandPool( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::CommandPool commandPool; |
| VkResult result = d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCommandPool *>( &commandPool ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPool" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandPool ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>>::type Device::createCommandPoolUnique( const VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::CommandPool commandPool; |
| VkResult result = d.vkCreateCommandPool( m_device, reinterpret_cast<const VkCommandPoolCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCommandPool *>( &commandPool ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCommandPoolUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::CommandPool, Dispatch>( commandPool, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CommandPool commandPool, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::resetCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolResetFlags flags, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkResetCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolResetFlags>( flags ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::resetCommandPool" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo * pAllocateInfo, VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( pAllocateInfo ), reinterpret_cast<VkCommandBuffer *>( pCommandBuffers ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename CommandBufferAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount ); |
| VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers ); |
| } |
| |
| template <typename CommandBufferAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, CommandBuffer>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator>>::type Device::allocateCommandBuffers( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer, CommandBufferAllocator> commandBuffers( allocateInfo.commandBufferCount, commandBufferAllocator ); |
| VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffers" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), commandBuffers ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch, typename CommandBufferAllocator> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); |
| VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); |
| std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers; |
| uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); |
| PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); |
| for ( auto const & commandBuffer : commandBuffers ) |
| { |
| uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) ); |
| } |
| |
| template <typename Dispatch, typename CommandBufferAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<CommandBuffer, Dispatch>>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator>>::type Device::allocateCommandBuffersUnique( const VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo & allocateInfo, CommandBufferAllocator & commandBufferAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::CommandBuffer> commandBuffers( allocateInfo.commandBufferCount ); |
| VkResult result = d.vkAllocateCommandBuffers( m_device, reinterpret_cast<const VkCommandBufferAllocateInfo *>( &allocateInfo ), reinterpret_cast<VkCommandBuffer *>( commandBuffers.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::allocateCommandBuffersUnique" ); |
| std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::CommandBuffer, Dispatch>, CommandBufferAllocator> uniqueCommandBuffers( commandBufferAllocator ); |
| uniqueCommandBuffers.reserve( allocateInfo.commandBufferCount ); |
| PoolFree<Device, CommandPool, Dispatch> deleter( *this, allocateInfo.commandPool, d ); |
| for ( auto const & commandBuffer : commandBuffers ) |
| { |
| uniqueCommandBuffers.push_back( UniqueHandle<CommandBuffer, Dispatch>( commandBuffer, deleter ) ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueCommandBuffers ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::freeCommandBuffers( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void ( Device::free )( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkFreeCommandBuffers( m_device, static_cast<VkCommandPool>( commandPool ), commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( pBeginInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::begin( const VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo & beginInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkBeginCommandBuffer( m_commandBuffer, reinterpret_cast<const VkCommandBufferBeginInfo *>( &beginInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::begin" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::end( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkEndCommandBuffer( m_commandBuffer ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::end( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkEndCommandBuffer( m_commandBuffer ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::end" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE typename ResultValueType<void>::type CommandBuffer::reset( VULKAN_HPP_NAMESPACE::CommandBufferResetFlags flags, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkResetCommandBuffer( m_commandBuffer, static_cast<VkCommandBufferResetFlags>( flags ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::reset" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::bindPipeline( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::Pipeline pipeline, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBindPipeline( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipeline>( pipeline ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setViewport( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdSetViewport( m_commandBuffer, firstViewport, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setScissor( uint32_t firstScissor, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdSetScissor( m_commandBuffer, firstScissor, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setLineWidth( float lineWidth, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetLineWidth( m_commandBuffer, lineWidth ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDepthBias( float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetDepthBias( m_commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setBlendConstants( const float blendConstants[4], Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetBlendConstants( m_commandBuffer, blendConstants ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDepthBounds( float minDepthBounds, float maxDepthBounds, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetDepthBounds( m_commandBuffer, minDepthBounds, maxDepthBounds ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setStencilCompareMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t compareMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetStencilCompareMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), compareMask ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setStencilWriteMask( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t writeMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetStencilWriteMask( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), writeMask ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setStencilReference( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, uint32_t reference, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetStencilReference( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), reference ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VULKAN_HPP_NAMESPACE::DescriptorSet * pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t * pDynamicOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSetCount, reinterpret_cast<const VkDescriptorSet *>( pDescriptorSets ), dynamicOffsetCount, pDynamicOffsets ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::bindDescriptorSets( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t firstSet, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DescriptorSet> const & descriptorSets, VULKAN_HPP_NAMESPACE::ArrayProxy<const uint32_t> const & dynamicOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdBindDescriptorSets( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), firstSet, descriptorSets.size(), reinterpret_cast<const VkDescriptorSet *>( descriptorSets.data() ), dynamicOffsets.size(), dynamicOffsets.data() ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::bindIndexBuffer( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::IndexType indexType, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBindIndexBuffer( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkIndexType>( indexType ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| #ifdef VULKAN_HPP_NO_EXCEPTIONS |
| VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); |
| #else |
| if ( buffers.size() != offsets.size() ) |
| { |
| throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers: buffers.size() != offsets.size()" ); |
| } |
| #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ |
| |
| |
| d.vkCmdBindVertexBuffers( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::draw( uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDraw( m_commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::drawIndexed( uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDrawIndexed( m_commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::drawIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDrawIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, uint32_t drawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDrawIndexedIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), drawCount, stride ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::dispatch( uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDispatch( m_commandBuffer, groupCountX, groupCountY, groupCountZ ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::dispatchIndirect( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDispatchIndirect( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferCopy * pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferCopy *>( pRegions ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyBuffer( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdCopyBuffer( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferCopy *>( regions.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageCopy * pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageCopy *>( pRegions ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdCopyImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageCopy *>( regions.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageBlit * pRegions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageBlit *>( pRegions ), static_cast<VkFilter>( filter ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::blitImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageBlit> const & regions, VULKAN_HPP_NAMESPACE::Filter filter, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdBlitImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageBlit *>( regions.data() ), static_cast<VkFilter>( filter ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage( VULKAN_HPP_NAMESPACE::Buffer srcBuffer, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdCopyBufferToImage( m_commandBuffer, static_cast<VkBuffer>( srcBuffer ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::BufferImageCopy * pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regionCount, reinterpret_cast<const VkBufferImageCopy *>( pRegions ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferImageCopy> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdCopyImageToBuffer( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkBuffer>( dstBuffer ), regions.size(), reinterpret_cast<const VkBufferImageCopy *>( regions.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize dataSize, const void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( dataSize ), pData ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DataType, typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::updateBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::ArrayProxy<const DataType> const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdUpdateBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), data.size() * sizeof( DataType ), reinterpret_cast<const void *>( data.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::fillBuffer( VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize size, uint32_t data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdFillBuffer( m_commandBuffer, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( size ), data ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue * pColor, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( pColor ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::clearColorImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearColorValue & color, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdClearColorImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearColorValue *>( &color ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue * pDepthStencil, uint32_t rangeCount, const VULKAN_HPP_NAMESPACE::ImageSubresourceRange * pRanges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( pDepthStencil ), rangeCount, reinterpret_cast<const VkImageSubresourceRange *>( pRanges ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::clearDepthStencilImage( VULKAN_HPP_NAMESPACE::Image image, VULKAN_HPP_NAMESPACE::ImageLayout imageLayout, const VULKAN_HPP_NAMESPACE::ClearDepthStencilValue & depthStencil, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageSubresourceRange> const & ranges, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdClearDepthStencilImage( m_commandBuffer, static_cast<VkImage>( image ), static_cast<VkImageLayout>( imageLayout ), reinterpret_cast<const VkClearDepthStencilValue *>( &depthStencil ), ranges.size(), reinterpret_cast<const VkImageSubresourceRange *>( ranges.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( uint32_t attachmentCount, const VULKAN_HPP_NAMESPACE::ClearAttachment * pAttachments, uint32_t rectCount, const VULKAN_HPP_NAMESPACE::ClearRect * pRects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdClearAttachments( m_commandBuffer, attachmentCount, reinterpret_cast<const VkClearAttachment *>( pAttachments ), rectCount, reinterpret_cast<const VkClearRect *>( pRects ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::clearAttachments( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearAttachment> const & attachments, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ClearRect> const & rects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdClearAttachments( m_commandBuffer, attachments.size(), reinterpret_cast<const VkClearAttachment *>( attachments.data() ), rects.size(), reinterpret_cast<const VkClearRect *>( rects.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, uint32_t regionCount, const VULKAN_HPP_NAMESPACE::ImageResolve * pRegions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regionCount, reinterpret_cast<const VkImageResolve *>( pRegions ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::resolveImage( VULKAN_HPP_NAMESPACE::Image srcImage, VULKAN_HPP_NAMESPACE::ImageLayout srcImageLayout, VULKAN_HPP_NAMESPACE::Image dstImage, VULKAN_HPP_NAMESPACE::ImageLayout dstImageLayout, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageResolve> const & regions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdResolveImage( m_commandBuffer, static_cast<VkImage>( srcImage ), static_cast<VkImageLayout>( srcImageLayout ), static_cast<VkImage>( dstImage ), static_cast<VkImageLayout>( dstImageLayout ), regions.size(), reinterpret_cast<const VkImageResolve *>( regions.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::resetEvent( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdResetEvent( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags>( stageMask ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::waitEvents( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdWaitEvents( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::waitEvents( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdWaitEvents( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VULKAN_HPP_NAMESPACE::MemoryBarrier * pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier * pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier * pImageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarrierCount, reinterpret_cast<const VkMemoryBarrier *>( pMemoryBarriers ), bufferMemoryBarrierCount, reinterpret_cast<const VkBufferMemoryBarrier *>( pBufferMemoryBarriers ), imageMemoryBarrierCount, reinterpret_cast<const VkImageMemoryBarrier *>( pImageMemoryBarriers ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier( VULKAN_HPP_NAMESPACE::PipelineStageFlags srcStageMask, VULKAN_HPP_NAMESPACE::PipelineStageFlags dstStageMask, VULKAN_HPP_NAMESPACE::DependencyFlags dependencyFlags, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::MemoryBarrier> const & memoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BufferMemoryBarrier> const & bufferMemoryBarriers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ImageMemoryBarrier> const & imageMemoryBarriers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdPipelineBarrier( m_commandBuffer, static_cast<VkPipelineStageFlags>( srcStageMask ), static_cast<VkPipelineStageFlags>( dstStageMask ), static_cast<VkDependencyFlags>( dependencyFlags ), memoryBarriers.size(), reinterpret_cast<const VkMemoryBarrier *>( memoryBarriers.data() ), bufferMemoryBarriers.size(), reinterpret_cast<const VkBufferMemoryBarrier *>( bufferMemoryBarriers.data() ), imageMemoryBarriers.size(), reinterpret_cast<const VkImageMemoryBarrier *>( imageMemoryBarriers.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBeginQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endQuery( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdEndQuery( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdResetQueryPool( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp( VULKAN_HPP_NAMESPACE::PipelineStageFlagBits pipelineStage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdWriteTimestamp( m_commandBuffer, static_cast<VkPipelineStageFlagBits>( pipelineStage ), static_cast<VkQueryPool>( queryPool ), query ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyQueryPoolResults( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VULKAN_HPP_NAMESPACE::Buffer dstBuffer, VULKAN_HPP_NAMESPACE::DeviceSize dstOffset, VULKAN_HPP_NAMESPACE::DeviceSize stride, VULKAN_HPP_NAMESPACE::QueryResultFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdCopyQueryPoolResults( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount, static_cast<VkBuffer>( dstBuffer ), static_cast<VkDeviceSize>( dstOffset ), static_cast<VkDeviceSize>( stride ), static_cast<VkQueryResultFlags>( flags ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void * pValues, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, size, pValues ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename ValuesType, typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::pushConstants( VULKAN_HPP_NAMESPACE::PipelineLayout layout, VULKAN_HPP_NAMESPACE::ShaderStageFlags stageFlags, uint32_t offset, VULKAN_HPP_NAMESPACE::ArrayProxy<const ValuesType> const & values, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdPushConstants( m_commandBuffer, static_cast<VkPipelineLayout>( layout ), static_cast<VkShaderStageFlags>( stageFlags ), offset, values.size() * sizeof( ValuesType ), reinterpret_cast<const void *>( values.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), static_cast<VkSubpassContents>( contents ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdBeginRenderPass( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), static_cast<VkSubpassContents>( contents ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::nextSubpass( VULKAN_HPP_NAMESPACE::SubpassContents contents, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdNextSubpass( m_commandBuffer, static_cast<VkSubpassContents>( contents ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endRenderPass( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdEndRenderPass( m_commandBuffer ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::executeCommands( uint32_t commandBufferCount, const VULKAN_HPP_NAMESPACE::CommandBuffer * pCommandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdExecuteCommands( m_commandBuffer, commandBufferCount, reinterpret_cast<const VkCommandBuffer *>( pCommandBuffers ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::executeCommands( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::CommandBuffer> const & commandBuffers, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdExecuteCommands( m_commandBuffer, commandBuffers.size(), reinterpret_cast<const VkCommandBuffer *>( commandBuffers.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_VERSION_1_1 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result enumerateInstanceVersion( uint32_t * pApiVersion, Dispatch const & d ) VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkEnumerateInstanceVersion( pApiVersion ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint32_t>::type enumerateInstanceVersion( Dispatch const & d ) |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| uint32_t apiVersion; |
| VkResult result = d.vkEnumerateInstanceVersion( &apiVersion ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::enumerateInstanceVersion" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), apiVersion ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindBufferMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkBindBufferMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindBufferMemoryInfo *>( pBindInfos ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindBufferMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo> const & bindInfos, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkBindBufferMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindBufferMemoryInfo *>( bindInfos.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindBufferMemory2" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindImageMemory2( uint32_t bindInfoCount, const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo * pBindInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkBindImageMemory2( m_device, bindInfoCount, reinterpret_cast<const VkBindImageMemoryInfo *>( pBindInfos ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindImageMemory2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindImageMemoryInfo> const & bindInfos, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkBindImageMemory2( m_device, bindInfos.size(), reinterpret_cast<const VkBindImageMemoryInfo *>( bindInfos.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindImageMemory2" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeatures( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; |
| d.vkGetDeviceGroupPeerMemoryFeatures( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); |
| |
| |
| return peerMemoryFeatures; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDeviceMask( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetDeviceMask( m_commandBuffer, deviceMask ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::dispatchBase( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDispatchBase( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroups( uint32_t * pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroups( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroups( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; |
| uint32_t physicalDeviceGroupCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) |
| { |
| physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); |
| result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); |
| VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); |
| if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) |
| { |
| physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties ); |
| } |
| |
| template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroups( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator ); |
| uint32_t physicalDeviceGroupCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) |
| { |
| physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); |
| result = d.vkEnumeratePhysicalDeviceGroups( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroups" ); |
| VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); |
| if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) |
| { |
| physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; |
| d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); |
| |
| |
| return memoryRequirements; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); |
| d.vkGetImageMemoryRequirements2( m_device, reinterpret_cast<const VkImageMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; |
| d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); |
| |
| |
| return memoryRequirements; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements2( const VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); |
| d.vkGetBufferMemoryRequirements2( m_device, reinterpret_cast<const VkBufferMemoryRequirementsInfo2 *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; |
| uint32_t sparseMemoryRequirementCount; |
| d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); |
| |
| VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); |
| if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) |
| { |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| } |
| return sparseMemoryRequirements; |
| } |
| |
| template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements2( const VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator ); |
| uint32_t sparseMemoryRequirementCount; |
| d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, nullptr ); |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| d.vkGetImageSparseMemoryRequirements2( m_device, reinterpret_cast<const VkImageSparseMemoryRequirementsInfo2 *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); |
| |
| VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); |
| if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) |
| { |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| } |
| return sparseMemoryRequirements; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; |
| d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); |
| |
| |
| return features; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); |
| d.vkGetPhysicalDeviceFeatures2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; |
| d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); |
| |
| |
| return properties; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); |
| d.vkGetPhysicalDeviceProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; |
| d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); |
| |
| |
| return formatProperties; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); |
| d.vkGetPhysicalDeviceFormatProperties2( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; |
| VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties ); |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); |
| VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename QueueFamilyProperties2Allocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; |
| uint32_t queueFamilyPropertyCount; |
| d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); |
| if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) |
| { |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| } |
| return queueFamilyProperties; |
| } |
| |
| template <typename QueueFamilyProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); |
| uint32_t queueFamilyPropertyCount; |
| d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); |
| if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) |
| { |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| } |
| return queueFamilyProperties; |
| } |
| |
| template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<StructureChain, StructureChainAllocator> structureChains; |
| std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; |
| uint32_t queueFamilyPropertyCount; |
| d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); |
| structureChains.resize( queueFamilyPropertyCount ); |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) |
| { |
| queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; |
| } |
| d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); |
| if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) |
| { |
| structureChains.resize( queueFamilyPropertyCount ); |
| } |
| for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) |
| { |
| structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; |
| } |
| return structureChains; |
| } |
| |
| template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); |
| std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; |
| uint32_t queueFamilyPropertyCount; |
| d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); |
| structureChains.resize( queueFamilyPropertyCount ); |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) |
| { |
| queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; |
| } |
| d.vkGetPhysicalDeviceQueueFamilyProperties2( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); |
| if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) |
| { |
| structureChains.resize( queueFamilyPropertyCount ); |
| } |
| for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) |
| { |
| structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; |
| } |
| return structureChains; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; |
| d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); |
| |
| |
| return memoryProperties; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); |
| d.vkGetPhysicalDeviceMemoryProperties2( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename SparseImageFormatProperties2Allocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; |
| uint32_t propertyCount; |
| d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); |
| properties.resize( propertyCount ); |
| d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return properties; |
| } |
| |
| template <typename SparseImageFormatProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); |
| uint32_t propertyCount; |
| d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); |
| properties.resize( propertyCount ); |
| d.vkGetPhysicalDeviceSparseImageFormatProperties2( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return properties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::trimCommandPool( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkTrimCommandPool( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 * pQueueInfo, VULKAN_HPP_NAMESPACE::Queue * pQueue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( pQueueInfo ), reinterpret_cast<VkQueue *>( pQueue ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Queue Device::getQueue2( const VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 & queueInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Queue queue; |
| d.vkGetDeviceQueue2( m_device, reinterpret_cast<const VkDeviceQueueInfo2 *>( &queueInfo ), reinterpret_cast<VkQueue *>( &queue ) ); |
| |
| |
| return queue; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion * pYcbcrConversion, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSamplerYcbcrConversion *>( pYcbcrConversion ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::type Device::createSamplerYcbcrConversion( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; |
| VkResult result = d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversion" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), ycbcrConversion ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>>::type Device::createSamplerYcbcrConversionUnique( const VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion; |
| VkResult result = d.vkCreateSamplerYcbcrConversion( m_device, reinterpret_cast<const VkSamplerYcbcrConversionCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSamplerYcbcrConversion *>( &ycbcrConversion ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSamplerYcbcrConversionUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion, Dispatch>( ycbcrConversion, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroySamplerYcbcrConversion( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ycbcrConversion, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroySamplerYcbcrConversion( m_device, static_cast<VkSamplerYcbcrConversion>( ycbcrConversion ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplate( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; |
| VkResult result = d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplate" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type Device::createDescriptorUpdateTemplateUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; |
| VkResult result = d.vkCreateDescriptorUpdateTemplate( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplate( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyDescriptorUpdateTemplate( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DataType, typename Dispatch> |
| VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplate( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkUpdateDescriptorSetWithTemplate( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const void *>( &data ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; |
| d.vkGetPhysicalDeviceExternalBufferProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); |
| |
| |
| return externalBufferProperties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFenceProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; |
| d.vkGetPhysicalDeviceExternalFenceProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); |
| |
| |
| return externalFenceProperties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphoreProperties( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; |
| d.vkGetPhysicalDeviceExternalSemaphoreProperties( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); |
| |
| |
| return externalSemaphoreProperties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo * pCreateInfo, VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport * pSupport, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( pCreateInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( pSupport ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport support; |
| d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); |
| |
| |
| return support; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getDescriptorSetLayoutSupport( const VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo & createInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport & support = structureChain.template get<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>(); |
| d.vkGetDescriptorSetLayoutSupport( m_device, reinterpret_cast<const VkDescriptorSetLayoutCreateInfo *>( &createInfo ), reinterpret_cast<VkDescriptorSetLayoutSupport *>( &support ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_VERSION_1_2 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDrawIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCount( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDrawIndexedIndirectCount( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::RenderPass renderPass; |
| VkResult result = d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPass2Unique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::RenderPass renderPass; |
| VkResult result = d.vkCreateRenderPass2( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2Unique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdBeginRenderPass2( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdNextSubpass2( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdEndRenderPass2( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::resetQueryPool( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkResetQueryPool( m_device, static_cast<VkQueryPool>( queryPool ), firstQuery, queryCount ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, uint64_t * pValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), pValue ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSemaphoreCounterValue( VULKAN_HPP_NAMESPACE::Semaphore semaphore, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| uint64_t value; |
| VkResult result = d.vkGetSemaphoreCounterValue( m_device, static_cast<VkSemaphore>( semaphore ), &value ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreCounterValue" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), value ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo * pWaitInfo, uint64_t timeout, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( pWaitInfo ), timeout ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::waitSemaphores( const VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo & waitInfo, uint64_t timeout, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkWaitSemaphores( m_device, reinterpret_cast<const VkSemaphoreWaitInfo *>( &waitInfo ), timeout ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::waitSemaphores", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout } ); |
| |
| return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo * pSignalInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( pSignalInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::signalSemaphore( const VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo & signalInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkSignalSemaphore( m_device, reinterpret_cast<const VkSemaphoreSignalInfo *>( &signalInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::signalSemaphore" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<DeviceAddress>( d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::DeviceAddress Device::getBufferAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkDeviceAddress result = d.vkGetBufferDeviceAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); |
| |
| |
| return static_cast<VULKAN_HPP_NAMESPACE::DeviceAddress>( result ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( pInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE uint64_t Device::getBufferOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| uint64_t result = d.vkGetBufferOpaqueCaptureAddress( m_device, reinterpret_cast<const VkBufferDeviceAddressInfo *>( &info ) ); |
| |
| |
| return result; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( pInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE uint64_t Device::getMemoryOpaqueCaptureAddress( const VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| uint64_t result = d.vkGetDeviceMemoryOpaqueCaptureAddress( m_device, reinterpret_cast<const VkDeviceMemoryOpaqueCaptureAddressInfo *>( &info ) ); |
| |
| |
| return result; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_VERSION_1_3 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getToolProperties( uint32_t * pToolCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties * pToolProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, pToolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( pToolProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type PhysicalDevice::getToolProperties( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties; |
| uint32_t toolCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && toolCount ) |
| { |
| toolProperties.resize( toolCount ); |
| result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); |
| VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); |
| if ( toolCount < toolProperties.size() ) |
| { |
| toolProperties.resize( toolCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties ); |
| } |
| |
| template <typename PhysicalDeviceToolPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceToolProperties>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator>>::type PhysicalDevice::getToolProperties( PhysicalDeviceToolPropertiesAllocator & physicalDeviceToolPropertiesAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties, PhysicalDeviceToolPropertiesAllocator> toolProperties( physicalDeviceToolPropertiesAllocator ); |
| uint32_t toolCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && toolCount ) |
| { |
| toolProperties.resize( toolCount ); |
| result = d.vkGetPhysicalDeviceToolProperties( m_physicalDevice, &toolCount, reinterpret_cast<VkPhysicalDeviceToolProperties *>( toolProperties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getToolProperties" ); |
| VULKAN_HPP_ASSERT( toolCount <= toolProperties.size() ); |
| if ( toolCount < toolProperties.size() ) |
| { |
| toolProperties.resize( toolCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), toolProperties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::PrivateDataSlot * pPrivateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreatePrivateDataSlot( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkPrivateDataSlot *>( pPrivateDataSlot ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::type Device::createPrivateDataSlot( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; |
| VkResult result = d.vkCreatePrivateDataSlot( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlot" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), privateDataSlot ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>>::type Device::createPrivateDataSlotUnique( const VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot; |
| VkResult result = d.vkCreatePrivateDataSlot( m_device, reinterpret_cast<const VkPrivateDataSlotCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkPrivateDataSlot *>( &privateDataSlot ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createPrivateDataSlotUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::PrivateDataSlot, Dispatch>( privateDataSlot, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyPrivateDataSlot( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyPrivateDataSlot( m_device, static_cast<VkPrivateDataSlot>( privateDataSlot ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::setPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t data, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkSetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), data ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::setPrivateData" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, uint64_t * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), pData ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint64_t Device::getPrivateData( VULKAN_HPP_NAMESPACE::ObjectType objectType, uint64_t objectHandle, VULKAN_HPP_NAMESPACE::PrivateDataSlot privateDataSlot, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| uint64_t data; |
| d.vkGetPrivateData( m_device, static_cast<VkObjectType>( objectType ), objectHandle, static_cast<VkPrivateDataSlot>( privateDataSlot ), &data ); |
| |
| |
| return data; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setEvent2( VULKAN_HPP_NAMESPACE::Event event, const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdSetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::resetEvent2( VULKAN_HPP_NAMESPACE::Event event, VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stageMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdResetEvent2( m_commandBuffer, static_cast<VkEvent>( event ), static_cast<VkPipelineStageFlags2>( stageMask ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( uint32_t eventCount, const VULKAN_HPP_NAMESPACE::Event * pEvents, const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdWaitEvents2( m_commandBuffer, eventCount, reinterpret_cast<const VkEvent *>( pEvents ), reinterpret_cast<const VkDependencyInfo *>( pDependencyInfos ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::waitEvents2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Event> const & events, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DependencyInfo> const & dependencyInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| #ifdef VULKAN_HPP_NO_EXCEPTIONS |
| VULKAN_HPP_ASSERT( events.size() == dependencyInfos.size() ); |
| #else |
| if ( events.size() != dependencyInfos.size() ) |
| { |
| throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::waitEvents2: events.size() != dependencyInfos.size()" ); |
| } |
| #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ |
| |
| |
| d.vkCmdWaitEvents2( m_commandBuffer, events.size(), reinterpret_cast<const VkEvent *>( events.data() ), reinterpret_cast<const VkDependencyInfo *>( dependencyInfos.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo * pDependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( pDependencyInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::pipelineBarrier2( const VULKAN_HPP_NAMESPACE::DependencyInfo & dependencyInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdPipelineBarrier2( m_commandBuffer, reinterpret_cast<const VkDependencyInfo *>( &dependencyInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::writeTimestamp2( VULKAN_HPP_NAMESPACE::PipelineStageFlags2 stage, VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdWriteTimestamp2( m_commandBuffer, static_cast<VkPipelineStageFlags2>( stage ), static_cast<VkQueryPool>( queryPool ), query ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::submit2( uint32_t submitCount, const VULKAN_HPP_NAMESPACE::SubmitInfo2 * pSubmits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkQueueSubmit2( m_queue, submitCount, reinterpret_cast<const VkSubmitInfo2 *>( pSubmits ), static_cast<VkFence>( fence ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Queue::submit2( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SubmitInfo2> const & submits, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkQueueSubmit2( m_queue, submits.size(), reinterpret_cast<const VkSubmitInfo2 *>( submits.data() ), static_cast<VkFence>( fence ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::submit2" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 * pCopyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( pCopyBufferInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyBuffer2( const VULKAN_HPP_NAMESPACE::CopyBufferInfo2 & copyBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdCopyBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyBufferInfo2 *>( ©BufferInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 * pCopyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( pCopyImageInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyImage2( const VULKAN_HPP_NAMESPACE::CopyImageInfo2 & copyImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdCopyImage2( m_commandBuffer, reinterpret_cast<const VkCopyImageInfo2 *>( ©ImageInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 * pCopyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( pCopyBufferToImageInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyBufferToImage2( const VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 & copyBufferToImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdCopyBufferToImage2( m_commandBuffer, reinterpret_cast<const VkCopyBufferToImageInfo2 *>( ©BufferToImageInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 * pCopyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( pCopyImageToBufferInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::copyImageToBuffer2( const VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 & copyImageToBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdCopyImageToBuffer2( m_commandBuffer, reinterpret_cast<const VkCopyImageToBufferInfo2 *>( ©ImageToBufferInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 * pBlitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( pBlitImageInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::blitImage2( const VULKAN_HPP_NAMESPACE::BlitImageInfo2 & blitImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdBlitImage2( m_commandBuffer, reinterpret_cast<const VkBlitImageInfo2 *>( &blitImageInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 * pResolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( pResolveImageInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::resolveImage2( const VULKAN_HPP_NAMESPACE::ResolveImageInfo2 & resolveImageInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdResolveImage2( m_commandBuffer, reinterpret_cast<const VkResolveImageInfo2 *>( &resolveImageInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginRendering( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdBeginRendering( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endRendering( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdEndRendering( m_commandBuffer ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setCullMode( VULKAN_HPP_NAMESPACE::CullModeFlags cullMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetCullMode( m_commandBuffer, static_cast<VkCullModeFlags>( cullMode ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setFrontFace( VULKAN_HPP_NAMESPACE::FrontFace frontFace, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetFrontFace( m_commandBuffer, static_cast<VkFrontFace>( frontFace ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveTopology( VULKAN_HPP_NAMESPACE::PrimitiveTopology primitiveTopology, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetPrimitiveTopology( m_commandBuffer, static_cast<VkPrimitiveTopology>( primitiveTopology ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::Viewport * pViewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetViewportWithCount( m_commandBuffer, viewportCount, reinterpret_cast<const VkViewport *>( pViewports ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setViewportWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Viewport> const & viewports, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdSetViewportWithCount( m_commandBuffer, viewports.size(), reinterpret_cast<const VkViewport *>( viewports.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( uint32_t scissorCount, const VULKAN_HPP_NAMESPACE::Rect2D * pScissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetScissorWithCount( m_commandBuffer, scissorCount, reinterpret_cast<const VkRect2D *>( pScissors ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setScissorWithCount( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & scissors, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdSetScissorWithCount( m_commandBuffer, scissors.size(), reinterpret_cast<const VkRect2D *>( scissors.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, const VULKAN_HPP_NAMESPACE::DeviceSize * pStrides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBindVertexBuffers2( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ), reinterpret_cast<const VkDeviceSize *>( pStrides ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::bindVertexBuffers2( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & strides, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| #ifdef VULKAN_HPP_NO_EXCEPTIONS |
| VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); |
| VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); |
| VULKAN_HPP_ASSERT( strides.empty() || buffers.size() == strides.size() ); |
| #else |
| if ( buffers.size() != offsets.size() ) |
| { |
| throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != offsets.size()" ); |
| } |
| if ( !sizes.empty() && buffers.size() != sizes.size() ) |
| { |
| throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != sizes.size()" ); |
| } |
| if ( !strides.empty() && buffers.size() != strides.size() ) |
| { |
| throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindVertexBuffers2: buffers.size() != strides.size()" ); |
| } |
| #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ |
| |
| |
| d.vkCmdBindVertexBuffers2( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ), reinterpret_cast<const VkDeviceSize *>( strides.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDepthTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetDepthTestEnable( m_commandBuffer, static_cast<VkBool32>( depthTestEnable ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDepthWriteEnable( VULKAN_HPP_NAMESPACE::Bool32 depthWriteEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetDepthWriteEnable( m_commandBuffer, static_cast<VkBool32>( depthWriteEnable ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDepthCompareOp( VULKAN_HPP_NAMESPACE::CompareOp depthCompareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetDepthCompareOp( m_commandBuffer, static_cast<VkCompareOp>( depthCompareOp ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDepthBoundsTestEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBoundsTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetDepthBoundsTestEnable( m_commandBuffer, static_cast<VkBool32>( depthBoundsTestEnable ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setStencilTestEnable( VULKAN_HPP_NAMESPACE::Bool32 stencilTestEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetStencilTestEnable( m_commandBuffer, static_cast<VkBool32>( stencilTestEnable ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setStencilOp( VULKAN_HPP_NAMESPACE::StencilFaceFlags faceMask, VULKAN_HPP_NAMESPACE::StencilOp failOp, VULKAN_HPP_NAMESPACE::StencilOp passOp, VULKAN_HPP_NAMESPACE::StencilOp depthFailOp, VULKAN_HPP_NAMESPACE::CompareOp compareOp, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetStencilOp( m_commandBuffer, static_cast<VkStencilFaceFlags>( faceMask ), static_cast<VkStencilOp>( failOp ), static_cast<VkStencilOp>( passOp ), static_cast<VkStencilOp>( depthFailOp ), static_cast<VkCompareOp>( compareOp ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setRasterizerDiscardEnable( VULKAN_HPP_NAMESPACE::Bool32 rasterizerDiscardEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetRasterizerDiscardEnable( m_commandBuffer, static_cast<VkBool32>( rasterizerDiscardEnable ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDepthBiasEnable( VULKAN_HPP_NAMESPACE::Bool32 depthBiasEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetDepthBiasEnable( m_commandBuffer, static_cast<VkBool32>( depthBiasEnable ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setPrimitiveRestartEnable( VULKAN_HPP_NAMESPACE::Bool32 primitiveRestartEnable, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetPrimitiveRestartEnable( m_commandBuffer, static_cast<VkBool32>( primitiveRestartEnable ) ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetDeviceBufferMemoryRequirements( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; |
| d.vkGetDeviceBufferMemoryRequirements( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); |
| |
| |
| return memoryRequirements; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getBufferMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); |
| d.vkGetDeviceBufferMemoryRequirements( m_device, reinterpret_cast<const VkDeviceBufferMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, VULKAN_HPP_NAMESPACE::MemoryRequirements2 * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetDeviceImageMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), reinterpret_cast<VkMemoryRequirements2 *>( pMemoryRequirements ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::MemoryRequirements2 Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::MemoryRequirements2 memoryRequirements; |
| d.vkGetDeviceImageMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); |
| |
| |
| return memoryRequirements; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> Device::getImageMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::MemoryRequirements2 & memoryRequirements = structureChain.template get<VULKAN_HPP_NAMESPACE::MemoryRequirements2>(); |
| d.vkGetDeviceImageMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), reinterpret_cast<VkMemoryRequirements2 *>( &memoryRequirements ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements * pInfo, uint32_t * pSparseMemoryRequirementCount, VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 * pSparseMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( pInfo ), pSparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( pSparseMemoryRequirements ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements; |
| uint32_t sparseMemoryRequirementCount; |
| d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); |
| |
| VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); |
| if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) |
| { |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| } |
| return sparseMemoryRequirements; |
| } |
| |
| template <typename SparseImageMemoryRequirements2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageMemoryRequirements2>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> Device::getImageSparseMemoryRequirements( const VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements & info, SparseImageMemoryRequirements2Allocator & sparseImageMemoryRequirements2Allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2, SparseImageMemoryRequirements2Allocator> sparseMemoryRequirements( sparseImageMemoryRequirements2Allocator ); |
| uint32_t sparseMemoryRequirementCount; |
| d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, nullptr ); |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| d.vkGetDeviceImageSparseMemoryRequirements( m_device, reinterpret_cast<const VkDeviceImageMemoryRequirements *>( &info ), &sparseMemoryRequirementCount, reinterpret_cast<VkSparseImageMemoryRequirements2 *>( sparseMemoryRequirements.data() ) ); |
| |
| VULKAN_HPP_ASSERT( sparseMemoryRequirementCount <= sparseMemoryRequirements.size() ); |
| if ( sparseMemoryRequirementCount < sparseMemoryRequirements.size() ) |
| { |
| sparseMemoryRequirements.resize( sparseMemoryRequirementCount ); |
| } |
| return sparseMemoryRequirements; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_surface === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::destroySurfaceKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroySurfaceKHR( m_instance, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::Bool32 * pSupported, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( pSupported ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Bool32>::type PhysicalDevice::getSurfaceSupportKHR( uint32_t queueFamilyIndex, VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Bool32 supported; |
| VkResult result = d.vkGetPhysicalDeviceSurfaceSupportKHR( m_physicalDevice, queueFamilyIndex, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkBool32 *>( &supported ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceSupportKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), supported ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR * pSurfaceCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( pSurfaceCapabilities ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::type PhysicalDevice::getSurfaceCapabilitiesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR surfaceCapabilities; |
| VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilitiesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilitiesKHR *>( &surfaceCapabilities ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilitiesKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormatKHR * pSurfaceFormats, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( pSurfaceFormats ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename SurfaceFormatKHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats; |
| uint32_t surfaceFormatCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) |
| { |
| surfaceFormats.resize( surfaceFormatCount ); |
| result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); |
| VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); |
| if ( surfaceFormatCount < surfaceFormats.size() ) |
| { |
| surfaceFormats.resize( surfaceFormatCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats ); |
| } |
| |
| template <typename SurfaceFormatKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormatKHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator>>::type PhysicalDevice::getSurfaceFormatsKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, SurfaceFormatKHRAllocator & surfaceFormatKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR, SurfaceFormatKHRAllocator> surfaceFormats( surfaceFormatKHRAllocator ); |
| uint32_t surfaceFormatCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) |
| { |
| surfaceFormats.resize( surfaceFormatCount ); |
| result = d.vkGetPhysicalDeviceSurfaceFormatsKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormatKHR *>( surfaceFormats.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormatsKHR" ); |
| VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); |
| if ( surfaceFormatCount < surfaceFormats.size() ) |
| { |
| surfaceFormats.resize( surfaceFormatCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pPresentModeCount, VULKAN_HPP_NAMESPACE::PresentModeKHR * pPresentModes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pPresentModeCount, reinterpret_cast<VkPresentModeKHR *>( pPresentModes ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename PresentModeKHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes; |
| uint32_t presentModeCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && presentModeCount ) |
| { |
| presentModes.resize( presentModeCount ); |
| result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); |
| VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); |
| if ( presentModeCount < presentModes.size() ) |
| { |
| presentModes.resize( presentModeCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes ); |
| } |
| |
| template <typename PresentModeKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PresentModeKHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator>>::type PhysicalDevice::getSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, PresentModeKHRAllocator & presentModeKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PresentModeKHR, PresentModeKHRAllocator> presentModes( presentModeKHRAllocator ); |
| uint32_t presentModeCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && presentModeCount ) |
| { |
| presentModes.resize( presentModeCount ); |
| result = d.vkGetPhysicalDeviceSurfacePresentModesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &presentModeCount, reinterpret_cast<VkPresentModeKHR *>( presentModes.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfacePresentModesKHR" ); |
| VULKAN_HPP_ASSERT( presentModeCount <= presentModes.size() ); |
| if ( presentModeCount < presentModes.size() ) |
| { |
| presentModes.resize( presentModeCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentModes ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_swapchain === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSwapchainKHR *>( pSwapchain ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; |
| VkResult result = d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type Device::createSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; |
| VkResult result = d.vkCreateSwapchainKHR( m_device, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSwapchainKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroySwapchainKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroySwapchainKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t * pSwapchainImageCount, VULKAN_HPP_NAMESPACE::Image * pSwapchainImages, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), pSwapchainImageCount, reinterpret_cast<VkImage *>( pSwapchainImages ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename ImageAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages; |
| uint32_t swapchainImageCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && swapchainImageCount ) |
| { |
| swapchainImages.resize( swapchainImageCount ); |
| result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); |
| VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); |
| if ( swapchainImageCount < swapchainImages.size() ) |
| { |
| swapchainImages.resize( swapchainImageCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages ); |
| } |
| |
| template <typename ImageAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Image>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator>>::type Device::getSwapchainImagesKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, ImageAllocator & imageAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Image, ImageAllocator> swapchainImages( imageAllocator ); |
| uint32_t swapchainImageCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && swapchainImageCount ) |
| { |
| swapchainImages.resize( swapchainImageCount ); |
| result = d.vkGetSwapchainImagesKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), &swapchainImageCount, reinterpret_cast<VkImage *>( swapchainImages.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainImagesKHR" ); |
| VULKAN_HPP_ASSERT( swapchainImageCount <= swapchainImages.size() ); |
| if ( swapchainImageCount < swapchainImages.size() ) |
| { |
| swapchainImages.resize( swapchainImageCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchainImages ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, uint32_t * pImageIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), pImageIndex ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImageKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint64_t timeout, VULKAN_HPP_NAMESPACE::Semaphore semaphore, VULKAN_HPP_NAMESPACE::Fence fence, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| uint32_t imageIndex; |
| VkResult result = d.vkAcquireNextImageKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ), timeout, static_cast<VkSemaphore>( semaphore ), static_cast<VkFence>( fence ), &imageIndex ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImageKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); |
| |
| return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR * pPresentInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( pPresentInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Queue::presentKHR( const VULKAN_HPP_NAMESPACE::PresentInfoKHR & presentInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkQueuePresentKHR( m_queue, reinterpret_cast<const VkPresentInfoKHR *>( &presentInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Queue::presentKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); |
| |
| return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupPresentCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR * pDeviceGroupPresentCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( pDeviceGroupPresentCapabilities ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::type Device::getGroupPresentCapabilitiesKHR( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR deviceGroupPresentCapabilities; |
| VkResult result = d.vkGetDeviceGroupPresentCapabilitiesKHR( m_device, reinterpret_cast<VkDeviceGroupPresentCapabilitiesKHR *>( &deviceGroupPresentCapabilities ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupPresentCapabilitiesKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), deviceGroupPresentCapabilities ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR * pModes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( pModes ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR>::type Device::getGroupSurfacePresentModesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR modes; |
| VkResult result = d.vkGetDeviceGroupSurfacePresentModesKHR( m_device, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkDeviceGroupPresentModeFlagsKHR *>( &modes ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getGroupSurfacePresentModesKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), modes ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, uint32_t * pRectCount, VULKAN_HPP_NAMESPACE::Rect2D * pRects, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), pRectCount, reinterpret_cast<VkRect2D *>( pRects ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Rect2DAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects; |
| uint32_t rectCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && rectCount ) |
| { |
| rects.resize( rectCount ); |
| result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); |
| VULKAN_HPP_ASSERT( rectCount <= rects.size() ); |
| if ( rectCount < rects.size() ) |
| { |
| rects.resize( rectCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects ); |
| } |
| |
| template <typename Rect2DAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, Rect2D>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator>>::type PhysicalDevice::getPresentRectanglesKHR( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Rect2DAllocator & rect2DAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::Rect2D, Rect2DAllocator> rects( rect2DAllocator ); |
| uint32_t rectCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && rectCount ) |
| { |
| rects.resize( rectCount ); |
| result = d.vkGetPhysicalDevicePresentRectanglesKHR( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), &rectCount, reinterpret_cast<VkRect2D *>( rects.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getPresentRectanglesKHR" ); |
| VULKAN_HPP_ASSERT( rectCount <= rects.size() ); |
| if ( rectCount < rects.size() ) |
| { |
| rects.resize( rectCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), rects ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR * pAcquireInfo, uint32_t * pImageIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( pAcquireInfo ), pImageIndex ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE ResultValue<uint32_t> Device::acquireNextImage2KHR( const VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR & acquireInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| uint32_t imageIndex; |
| VkResult result = d.vkAcquireNextImage2KHR( m_device, reinterpret_cast<const VkAcquireNextImageInfoKHR *>( &acquireInfo ), &imageIndex ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireNextImage2KHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eTimeout, VULKAN_HPP_NAMESPACE::Result::eNotReady, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); |
| |
| return ResultValue<uint32_t>( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageIndex ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_display === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPropertiesKHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( pProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DisplayPropertiesKHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPropertiesKHR( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties; |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| |
| template <typename DisplayPropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPropertiesKHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPropertiesKHR( DisplayPropertiesKHRAllocator & displayPropertiesKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR, DisplayPropertiesKHRAllocator> properties( displayPropertiesKHRAllocator ); |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkGetPhysicalDeviceDisplayPropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPropertiesKHR *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPropertiesKHR" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlanePropertiesKHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( pProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties; |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| |
| template <typename DisplayPlanePropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlanePropertiesKHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayPlanePropertiesKHR( DisplayPlanePropertiesKHRAllocator & displayPlanePropertiesKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR, DisplayPlanePropertiesKHRAllocator> properties( displayPlanePropertiesKHRAllocator ); |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkGetPhysicalDeviceDisplayPlanePropertiesKHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlanePropertiesKHR *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlanePropertiesKHR" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, uint32_t * pDisplayCount, VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplays, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, pDisplayCount, reinterpret_cast<VkDisplayKHR *>( pDisplays ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DisplayKHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays; |
| uint32_t displayCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && displayCount ) |
| { |
| displays.resize( displayCount ); |
| result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); |
| VULKAN_HPP_ASSERT( displayCount <= displays.size() ); |
| if ( displayCount < displays.size() ) |
| { |
| displays.resize( displayCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays ); |
| } |
| |
| template <typename DisplayKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayKHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator>>::type PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR( uint32_t planeIndex, DisplayKHRAllocator & displayKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayKHR, DisplayKHRAllocator> displays( displayKHRAllocator ); |
| uint32_t displayCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && displayCount ) |
| { |
| displays.resize( displayCount ); |
| result = d.vkGetDisplayPlaneSupportedDisplaysKHR( m_physicalDevice, planeIndex, &displayCount, reinterpret_cast<VkDisplayKHR *>( displays.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneSupportedDisplaysKHR" ); |
| VULKAN_HPP_ASSERT( displayCount <= displays.size() ); |
| if ( displayCount < displays.size() ) |
| { |
| displays.resize( displayCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displays ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), pPropertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( pProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DisplayModePropertiesKHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties; |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| |
| template <typename DisplayModePropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayModePropertiesKHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator>>::type PhysicalDevice::getDisplayModePropertiesKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, DisplayModePropertiesKHRAllocator & displayModePropertiesKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR, DisplayModePropertiesKHRAllocator> properties( displayModePropertiesKHRAllocator ); |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkGetDisplayModePropertiesKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), &propertyCount, reinterpret_cast<VkDisplayModePropertiesKHR *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayModePropertiesKHR" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DisplayModeKHR * pMode, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDisplayModeKHR *>( pMode ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::type PhysicalDevice::createDisplayModeKHR( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; |
| VkResult result = d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDisplayModeKHR *>( &mode ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), mode ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>>::type PhysicalDevice::createDisplayModeKHRUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DisplayModeKHR mode; |
| VkResult result = d.vkCreateDisplayModeKHR( m_physicalDevice, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayModeCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDisplayModeKHR *>( &mode ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::createDisplayModeKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayModeKHR, Dispatch>( mode, ObjectDestroy<PhysicalDevice, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR * pCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( pCapabilities ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::type PhysicalDevice::getDisplayPlaneCapabilitiesKHR( VULKAN_HPP_NAMESPACE::DisplayModeKHR mode, uint32_t planeIndex, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR capabilities; |
| VkResult result = d.vkGetDisplayPlaneCapabilitiesKHR( m_physicalDevice, static_cast<VkDisplayModeKHR>( mode ), planeIndex, reinterpret_cast<VkDisplayPlaneCapabilitiesKHR *>( &capabilities ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneCapabilitiesKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createDisplayPlaneSurfaceKHR( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createDisplayPlaneSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateDisplayPlaneSurfaceKHR( m_instance, reinterpret_cast<const VkDisplaySurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDisplayPlaneSurfaceKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_display_swapchain === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createSharedSwapchainsKHR( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR * pCreateInfos, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateSharedSwapchainsKHR( m_device, swapchainCount, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( pCreateInfos ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSwapchainKHR *>( pSwapchains ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename SwapchainKHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size() ); |
| VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains ); |
| } |
| |
| template <typename SwapchainKHRAllocator, typename Dispatch, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, SwapchainKHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHR( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainKHRAllocator> swapchains( createInfos.size(), swapchainKHRAllocator ); |
| VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchains ); |
| } |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SwapchainKHR>::type Device::createSharedSwapchainKHR( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; |
| VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), swapchain ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch, typename SwapchainKHRAllocator> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() ); |
| VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); |
| std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains; |
| uniqueSwapchains.reserve( createInfos.size() ); |
| ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); |
| for ( auto const & swapchain : swapchains ) |
| { |
| uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) ); |
| } |
| |
| template <typename Dispatch, typename SwapchainKHRAllocator, typename B0, typename std::enable_if<std::is_same<typename B0::value_type, UniqueHandle<SwapchainKHR, Dispatch>>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator>>::type Device::createSharedSwapchainsKHRUnique( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR> const & createInfos, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, SwapchainKHRAllocator & swapchainKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SwapchainKHR> swapchains( createInfos.size() ); |
| VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, createInfos.size(), reinterpret_cast<const VkSwapchainCreateInfoKHR *>( createInfos.data() ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( swapchains.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainsKHRUnique" ); |
| std::vector<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>, SwapchainKHRAllocator> uniqueSwapchains( swapchainKHRAllocator ); |
| uniqueSwapchains.reserve( createInfos.size() ); |
| ObjectDestroy<Device, Dispatch> deleter( *this, allocator, d ); |
| for ( auto const & swapchain : swapchains ) |
| { |
| uniqueSwapchains.push_back( UniqueHandle<SwapchainKHR, Dispatch>( swapchain, deleter ) ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), std::move( uniqueSwapchains ) ); |
| } |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>>::type Device::createSharedSwapchainKHRUnique( const VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain; |
| VkResult result = d.vkCreateSharedSwapchainsKHR( m_device, 1, reinterpret_cast<const VkSwapchainCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSwapchainKHR *>( &swapchain ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createSharedSwapchainKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR, Dispatch>( swapchain, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| #if defined( VK_USE_PLATFORM_XLIB_KHR ) |
| //=== VK_KHR_xlib_surface === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXlibSurfaceKHR( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createXlibSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateXlibSurfaceKHR( m_instance, reinterpret_cast<const VkXlibSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXlibSurfaceKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display * dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Bool32>( d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, dpy, visualID ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXlibPresentationSupportKHR( uint32_t queueFamilyIndex, Display & dpy, VisualID visualID, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkBool32 result = d.vkGetPhysicalDeviceXlibPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &dpy, visualID ); |
| |
| |
| return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_XLIB_KHR*/ |
| |
| #if defined( VK_USE_PLATFORM_XCB_KHR ) |
| //=== VK_KHR_xcb_surface === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createXcbSurfaceKHR( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createXcbSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateXcbSurfaceKHR( m_instance, reinterpret_cast<const VkXcbSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createXcbSurfaceKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t * connection, xcb_visualid_t visual_id, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Bool32>( d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, connection, visual_id ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getXcbPresentationSupportKHR( uint32_t queueFamilyIndex, xcb_connection_t & connection, xcb_visualid_t visual_id, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkBool32 result = d.vkGetPhysicalDeviceXcbPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &connection, visual_id ); |
| |
| |
| return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_XCB_KHR*/ |
| |
| #if defined( VK_USE_PLATFORM_WAYLAND_KHR ) |
| //=== VK_KHR_wayland_surface === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWaylandSurfaceKHR( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createWaylandSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateWaylandSurfaceKHR( m_instance, reinterpret_cast<const VkWaylandSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWaylandSurfaceKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display * display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Bool32>( d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, display ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Bool32 PhysicalDevice::getWaylandPresentationSupportKHR( uint32_t queueFamilyIndex, struct wl_display & display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkBool32 result = d.vkGetPhysicalDeviceWaylandPresentationSupportKHR( m_physicalDevice, queueFamilyIndex, &display ); |
| |
| |
| return static_cast<VULKAN_HPP_NAMESPACE::Bool32>( result ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_WAYLAND_KHR*/ |
| |
| #if defined( VK_USE_PLATFORM_ANDROID_KHR ) |
| //=== VK_KHR_android_surface === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createAndroidSurfaceKHR( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createAndroidSurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateAndroidSurfaceKHR( m_instance, reinterpret_cast<const VkAndroidSurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createAndroidSurfaceKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_ANDROID_KHR*/ |
| |
| #if defined( VK_USE_PLATFORM_WIN32_KHR ) |
| //=== VK_KHR_win32_surface === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createWin32SurfaceKHR( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createWin32SurfaceKHRUnique( const VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateWin32SurfaceKHR( m_instance, reinterpret_cast<const VkWin32SurfaceCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createWin32SurfaceKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE Bool32 PhysicalDevice::getWin32PresentationSupportKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Bool32>( d.vkGetPhysicalDeviceWin32PresentationSupportKHR( m_physicalDevice, queueFamilyIndex ) ); |
| } |
| #endif /*VK_USE_PLATFORM_WIN32_KHR*/ |
| |
| //=== VK_EXT_debug_report === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT * pCallback, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDebugReportCallbackEXT *>( pCallback ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::type Instance::createDebugReportCallbackEXT( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; |
| VkResult result = d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXT" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), callback ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>>::type Instance::createDebugReportCallbackEXTUnique( const VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback; |
| VkResult result = d.vkCreateDebugReportCallbackEXT( m_instance, reinterpret_cast<const VkDebugReportCallbackCreateInfoEXT *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDebugReportCallbackEXT *>( &callback ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createDebugReportCallbackEXTUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT, Dispatch>( callback, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::destroyDebugReportCallbackEXT( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::destroy( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT callback, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyDebugReportCallbackEXT( m_instance, static_cast<VkDebugReportCallbackEXT>( callback ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char * pLayerPrefix, const char * pMessage, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, pLayerPrefix, pMessage ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Instance::debugReportMessageEXT( VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT flags, VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const std::string & layerPrefix, const std::string & message, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDebugReportMessageEXT( m_instance, static_cast<VkDebugReportFlagsEXT>( flags ), static_cast<VkDebugReportObjectTypeEXT>( objectType ), object, location, messageCode, layerPrefix.c_str(), message.c_str() ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_EXT_debug_marker === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT * pTagInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( pTagInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectTagEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT & tagInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkDebugMarkerSetObjectTagEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectTagInfoEXT *>( &tagInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectTagEXT" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT * pNameInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( pNameInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::debugMarkerSetObjectNameEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT & nameInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkDebugMarkerSetObjectNameEXT( m_device, reinterpret_cast<const VkDebugMarkerObjectNameInfoEXT *>( &nameInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::debugMarkerSetObjectNameEXT" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::debugMarkerBeginEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdDebugMarkerBeginEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::debugMarkerEndEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDebugMarkerEndEXT( m_commandBuffer ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT * pMarkerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( pMarkerInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::debugMarkerInsertEXT( const VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT & markerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdDebugMarkerInsertEXT( m_commandBuffer, reinterpret_cast<const VkDebugMarkerMarkerInfoEXT *>( &markerInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_video_queue === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR * pVideoProfile, VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR * pCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( pVideoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( pCapabilities ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::type PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR capabilities; |
| VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), capabilities ); |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getVideoCapabilitiesKHR( const VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR & videoProfile, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR & capabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>(); |
| VkResult result = d.vkGetPhysicalDeviceVideoCapabilitiesKHR( m_physicalDevice, reinterpret_cast<const VkVideoProfileInfoKHR *>( &videoProfile ), reinterpret_cast<VkVideoCapabilitiesKHR *>( &capabilities ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoCapabilitiesKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR * pVideoFormatInfo, uint32_t * pVideoFormatPropertyCount, VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR * pVideoFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( pVideoFormatInfo ), pVideoFormatPropertyCount, reinterpret_cast<VkVideoFormatPropertiesKHR *>( pVideoFormatProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties; |
| uint32_t videoFormatPropertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount ) |
| { |
| videoFormatProperties.resize( videoFormatPropertyCount ); |
| result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); |
| VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); |
| if ( videoFormatPropertyCount < videoFormatProperties.size() ) |
| { |
| videoFormatProperties.resize( videoFormatPropertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties ); |
| } |
| |
| template <typename VideoFormatPropertiesKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, VideoFormatPropertiesKHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator>>::type PhysicalDevice::getVideoFormatPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR & videoFormatInfo, VideoFormatPropertiesKHRAllocator & videoFormatPropertiesKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR, VideoFormatPropertiesKHRAllocator> videoFormatProperties( videoFormatPropertiesKHRAllocator ); |
| uint32_t videoFormatPropertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && videoFormatPropertyCount ) |
| { |
| videoFormatProperties.resize( videoFormatPropertyCount ); |
| result = d.vkGetPhysicalDeviceVideoFormatPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceVideoFormatInfoKHR *>( &videoFormatInfo ), &videoFormatPropertyCount, reinterpret_cast<VkVideoFormatPropertiesKHR *>( videoFormatProperties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getVideoFormatPropertiesKHR" ); |
| VULKAN_HPP_ASSERT( videoFormatPropertyCount <= videoFormatProperties.size() ); |
| if ( videoFormatPropertyCount < videoFormatProperties.size() ) |
| { |
| videoFormatProperties.resize( videoFormatPropertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoFormatProperties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::VideoSessionKHR * pVideoSession, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateVideoSessionKHR( m_device, reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkVideoSessionKHR *>( pVideoSession ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::type Device::createVideoSessionKHR( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; |
| VkResult result = d.vkCreateVideoSessionKHR( m_device, reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSession ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>>::type Device::createVideoSessionKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession; |
| VkResult result = d.vkCreateVideoSessionKHR( m_device, reinterpret_cast<const VkVideoSessionCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkVideoSessionKHR *>( &videoSession ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionKHR, Dispatch>( videoSession, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyVideoSessionKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyVideoSessionKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, uint32_t * pMemoryRequirementsCount, VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR * pMemoryRequirements, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), pMemoryRequirementsCount, reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( pMemoryRequirements ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements; |
| uint32_t memoryRequirementsCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && memoryRequirementsCount ) |
| { |
| memoryRequirements.resize( memoryRequirementsCount ); |
| result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| |
| VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); |
| if ( memoryRequirementsCount < memoryRequirements.size() ) |
| { |
| memoryRequirements.resize( memoryRequirementsCount ); |
| } |
| return memoryRequirements; |
| } |
| |
| template <typename VideoSessionMemoryRequirementsKHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, VideoSessionMemoryRequirementsKHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator>>::type Device::getVideoSessionMemoryRequirementsKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, VideoSessionMemoryRequirementsKHRAllocator & videoSessionMemoryRequirementsKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR, VideoSessionMemoryRequirementsKHRAllocator> memoryRequirements( videoSessionMemoryRequirementsKHRAllocator ); |
| uint32_t memoryRequirementsCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && memoryRequirementsCount ) |
| { |
| memoryRequirements.resize( memoryRequirementsCount ); |
| result = d.vkGetVideoSessionMemoryRequirementsKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), &memoryRequirementsCount, reinterpret_cast<VkVideoSessionMemoryRequirementsKHR *>( memoryRequirements.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| |
| VULKAN_HPP_ASSERT( memoryRequirementsCount <= memoryRequirements.size() ); |
| if ( memoryRequirementsCount < memoryRequirements.size() ) |
| { |
| memoryRequirements.resize( memoryRequirementsCount ); |
| } |
| return memoryRequirements; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, uint32_t bindSessionMemoryInfoCount, const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR * pBindSessionMemoryInfos, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkBindVideoSessionMemoryKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), bindSessionMemoryInfoCount, reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( pBindSessionMemoryInfos ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::bindVideoSessionMemoryKHR( VULKAN_HPP_NAMESPACE::VideoSessionKHR videoSession, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR> const & bindSessionMemoryInfos, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkBindVideoSessionMemoryKHR( m_device, static_cast<VkVideoSessionKHR>( videoSession ), bindSessionMemoryInfos.size(), reinterpret_cast<const VkBindVideoSessionMemoryInfoKHR *>( bindSessionMemoryInfos.data() ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::bindVideoSessionMemoryKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR * pVideoSessionParameters, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateVideoSessionParametersKHR( m_device, reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkVideoSessionParametersKHR *>( pVideoSessionParameters ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::type Device::createVideoSessionParametersKHR( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; |
| VkResult result = d.vkCreateVideoSessionParametersKHR( m_device, reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), videoSessionParameters ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>>::type Device::createVideoSessionParametersKHRUnique( const VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters; |
| VkResult result = d.vkCreateVideoSessionParametersKHR( m_device, reinterpret_cast<const VkVideoSessionParametersCreateInfoKHR *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkVideoSessionParametersKHR *>( &videoSessionParameters ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createVideoSessionParametersKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR, Dispatch>( videoSessionParameters, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR * pUpdateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkUpdateVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( pUpdateInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::updateVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR & updateInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkUpdateVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkVideoSessionParametersUpdateInfoKHR *>( &updateInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::updateVideoSessionParametersKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyVideoSessionParametersKHR( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR videoSessionParameters, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyVideoSessionParametersKHR( m_device, static_cast<VkVideoSessionParametersKHR>( videoSessionParameters ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR * pBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( pBeginInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR & beginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdBeginVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoBeginCodingInfoKHR *>( &beginInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR * pEndCodingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( pEndCodingInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR & endCodingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdEndVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoEndCodingInfoKHR *>( &endCodingInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR * pCodingControlInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( pCodingControlInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::controlVideoCodingKHR( const VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR & codingControlInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdControlVideoCodingKHR( m_commandBuffer, reinterpret_cast<const VkVideoCodingControlInfoKHR *>( &codingControlInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_video_decode_queue === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR * pDecodeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( pDecodeInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::decodeVideoKHR( const VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR & decodeInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdDecodeVideoKHR( m_commandBuffer, reinterpret_cast<const VkVideoDecodeInfoKHR *>( &decodeInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_EXT_transform_feedback === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, uint32_t bindingCount, const VULKAN_HPP_NAMESPACE::Buffer * pBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pOffsets, const VULKAN_HPP_NAMESPACE::DeviceSize * pSizes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, bindingCount, reinterpret_cast<const VkBuffer *>( pBuffers ), reinterpret_cast<const VkDeviceSize *>( pOffsets ), reinterpret_cast<const VkDeviceSize *>( pSizes ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::bindTransformFeedbackBuffersEXT( uint32_t firstBinding, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & buffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & offsets, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & sizes, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| #ifdef VULKAN_HPP_NO_EXCEPTIONS |
| VULKAN_HPP_ASSERT( buffers.size() == offsets.size() ); |
| VULKAN_HPP_ASSERT( sizes.empty() || buffers.size() == sizes.size() ); |
| #else |
| if ( buffers.size() != offsets.size() ) |
| { |
| throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != offsets.size()" ); |
| } |
| if ( !sizes.empty() && buffers.size() != sizes.size() ) |
| { |
| throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::bindTransformFeedbackBuffersEXT: buffers.size() != sizes.size()" ); |
| } |
| #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ |
| |
| |
| d.vkCmdBindTransformFeedbackBuffersEXT( m_commandBuffer, firstBinding, buffers.size(), reinterpret_cast<const VkBuffer *>( buffers.data() ), reinterpret_cast<const VkDeviceSize *>( offsets.data() ), reinterpret_cast<const VkDeviceSize *>( sizes.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer *>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginTransformFeedbackEXT( uint32_t firstCounterBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| #ifdef VULKAN_HPP_NO_EXCEPTIONS |
| VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); |
| #else |
| if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) |
| { |
| throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::beginTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); |
| } |
| #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ |
| |
| |
| d.vkCmdBeginTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size(), reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VULKAN_HPP_NAMESPACE::Buffer * pCounterBuffers, const VULKAN_HPP_NAMESPACE::DeviceSize * pCounterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBufferCount, reinterpret_cast<const VkBuffer *>( pCounterBuffers ), reinterpret_cast<const VkDeviceSize *>( pCounterBufferOffsets ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endTransformFeedbackEXT( uint32_t firstCounterBuffer, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Buffer> const & counterBuffers, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::DeviceSize> const & counterBufferOffsets, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| #ifdef VULKAN_HPP_NO_EXCEPTIONS |
| VULKAN_HPP_ASSERT( counterBufferOffsets.empty() || counterBuffers.size() == counterBufferOffsets.size() ); |
| #else |
| if ( !counterBufferOffsets.empty() && counterBuffers.size() != counterBufferOffsets.size() ) |
| { |
| throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::CommandBuffer::endTransformFeedbackEXT: counterBuffers.size() != counterBufferOffsets.size()" ); |
| } |
| #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ |
| |
| |
| d.vkCmdEndTransformFeedbackEXT( m_commandBuffer, firstCounterBuffer, counterBuffers.size(), reinterpret_cast<const VkBuffer *>( counterBuffers.data() ), reinterpret_cast<const VkDeviceSize *>( counterBufferOffsets.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, VULKAN_HPP_NAMESPACE::QueryControlFlags flags, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBeginQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, static_cast<VkQueryControlFlags>( flags ), index ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endQueryIndexedEXT( VULKAN_HPP_NAMESPACE::QueryPool queryPool, uint32_t query, uint32_t index, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdEndQueryIndexedEXT( m_commandBuffer, static_cast<VkQueryPool>( queryPool ), query, index ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::drawIndirectByteCountEXT( uint32_t instanceCount, uint32_t firstInstance, VULKAN_HPP_NAMESPACE::Buffer counterBuffer, VULKAN_HPP_NAMESPACE::DeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDrawIndirectByteCountEXT( m_commandBuffer, instanceCount, firstInstance, static_cast<VkBuffer>( counterBuffer ), static_cast<VkDeviceSize>( counterBufferOffset ), counterOffset, vertexStride ); |
| } |
| |
| //=== VK_NVX_binary_import === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CuModuleNVX * pModule, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateCuModuleNVX( m_device, reinterpret_cast<const VkCuModuleCreateInfoNVX *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCuModuleNVX *>( pModule ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuModuleNVX>::type Device::createCuModuleNVX( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::CuModuleNVX module; |
| VkResult result = d.vkCreateCuModuleNVX( m_device, reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCuModuleNVX *>( &module ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVX" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), module ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>>::type Device::createCuModuleNVXUnique( const VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::CuModuleNVX module; |
| VkResult result = d.vkCreateCuModuleNVX( m_device, reinterpret_cast<const VkCuModuleCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCuModuleNVX *>( &module ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuModuleNVXUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::CuModuleNVX, Dispatch>( module, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::CuFunctionNVX * pFunction, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateCuFunctionNVX( m_device, reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkCuFunctionNVX *>( pFunction ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::type Device::createCuFunctionNVX( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::CuFunctionNVX function; |
| VkResult result = d.vkCreateCuFunctionNVX( m_device, reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCuFunctionNVX *>( &function ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVX" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), function ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>>::type Device::createCuFunctionNVXUnique( const VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::CuFunctionNVX function; |
| VkResult result = d.vkCreateCuFunctionNVX( m_device, reinterpret_cast<const VkCuFunctionCreateInfoNVX *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkCuFunctionNVX *>( &function ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createCuFunctionNVXUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::CuFunctionNVX, Dispatch>( function, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyCuModuleNVX( VULKAN_HPP_NAMESPACE::CuModuleNVX module, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuModuleNVX module, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyCuModuleNVX( m_device, static_cast<VkCuModuleNVX>( module ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyCuFunctionNVX( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroy( VULKAN_HPP_NAMESPACE::CuFunctionNVX function, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyCuFunctionNVX( m_device, static_cast<VkCuFunctionNVX>( function ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX * pLaunchInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( pLaunchInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::cuLaunchKernelNVX( const VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX & launchInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdCuLaunchKernelNVX( m_commandBuffer, reinterpret_cast<const VkCuLaunchInfoNVX *>( &launchInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_NVX_image_view_handle === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( pInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE uint32_t Device::getImageViewHandleNVX( const VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX & info, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| uint32_t result = d.vkGetImageViewHandleNVX( m_device, reinterpret_cast<const VkImageViewHandleInfoNVX *>( &info ) ); |
| |
| |
| return result; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( pProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::type Device::getImageViewAddressNVX( VULKAN_HPP_NAMESPACE::ImageView imageView, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX properties; |
| VkResult result = d.vkGetImageViewAddressNVX( m_device, static_cast<VkImageView>( imageView ), reinterpret_cast<VkImageViewAddressPropertiesNVX *>( &properties ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getImageViewAddressNVX" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_AMD_draw_indirect_count === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::drawIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDrawIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::drawIndexedIndirectCountAMD( VULKAN_HPP_NAMESPACE::Buffer buffer, VULKAN_HPP_NAMESPACE::DeviceSize offset, VULKAN_HPP_NAMESPACE::Buffer countBuffer, VULKAN_HPP_NAMESPACE::DeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDrawIndexedIndirectCountAMD( m_commandBuffer, static_cast<VkBuffer>( buffer ), static_cast<VkDeviceSize>( offset ), static_cast<VkBuffer>( countBuffer ), static_cast<VkDeviceSize>( countBufferOffset ), maxDrawCount, stride ); |
| } |
| |
| //=== VK_AMD_shader_info === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, size_t * pInfoSize, void * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), pInfoSize, pInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Uint8_tAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<uint8_t, Uint8_tAllocator> info; |
| size_t infoSize; |
| VkResult result; |
| do |
| { |
| result = d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, nullptr ); |
| if ( ( result == VK_SUCCESS ) && infoSize ) |
| { |
| info.resize( infoSize ); |
| result = d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void *>( info.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); |
| VULKAN_HPP_ASSERT( infoSize <= info.size() ); |
| if ( infoSize < info.size() ) |
| { |
| info.resize( infoSize ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info ); |
| } |
| |
| template <typename Uint8_tAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, uint8_t>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<uint8_t, Uint8_tAllocator>>::type Device::getShaderInfoAMD( VULKAN_HPP_NAMESPACE::Pipeline pipeline, VULKAN_HPP_NAMESPACE::ShaderStageFlagBits shaderStage, VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD infoType, Uint8_tAllocator & uint8_tAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<uint8_t, Uint8_tAllocator> info( uint8_tAllocator ); |
| size_t infoSize; |
| VkResult result; |
| do |
| { |
| result = d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, nullptr ); |
| if ( ( result == VK_SUCCESS ) && infoSize ) |
| { |
| info.resize( infoSize ); |
| result = d.vkGetShaderInfoAMD( m_device, static_cast<VkPipeline>( pipeline ), static_cast<VkShaderStageFlagBits>( shaderStage ), static_cast<VkShaderInfoTypeAMD>( infoType ), &infoSize, reinterpret_cast<void *>( info.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getShaderInfoAMD" ); |
| VULKAN_HPP_ASSERT( infoSize <= info.size() ); |
| if ( infoSize < info.size() ) |
| { |
| info.resize( infoSize ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), info ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_dynamic_rendering === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo * pRenderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( pRenderingInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginRenderingKHR( const VULKAN_HPP_NAMESPACE::RenderingInfo & renderingInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdBeginRenderingKHR( m_commandBuffer, reinterpret_cast<const VkRenderingInfo *>( &renderingInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endRenderingKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdEndRenderingKHR( m_commandBuffer ); |
| } |
| |
| #if defined( VK_USE_PLATFORM_GGP ) |
| //=== VK_GGP_stream_descriptor_surface === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createStreamDescriptorSurfaceGGP( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGP" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createStreamDescriptorSurfaceGGPUnique( const VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateStreamDescriptorSurfaceGGP( m_instance, reinterpret_cast<const VkStreamDescriptorSurfaceCreateInfoGGP *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createStreamDescriptorSurfaceGGPUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_GGP*/ |
| |
| //=== VK_NV_external_memory_capabilities === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV * pExternalImageFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV *>( pExternalImageFormatProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::type PhysicalDevice::getExternalImageFormatPropertiesNV( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::ImageType type, VULKAN_HPP_NAMESPACE::ImageTiling tiling, VULKAN_HPP_NAMESPACE::ImageUsageFlags usage, VULKAN_HPP_NAMESPACE::ImageCreateFlags flags, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV externalHandleType, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV externalImageFormatProperties; |
| VkResult result = d.vkGetPhysicalDeviceExternalImageFormatPropertiesNV( m_physicalDevice, static_cast<VkFormat>( format ), static_cast<VkImageType>( type ), static_cast<VkImageTiling>( tiling ), static_cast<VkImageUsageFlags>( usage ), static_cast<VkImageCreateFlags>( flags ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( externalHandleType ), reinterpret_cast<VkExternalImageFormatPropertiesNV *>( &externalImageFormatProperties ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getExternalImageFormatPropertiesNV" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), externalImageFormatProperties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| #if defined( VK_USE_PLATFORM_WIN32_KHR ) |
| //=== VK_NV_external_memory_win32 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), pHandle ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleNV( VULKAN_HPP_NAMESPACE::DeviceMemory memory, VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV handleType, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| HANDLE handle; |
| VkResult result = d.vkGetMemoryWin32HandleNV( m_device, static_cast<VkDeviceMemory>( memory ), static_cast<VkExternalMemoryHandleTypeFlagsNV>( handleType ), &handle ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleNV" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_WIN32_KHR*/ |
| |
| //=== VK_KHR_get_physical_device_properties2 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getFeatures2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 * pFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( pFeatures ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 features; |
| d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); |
| |
| |
| return features; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFeatures2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 & features = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>(); |
| d.vkGetPhysicalDeviceFeatures2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceFeatures2 *>( &features ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( pProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 properties; |
| d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); |
| |
| |
| return properties; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 & properties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>(); |
| d.vkGetPhysicalDeviceProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceProperties2 *>( &properties ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, VULKAN_HPP_NAMESPACE::FormatProperties2 * pFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( pFormatProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::FormatProperties2 PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::FormatProperties2 formatProperties; |
| d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); |
| |
| |
| return formatProperties; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getFormatProperties2KHR( VULKAN_HPP_NAMESPACE::Format format, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::FormatProperties2 & formatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::FormatProperties2>(); |
| d.vkGetPhysicalDeviceFormatProperties2KHR( m_physicalDevice, static_cast<VkFormat>( format ), reinterpret_cast<VkFormatProperties2 *>( &formatProperties ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 * pImageFormatInfo, VULKAN_HPP_NAMESPACE::ImageFormatProperties2 * pImageFormatProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( pImageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( pImageFormatProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::type PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ImageFormatProperties2 imageFormatProperties; |
| VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), imageFormatProperties ); |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 & imageFormatInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::ImageFormatProperties2 & imageFormatProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>(); |
| VkResult result = d.vkGetPhysicalDeviceImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceImageFormatInfo2 *>( &imageFormatInfo ), reinterpret_cast<VkImageFormatProperties2 *>( &imageFormatProperties ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getImageFormatProperties2KHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyProperties2KHR( uint32_t * pQueueFamilyPropertyCount, VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 * pQueueFamilyProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, pQueueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( pQueueFamilyProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename QueueFamilyProperties2Allocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties; |
| uint32_t queueFamilyPropertyCount; |
| d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); |
| if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) |
| { |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| } |
| return queueFamilyProperties; |
| } |
| |
| template <typename QueueFamilyProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, QueueFamilyProperties2>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> PhysicalDevice::getQueueFamilyProperties2KHR( QueueFamilyProperties2Allocator & queueFamilyProperties2Allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2, QueueFamilyProperties2Allocator> queueFamilyProperties( queueFamilyProperties2Allocator ); |
| uint32_t queueFamilyPropertyCount; |
| d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); |
| if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) |
| { |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| } |
| return queueFamilyProperties; |
| } |
| |
| template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2KHR( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<StructureChain, StructureChainAllocator> structureChains; |
| std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; |
| uint32_t queueFamilyPropertyCount; |
| d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); |
| structureChains.resize( queueFamilyPropertyCount ); |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) |
| { |
| queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; |
| } |
| d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); |
| if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) |
| { |
| structureChains.resize( queueFamilyPropertyCount ); |
| } |
| for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) |
| { |
| structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; |
| } |
| return structureChains; |
| } |
| |
| template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<StructureChain, StructureChainAllocator> PhysicalDevice::getQueueFamilyProperties2KHR( StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); |
| std::vector<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2> queueFamilyProperties; |
| uint32_t queueFamilyPropertyCount; |
| d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, nullptr ); |
| structureChains.resize( queueFamilyPropertyCount ); |
| queueFamilyProperties.resize( queueFamilyPropertyCount ); |
| for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) |
| { |
| queueFamilyProperties[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>().pNext; |
| } |
| d.vkGetPhysicalDeviceQueueFamilyProperties2KHR( m_physicalDevice, &queueFamilyPropertyCount, reinterpret_cast<VkQueueFamilyProperties2 *>( queueFamilyProperties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( queueFamilyPropertyCount <= queueFamilyProperties.size() ); |
| if ( queueFamilyPropertyCount < queueFamilyProperties.size() ) |
| { |
| structureChains.resize( queueFamilyPropertyCount ); |
| } |
| for ( uint32_t i = 0; i < queueFamilyPropertyCount; i++ ) |
| { |
| structureChains[i].template get<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>() = queueFamilyProperties[i]; |
| } |
| return structureChains; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getMemoryProperties2KHR( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 * pMemoryProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( pMemoryProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 memoryProperties; |
| d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); |
| |
| |
| return memoryProperties; |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE StructureChain<X, Y, Z...> PhysicalDevice::getMemoryProperties2KHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 & memoryProperties = structureChain.template get<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>(); |
| d.vkGetPhysicalDeviceMemoryProperties2KHR( m_physicalDevice, reinterpret_cast<VkPhysicalDeviceMemoryProperties2 *>( &memoryProperties ) ); |
| |
| |
| return structureChain; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 * pFormatInfo, uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( pFormatInfo ), pPropertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( pProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename SparseImageFormatProperties2Allocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties; |
| uint32_t propertyCount; |
| d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); |
| properties.resize( propertyCount ); |
| d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return properties; |
| } |
| |
| template <typename SparseImageFormatProperties2Allocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SparseImageFormatProperties2>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> PhysicalDevice::getSparseImageFormatProperties2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 & formatInfo, SparseImageFormatProperties2Allocator & sparseImageFormatProperties2Allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2, SparseImageFormatProperties2Allocator> properties( sparseImageFormatProperties2Allocator ); |
| uint32_t propertyCount; |
| d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, nullptr ); |
| properties.resize( propertyCount ); |
| d.vkGetPhysicalDeviceSparseImageFormatProperties2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSparseImageFormatInfo2 *>( &formatInfo ), &propertyCount, reinterpret_cast<VkSparseImageFormatProperties2 *>( properties.data() ) ); |
| |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return properties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_device_group === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags * pPeerMemoryFeatures, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( pPeerMemoryFeatures ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags Device::getGroupPeerMemoryFeaturesKHR( uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags peerMemoryFeatures; |
| d.vkGetDeviceGroupPeerMemoryFeaturesKHR( m_device, heapIndex, localDeviceIndex, remoteDeviceIndex, reinterpret_cast<VkPeerMemoryFeatureFlags *>( &peerMemoryFeatures ) ); |
| |
| |
| return peerMemoryFeatures; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDeviceMaskKHR( uint32_t deviceMask, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetDeviceMaskKHR( m_commandBuffer, deviceMask ); |
| } |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::dispatchBaseKHR( uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdDispatchBaseKHR( m_commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ ); |
| } |
| |
| #if defined( VK_USE_PLATFORM_VI_NN ) |
| //=== VK_NN_vi_surface === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::SurfaceKHR * pSurface, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkSurfaceKHR *>( pSurface ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceKHR>::type Instance::createViSurfaceNN( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNN" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surface ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>>::type Instance::createViSurfaceNNUnique( const VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceKHR surface; |
| VkResult result = d.vkCreateViSurfaceNN( m_instance, reinterpret_cast<const VkViSurfaceCreateInfoNN *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkSurfaceKHR *>( &surface ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::createViSurfaceNNUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR, Dispatch>( surface, ObjectDestroy<Instance, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_VI_NN*/ |
| |
| //=== VK_KHR_maintenance1 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::trimCommandPoolKHR( VULKAN_HPP_NAMESPACE::CommandPool commandPool, VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags flags, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkTrimCommandPoolKHR( m_device, static_cast<VkCommandPool>( commandPool ), static_cast<VkCommandPoolTrimFlags>( flags ) ); |
| } |
| |
| //=== VK_KHR_device_group_creation === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Instance::enumeratePhysicalDeviceGroupsKHR( uint32_t * pPhysicalDeviceGroupCount, VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, pPhysicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( pPhysicalDeviceGroupProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties; |
| uint32_t physicalDeviceGroupCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) |
| { |
| physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); |
| result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); |
| VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); |
| if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) |
| { |
| physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties ); |
| } |
| |
| template <typename PhysicalDeviceGroupPropertiesAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PhysicalDeviceGroupProperties>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator>>::type Instance::enumeratePhysicalDeviceGroupsKHR( PhysicalDeviceGroupPropertiesAllocator & physicalDeviceGroupPropertiesAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties, PhysicalDeviceGroupPropertiesAllocator> physicalDeviceGroupProperties( physicalDeviceGroupPropertiesAllocator ); |
| uint32_t physicalDeviceGroupCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && physicalDeviceGroupCount ) |
| { |
| physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); |
| result = d.vkEnumeratePhysicalDeviceGroupsKHR( m_instance, &physicalDeviceGroupCount, reinterpret_cast<VkPhysicalDeviceGroupProperties *>( physicalDeviceGroupProperties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Instance::enumeratePhysicalDeviceGroupsKHR" ); |
| VULKAN_HPP_ASSERT( physicalDeviceGroupCount <= physicalDeviceGroupProperties.size() ); |
| if ( physicalDeviceGroupCount < physicalDeviceGroupProperties.size() ) |
| { |
| physicalDeviceGroupProperties.resize( physicalDeviceGroupCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), physicalDeviceGroupProperties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_external_memory_capabilities === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo * pExternalBufferInfo, VULKAN_HPP_NAMESPACE::ExternalBufferProperties * pExternalBufferProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( pExternalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( pExternalBufferProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalBufferProperties PhysicalDevice::getExternalBufferPropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo & externalBufferInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ExternalBufferProperties externalBufferProperties; |
| d.vkGetPhysicalDeviceExternalBufferPropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalBufferInfo *>( &externalBufferInfo ), reinterpret_cast<VkExternalBufferProperties *>( &externalBufferProperties ) ); |
| |
| |
| return externalBufferProperties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| #if defined( VK_USE_PLATFORM_WIN32_KHR ) |
| //=== VK_KHR_external_memory_win32 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getMemoryWin32HandleKHR( const VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| HANDLE handle; |
| VkResult result = d.vkGetMemoryWin32HandleKHR( m_device, reinterpret_cast<const VkMemoryGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandleKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR * pMemoryWin32HandleProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( pMemoryWin32HandleProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::type Device::getMemoryWin32HandlePropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, HANDLE handle, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR memoryWin32HandleProperties; |
| VkResult result = d.vkGetMemoryWin32HandlePropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), handle, reinterpret_cast<VkMemoryWin32HandlePropertiesKHR *>( &memoryWin32HandleProperties ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryWin32HandlePropertiesKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryWin32HandleProperties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_WIN32_KHR*/ |
| |
| //=== VK_KHR_external_memory_fd === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getMemoryFdKHR( const VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR & getFdInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| int fd; |
| VkResult result = d.vkGetMemoryFdKHR( m_device, reinterpret_cast<const VkMemoryGetFdInfoKHR *>( &getFdInfo ), &fd ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR * pMemoryFdProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( pMemoryFdProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::type Device::getMemoryFdPropertiesKHR( VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits handleType, int fd, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR memoryFdProperties; |
| VkResult result = d.vkGetMemoryFdPropertiesKHR( m_device, static_cast<VkExternalMemoryHandleTypeFlagBits>( handleType ), fd, reinterpret_cast<VkMemoryFdPropertiesKHR *>( &memoryFdProperties ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getMemoryFdPropertiesKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), memoryFdProperties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_external_semaphore_capabilities === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo, VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties * pExternalSemaphoreProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( pExternalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( pExternalSemaphoreProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties PhysicalDevice::getExternalSemaphorePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo & externalSemaphoreInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties externalSemaphoreProperties; |
| d.vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalSemaphoreInfo *>( &externalSemaphoreInfo ), reinterpret_cast<VkExternalSemaphoreProperties *>( &externalSemaphoreProperties ) ); |
| |
| |
| return externalSemaphoreProperties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| #if defined( VK_USE_PLATFORM_WIN32_KHR ) |
| //=== VK_KHR_external_semaphore_win32 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR * pImportSemaphoreWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( pImportSemaphoreWin32HandleInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR & importSemaphoreWin32HandleInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkImportSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkImportSemaphoreWin32HandleInfoKHR *>( &importSemaphoreWin32HandleInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreWin32HandleKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getSemaphoreWin32HandleKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| HANDLE handle; |
| VkResult result = d.vkGetSemaphoreWin32HandleKHR( m_device, reinterpret_cast<const VkSemaphoreGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreWin32HandleKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_WIN32_KHR*/ |
| |
| //=== VK_KHR_external_semaphore_fd === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( pImportSemaphoreFdInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR & importSemaphoreFdInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkImportSemaphoreFdKHR( m_device, reinterpret_cast<const VkImportSemaphoreFdInfoKHR *>( &importSemaphoreFdInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importSemaphoreFdKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getSemaphoreFdKHR( const VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR & getFdInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| int fd; |
| VkResult result = d.vkGetSemaphoreFdKHR( m_device, reinterpret_cast<const VkSemaphoreGetFdInfoKHR *>( &getFdInfo ), &fd ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSemaphoreFdKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_push_descriptor === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VULKAN_HPP_NAMESPACE::WriteDescriptorSet * pDescriptorWrites, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWriteCount, reinterpret_cast<const VkWriteDescriptorSet *>( pDescriptorWrites ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetKHR( VULKAN_HPP_NAMESPACE::PipelineBindPoint pipelineBindPoint, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::WriteDescriptorSet> const & descriptorWrites, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdPushDescriptorSetKHR( m_commandBuffer, static_cast<VkPipelineBindPoint>( pipelineBindPoint ), static_cast<VkPipelineLayout>( layout ), set, descriptorWrites.size(), reinterpret_cast<const VkWriteDescriptorSet *>( descriptorWrites.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, const void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, pData ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DataType, typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::pushDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, VULKAN_HPP_NAMESPACE::PipelineLayout layout, uint32_t set, DataType const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdPushDescriptorSetWithTemplateKHR( m_commandBuffer, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), static_cast<VkPipelineLayout>( layout ), set, reinterpret_cast<const void *>( &data ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_EXT_conditional_rendering === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT * pConditionalRenderingBegin, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( pConditionalRenderingBegin ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginConditionalRenderingEXT( const VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT & conditionalRenderingBegin, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdBeginConditionalRenderingEXT( m_commandBuffer, reinterpret_cast<const VkConditionalRenderingBeginInfoEXT *>( &conditionalRenderingBegin ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endConditionalRenderingEXT( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdEndConditionalRenderingEXT( m_commandBuffer ); |
| } |
| |
| //=== VK_KHR_descriptor_update_template === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate * pDescriptorUpdateTemplate, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkDescriptorUpdateTemplate *>( pDescriptorUpdateTemplate ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::type Device::createDescriptorUpdateTemplateKHR( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; |
| VkResult result = d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), descriptorUpdateTemplate ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>>::type Device::createDescriptorUpdateTemplateKHRUnique( const VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate; |
| VkResult result = d.vkCreateDescriptorUpdateTemplateKHR( m_device, reinterpret_cast<const VkDescriptorUpdateTemplateCreateInfo *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkDescriptorUpdateTemplate *>( &descriptorUpdateTemplate ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createDescriptorUpdateTemplateKHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate, Dispatch>( descriptorUpdateTemplate, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::destroyDescriptorUpdateTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkDestroyDescriptorUpdateTemplateKHR( m_device, static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, const void * pData, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), pData ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DataType, typename Dispatch> |
| VULKAN_HPP_INLINE void Device::updateDescriptorSetWithTemplateKHR( VULKAN_HPP_NAMESPACE::DescriptorSet descriptorSet, VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate descriptorUpdateTemplate, DataType const & data, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkUpdateDescriptorSetWithTemplateKHR( m_device, static_cast<VkDescriptorSet>( descriptorSet ), static_cast<VkDescriptorUpdateTemplate>( descriptorUpdateTemplate ), reinterpret_cast<const void *>( &data ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_NV_clip_space_w_scaling === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, uint32_t viewportCount, const VULKAN_HPP_NAMESPACE::ViewportWScalingNV * pViewportWScalings, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportCount, reinterpret_cast<const VkViewportWScalingNV *>( pViewportWScalings ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setViewportWScalingNV( uint32_t firstViewport, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::ViewportWScalingNV> const & viewportWScalings, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdSetViewportWScalingNV( m_commandBuffer, firstViewport, viewportWScalings.size(), reinterpret_cast<const VkViewportWScalingNV *>( viewportWScalings.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_EXT_direct_mode_display === |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE Result PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::releaseDisplayEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkReleaseDisplayEXT( m_physicalDevice, static_cast<VkDisplayKHR>( display ) ); |
| |
| |
| |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| #if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT ) |
| //=== VK_EXT_acquire_xlib_display === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::acquireXlibDisplayEXT( Display * dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkAcquireXlibDisplayEXT( m_physicalDevice, dpy, static_cast<VkDisplayKHR>( display ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type PhysicalDevice::acquireXlibDisplayEXT( Display & dpy, VULKAN_HPP_NAMESPACE::DisplayKHR display, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkAcquireXlibDisplayEXT( m_physicalDevice, &dpy, static_cast<VkDisplayKHR>( display ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::acquireXlibDisplayEXT" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getRandROutputDisplayEXT( Display * dpy, RROutput rrOutput, VULKAN_HPP_NAMESPACE::DisplayKHR * pDisplay, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetRandROutputDisplayEXT( m_physicalDevice, dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( pDisplay ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::DisplayKHR>::type PhysicalDevice::getRandROutputDisplayEXT( Display & dpy, RROutput rrOutput, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DisplayKHR display; |
| VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXT" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), display ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>>::type PhysicalDevice::getRandROutputDisplayEXTUnique( Display & dpy, RROutput rrOutput, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::DisplayKHR display; |
| VkResult result = d.vkGetRandROutputDisplayEXT( m_physicalDevice, &dpy, rrOutput, reinterpret_cast<VkDisplayKHR *>( &display ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getRandROutputDisplayEXTUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::DisplayKHR, Dispatch>( display, ObjectRelease<PhysicalDevice, Dispatch>( *this, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/ |
| |
| //=== VK_EXT_display_surface_counter === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT * pSurfaceCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( pSurfaceCapabilities ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::type PhysicalDevice::getSurfaceCapabilities2EXT( VULKAN_HPP_NAMESPACE::SurfaceKHR surface, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT surfaceCapabilities; |
| VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2EXT( m_physicalDevice, static_cast<VkSurfaceKHR>( surface ), reinterpret_cast<VkSurfaceCapabilities2EXT *>( &surfaceCapabilities ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2EXT" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_EXT_display_control === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT * pDisplayPowerInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( pDisplayPowerInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::displayPowerControlEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT & displayPowerInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkDisplayPowerControlEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayPowerInfoEXT *>( &displayPowerInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::displayPowerControlEXT" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT * pDeviceEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( pDeviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerEventEXT( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Fence fence; |
| VkResult result = d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXT" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::registerEventEXTUnique( const VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT & deviceEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Fence fence; |
| VkResult result = d.vkRegisterDeviceEventEXT( m_device, reinterpret_cast<const VkDeviceEventInfoEXT *>( &deviceEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerEventEXTUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT * pDisplayEventInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::Fence * pFence, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( pDisplayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkFence *>( pFence ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::Fence>::type Device::registerDisplayEventEXT( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Fence fence; |
| VkResult result = d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXT" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fence ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>>::type Device::registerDisplayEventEXTUnique( VULKAN_HPP_NAMESPACE::DisplayKHR display, const VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT & displayEventInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::Fence fence; |
| VkResult result = d.vkRegisterDisplayEventEXT( m_device, static_cast<VkDisplayKHR>( display ), reinterpret_cast<const VkDisplayEventInfoEXT *>( &displayEventInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkFence *>( &fence ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::registerDisplayEventEXTUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::Fence, Dispatch>( fence, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, uint64_t * pCounterValue, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), pCounterValue ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<uint64_t>::type Device::getSwapchainCounterEXT( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT counter, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| uint64_t counterValue; |
| VkResult result = d.vkGetSwapchainCounterEXT( m_device, static_cast<VkSwapchainKHR>( swapchain ), static_cast<VkSurfaceCounterFlagBitsEXT>( counter ), &counterValue ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainCounterEXT" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), counterValue ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_GOOGLE_display_timing === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE * pDisplayTimingProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( pDisplayTimingProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::type Device::getRefreshCycleDurationGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE displayTimingProperties; |
| VkResult result = d.vkGetRefreshCycleDurationGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), reinterpret_cast<VkRefreshCycleDurationGOOGLE *>( &displayTimingProperties ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getRefreshCycleDurationGOOGLE" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), displayTimingProperties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, uint32_t * pPresentationTimingCount, VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE * pPresentationTimings, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), pPresentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE *>( pPresentationTimings ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings; |
| uint32_t presentationTimingCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && presentationTimingCount ) |
| { |
| presentationTimings.resize( presentationTimingCount ); |
| result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); |
| VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); |
| if ( presentationTimingCount < presentationTimings.size() ) |
| { |
| presentationTimings.resize( presentationTimingCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings ); |
| } |
| |
| template <typename PastPresentationTimingGOOGLEAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, PastPresentationTimingGOOGLE>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator>>::type Device::getPastPresentationTimingGOOGLE( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, PastPresentationTimingGOOGLEAllocator & pastPresentationTimingGOOGLEAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE, PastPresentationTimingGOOGLEAllocator> presentationTimings( pastPresentationTimingGOOGLEAllocator ); |
| uint32_t presentationTimingCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && presentationTimingCount ) |
| { |
| presentationTimings.resize( presentationTimingCount ); |
| result = d.vkGetPastPresentationTimingGOOGLE( m_device, static_cast<VkSwapchainKHR>( swapchain ), &presentationTimingCount, reinterpret_cast<VkPastPresentationTimingGOOGLE *>( presentationTimings.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getPastPresentationTimingGOOGLE" ); |
| VULKAN_HPP_ASSERT( presentationTimingCount <= presentationTimings.size() ); |
| if ( presentationTimingCount < presentationTimings.size() ) |
| { |
| presentationTimings.resize( presentationTimingCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), presentationTimings ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_EXT_discard_rectangles === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VULKAN_HPP_NAMESPACE::Rect2D * pDiscardRectangles, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangleCount, reinterpret_cast<const VkRect2D *>( pDiscardRectangles ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::setDiscardRectangleEXT( uint32_t firstDiscardRectangle, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::Rect2D> const & discardRectangles, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdSetDiscardRectangleEXT( m_commandBuffer, firstDiscardRectangle, discardRectangles.size(), reinterpret_cast<const VkRect2D *>( discardRectangles.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_EXT_hdr_metadata === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( uint32_t swapchainCount, const VULKAN_HPP_NAMESPACE::SwapchainKHR * pSwapchains, const VULKAN_HPP_NAMESPACE::HdrMetadataEXT * pMetadata, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkSetHdrMetadataEXT( m_device, swapchainCount, reinterpret_cast<const VkSwapchainKHR *>( pSwapchains ), reinterpret_cast<const VkHdrMetadataEXT *>( pMetadata ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::setHdrMetadataEXT( VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::SwapchainKHR> const & swapchains, VULKAN_HPP_NAMESPACE::ArrayProxy<const VULKAN_HPP_NAMESPACE::HdrMetadataEXT> const & metadata, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| #ifdef VULKAN_HPP_NO_EXCEPTIONS |
| VULKAN_HPP_ASSERT( swapchains.size() == metadata.size() ); |
| #else |
| if ( swapchains.size() != metadata.size() ) |
| { |
| throw LogicError( VULKAN_HPP_NAMESPACE_STRING "::Device::setHdrMetadataEXT: swapchains.size() != metadata.size()" ); |
| } |
| #endif /*VULKAN_HPP_NO_EXCEPTIONS*/ |
| |
| |
| d.vkSetHdrMetadataEXT( m_device, swapchains.size(), reinterpret_cast<const VkSwapchainKHR *>( swapchains.data() ), reinterpret_cast<const VkHdrMetadataEXT *>( metadata.data() ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_create_renderpass2 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 * pCreateInfo, const VULKAN_HPP_NAMESPACE::AllocationCallbacks * pAllocator, VULKAN_HPP_NAMESPACE::RenderPass * pRenderPass, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( pCreateInfo ), reinterpret_cast<const VkAllocationCallbacks *>( pAllocator ), reinterpret_cast<VkRenderPass *>( pRenderPass ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::RenderPass>::type Device::createRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::RenderPass renderPass; |
| VkResult result = d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), renderPass ); |
| } |
| |
| # ifndef VULKAN_HPP_NO_SMART_HANDLE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>>::type Device::createRenderPass2KHRUnique( const VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 & createInfo, Optional<const VULKAN_HPP_NAMESPACE::AllocationCallbacks> allocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::RenderPass renderPass; |
| VkResult result = d.vkCreateRenderPass2KHR( m_device, reinterpret_cast<const VkRenderPassCreateInfo2 *>( &createInfo ), reinterpret_cast<const VkAllocationCallbacks *>( static_cast<const VULKAN_HPP_NAMESPACE::AllocationCallbacks *>( allocator ) ), reinterpret_cast<VkRenderPass *>( &renderPass ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::createRenderPass2KHRUnique" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), UniqueHandle<VULKAN_HPP_NAMESPACE::RenderPass, Dispatch>( renderPass, ObjectDestroy<Device, Dispatch>( *this, allocator, d ) ) ); |
| } |
| # endif /* VULKAN_HPP_NO_SMART_HANDLE */ |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo * pRenderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( pRenderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::beginRenderPass2KHR( const VULKAN_HPP_NAMESPACE::RenderPassBeginInfo & renderPassBegin, const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdBeginRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkRenderPassBeginInfo *>( &renderPassBegin ), reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo * pSubpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( pSubpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::nextSubpass2KHR( const VULKAN_HPP_NAMESPACE::SubpassBeginInfo & subpassBeginInfo, const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdNextSubpass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassBeginInfo *>( &subpassBeginInfo ), reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo * pSubpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( pSubpassEndInfo ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void CommandBuffer::endRenderPass2KHR( const VULKAN_HPP_NAMESPACE::SubpassEndInfo & subpassEndInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| d.vkCmdEndRenderPass2KHR( m_commandBuffer, reinterpret_cast<const VkSubpassEndInfo *>( &subpassEndInfo ) ); |
| |
| |
| |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_shared_presentable_image === |
| |
| |
| #ifdef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ) ); |
| } |
| #else |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::Result Device::getSwapchainStatusKHR( VULKAN_HPP_NAMESPACE::SwapchainKHR swapchain, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkGetSwapchainStatusKHR( m_device, static_cast<VkSwapchainKHR>( swapchain ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getSwapchainStatusKHR", { VULKAN_HPP_NAMESPACE::Result::eSuccess, VULKAN_HPP_NAMESPACE::Result::eSuboptimalKHR } ); |
| |
| return static_cast<VULKAN_HPP_NAMESPACE::Result>( result ); |
| } |
| #endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/ |
| |
| //=== VK_KHR_external_fence_capabilities === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo * pExternalFenceInfo, VULKAN_HPP_NAMESPACE::ExternalFenceProperties * pExternalFenceProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( pExternalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( pExternalFenceProperties ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE VULKAN_HPP_NAMESPACE::ExternalFenceProperties PhysicalDevice::getExternalFencePropertiesKHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo & externalFenceInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::ExternalFenceProperties externalFenceProperties; |
| d.vkGetPhysicalDeviceExternalFencePropertiesKHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceExternalFenceInfo *>( &externalFenceInfo ), reinterpret_cast<VkExternalFenceProperties *>( &externalFenceProperties ) ); |
| |
| |
| return externalFenceProperties; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| #if defined( VK_USE_PLATFORM_WIN32_KHR ) |
| //=== VK_KHR_external_fence_win32 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR * pImportFenceWin32HandleInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( pImportFenceWin32HandleInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR & importFenceWin32HandleInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkImportFenceWin32HandleKHR( m_device, reinterpret_cast<const VkImportFenceWin32HandleInfoKHR *>( &importFenceWin32HandleInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceWin32HandleKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR * pGetWin32HandleInfo, HANDLE * pHandle, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( pGetWin32HandleInfo ), pHandle ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<HANDLE>::type Device::getFenceWin32HandleKHR( const VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR & getWin32HandleInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| HANDLE handle; |
| VkResult result = d.vkGetFenceWin32HandleKHR( m_device, reinterpret_cast<const VkFenceGetWin32HandleInfoKHR *>( &getWin32HandleInfo ), &handle ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceWin32HandleKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), handle ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| #endif /*VK_USE_PLATFORM_WIN32_KHR*/ |
| |
| //=== VK_KHR_external_fence_fd === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR * pImportFenceFdInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( pImportFenceFdInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::importFenceFdKHR( const VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR & importFenceFdInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkImportFenceFdKHR( m_device, reinterpret_cast<const VkImportFenceFdInfoKHR *>( &importFenceFdInfo ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::importFenceFdKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR * pGetFdInfo, int * pFd, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( pGetFdInfo ), pFd ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<int>::type Device::getFenceFdKHR( const VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR & getFdInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| int fd; |
| VkResult result = d.vkGetFenceFdKHR( m_device, reinterpret_cast<const VkFenceGetFdInfoKHR *>( &getFdInfo ), &fd ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::getFenceFdKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), fd ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_performance_query === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, uint32_t * pCounterCount, VULKAN_HPP_NAMESPACE::PerformanceCounterKHR * pCounters, VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR * pCounterDescriptions, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, pCounterCount, reinterpret_cast<VkPerformanceCounterKHR *>( pCounters ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( pCounterDescriptions ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> data; |
| std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first; |
| std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second; |
| uint32_t counterCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ); |
| if ( ( result == VK_SUCCESS ) && counterCount ) |
| { |
| counters.resize( counterCount ); |
| counterDescriptions.resize( counterCount ); |
| result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); |
| VULKAN_HPP_ASSERT( counterCount <= counters.size() ); |
| if ( counterCount < counters.size() ) |
| { |
| counters.resize( counterCount ); |
| counterDescriptions.resize( counterCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data ); |
| } |
| |
| template <typename PerformanceCounterKHRAllocator, typename PerformanceCounterDescriptionKHRAllocator, typename Dispatch, typename B1, typename B2, typename std::enable_if<std::is_same<typename B1::value_type, PerformanceCounterKHR>::value && std::is_same<typename B2::value_type, PerformanceCounterDescriptionKHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>>>::type PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR( uint32_t queueFamilyIndex, PerformanceCounterKHRAllocator & performanceCounterKHRAllocator, PerformanceCounterDescriptionKHRAllocator & performanceCounterDescriptionKHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::pair<std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator>, std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator>> data( std::piecewise_construct, std::forward_as_tuple( performanceCounterKHRAllocator ), std::forward_as_tuple( performanceCounterDescriptionKHRAllocator ) ); |
| std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR, PerformanceCounterKHRAllocator> & counters = data.first; |
| std::vector<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR, PerformanceCounterDescriptionKHRAllocator> & counterDescriptions = data.second; |
| uint32_t counterCount; |
| VkResult result; |
| do |
| { |
| result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, nullptr, nullptr ); |
| if ( ( result == VK_SUCCESS ) && counterCount ) |
| { |
| counters.resize( counterCount ); |
| counterDescriptions.resize( counterCount ); |
| result = d.vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( m_physicalDevice, queueFamilyIndex, &counterCount, reinterpret_cast<VkPerformanceCounterKHR *>( counters.data() ), reinterpret_cast<VkPerformanceCounterDescriptionKHR *>( counterDescriptions.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::enumerateQueueFamilyPerformanceQueryCountersKHR" ); |
| VULKAN_HPP_ASSERT( counterCount <= counters.size() ); |
| if ( counterCount < counters.size() ) |
| { |
| counters.resize( counterCount ); |
| counterDescriptions.resize( counterCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), data ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR * pPerformanceQueryCreateInfo, uint32_t * pNumPasses, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( pPerformanceQueryCreateInfo ), pNumPasses ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE uint32_t PhysicalDevice::getQueueFamilyPerformanceQueryPassesKHR( const VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR & performanceQueryCreateInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| uint32_t numPasses; |
| d.vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( m_physicalDevice, reinterpret_cast<const VkQueryPoolPerformanceCreateInfoKHR *>( &performanceQueryCreateInfo ), &numPasses ); |
| |
| |
| return numPasses; |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR * pInfo, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( pInfo ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS VULKAN_HPP_INLINE typename ResultValueType<void>::type Device::acquireProfilingLockKHR( const VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR & info, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| |
| VkResult result = d.vkAcquireProfilingLockKHR( m_device, reinterpret_cast<const VkAcquireProfilingLockInfoKHR *>( &info ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::Device::acquireProfilingLockKHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ) ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_INLINE void Device::releaseProfilingLockKHR( Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| d.vkReleaseProfilingLockKHR( m_device ); |
| } |
| |
| //=== VK_KHR_get_surface_capabilities2 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR * pSurfaceCapabilities, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( pSurfaceCapabilities ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::type PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR surfaceCapabilities; |
| VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceCapabilities ); |
| } |
| |
| template <typename X, typename Y, typename... Z, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<StructureChain<X, Y, Z...>>::type PhysicalDevice::getSurfaceCapabilities2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| StructureChain<X, Y, Z...> structureChain; |
| VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR & surfaceCapabilities = structureChain.template get<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>(); |
| VkResult result = d.vkGetPhysicalDeviceSurfaceCapabilities2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), reinterpret_cast<VkSurfaceCapabilities2KHR *>( &surfaceCapabilities ) ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceCapabilities2KHR" ); |
| |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChain ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR * pSurfaceInfo, uint32_t * pSurfaceFormatCount, VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR * pSurfaceFormats, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( pSurfaceInfo ), pSurfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( pSurfaceFormats ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename SurfaceFormat2KHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats; |
| uint32_t surfaceFormatCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) |
| { |
| surfaceFormats.resize( surfaceFormatCount ); |
| result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); |
| VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); |
| if ( surfaceFormatCount < surfaceFormats.size() ) |
| { |
| surfaceFormats.resize( surfaceFormatCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats ); |
| } |
| |
| template <typename SurfaceFormat2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, SurfaceFormat2KHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, SurfaceFormat2KHRAllocator & surfaceFormat2KHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR, SurfaceFormat2KHRAllocator> surfaceFormats( surfaceFormat2KHRAllocator ); |
| uint32_t surfaceFormatCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) |
| { |
| surfaceFormats.resize( surfaceFormatCount ); |
| result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); |
| VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); |
| if ( surfaceFormatCount < surfaceFormats.size() ) |
| { |
| surfaceFormats.resize( surfaceFormatCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), surfaceFormats ); |
| } |
| |
| template <typename StructureChain, typename StructureChainAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<StructureChain, StructureChainAllocator> structureChains; |
| std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats; |
| uint32_t surfaceFormatCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) |
| { |
| structureChains.resize( surfaceFormatCount ); |
| surfaceFormats.resize( surfaceFormatCount ); |
| for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) |
| { |
| surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext; |
| } |
| result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); |
| VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); |
| if ( surfaceFormatCount < surfaceFormats.size() ) |
| { |
| structureChains.resize( surfaceFormatCount ); |
| } |
| for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) |
| { |
| structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i]; |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains ); |
| } |
| |
| template <typename StructureChain, typename StructureChainAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, StructureChain>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<StructureChain, StructureChainAllocator>>::type PhysicalDevice::getSurfaceFormats2KHR( const VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR & surfaceInfo, StructureChainAllocator & structureChainAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<StructureChain, StructureChainAllocator> structureChains( structureChainAllocator ); |
| std::vector<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR> surfaceFormats; |
| uint32_t surfaceFormatCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && surfaceFormatCount ) |
| { |
| structureChains.resize( surfaceFormatCount ); |
| surfaceFormats.resize( surfaceFormatCount ); |
| for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) |
| { |
| surfaceFormats[i].pNext = structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>().pNext; |
| } |
| result = d.vkGetPhysicalDeviceSurfaceFormats2KHR( m_physicalDevice, reinterpret_cast<const VkPhysicalDeviceSurfaceInfo2KHR *>( &surfaceInfo ), &surfaceFormatCount, reinterpret_cast<VkSurfaceFormat2KHR *>( surfaceFormats.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getSurfaceFormats2KHR" ); |
| VULKAN_HPP_ASSERT( surfaceFormatCount <= surfaceFormats.size() ); |
| if ( surfaceFormatCount < surfaceFormats.size() ) |
| { |
| structureChains.resize( surfaceFormatCount ); |
| } |
| for ( uint32_t i = 0; i < surfaceFormatCount; i++ ) |
| { |
| structureChains[i].template get<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>() = surfaceFormats[i]; |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), structureChains ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| //=== VK_KHR_get_display_properties2 === |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayProperties2KHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayProperties2KHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( pProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DisplayProperties2KHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type PhysicalDevice::getDisplayProperties2KHR( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties; |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| |
| template <typename DisplayProperties2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayProperties2KHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator>>::type PhysicalDevice::getDisplayProperties2KHR( DisplayProperties2KHRAllocator & displayProperties2KHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR, DisplayProperties2KHRAllocator> properties( displayProperties2KHRAllocator ); |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkGetPhysicalDeviceDisplayProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayProperties2KHR *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayProperties2KHR" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| #endif /* VULKAN_HPP_DISABLE_ENHANCED_MODE */ |
| |
| |
| template <typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE Result PhysicalDevice::getDisplayPlaneProperties2KHR( uint32_t * pPropertyCount, VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR * pProperties, Dispatch const & d ) const VULKAN_HPP_NOEXCEPT |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| return static_cast<Result>( d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, pPropertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( pProperties ) ) ); |
| } |
| |
| #ifndef VULKAN_HPP_DISABLE_ENHANCED_MODE |
| template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR( Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties; |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| return createResultValueType( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), properties ); |
| } |
| |
| template <typename DisplayPlaneProperties2KHRAllocator, typename Dispatch, typename B1, typename std::enable_if<std::is_same<typename B1::value_type, DisplayPlaneProperties2KHR>::value, int>::type> |
| VULKAN_HPP_NODISCARD VULKAN_HPP_INLINE typename ResultValueType<std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator>>::type PhysicalDevice::getDisplayPlaneProperties2KHR( DisplayPlaneProperties2KHRAllocator & displayPlaneProperties2KHRAllocator, Dispatch const & d ) const |
| { |
| VULKAN_HPP_ASSERT( d.getVkHeaderVersion() == VK_HEADER_VERSION ); |
| |
| |
| std::vector<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR, DisplayPlaneProperties2KHRAllocator> properties( displayPlaneProperties2KHRAllocator ); |
| uint32_t propertyCount; |
| VkResult result; |
| do |
| { |
| result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, nullptr ); |
| if ( ( result == VK_SUCCESS ) && propertyCount ) |
| { |
| properties.resize( propertyCount ); |
| result = d.vkGetPhysicalDeviceDisplayPlaneProperties2KHR( m_physicalDevice, &propertyCount, reinterpret_cast<VkDisplayPlaneProperties2KHR *>( properties.data() ) ); |
| } |
| } while ( result == VK_INCOMPLETE ); |
| resultCheck( static_cast<VULKAN_HPP_NAMESPACE::Result>( result ), VULKAN_HPP_NAMESPACE_STRING "::PhysicalDevice::getDisplayPlaneProperties2KHR" ); |
| VULKAN_HPP_ASSERT( propertyCount <= properties.size() ); |
| if ( propertyCount < properties.size() ) |
| { |
| properties.resize( propertyCount ); |
| } |
| |