Reduce the complexity of VkNonDispatchableHandle

- Simplified VkNonDispatchableHandle as much as possible so
  that it's just a wrapper over a uint64_t
- Centralized casting logic in vkObject.hpp
- Explicitly changed "Cast" to "vk::Cast" to differentiate
  from vk::<object>::Cast
- Moved VkDescriptorSet operators from VulkanPlatform.h to
  VkDescriptorPool.cpp

Bug b/129979580

Change-Id: I4fe5fcfe56029c21594088d274bae862999597ad
Reviewed-on: https://swiftshader-review.googlesource.com/c/SwiftShader/+/32928
Tested-by: Alexis Hétu <sugoi@google.com>
Reviewed-by: Chris Forbes <chrisforbes@google.com>
Kokoro-Presubmit: kokoro <noreply+kokoro@google.com>
diff --git a/src/Vulkan/VkBuffer.hpp b/src/Vulkan/VkBuffer.hpp
index 13e8732..286e0ed 100644
--- a/src/Vulkan/VkBuffer.hpp
+++ b/src/Vulkan/VkBuffer.hpp
@@ -53,7 +53,7 @@
 
 static inline Buffer* Cast(VkBuffer object)
 {
-	return reinterpret_cast<Buffer*>(object.get());
+	return Buffer::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkBufferView.cpp b/src/Vulkan/VkBufferView.cpp
index 5973bc8..0f4848d 100644
--- a/src/Vulkan/VkBufferView.cpp
+++ b/src/Vulkan/VkBufferView.cpp
@@ -20,7 +20,7 @@
 {
 
 BufferView::BufferView(const VkBufferViewCreateInfo* pCreateInfo, void* mem) :
-    buffer(Cast(pCreateInfo->buffer)), format(pCreateInfo->format), offset(pCreateInfo->offset)
+    buffer(vk::Cast(pCreateInfo->buffer)), format(pCreateInfo->format), offset(pCreateInfo->offset)
 {
     if (pCreateInfo->range == VK_WHOLE_SIZE)
     {
diff --git a/src/Vulkan/VkBufferView.hpp b/src/Vulkan/VkBufferView.hpp
index 4d67ab2..45a87e9 100644
--- a/src/Vulkan/VkBufferView.hpp
+++ b/src/Vulkan/VkBufferView.hpp
@@ -49,7 +49,7 @@
 
 static inline BufferView* Cast(VkBufferView object)
 {
-	return reinterpret_cast<BufferView*>(object.get());
+	return BufferView::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkCommandBuffer.cpp b/src/Vulkan/VkCommandBuffer.cpp
index de72b92..7c4edac 100644
--- a/src/Vulkan/VkCommandBuffer.cpp
+++ b/src/Vulkan/VkCommandBuffer.cpp
@@ -1384,7 +1384,7 @@
 		ASSERT(dynamicOffsetCount >= numDynamicDescriptors);
 
 		addCommand<BindDescriptorSet>(
-				pipelineBindPoint, layout, descriptorSetIndex, Cast(pDescriptorSets[i]),
+				pipelineBindPoint, layout, descriptorSetIndex, vk::Cast(pDescriptorSets[i]),
 				dynamicOffsetCount, pDynamicOffsets);
 
 		pDynamicOffsets += numDynamicDescriptors;
diff --git a/src/Vulkan/VkCommandBuffer.hpp b/src/Vulkan/VkCommandBuffer.hpp
index adef7e7..8c4351b 100644
--- a/src/Vulkan/VkCommandBuffer.hpp
+++ b/src/Vulkan/VkCommandBuffer.hpp
@@ -49,6 +49,11 @@
 
 	CommandBuffer(VkCommandBufferLevel pLevel);
 
+	static inline CommandBuffer* Cast(VkCommandBuffer object)
+	{
+		return reinterpret_cast<CommandBuffer*>(object);
+	}
+
 	void destroy(const VkAllocationCallbacks* pAllocator);
 
 	VkResult begin(VkCommandBufferUsageFlags flags, const VkCommandBufferInheritanceInfo* pInheritanceInfo);
diff --git a/src/Vulkan/VkCommandPool.cpp b/src/Vulkan/VkCommandPool.cpp
index 0a7dc1b..17934ea 100644
--- a/src/Vulkan/VkCommandPool.cpp
+++ b/src/Vulkan/VkCommandPool.cpp
@@ -95,7 +95,7 @@
 	//  the command pool are put in the initial state."
 	for(auto commandBuffer : *commandBuffers)
 	{
-		Cast(commandBuffer)->reset(flags);
+		vk::Cast(commandBuffer)->reset(flags);
 	}
 
 	// According the Vulkan 1.1 spec:
diff --git a/src/Vulkan/VkCommandPool.hpp b/src/Vulkan/VkCommandPool.hpp
index 69479a0..e07a248 100644
--- a/src/Vulkan/VkCommandPool.hpp
+++ b/src/Vulkan/VkCommandPool.hpp
@@ -40,7 +40,7 @@
 
 static inline CommandPool* Cast(VkCommandPool object)
 {
-	return reinterpret_cast<CommandPool*>(object.get());
+	return CommandPool::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkDescriptorPool.cpp b/src/Vulkan/VkDescriptorPool.cpp
index fa4df1f..3acb014 100644
--- a/src/Vulkan/VkDescriptorPool.cpp
+++ b/src/Vulkan/VkDescriptorPool.cpp
@@ -20,18 +20,43 @@
 #include <algorithm>
 #include <memory>
 
+namespace
+{
+
+inline uintptr_t toPtr(const VkDescriptorSet& descSet)
+{
+	return reinterpret_cast<uintptr_t>(vk::Cast(descSet));
+}
+
+inline uint64_t operator+(const VkDescriptorSet& lhs, size_t offset)
+{
+	return static_cast<uint64_t>(toPtr(lhs) + offset);
+}
+
+inline void operator+=(VkDescriptorSet& lhs, size_t offset)
+{
+	lhs = static_cast<uint64_t>(toPtr(lhs) + offset);
+}
+
+inline uintptr_t operator-(const VkDescriptorSet& lhs, const VkDescriptorSet& rhs)
+{
+	return toPtr(lhs) - toPtr(rhs);
+}
+
+}
+
 namespace vk
 {
 
 DescriptorPool::DescriptorPool(const VkDescriptorPoolCreateInfo* pCreateInfo, void* mem) :
-	pool(static_cast<uint8_t*>(mem)),
+	pool(static_cast<uint64_t>(reinterpret_cast<uintptr_t>(mem))),
 	poolSize(ComputeRequiredAllocationSize(pCreateInfo))
 {
 }
 
 void DescriptorPool::destroy(const VkAllocationCallbacks* pAllocator)
 {
-	vk::deallocate(pool, pAllocator);
+	vk::deallocate(pool.get(), pAllocator);
 }
 
 size_t DescriptorPool::ComputeRequiredAllocationSize(const VkDescriptorPoolCreateInfo* pCreateInfo)
@@ -54,7 +79,7 @@
 	for(uint32_t i = 0; i < descriptorSetCount; i++)
 	{
 		pDescriptorSets[i] = VK_NULL_HANDLE;
-		layoutSizes[i] = Cast(pSetLayouts[i])->getDescriptorSetAllocationSize();
+		layoutSizes[i] = vk::Cast(pSetLayouts[i])->getDescriptorSetAllocationSize();
 	}
 
 	VkResult result = allocateSets(&(layoutSizes[0]), descriptorSetCount, pDescriptorSets);
@@ -62,7 +87,7 @@
 	{
 		for(uint32_t i = 0; i < descriptorSetCount; i++)
 		{
-			Cast(pSetLayouts[i])->initialize(vk::Cast(pDescriptorSets[i]));
+			vk::Cast(pSetLayouts[i])->initialize(vk::Cast(pDescriptorSets[i]));
 		}
 	}
 	return result;
diff --git a/src/Vulkan/VkDescriptorPool.hpp b/src/Vulkan/VkDescriptorPool.hpp
index 731dd01..b363ce1 100644
--- a/src/Vulkan/VkDescriptorPool.hpp
+++ b/src/Vulkan/VkDescriptorPool.hpp
@@ -55,7 +55,7 @@
 
 	static inline DescriptorPool* Cast(VkDescriptorPool object)
 	{
-		return reinterpret_cast<DescriptorPool*>(object.get());
+		return DescriptorPool::Cast(object);
 	}
 
 } // namespace vk
diff --git a/src/Vulkan/VkDescriptorSet.hpp b/src/Vulkan/VkDescriptorSet.hpp
index 38ee493..b681573 100644
--- a/src/Vulkan/VkDescriptorSet.hpp
+++ b/src/Vulkan/VkDescriptorSet.hpp
@@ -32,6 +32,11 @@
 	class DescriptorSet
 	{
 	public:
+		static inline DescriptorSet* Cast(VkDescriptorSet object)
+		{
+			return static_cast<DescriptorSet*>(object.get());
+		}
+
 		using Bindings = std::array<vk::DescriptorSet*, vk::MAX_BOUND_DESCRIPTOR_SETS>;
 		using DynamicOffsets = std::array<uint32_t, vk::MAX_DESCRIPTOR_SET_COMBINED_BUFFERS_DYNAMIC>;
 
@@ -41,7 +46,7 @@
 
 	inline DescriptorSet* Cast(VkDescriptorSet object)
 	{
-		return reinterpret_cast<DescriptorSet*>(object.get());
+		return DescriptorSet::Cast(object);
 	}
 
 } // namespace vk
diff --git a/src/Vulkan/VkDescriptorSetLayout.cpp b/src/Vulkan/VkDescriptorSetLayout.cpp
index 16fb5b9..65e625e 100644
--- a/src/Vulkan/VkDescriptorSetLayout.cpp
+++ b/src/Vulkan/VkDescriptorSetLayout.cpp
@@ -303,7 +303,7 @@
 		for (uint32_t i = 0; i < entry.descriptorCount; i++)
 		{
 			auto update = reinterpret_cast<VkBufferView const *>(src + entry.offset + entry.stride * i);
-			auto bufferView = Cast(*update);
+			auto bufferView = vk::Cast(*update);
 
 			imageSampler[i].type = VK_IMAGE_VIEW_TYPE_1D;
 			imageSampler[i].imageViewId = bufferView->id;
@@ -438,7 +438,7 @@
 		for(uint32_t i = 0; i < entry.descriptorCount; i++)
 		{
 			auto update = reinterpret_cast<VkDescriptorImageInfo const *>(src + entry.offset + entry.stride * i);
-			auto imageView = Cast(update->imageView);
+			auto imageView = vk::Cast(update->imageView);
 			descriptor[i].ptr = imageView->getOffsetPointer({0, 0, 0}, VK_IMAGE_ASPECT_COLOR_BIT, 0, 0);
 			descriptor[i].extent = imageView->getMipLevelExtent(0);
 			descriptor[i].rowPitchBytes = imageView->rowPitchBytes(VK_IMAGE_ASPECT_COLOR_BIT, 0);
@@ -467,7 +467,7 @@
 		for (uint32_t i = 0; i < entry.descriptorCount; i++)
 		{
 			auto update = reinterpret_cast<VkBufferView const *>(src + entry.offset + entry.stride * i);
-			auto bufferView = Cast(*update);
+			auto bufferView = vk::Cast(*update);
 			descriptor[i].ptr = bufferView->getPointer();
 			descriptor[i].extent = {bufferView->getElementCount(), 1, 1};
 			descriptor[i].rowPitchBytes = 0;
@@ -487,7 +487,7 @@
 		for (uint32_t i = 0; i < entry.descriptorCount; i++)
 		{
 			auto update = reinterpret_cast<VkDescriptorBufferInfo const *>(src + entry.offset + entry.stride * i);
-			auto buffer = Cast(update->buffer);
+			auto buffer = vk::Cast(update->buffer);
 			descriptor[i].ptr = buffer->getOffsetPointer(update->offset);
 			descriptor[i].sizeInBytes = static_cast<int>((update->range == VK_WHOLE_SIZE) ? buffer->getSize() - update->offset : update->range);
 			descriptor[i].robustnessSize = static_cast<int>(buffer->getSize() - update->offset);
diff --git a/src/Vulkan/VkDescriptorSetLayout.hpp b/src/Vulkan/VkDescriptorSetLayout.hpp
index 63f1211..44ac8f2 100644
--- a/src/Vulkan/VkDescriptorSetLayout.hpp
+++ b/src/Vulkan/VkDescriptorSetLayout.hpp
@@ -141,7 +141,7 @@
 
 static inline DescriptorSetLayout* Cast(VkDescriptorSetLayout object)
 {
-	return reinterpret_cast<DescriptorSetLayout*>(object.get());
+	return DescriptorSetLayout::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkDescriptorUpdateTemplate.cpp b/src/Vulkan/VkDescriptorUpdateTemplate.cpp
index 7ea84f8..76acbe7 100644
--- a/src/Vulkan/VkDescriptorUpdateTemplate.cpp
+++ b/src/Vulkan/VkDescriptorUpdateTemplate.cpp
@@ -22,7 +22,7 @@
 	DescriptorUpdateTemplate::DescriptorUpdateTemplate(const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, void* mem) :
 		descriptorUpdateEntryCount(pCreateInfo->descriptorUpdateEntryCount),
 		descriptorUpdateEntries(reinterpret_cast<VkDescriptorUpdateTemplateEntry*>(mem)),
-		descriptorSetLayout(Cast(pCreateInfo->descriptorSetLayout))
+		descriptorSetLayout(vk::Cast(pCreateInfo->descriptorSetLayout))
 	{
 		for(uint32_t i = 0; i < descriptorUpdateEntryCount; i++)
 		{
diff --git a/src/Vulkan/VkDescriptorUpdateTemplate.hpp b/src/Vulkan/VkDescriptorUpdateTemplate.hpp
index 90ea650..7f0e5be 100644
--- a/src/Vulkan/VkDescriptorUpdateTemplate.hpp
+++ b/src/Vulkan/VkDescriptorUpdateTemplate.hpp
@@ -38,7 +38,7 @@
 

 	static inline DescriptorUpdateTemplate* Cast(VkDescriptorUpdateTemplate object)

 	{

-		return reinterpret_cast<DescriptorUpdateTemplate*>(object.get());

+		return DescriptorUpdateTemplate::Cast(object);

 	}

 

 } // namespace vk

diff --git a/src/Vulkan/VkDeviceMemory.hpp b/src/Vulkan/VkDeviceMemory.hpp
index e50b21d..f4eb05f 100644
--- a/src/Vulkan/VkDeviceMemory.hpp
+++ b/src/Vulkan/VkDeviceMemory.hpp
@@ -42,7 +42,7 @@
 
 static inline DeviceMemory* Cast(VkDeviceMemory object)
 {
-	return reinterpret_cast<DeviceMemory*>(object.get());
+	return DeviceMemory::Cast(object);
 }
 
 
diff --git a/src/Vulkan/VkEvent.hpp b/src/Vulkan/VkEvent.hpp
index 62eb47a..57901ef 100644
--- a/src/Vulkan/VkEvent.hpp
+++ b/src/Vulkan/VkEvent.hpp
@@ -70,7 +70,7 @@
 
 static inline Event* Cast(VkEvent object)
 {
-	return reinterpret_cast<Event*>(object.get());
+	return Event::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkFence.hpp b/src/Vulkan/VkFence.hpp
index 2a043e6..df5129e 100644
--- a/src/Vulkan/VkFence.hpp
+++ b/src/Vulkan/VkFence.hpp
@@ -80,7 +80,7 @@
 
 static inline Fence* Cast(VkFence object)
 {
-	return reinterpret_cast<Fence*>(object.get());
+	return Fence::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkFramebuffer.cpp b/src/Vulkan/VkFramebuffer.cpp
index f9047c1..768c492 100644
--- a/src/Vulkan/VkFramebuffer.cpp
+++ b/src/Vulkan/VkFramebuffer.cpp
@@ -27,7 +27,7 @@
 {
 	for(uint32_t i = 0; i < attachmentCount; i++)
 	{
-		attachments[i] = Cast(pCreateInfo->pAttachments[i]);
+		attachments[i] = vk::Cast(pCreateInfo->pAttachments[i]);
 	}
 }
 
diff --git a/src/Vulkan/VkFramebuffer.hpp b/src/Vulkan/VkFramebuffer.hpp
index 9dab4b4..cfc5e07 100644
--- a/src/Vulkan/VkFramebuffer.hpp
+++ b/src/Vulkan/VkFramebuffer.hpp
@@ -43,7 +43,7 @@
 
 static inline Framebuffer* Cast(VkFramebuffer object)
 {
-	return reinterpret_cast<Framebuffer*>(object.get());
+	return Framebuffer::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkImage.hpp b/src/Vulkan/VkImage.hpp
index ff9511a..d56fd05 100644
--- a/src/Vulkan/VkImage.hpp
+++ b/src/Vulkan/VkImage.hpp
@@ -102,7 +102,7 @@
 
 static inline Image* Cast(VkImage object)
 {
-	return reinterpret_cast<Image*>(object.get());
+	return Image::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkImageView.cpp b/src/Vulkan/VkImageView.cpp
index 94b56d5..a65f391 100644
--- a/src/Vulkan/VkImageView.cpp
+++ b/src/Vulkan/VkImageView.cpp
@@ -56,7 +56,7 @@
 std::atomic<uint32_t> ImageView::nextID(1);
 
 ImageView::ImageView(const VkImageViewCreateInfo* pCreateInfo, void* mem, const vk::SamplerYcbcrConversion *ycbcrConversion) :
-	image(Cast(pCreateInfo->image)), viewType(pCreateInfo->viewType), format(pCreateInfo->format),
+	image(vk::Cast(pCreateInfo->image)), viewType(pCreateInfo->viewType), format(pCreateInfo->format),
 	components(ResolveComponentMapping(pCreateInfo->components, format)),
 	subresourceRange(ResolveRemainingLevelsLayers(pCreateInfo->subresourceRange, image)),
 	ycbcrConversion(ycbcrConversion)
diff --git a/src/Vulkan/VkImageView.hpp b/src/Vulkan/VkImageView.hpp
index 73ee4ab..db3848e 100644
--- a/src/Vulkan/VkImageView.hpp
+++ b/src/Vulkan/VkImageView.hpp
@@ -104,7 +104,7 @@
 
 static inline ImageView* Cast(VkImageView object)
 {
-	return reinterpret_cast<ImageView*>(object.get());
+	return ImageView::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkObject.hpp b/src/Vulkan/VkObject.hpp
index b8a9800..9aac1fe 100644
--- a/src/Vulkan/VkObject.hpp
+++ b/src/Vulkan/VkObject.hpp
@@ -25,6 +25,19 @@
 
 namespace vk
 {
+
+template<typename T, typename VkT>
+static inline T* VkTtoT(VkT vkObject)
+{
+	return static_cast<T*>(vkObject.get());
+}
+
+template<typename T, typename VkT>
+static inline VkT TtoVkT(T* object)
+{
+	return VkT(static_cast<uint64_t>(reinterpret_cast<uintptr_t>(object)));
+}
+
 // For use in the placement new to make it verbose that we're allocating an object using device memory
 static constexpr VkAllocationCallbacks* DEVICE_MEMORY = nullptr;
 
@@ -92,7 +105,12 @@
 	{
 		// The static_cast<T*> is used to make sure the returned pointer points to the
 		// beginning of the object, even if the derived class uses multiple inheritance
-		return reinterpret_cast<typename VkT::HandleType>(static_cast<T*>(this));
+		return vk::TtoVkT<T, VkT>(static_cast<T*>(this));
+	}
+
+	static inline T* Cast(VkT vkObject)
+	{
+		return vk::VkTtoT<T, VkT>(vkObject);
 	}
 };
 
diff --git a/src/Vulkan/VkPipeline.cpp b/src/Vulkan/VkPipeline.cpp
index 18f5324..971c7ec 100644
--- a/src/Vulkan/VkPipeline.cpp
+++ b/src/Vulkan/VkPipeline.cpp
@@ -228,7 +228,7 @@
 Pipeline::Pipeline(PipelineLayout const *layout) : layout(layout) {}
 
 GraphicsPipeline::GraphicsPipeline(const VkGraphicsPipelineCreateInfo* pCreateInfo, void* mem)
-	: Pipeline(Cast(pCreateInfo->layout))
+	: Pipeline(vk::Cast(pCreateInfo->layout))
 {
 	if(((pCreateInfo->flags &
 		~(VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT |
@@ -451,12 +451,12 @@
 			UNIMPLEMENTED("pStage->flags");
 		}
 
-		auto module = Cast(pStage->module);
+		auto module = vk::Cast(pStage->module);
 		auto code = preprocessSpirv(module->getCode(), pStage->pSpecializationInfo);
 
 		// FIXME (b/119409619): use an allocator here so we can control all memory allocations
 		// TODO: also pass in any pipeline state which will affect shader compilation
-		auto spirvShader = new sw::SpirvShader{pStage, code, Cast(pCreateInfo->renderPass), pCreateInfo->subpass};
+		auto spirvShader = new sw::SpirvShader{pStage, code, vk::Cast(pCreateInfo->renderPass), pCreateInfo->subpass};
 
 		switch (pStage->stage)
 		{
@@ -525,7 +525,7 @@
 }
 
 ComputePipeline::ComputePipeline(const VkComputePipelineCreateInfo* pCreateInfo, void* mem)
-	: Pipeline(Cast(pCreateInfo->layout))
+	: Pipeline(vk::Cast(pCreateInfo->layout))
 {
 }
 
@@ -542,7 +542,7 @@
 
 void ComputePipeline::compileShaders(const VkAllocationCallbacks* pAllocator, const VkComputePipelineCreateInfo* pCreateInfo)
 {
-	auto module = Cast(pCreateInfo->stage.module);
+	auto module = vk::Cast(pCreateInfo->stage.module);
 
 	auto code = preprocessSpirv(module->getCode(), pCreateInfo->stage.pSpecializationInfo);
 
diff --git a/src/Vulkan/VkPipeline.hpp b/src/Vulkan/VkPipeline.hpp
index 0a11eb1..eba4b36 100644
--- a/src/Vulkan/VkPipeline.hpp
+++ b/src/Vulkan/VkPipeline.hpp
@@ -38,7 +38,12 @@
 
 	operator VkPipeline()
 	{
-		return reinterpret_cast<VkPipeline::HandleType>(this);
+		return vk::TtoVkT<Pipeline, VkPipeline>(this);
+	}
+
+	static inline Pipeline* Cast(VkPipeline object)
+	{
+		return vk::VkTtoT<Pipeline, VkPipeline>(object);
 	}
 
 	void destroy(const VkAllocationCallbacks* pAllocator)
@@ -124,7 +129,7 @@
 
 static inline Pipeline* Cast(VkPipeline object)
 {
-	return reinterpret_cast<Pipeline*>(object.get());
+	return Pipeline::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkPipelineCache.hpp b/src/Vulkan/VkPipelineCache.hpp
index c75510b..42c3921 100644
--- a/src/Vulkan/VkPipelineCache.hpp
+++ b/src/Vulkan/VkPipelineCache.hpp
@@ -47,7 +47,7 @@
 
 static inline PipelineCache* Cast(VkPipelineCache object)
 {
-	return reinterpret_cast<PipelineCache*>(object.get());
+	return PipelineCache::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkPipelineLayout.cpp b/src/Vulkan/VkPipelineLayout.cpp
index 5ea8cb8..cd47bab 100644
--- a/src/Vulkan/VkPipelineLayout.cpp
+++ b/src/Vulkan/VkPipelineLayout.cpp
@@ -27,7 +27,7 @@
 	setLayouts = reinterpret_cast<DescriptorSetLayout**>(hostMem);
 	for(uint32_t i = 0; i < pCreateInfo->setLayoutCount; i++)
 	{
-		setLayouts[i] = Cast(pCreateInfo->pSetLayouts[i]);
+		setLayouts[i] = vk::Cast(pCreateInfo->pSetLayouts[i]);
 	}
 	hostMem += setLayoutsSize;
 
diff --git a/src/Vulkan/VkPipelineLayout.hpp b/src/Vulkan/VkPipelineLayout.hpp
index ef1f9a4..f450c5d 100644
--- a/src/Vulkan/VkPipelineLayout.hpp
+++ b/src/Vulkan/VkPipelineLayout.hpp
@@ -45,7 +45,7 @@
 
 static inline PipelineLayout* Cast(VkPipelineLayout object)
 {
-	return reinterpret_cast<PipelineLayout*>(object.get());
+	return PipelineLayout::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkQueryPool.hpp b/src/Vulkan/VkQueryPool.hpp
index 88ca21b..45891b2 100644
--- a/src/Vulkan/VkQueryPool.hpp
+++ b/src/Vulkan/VkQueryPool.hpp
@@ -114,7 +114,7 @@
 
 static inline QueryPool* Cast(VkQueryPool object)
 {
-	return reinterpret_cast<QueryPool*>(object.get());
+	return QueryPool::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkRenderPass.hpp b/src/Vulkan/VkRenderPass.hpp
index b111066..0fe9e22 100644
--- a/src/Vulkan/VkRenderPass.hpp
+++ b/src/Vulkan/VkRenderPass.hpp
@@ -89,7 +89,7 @@
 
 static inline RenderPass* Cast(VkRenderPass object)
 {
-	return reinterpret_cast<RenderPass*>(object.get());
+	return RenderPass::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkSampler.hpp b/src/Vulkan/VkSampler.hpp
index 4313e3c..af9da6d 100644
--- a/src/Vulkan/VkSampler.hpp
+++ b/src/Vulkan/VkSampler.hpp
@@ -116,12 +116,12 @@
 
 static inline Sampler* Cast(VkSampler object)
 {
-	return reinterpret_cast<Sampler*>(object.get());
+	return Sampler::Cast(object);
 }
 
 static inline SamplerYcbcrConversion* Cast(VkSamplerYcbcrConversion object)
 {
-	return reinterpret_cast<SamplerYcbcrConversion*>(object.get());
+	return SamplerYcbcrConversion::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkSemaphore.hpp b/src/Vulkan/VkSemaphore.hpp
index 5ebad95..d3ecae4 100644
--- a/src/Vulkan/VkSemaphore.hpp
+++ b/src/Vulkan/VkSemaphore.hpp
@@ -52,7 +52,7 @@
 
 static inline Semaphore* Cast(VkSemaphore object)
 {
-	return reinterpret_cast<Semaphore*>(object.get());
+	return Semaphore::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VkShaderModule.hpp b/src/Vulkan/VkShaderModule.hpp
index 18b9a60..0bc1309 100644
--- a/src/Vulkan/VkShaderModule.hpp
+++ b/src/Vulkan/VkShaderModule.hpp
@@ -44,7 +44,7 @@
 
 static inline ShaderModule* Cast(VkShaderModule object)
 {
-	return reinterpret_cast<ShaderModule*>(object.get());
+	return ShaderModule::Cast(object);
 }
 
 } // namespace vk
diff --git a/src/Vulkan/VulkanPlatform.h b/src/Vulkan/VulkanPlatform.h
index 1b309be..519cfd1 100644
--- a/src/Vulkan/VulkanPlatform.h
+++ b/src/Vulkan/VulkanPlatform.h
@@ -18,90 +18,26 @@
 #include <cstddef>
 #include <cstdint>
 
-template<typename HandleType> class VkHandle
+template<typename T> class VkNonDispatchableHandle
 {
 public:
-	VkHandle(HandleType handle)
-	{
-		u.dummy = 0;
-		u.handle = handle;
-	}
-
-	HandleType get() const
-	{
-		return u.handle;
-	}
-
-	operator HandleType() const
-	{
-		return u.handle;
-	}
-
-protected:
-	HandleType set(HandleType handle)
-	{
-		return (u.handle = handle);
-	}
-
-private:
-	union PointerHandleUnion
-	{
-		HandleType handle;
-		uint64_t dummy; // VkNonDispatchableHandle's size must always be 64 bits even when void* is 32 bits
-	};
-	PointerHandleUnion u;
-};
-
-template<typename T> class VkNonDispatchableHandleBase : public VkHandle<T>
-{
-public:
-	using HandleType = T;
-
-	VkNonDispatchableHandleBase(HandleType handle) : VkHandle<T>(handle)
-	{
-	}
-
-	void operator=(HandleType handle)
-	{
-		this->set(handle);
-	}
-};
-
-// VkDescriptorSet objects are really just memory in the VkDescriptorPool
-// object, so define different/more convenient operators for this object.
-struct VkDescriptorSet_T;
-template<> class VkNonDispatchableHandleBase<VkDescriptorSet_T*> : public VkHandle<uint8_t*>
-{
-public:
-	using HandleType = uint8_t*;
-
-	VkNonDispatchableHandleBase(HandleType handle) : VkHandle<uint8_t*>(handle)
-	{
-	}
-
-	HandleType operator+(ptrdiff_t rhs) const
-	{
-		return get() + rhs;
-	}
-
-	HandleType operator+=(ptrdiff_t rhs)
-	{
-		return this->set(get() + rhs);
-	}
-
-	ptrdiff_t operator-(const HandleType rhs) const
-	{
-		return get() - rhs;
-	}
-};
-
-template<typename T> class VkNonDispatchableHandle : public VkNonDispatchableHandleBase<T>
-{
-public:
-	VkNonDispatchableHandle(typename VkNonDispatchableHandleBase<T>::HandleType handle) : VkNonDispatchableHandleBase<T>(handle)
+	VkNonDispatchableHandle(uint64_t h) : handle(h)
 	{
 		static_assert(sizeof(VkNonDispatchableHandle) == sizeof(uint64_t), "Size is not 64 bits!");
 	}
+
+	void* get() const
+	{
+		return reinterpret_cast<void*>(static_cast<uintptr_t>(handle));
+	}
+
+	operator void*() const
+	{
+		return get();
+	}
+
+private:
+	uint64_t handle;
 };
 
 #define VK_DEFINE_NON_DISPATCHABLE_HANDLE(object) \
diff --git a/src/WSI/VkSurfaceKHR.hpp b/src/WSI/VkSurfaceKHR.hpp
index 9a2f3ff..e25e95d 100644
--- a/src/WSI/VkSurfaceKHR.hpp
+++ b/src/WSI/VkSurfaceKHR.hpp
@@ -61,7 +61,12 @@
 
 	operator VkSurfaceKHR()
 	{
-		return reinterpret_cast<VkSurfaceKHR::HandleType>(this);
+		return vk::TtoVkT<SurfaceKHR, VkSurfaceKHR>(this);
+	}
+
+	static inline SurfaceKHR* Cast(VkSurfaceKHR object)
+	{
+		return vk::VkTtoT<SurfaceKHR, VkSurfaceKHR>(object);
 	}
 
 	void destroy(const VkAllocationCallbacks* pAllocator)
@@ -93,7 +98,7 @@
 
 static inline SurfaceKHR* Cast(VkSurfaceKHR object)
 {
-	return reinterpret_cast<SurfaceKHR*>(object.get());
+	return SurfaceKHR::Cast(object);
 }
 
 }
diff --git a/src/WSI/VkSwapchainKHR.cpp b/src/WSI/VkSwapchainKHR.cpp
index 0ca781d..7c9ae2a 100644
--- a/src/WSI/VkSwapchainKHR.cpp
+++ b/src/WSI/VkSwapchainKHR.cpp
@@ -26,7 +26,7 @@
 {
 
 SwapchainKHR::SwapchainKHR(const VkSwapchainCreateInfoKHR *pCreateInfo, void *mem) :
-	surface(Cast(pCreateInfo->surface)),
+	surface(vk::Cast(pCreateInfo->surface)),
 	images(reinterpret_cast<PresentImage*>(mem)),
 	imageCount(pCreateInfo->minImageCount),
 	retired(false)
diff --git a/src/WSI/VkSwapchainKHR.hpp b/src/WSI/VkSwapchainKHR.hpp
index 4ae8446..3e6bf33 100644
--- a/src/WSI/VkSwapchainKHR.hpp
+++ b/src/WSI/VkSwapchainKHR.hpp
@@ -59,7 +59,7 @@
 
 static inline SwapchainKHR* Cast(VkSwapchainKHR object)
 {
-	return reinterpret_cast<SwapchainKHR*>(object.get());
+	return SwapchainKHR::Cast(object);
 }
 
 }