blob: 56786e83cf3a46a1ed20138fd43a9d0686303b4b [file] [log] [blame]
Alexis Hetu000df8b2018-10-24 15:22:41 -04001// Copyright 2018 The SwiftShader Authors. All Rights Reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include "VkPipeline.hpp"
Nicolas Capensa29aa772019-06-26 00:36:28 -040016
17#include "VkDevice.hpp"
Alexis Hetu52edb172019-06-26 10:17:18 -040018#include "VkPipelineCache.hpp"
Ben Clayton76e9bc02019-02-26 15:02:18 +000019#include "VkPipelineLayout.hpp"
Ben Clayton2ed93ab2019-12-17 20:38:03 +000020#include "VkRenderPass.hpp"
Alexis Hetuc0f92f22018-11-15 16:25:38 -050021#include "VkShaderModule.hpp"
Alexis Hetu7b29abf2019-11-21 17:26:17 -050022#include "VkStringify.hpp"
Ben Claytonf2be26a2019-03-08 12:02:05 +000023#include "Pipeline/ComputeProgram.hpp"
Nicolas Capens73c31242019-02-12 00:09:23 -050024#include "Pipeline/SpirvShader.hpp"
25
Ben Claytone693b622019-09-05 12:48:37 +010026#include "marl/trace.h"
Ben Claytond6c61362019-08-14 18:16:01 +010027
Nicolas Capens73c31242019-02-12 00:09:23 -050028#include "spirv-tools/optimizer.hpp"
Alexis Hetuc0f92f22018-11-15 16:25:38 -050029
Ben Clayton62758f52019-03-13 14:18:58 +000030#include <iostream>
31
Nicolas Capens157ba262019-12-10 17:49:14 -050032namespace {
Alexis Hetuc0f92f22018-11-15 16:25:38 -050033
Nicolas Capens4aa4fcd2019-06-19 13:14:11 -040034// preprocessSpirv applies and freezes specializations into constants, and inlines all functions.
Ben Clayton45faa082019-03-05 13:20:40 +000035std::vector<uint32_t> preprocessSpirv(
Ben Clayton2ed93ab2019-12-17 20:38:03 +000036 std::vector<uint32_t> const &code,
Ben Clayton5349f3c2020-01-14 11:50:24 +000037 VkSpecializationInfo const *specializationInfo,
38 bool optimize)
Ben Clayton45faa082019-03-05 13:20:40 +000039{
Ben Clayton2ed93ab2019-12-17 20:38:03 +000040 spvtools::Optimizer opt{ SPV_ENV_VULKAN_1_1 };
Ben Clayton45faa082019-03-05 13:20:40 +000041
Ben Clayton2ed93ab2019-12-17 20:38:03 +000042 opt.SetMessageConsumer([](spv_message_level_t level, const char *, const spv_position_t &p, const char *m) {
Nicolas Capens81bc9d92019-12-16 15:05:57 -050043 switch(level)
Ben Clayton45faa082019-03-05 13:20:40 +000044 {
Ben Clayton25e06e02020-02-07 11:19:08 +000045 case SPV_MSG_FATAL: sw::warn("SPIR-V FATAL: %d:%d %s\n", int(p.line), int(p.column), m);
46 case SPV_MSG_INTERNAL_ERROR: sw::warn("SPIR-V INTERNAL_ERROR: %d:%d %s\n", int(p.line), int(p.column), m);
47 case SPV_MSG_ERROR: sw::warn("SPIR-V ERROR: %d:%d %s\n", int(p.line), int(p.column), m);
48 case SPV_MSG_WARNING: sw::warn("SPIR-V WARNING: %d:%d %s\n", int(p.line), int(p.column), m);
49 case SPV_MSG_INFO: sw::trace("SPIR-V INFO: %d:%d %s\n", int(p.line), int(p.column), m);
50 case SPV_MSG_DEBUG: sw::trace("SPIR-V DEBUG: %d:%d %s\n", int(p.line), int(p.column), m);
51 default: sw::trace("SPIR-V MESSAGE: %d:%d %s\n", int(p.line), int(p.column), m);
Ben Clayton45faa082019-03-05 13:20:40 +000052 }
53 });
54
Ben Clayton45faa082019-03-05 13:20:40 +000055 // If the pipeline uses specialization, apply the specializations before freezing
Nicolas Capens81bc9d92019-12-16 15:05:57 -050056 if(specializationInfo)
Ben Clayton45faa082019-03-05 13:20:40 +000057 {
58 std::unordered_map<uint32_t, std::vector<uint32_t>> specializations;
Nicolas Capens81bc9d92019-12-16 15:05:57 -050059 for(auto i = 0u; i < specializationInfo->mapEntryCount; ++i)
Ben Clayton45faa082019-03-05 13:20:40 +000060 {
61 auto const &e = specializationInfo->pMapEntries[i];
62 auto value_ptr =
Ben Clayton2ed93ab2019-12-17 20:38:03 +000063 static_cast<uint32_t const *>(specializationInfo->pData) + e.offset / sizeof(uint32_t);
Ben Clayton45faa082019-03-05 13:20:40 +000064 specializations.emplace(e.constantID,
Ben Clayton2ed93ab2019-12-17 20:38:03 +000065 std::vector<uint32_t>{ value_ptr, value_ptr + e.size / sizeof(uint32_t) });
Ben Clayton45faa082019-03-05 13:20:40 +000066 }
67 opt.RegisterPass(spvtools::CreateSetSpecConstantDefaultValuePass(specializations));
68 }
Ben Clayton45faa082019-03-05 13:20:40 +000069
Ben Clayton5349f3c2020-01-14 11:50:24 +000070 if(optimize)
71 {
72 // Full optimization list taken from spirv-opt.
73 opt.RegisterPerformancePasses();
74 }
Ben Clayton4fa92a72019-04-03 11:03:53 +010075
Ben Clayton45faa082019-03-05 13:20:40 +000076 std::vector<uint32_t> optimized;
77 opt.Run(code.data(), code.size(), &optimized);
Ben Clayton62758f52019-03-13 14:18:58 +000078
Ben Clayton2ed93ab2019-12-17 20:38:03 +000079 if(false)
80 {
Ben Clayton62758f52019-03-13 14:18:58 +000081 spvtools::SpirvTools core(SPV_ENV_VULKAN_1_1);
82 std::string preOpt;
Ben Clayton555c3332019-03-28 17:15:04 +000083 core.Disassemble(code, &preOpt, SPV_BINARY_TO_TEXT_OPTION_NONE);
Ben Clayton62758f52019-03-13 14:18:58 +000084 std::string postOpt;
Ben Clayton555c3332019-03-28 17:15:04 +000085 core.Disassemble(optimized, &postOpt, SPV_BINARY_TO_TEXT_OPTION_NONE);
Ben Clayton62758f52019-03-13 14:18:58 +000086 std::cout << "PRE-OPT: " << preOpt << std::endl
Ben Clayton2ed93ab2019-12-17 20:38:03 +000087 << "POST-OPT: " << postOpt << std::endl;
Ben Clayton62758f52019-03-13 14:18:58 +000088 }
89
Ben Clayton45faa082019-03-05 13:20:40 +000090 return optimized;
Alexis Hetuc0f92f22018-11-15 16:25:38 -050091}
Alexis Hetu000df8b2018-10-24 15:22:41 -040092
Ben Clayton7d0ce412019-12-03 13:26:31 +000093std::shared_ptr<sw::SpirvShader> createShader(
94 const vk::PipelineCache::SpirvShaderKey &key,
95 const vk::ShaderModule *module,
96 bool robustBufferAccess,
97 const std::shared_ptr<vk::dbg::Context> &dbgctx)
Alexis Hetu52edb172019-06-26 10:17:18 -040098{
Ben Clayton5349f3c2020-01-14 11:50:24 +000099 // Do not optimize the shader if we have a debugger context.
100 // Optimization passes are likely to damage debug information, and reorder
101 // instructions.
102 const bool optimize = !dbgctx;
103
104 // TODO(b/147726513): Do not preprocess the shader if we have a debugger
105 // context.
106 // This is a work-around for the SPIR-V tools incorrectly reporting errors
107 // when debug information is provided. This can be removed once the
108 // following SPIR-V tools bugs are fixed:
109 // https://github.com/KhronosGroup/SPIRV-Tools/issues/3102
110 // https://github.com/KhronosGroup/SPIRV-Tools/issues/3103
111 // https://github.com/KhronosGroup/SPIRV-Tools/issues/3118
112 auto code = dbgctx ? key.getInsns() : preprocessSpirv(key.getInsns(), key.getSpecializationInfo(), optimize);
Alexis Hetu52edb172019-06-26 10:17:18 -0400113 ASSERT(code.size() > 0);
114
115 // If the pipeline has specialization constants, assume they're unique and
116 // use a new serial ID so the shader gets recompiled.
117 uint32_t codeSerialID = (key.getSpecializationInfo() ? vk::ShaderModule::nextSerialID() : module->getSerialID());
118
119 // TODO(b/119409619): use allocator.
120 return std::make_shared<sw::SpirvShader>(codeSerialID, key.getPipelineStage(), key.getEntryPointName().c_str(),
Ben Clayton7d0ce412019-12-03 13:26:31 +0000121 code, key.getRenderPass(), key.getSubpassIndex(), robustBufferAccess, dbgctx);
Alexis Hetu52edb172019-06-26 10:17:18 -0400122}
123
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000124std::shared_ptr<sw::ComputeProgram> createProgram(const vk::PipelineCache::ComputeProgramKey &key)
Alexis Hetu52edb172019-06-26 10:17:18 -0400125{
Ben Claytone693b622019-09-05 12:48:37 +0100126 MARL_SCOPED_EVENT("createProgram");
Ben Claytond6c61362019-08-14 18:16:01 +0100127
Alexis Hetu52edb172019-06-26 10:17:18 -0400128 vk::DescriptorSet::Bindings descriptorSets; // FIXME(b/129523279): Delay code generation until invoke time.
129 // TODO(b/119409619): use allocator.
130 auto program = std::make_shared<sw::ComputeProgram>(key.getShader(), key.getLayout(), descriptorSets);
131 program->generate();
Ben Clayton056d6922019-07-04 12:41:13 +0100132 program->finalize();
Alexis Hetu52edb172019-06-26 10:17:18 -0400133 return program;
134}
135
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000136} // anonymous namespace
Ben Clayton45faa082019-03-05 13:20:40 +0000137
Nicolas Capens157ba262019-12-10 17:49:14 -0500138namespace vk {
Alexis Hetu000df8b2018-10-24 15:22:41 -0400139
Nicolas Capensa29aa772019-06-26 00:36:28 -0400140Pipeline::Pipeline(PipelineLayout const *layout, const Device *device)
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000141 : layout(layout)
Ben Clayton7d0ce412019-12-03 13:26:31 +0000142 , device(device)
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000143 , robustBufferAccess(device->getEnabledFeatures().robustBufferAccess)
Nicolas Capensa29aa772019-06-26 00:36:28 -0400144{
145}
Ben Clayton76e9bc02019-02-26 15:02:18 +0000146
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000147GraphicsPipeline::GraphicsPipeline(const VkGraphicsPipelineCreateInfo *pCreateInfo, void *mem, const Device *device)
148 : Pipeline(vk::Cast(pCreateInfo->layout), device)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400149{
Alexis Hetuf9c8d5f2019-09-12 09:54:54 -0400150 context.robustBufferAccess = robustBufferAccess;
151
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500152 if((pCreateInfo->flags &
153 ~(VK_PIPELINE_CREATE_DISABLE_OPTIMIZATION_BIT |
154 VK_PIPELINE_CREATE_DERIVATIVE_BIT |
155 VK_PIPELINE_CREATE_ALLOW_DERIVATIVES_BIT)) != 0)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500156 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500157 UNSUPPORTED("pCreateInfo->flags %d", int(pCreateInfo->flags));
158 }
159
160 if(pCreateInfo->pTessellationState != nullptr)
161 {
162 UNSUPPORTED("pCreateInfo->pTessellationState");
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500163 }
164
Alexis Hetu73832432019-04-11 16:43:18 -0400165 if(pCreateInfo->pDynamicState)
166 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500167 if(pCreateInfo->pDynamicState->flags != 0)
168 {
169 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
170 UNSUPPORTED("pCreateInfo->pDynamicState->flags %d", int(pCreateInfo->pDynamicState->flags));
171 }
172
Alexis Hetu73832432019-04-11 16:43:18 -0400173 for(uint32_t i = 0; i < pCreateInfo->pDynamicState->dynamicStateCount; i++)
174 {
175 VkDynamicState dynamicState = pCreateInfo->pDynamicState->pDynamicStates[i];
176 switch(dynamicState)
177 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000178 case VK_DYNAMIC_STATE_VIEWPORT:
179 case VK_DYNAMIC_STATE_SCISSOR:
180 case VK_DYNAMIC_STATE_LINE_WIDTH:
181 case VK_DYNAMIC_STATE_DEPTH_BIAS:
182 case VK_DYNAMIC_STATE_BLEND_CONSTANTS:
183 case VK_DYNAMIC_STATE_DEPTH_BOUNDS:
184 case VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK:
185 case VK_DYNAMIC_STATE_STENCIL_WRITE_MASK:
186 case VK_DYNAMIC_STATE_STENCIL_REFERENCE:
187 ASSERT(dynamicState < (sizeof(dynamicStateFlags) * 8));
188 dynamicStateFlags |= (1 << dynamicState);
189 break;
190 default:
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500191 UNSUPPORTED("VkDynamicState %d", int(dynamicState));
Alexis Hetu73832432019-04-11 16:43:18 -0400192 }
193 }
194 }
195
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000196 const VkPipelineVertexInputStateCreateInfo *vertexInputState = pCreateInfo->pVertexInputState;
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500197
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500198 if(vertexInputState->flags != 0)
199 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500200 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
Nicolas Capens865f8892020-01-21 14:27:10 -0500201 UNSUPPORTED("vertexInputState->flags");
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500202 }
203
Ben Clayton76e9bc02019-02-26 15:02:18 +0000204 // Context must always have a PipelineLayout set.
205 context.pipelineLayout = layout;
206
Chris Forbesfe3d4972019-02-22 17:21:23 -0800207 // Temporary in-binding-order representation of buffer strides, to be consumed below
208 // when considering attributes. TODO: unfuse buffers from attributes in backend, is old GL model.
Chris Forbese1cf8632019-03-08 18:17:35 -0800209 uint32_t vertexStrides[MAX_VERTEX_INPUT_BINDINGS];
210 uint32_t instanceStrides[MAX_VERTEX_INPUT_BINDINGS];
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500211 for(uint32_t i = 0; i < vertexInputState->vertexBindingDescriptionCount; i++)
212 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000213 auto const &desc = vertexInputState->pVertexBindingDescriptions[i];
Chris Forbese1cf8632019-03-08 18:17:35 -0800214 vertexStrides[desc.binding] = desc.inputRate == VK_VERTEX_INPUT_RATE_VERTEX ? desc.stride : 0;
215 instanceStrides[desc.binding] = desc.inputRate == VK_VERTEX_INPUT_RATE_INSTANCE ? desc.stride : 0;
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500216 }
217
218 for(uint32_t i = 0; i < vertexInputState->vertexAttributeDescriptionCount; i++)
219 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000220 auto const &desc = vertexInputState->pVertexAttributeDescriptions[i];
221 sw::Stream &input = context.input[desc.location];
Alexis Hetub766e5e2020-01-20 11:40:28 -0500222 input.format = desc.format;
Chris Forbesfe3d4972019-02-22 17:21:23 -0800223 input.offset = desc.offset;
224 input.binding = desc.binding;
Chris Forbese1cf8632019-03-08 18:17:35 -0800225 input.vertexStride = vertexStrides[desc.binding];
226 input.instanceStride = instanceStrides[desc.binding];
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500227 }
228
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500229 const VkPipelineInputAssemblyStateCreateInfo *inputAssemblyState = pCreateInfo->pInputAssemblyState;
230
231 if(inputAssemblyState->flags != 0)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500232 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500233 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
234 UNSUPPORTED("pCreateInfo->pInputAssemblyState->flags %d", int(pCreateInfo->pInputAssemblyState->flags));
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500235 }
236
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500237 primitiveRestartEnable = (inputAssemblyState->primitiveRestartEnable != VK_FALSE);
238 context.topology = inputAssemblyState->topology;
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500239
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000240 const VkPipelineViewportStateCreateInfo *viewportState = pCreateInfo->pViewportState;
Alexis Hetua0a80b12019-02-15 15:56:14 -0500241 if(viewportState)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500242 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500243 if(viewportState->flags != 0)
244 {
245 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
246 UNSUPPORTED("pCreateInfo->pViewportState->flags %d", int(pCreateInfo->pViewportState->flags));
247 }
248
249 if((viewportState->viewportCount != 1) ||
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000250 (viewportState->scissorCount != 1))
Alexis Hetua0a80b12019-02-15 15:56:14 -0500251 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500252 UNSUPPORTED("VkPhysicalDeviceFeatures::multiViewport");
Alexis Hetua0a80b12019-02-15 15:56:14 -0500253 }
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500254
Alexis Hetu428c6452019-04-15 09:48:27 -0400255 if(!hasDynamicState(VK_DYNAMIC_STATE_SCISSOR))
256 {
257 scissor = viewportState->pScissors[0];
258 }
259
260 if(!hasDynamicState(VK_DYNAMIC_STATE_VIEWPORT))
261 {
262 viewport = viewportState->pViewports[0];
263 }
Alexis Hetua0a80b12019-02-15 15:56:14 -0500264 }
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500265
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000266 const VkPipelineRasterizationStateCreateInfo *rasterizationState = pCreateInfo->pRasterizationState;
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500267
268 if(rasterizationState->flags != 0)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500269 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500270 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
271 UNSUPPORTED("pCreateInfo->pRasterizationState->flags %d", int(pCreateInfo->pRasterizationState->flags));
272 }
273
274 if(rasterizationState->depthClampEnable != VK_FALSE)
275 {
276 UNSUPPORTED("VkPhysicalDeviceFeatures::depthClamp");
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500277 }
278
Nicolas Capens8fff8c32020-01-22 03:07:14 -0500279 context.rasterizerDiscard = (rasterizationState->rasterizerDiscardEnable != VK_FALSE);
Chris Forbesd2452552019-02-22 09:45:06 -0800280 context.cullMode = rasterizationState->cullMode;
Alexis Hetu72d81042019-06-10 10:23:23 -0400281 context.frontFace = rasterizationState->frontFace;
Ben Clayton2210f802019-08-12 13:55:43 +0100282 context.polygonMode = rasterizationState->polygonMode;
Nicolas Capens85035be2019-06-05 13:54:18 -0400283 context.depthBias = (rasterizationState->depthBiasEnable != VK_FALSE) ? rasterizationState->depthBiasConstantFactor : 0.0f;
284 context.slopeDepthBias = (rasterizationState->depthBiasEnable != VK_FALSE) ? rasterizationState->depthBiasSlopeFactor : 0.0f;
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500285
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000286 const VkBaseInStructure *extensionCreateInfo = reinterpret_cast<const VkBaseInStructure *>(rasterizationState->pNext);
Alexis Hetu23f9c252019-10-17 13:58:25 -0400287 while(extensionCreateInfo)
288 {
Alexis Hetub07147b2019-10-29 12:49:24 -0400289 // Casting to a long since some structures, such as
290 // VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT
291 // are not enumerated in the official Vulkan header
292 switch((long)(extensionCreateInfo->sType))
Alexis Hetu23f9c252019-10-17 13:58:25 -0400293 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000294 case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_LINE_STATE_CREATE_INFO_EXT:
295 {
296 const VkPipelineRasterizationLineStateCreateInfoEXT *lineStateCreateInfo = reinterpret_cast<const VkPipelineRasterizationLineStateCreateInfoEXT *>(extensionCreateInfo);
297 context.lineRasterizationMode = lineStateCreateInfo->lineRasterizationMode;
298 }
Alexis Hetu23f9c252019-10-17 13:58:25 -0400299 break;
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000300 case VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_PROVOKING_VERTEX_STATE_CREATE_INFO_EXT:
301 {
302 const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *provokingVertexModeCreateInfo =
303 reinterpret_cast<const VkPipelineRasterizationProvokingVertexStateCreateInfoEXT *>(extensionCreateInfo);
304 context.provokingVertexMode = provokingVertexModeCreateInfo->provokingVertexMode;
305 }
306 break;
307 default:
308 WARN("pCreateInfo->pRasterizationState->pNext sType = %s", vk::Stringify(extensionCreateInfo->sType).c_str());
309 break;
Alexis Hetu23f9c252019-10-17 13:58:25 -0400310 }
311
312 extensionCreateInfo = extensionCreateInfo->pNext;
313 }
314
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000315 const VkPipelineMultisampleStateCreateInfo *multisampleState = pCreateInfo->pMultisampleState;
Alexis Hetua0a80b12019-02-15 15:56:14 -0500316 if(multisampleState)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500317 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500318 if(multisampleState->flags != 0)
319 {
320 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
321 UNSUPPORTED("pCreateInfo->pMultisampleState->flags %d", int(pCreateInfo->pMultisampleState->flags));
322 }
323
324 if(multisampleState->sampleShadingEnable != VK_FALSE)
325 {
326 UNSUPPORTED("VkPhysicalDeviceFeatures::sampleRateShading");
327 }
328
329 if(multisampleState->alphaToOneEnable != VK_FALSE)
330 {
331 UNSUPPORTED("VkPhysicalDeviceFeatures::alphaToOne");
332 }
333
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500334 switch(multisampleState->rasterizationSamples)
Nicolas Capens18c9ac42019-08-27 09:28:27 -0400335 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000336 case VK_SAMPLE_COUNT_1_BIT:
337 context.sampleCount = 1;
338 break;
339 case VK_SAMPLE_COUNT_4_BIT:
340 context.sampleCount = 4;
341 break;
342 default:
Nicolas Capens865f8892020-01-21 14:27:10 -0500343 UNSUPPORTED("Unsupported sample count");
Chris Forbes10547582019-02-28 10:07:55 -0800344 }
345
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500346 if(multisampleState->pSampleMask)
Nicolas Capens18c9ac42019-08-27 09:28:27 -0400347 {
Chris Forbesbbf5cf12019-03-06 09:00:56 -0800348 context.sampleMask = multisampleState->pSampleMask[0];
Nicolas Capens18c9ac42019-08-27 09:28:27 -0400349 }
Chris Forbesbbf5cf12019-03-06 09:00:56 -0800350
Nicolas Capens8fff8c32020-01-22 03:07:14 -0500351 context.alphaToCoverage = (multisampleState->alphaToCoverageEnable != VK_FALSE);
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500352 }
Chris Forbes10547582019-02-28 10:07:55 -0800353 else
354 {
355 context.sampleCount = 1;
356 }
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500357
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000358 const VkPipelineDepthStencilStateCreateInfo *depthStencilState = pCreateInfo->pDepthStencilState;
Alexis Hetua0a80b12019-02-15 15:56:14 -0500359 if(depthStencilState)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500360 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500361 if(depthStencilState->flags != 0)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500362 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500363 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
364 UNSUPPORTED("pCreateInfo->pDepthStencilState->flags %d", int(pCreateInfo->pDepthStencilState->flags));
365 }
366
367 if(depthStencilState->depthBoundsTestEnable != VK_FALSE)
368 {
369 UNSUPPORTED("VkPhysicalDeviceFeatures::depthBounds");
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500370 }
371
Nicolas Capens8fff8c32020-01-22 03:07:14 -0500372 context.depthBoundsTestEnable = (depthStencilState->depthBoundsTestEnable != VK_FALSE);
373 context.depthBufferEnable = (depthStencilState->depthTestEnable != VK_FALSE);
374 context.depthWriteEnable = (depthStencilState->depthWriteEnable != VK_FALSE);
Alexis Hetua0a80b12019-02-15 15:56:14 -0500375 context.depthCompareMode = depthStencilState->depthCompareOp;
376
Nicolas Capens8fff8c32020-01-22 03:07:14 -0500377 context.stencilEnable = (depthStencilState->stencilTestEnable != VK_FALSE);
Alexis Hetua0a80b12019-02-15 15:56:14 -0500378 if(context.stencilEnable)
379 {
Chris Forbes1bd9e2f2019-03-18 11:41:56 -0700380 context.frontStencil = depthStencilState->front;
381 context.backStencil = depthStencilState->back;
Alexis Hetua0a80b12019-02-15 15:56:14 -0500382 }
383 }
384
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000385 const VkPipelineColorBlendStateCreateInfo *colorBlendState = pCreateInfo->pColorBlendState;
Alexis Hetua0a80b12019-02-15 15:56:14 -0500386 if(colorBlendState)
387 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500388 if(pCreateInfo->pColorBlendState->flags != 0)
Alexis Hetua0a80b12019-02-15 15:56:14 -0500389 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500390 // Vulkan 1.2: "flags is reserved for future use." "flags must be 0"
391 UNSUPPORTED("pCreateInfo->pColorBlendState->flags %d", int(pCreateInfo->pColorBlendState->flags));
392 }
393
394 if(colorBlendState->logicOpEnable != VK_FALSE)
395 {
396 UNSUPPORTED("VkPhysicalDeviceFeatures::logicOp");
Alexis Hetua0a80b12019-02-15 15:56:14 -0500397 }
398
Alexis Hetu428c6452019-04-15 09:48:27 -0400399 if(!hasDynamicState(VK_DYNAMIC_STATE_BLEND_CONSTANTS))
400 {
401 blendConstants.r = colorBlendState->blendConstants[0];
402 blendConstants.g = colorBlendState->blendConstants[1];
403 blendConstants.b = colorBlendState->blendConstants[2];
404 blendConstants.a = colorBlendState->blendConstants[3];
405 }
Alexis Hetua0a80b12019-02-15 15:56:14 -0500406
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500407 for(auto i = 0u; i < colorBlendState->attachmentCount; i++)
Chris Forbes558df922019-05-14 17:33:49 -0700408 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000409 const VkPipelineColorBlendAttachmentState &attachment = colorBlendState->pAttachments[i];
Alexis Hetu72d81042019-06-10 10:23:23 -0400410 context.colorWriteMask[i] = attachment.colorWriteMask;
Chris Forbes558df922019-05-14 17:33:49 -0700411
Nicolas Capens8fff8c32020-01-22 03:07:14 -0500412 context.setBlendState(i, { (attachment.blendEnable != VK_FALSE),
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000413 attachment.srcColorBlendFactor, attachment.dstColorBlendFactor, attachment.colorBlendOp,
414 attachment.srcAlphaBlendFactor, attachment.dstAlphaBlendFactor, attachment.alphaBlendOp });
Alexis Hetua0a80b12019-02-15 15:56:14 -0500415 }
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500416 }
Chris Forbesa9da7722019-08-26 12:45:13 -0700417
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000418 context.multiSampleMask = context.sampleMask & ((unsigned)0xFFFFFFFF >> (32 - context.sampleCount));
Alexis Hetu000df8b2018-10-24 15:22:41 -0400419}
420
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000421void GraphicsPipeline::destroyPipeline(const VkAllocationCallbacks *pAllocator)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400422{
Alexis Hetu52edb172019-06-26 10:17:18 -0400423 vertexShader.reset();
424 fragmentShader.reset();
Alexis Hetu000df8b2018-10-24 15:22:41 -0400425}
426
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000427size_t GraphicsPipeline::ComputeRequiredAllocationSize(const VkGraphicsPipelineCreateInfo *pCreateInfo)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400428{
429 return 0;
430}
431
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000432void GraphicsPipeline::setShader(const VkShaderStageFlagBits &stage, const std::shared_ptr<sw::SpirvShader> spirvShader)
Alexis Hetu52edb172019-06-26 10:17:18 -0400433{
434 switch(stage)
435 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000436 case VK_SHADER_STAGE_VERTEX_BIT:
437 ASSERT(vertexShader.get() == nullptr);
438 vertexShader = spirvShader;
439 context.vertexShader = vertexShader.get();
440 break;
Alexis Hetu52edb172019-06-26 10:17:18 -0400441
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000442 case VK_SHADER_STAGE_FRAGMENT_BIT:
443 ASSERT(fragmentShader.get() == nullptr);
444 fragmentShader = spirvShader;
445 context.pixelShader = fragmentShader.get();
446 break;
Alexis Hetu52edb172019-06-26 10:17:18 -0400447
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000448 default:
449 UNSUPPORTED("Unsupported stage");
450 break;
Alexis Hetu52edb172019-06-26 10:17:18 -0400451 }
452}
453
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000454const std::shared_ptr<sw::SpirvShader> GraphicsPipeline::getShader(const VkShaderStageFlagBits &stage) const
Alexis Hetu52edb172019-06-26 10:17:18 -0400455{
456 switch(stage)
457 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000458 case VK_SHADER_STAGE_VERTEX_BIT:
459 return vertexShader;
460 case VK_SHADER_STAGE_FRAGMENT_BIT:
461 return fragmentShader;
462 default:
463 UNSUPPORTED("Unsupported stage");
464 return fragmentShader;
Alexis Hetu52edb172019-06-26 10:17:18 -0400465 }
466}
467
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000468void GraphicsPipeline::compileShaders(const VkAllocationCallbacks *pAllocator, const VkGraphicsPipelineCreateInfo *pCreateInfo, PipelineCache *pPipelineCache)
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500469{
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500470 for(auto pStage = pCreateInfo->pStages; pStage != pCreateInfo->pStages + pCreateInfo->stageCount; pStage++)
Chris Forbeseea21ba2019-01-31 07:54:05 -0800471 {
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500472 if(pStage->flags != 0)
Chris Forbesd1ab73d2019-03-14 13:07:08 -0700473 {
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500474 // Vulkan 1.2: "flags must be 0"
475 UNSUPPORTED("pStage->flags %d", int(pStage->flags));
Chris Forbesd1ab73d2019-03-14 13:07:08 -0700476 }
Chris Forbesaf4ed532018-12-06 18:33:27 -0800477
Nicolas Capens4aa4fcd2019-06-19 13:14:11 -0400478 const ShaderModule *module = vk::Cast(pStage->module);
Alexis Hetu52edb172019-06-26 10:17:18 -0400479 const PipelineCache::SpirvShaderKey key(pStage->stage, pStage->pName, module->getCode(),
480 vk::Cast(pCreateInfo->renderPass), pCreateInfo->subpass,
481 pStage->pSpecializationInfo);
Nicolas Capensa29aa772019-06-26 00:36:28 -0400482 auto pipelineStage = key.getPipelineStage();
483
Alexis Hetu52edb172019-06-26 10:17:18 -0400484 if(pPipelineCache)
Chris Forbeseea21ba2019-01-31 07:54:05 -0800485 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000486 PipelineCache &pipelineCache = *pPipelineCache;
Alexis Hetu52edb172019-06-26 10:17:18 -0400487 {
488 std::unique_lock<std::mutex> lock(pipelineCache.getShaderMutex());
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000489 const std::shared_ptr<sw::SpirvShader> *spirvShader = pipelineCache[key];
Alexis Hetu52edb172019-06-26 10:17:18 -0400490 if(!spirvShader)
491 {
Ben Clayton7d0ce412019-12-03 13:26:31 +0000492 auto shader = createShader(key, module, robustBufferAccess, device->getDebuggerContext());
Nicolas Capensa29aa772019-06-26 00:36:28 -0400493 setShader(pipelineStage, shader);
494 pipelineCache.insert(key, getShader(pipelineStage));
Alexis Hetu52edb172019-06-26 10:17:18 -0400495 }
496 else
497 {
Nicolas Capensa29aa772019-06-26 00:36:28 -0400498 setShader(pipelineStage, *spirvShader);
Alexis Hetu52edb172019-06-26 10:17:18 -0400499 }
500 }
501 }
502 else
503 {
Ben Clayton7d0ce412019-12-03 13:26:31 +0000504 auto shader = createShader(key, module, robustBufferAccess, device->getDebuggerContext());
Nicolas Capensa29aa772019-06-26 00:36:28 -0400505 setShader(pipelineStage, shader);
Chris Forbesaf4ed532018-12-06 18:33:27 -0800506 }
507 }
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500508}
509
Alexis Hetuc65473d2018-12-07 16:26:05 -0500510uint32_t GraphicsPipeline::computePrimitiveCount(uint32_t vertexCount) const
511{
Alexis Hetufcbb1452018-11-22 15:46:28 -0500512 switch(context.topology)
Alexis Hetuc65473d2018-12-07 16:26:05 -0500513 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000514 case VK_PRIMITIVE_TOPOLOGY_POINT_LIST:
515 return vertexCount;
516 case VK_PRIMITIVE_TOPOLOGY_LINE_LIST:
517 return vertexCount / 2;
518 case VK_PRIMITIVE_TOPOLOGY_LINE_STRIP:
519 return std::max<uint32_t>(vertexCount, 1) - 1;
520 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST:
521 return vertexCount / 3;
522 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
523 return std::max<uint32_t>(vertexCount, 2) - 2;
524 case VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN:
525 return std::max<uint32_t>(vertexCount, 2) - 2;
526 default:
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500527 UNSUPPORTED("VkPrimitiveTopology %d", int(context.topology));
Alexis Hetuc65473d2018-12-07 16:26:05 -0500528 }
529
530 return 0;
531}
532
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000533const sw::Context &GraphicsPipeline::getContext() const
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500534{
535 return context;
536}
537
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000538const VkRect2D &GraphicsPipeline::getScissor() const
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500539{
540 return scissor;
541}
542
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000543const VkViewport &GraphicsPipeline::getViewport() const
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500544{
545 return viewport;
546}
547
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000548const sw::Color<float> &GraphicsPipeline::getBlendConstants() const
Alexis Hetuc0f92f22018-11-15 16:25:38 -0500549{
550 return blendConstants;
551}
552
Alexis Hetu73832432019-04-11 16:43:18 -0400553bool GraphicsPipeline::hasDynamicState(VkDynamicState dynamicState) const
554{
555 return (dynamicStateFlags & (1 << dynamicState)) != 0;
556}
557
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000558ComputePipeline::ComputePipeline(const VkComputePipelineCreateInfo *pCreateInfo, void *mem, const Device *device)
559 : Pipeline(vk::Cast(pCreateInfo->layout), device)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400560{
561}
562
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000563void ComputePipeline::destroyPipeline(const VkAllocationCallbacks *pAllocator)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400564{
Alexis Hetu52edb172019-06-26 10:17:18 -0400565 shader.reset();
566 program.reset();
Alexis Hetu000df8b2018-10-24 15:22:41 -0400567}
568
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000569size_t ComputePipeline::ComputeRequiredAllocationSize(const VkComputePipelineCreateInfo *pCreateInfo)
Alexis Hetu000df8b2018-10-24 15:22:41 -0400570{
571 return 0;
572}
573
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000574void ComputePipeline::compileShaders(const VkAllocationCallbacks *pAllocator, const VkComputePipelineCreateInfo *pCreateInfo, PipelineCache *pPipelineCache)
Ben Claytonf2be26a2019-03-08 12:02:05 +0000575{
Nicolas Capens4aa4fcd2019-06-19 13:14:11 -0400576 auto &stage = pCreateInfo->stage;
577 const ShaderModule *module = vk::Cast(stage.module);
Ben Claytonf2be26a2019-03-08 12:02:05 +0000578
Alexis Hetu52edb172019-06-26 10:17:18 -0400579 ASSERT(shader.get() == nullptr);
580 ASSERT(program.get() == nullptr);
Ben Claytonf2be26a2019-03-08 12:02:05 +0000581
Alexis Hetu52edb172019-06-26 10:17:18 -0400582 const PipelineCache::SpirvShaderKey shaderKey(
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000583 stage.stage, stage.pName, module->getCode(), nullptr, 0, stage.pSpecializationInfo);
Alexis Hetu52edb172019-06-26 10:17:18 -0400584 if(pPipelineCache)
585 {
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000586 PipelineCache &pipelineCache = *pPipelineCache;
Alexis Hetu52edb172019-06-26 10:17:18 -0400587 {
588 std::unique_lock<std::mutex> lock(pipelineCache.getShaderMutex());
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000589 const std::shared_ptr<sw::SpirvShader> *spirvShader = pipelineCache[shaderKey];
Alexis Hetu52edb172019-06-26 10:17:18 -0400590 if(!spirvShader)
591 {
Ben Clayton7d0ce412019-12-03 13:26:31 +0000592 shader = createShader(shaderKey, module, robustBufferAccess, device->getDebuggerContext());
Alexis Hetu52edb172019-06-26 10:17:18 -0400593 pipelineCache.insert(shaderKey, shader);
594 }
595 else
596 {
597 shader = *spirvShader;
598 }
599 }
Ben Claytonf2be26a2019-03-08 12:02:05 +0000600
Alexis Hetu52edb172019-06-26 10:17:18 -0400601 {
602 const PipelineCache::ComputeProgramKey programKey(shader.get(), layout);
603 std::unique_lock<std::mutex> lock(pipelineCache.getProgramMutex());
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000604 const std::shared_ptr<sw::ComputeProgram> *computeProgram = pipelineCache[programKey];
Alexis Hetu52edb172019-06-26 10:17:18 -0400605 if(!computeProgram)
606 {
607 program = createProgram(programKey);
608 pipelineCache.insert(programKey, program);
609 }
610 else
611 {
612 program = *computeProgram;
613 }
614 }
615 }
616 else
617 {
Ben Clayton7d0ce412019-12-03 13:26:31 +0000618 shader = createShader(shaderKey, module, robustBufferAccess, device->getDebuggerContext());
Alexis Hetu52edb172019-06-26 10:17:18 -0400619 const PipelineCache::ComputeProgramKey programKey(shader.get(), layout);
620 program = createProgram(programKey);
621 }
Ben Claytonf2be26a2019-03-08 12:02:05 +0000622}
623
Chris Forbes4a4c2592019-05-13 08:53:36 -0700624void ComputePipeline::run(uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ,
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000625 uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ,
626 vk::DescriptorSet::Bindings const &descriptorSets,
627 vk::DescriptorSet::DynamicOffsets const &descriptorDynamicOffsets,
628 sw::PushConstantStorage const &pushConstants)
Ben Claytonf2be26a2019-03-08 12:02:05 +0000629{
Ben Clayton895df0d2019-05-08 08:49:58 +0100630 ASSERT_OR_RETURN(program != nullptr);
631 program->run(
Ben Clayton2ed93ab2019-12-17 20:38:03 +0000632 descriptorSets, descriptorDynamicOffsets, pushConstants,
633 baseGroupX, baseGroupY, baseGroupZ,
634 groupCountX, groupCountY, groupCountZ);
Ben Claytonf2be26a2019-03-08 12:02:05 +0000635}
636
Nicolas Capens157ba262019-12-10 17:49:14 -0500637} // namespace vk