Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 1 | // Copyright 2019 The SwiftShader Authors. All Rights Reserved. |
| 2 | // |
| 3 | // Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | // you may not use this file except in compliance with the License. |
| 5 | // You may obtain a copy of the License at |
| 6 | // |
| 7 | // http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | // |
| 9 | // Unless required by applicable law or agreed to in writing, software |
| 10 | // distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | // See the License for the specific language governing permissions and |
| 13 | // limitations under the License. |
| 14 | |
| 15 | #include "SpirvShader.hpp" |
| 16 | |
| 17 | #include "ShaderCore.hpp" |
| 18 | |
| 19 | #include "Vulkan/VkPipelineLayout.hpp" |
| 20 | |
| 21 | #include <spirv/unified1/spirv.hpp> |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 22 | |
| 23 | namespace sw { |
| 24 | |
| 25 | SpirvShader::EmitResult SpirvShader::EmitLoad(InsnIterator insn, EmitState *state) const |
| 26 | { |
| 27 | bool atomic = (insn.opcode() == spv::OpAtomicLoad); |
| 28 | Object::ID resultId = insn.word(2); |
| 29 | Object::ID pointerId = insn.word(3); |
| 30 | auto &result = getObject(resultId); |
Nicolas Capens | 72f089c | 2020-04-08 23:37:08 -0400 | [diff] [blame] | 31 | auto &resultTy = getType(result); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 32 | auto &pointer = getObject(pointerId); |
Nicolas Capens | 72f089c | 2020-04-08 23:37:08 -0400 | [diff] [blame] | 33 | auto &pointerTy = getType(pointer); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 34 | std::memory_order memoryOrder = std::memory_order_relaxed; |
| 35 | |
Nicolas Capens | 72f089c | 2020-04-08 23:37:08 -0400 | [diff] [blame] | 36 | ASSERT(getType(pointer).element == result.typeId()); |
| 37 | ASSERT(Type::ID(insn.word(1)) == result.typeId()); |
| 38 | ASSERT(!atomic || getType(getType(pointer).element).opcode() == spv::OpTypeInt); // Vulkan 1.1: "Atomic instructions must declare a scalar 32-bit integer type, for the value pointed to by Pointer." |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 39 | |
| 40 | if(pointerTy.storageClass == spv::StorageClassUniformConstant) |
| 41 | { |
| 42 | // Just propagate the pointer. |
| 43 | auto &ptr = state->getPointer(pointerId); |
| 44 | state->createPointer(resultId, ptr); |
| 45 | return EmitResult::Continue; |
| 46 | } |
| 47 | |
| 48 | if(atomic) |
| 49 | { |
| 50 | Object::ID semanticsId = insn.word(5); |
| 51 | auto memorySemantics = static_cast<spv::MemorySemanticsMask>(getObject(semanticsId).constantValue[0]); |
| 52 | memoryOrder = MemoryOrder(memorySemantics); |
| 53 | } |
| 54 | |
| 55 | auto ptr = GetPointerToData(pointerId, 0, state); |
| 56 | bool interleavedByLane = IsStorageInterleavedByLane(pointerTy.storageClass); |
Nicolas Capens | ff9f9b5 | 2020-04-14 00:46:38 -0400 | [diff] [blame] | 57 | auto &dst = state->createIntermediate(resultId, resultTy.componentCount); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 58 | auto robustness = state->getOutOfBoundsBehavior(pointerTy.storageClass); |
| 59 | |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 60 | VisitMemoryObject(pointerId, [&](const MemoryElement &el) { |
Ben Clayton | 18c6a78 | 2019-12-03 12:08:16 +0000 | [diff] [blame] | 61 | auto p = ptr + el.offset; |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 62 | if(interleavedByLane) { p = InterleaveByLane(p); } // TODO: Interleave once, then add offset? |
Ben Clayton | 18c6a78 | 2019-12-03 12:08:16 +0000 | [diff] [blame] | 63 | dst.move(el.index, p.Load<SIMD::Float>(robustness, state->activeLaneMask(), atomic, memoryOrder)); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 64 | }); |
| 65 | |
| 66 | return EmitResult::Continue; |
| 67 | } |
| 68 | |
| 69 | SpirvShader::EmitResult SpirvShader::EmitStore(InsnIterator insn, EmitState *state) const |
| 70 | { |
| 71 | bool atomic = (insn.opcode() == spv::OpAtomicStore); |
| 72 | Object::ID pointerId = insn.word(1); |
| 73 | Object::ID objectId = insn.word(atomic ? 4 : 2); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 74 | std::memory_order memoryOrder = std::memory_order_relaxed; |
| 75 | |
| 76 | if(atomic) |
| 77 | { |
| 78 | Object::ID semanticsId = insn.word(3); |
| 79 | auto memorySemantics = static_cast<spv::MemorySemanticsMask>(getObject(semanticsId).constantValue[0]); |
| 80 | memoryOrder = MemoryOrder(memorySemantics); |
| 81 | } |
| 82 | |
Nicolas Capens | 0b77aa5 | 2020-04-09 02:48:16 -0400 | [diff] [blame^] | 83 | const auto &value = Operand(this, state, objectId); |
| 84 | |
| 85 | Store(pointerId, value, atomic, memoryOrder, state); |
| 86 | |
| 87 | return EmitResult::Continue; |
| 88 | } |
| 89 | |
| 90 | void SpirvShader::Store(Object::ID pointerId, const Operand &value, bool atomic, std::memory_order memoryOrder, EmitState *state) const |
| 91 | { |
| 92 | auto &pointer = getObject(pointerId); |
| 93 | auto &pointerTy = getType(pointer); |
| 94 | auto &elementTy = getType(pointerTy.element); |
| 95 | |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 96 | ASSERT(!atomic || elementTy.opcode() == spv::OpTypeInt); // Vulkan 1.1: "Atomic instructions must declare a scalar 32-bit integer type, for the value pointed to by Pointer." |
| 97 | |
| 98 | auto ptr = GetPointerToData(pointerId, 0, state); |
| 99 | bool interleavedByLane = IsStorageInterleavedByLane(pointerTy.storageClass); |
| 100 | auto robustness = state->getOutOfBoundsBehavior(pointerTy.storageClass); |
| 101 | |
| 102 | SIMD::Int mask = state->activeLaneMask(); |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 103 | if(!StoresInHelperInvocation(pointerTy.storageClass)) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 104 | { |
| 105 | mask = mask & state->storesAndAtomicsMask(); |
| 106 | } |
| 107 | |
Nicolas Capens | 0b77aa5 | 2020-04-09 02:48:16 -0400 | [diff] [blame^] | 108 | VisitMemoryObject(pointerId, [&](const MemoryElement &el) { |
| 109 | auto p = ptr + el.offset; |
| 110 | if(interleavedByLane) { p = InterleaveByLane(p); } |
| 111 | p.Store(value.Float(el.index), robustness, mask, atomic, memoryOrder); |
| 112 | }); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 113 | } |
| 114 | |
| 115 | SpirvShader::EmitResult SpirvShader::EmitVariable(InsnIterator insn, EmitState *state) const |
| 116 | { |
| 117 | auto routine = state->routine; |
| 118 | Object::ID resultId = insn.word(2); |
| 119 | auto &object = getObject(resultId); |
Nicolas Capens | 72f089c | 2020-04-08 23:37:08 -0400 | [diff] [blame] | 120 | auto &objectTy = getType(object); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 121 | |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 122 | switch(objectTy.storageClass) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 123 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 124 | case spv::StorageClassOutput: |
| 125 | case spv::StorageClassPrivate: |
| 126 | case spv::StorageClassFunction: |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 127 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 128 | ASSERT(objectTy.opcode() == spv::OpTypePointer); |
| 129 | auto base = &routine->getVariable(resultId)[0]; |
| 130 | auto elementTy = getType(objectTy.element); |
Nicolas Capens | ff9f9b5 | 2020-04-14 00:46:38 -0400 | [diff] [blame] | 131 | auto size = elementTy.componentCount * static_cast<uint32_t>(sizeof(float)) * SIMD::Width; |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 132 | state->createPointer(resultId, SIMD::Pointer(base, size)); |
| 133 | break; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 134 | } |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 135 | case spv::StorageClassWorkgroup: |
| 136 | { |
| 137 | ASSERT(objectTy.opcode() == spv::OpTypePointer); |
| 138 | auto base = &routine->workgroupMemory[0]; |
| 139 | auto size = workgroupMemory.size(); |
| 140 | state->createPointer(resultId, SIMD::Pointer(base, size, workgroupMemory.offsetOf(resultId))); |
| 141 | break; |
| 142 | } |
| 143 | case spv::StorageClassInput: |
| 144 | { |
| 145 | if(object.kind == Object::Kind::InterfaceVariable) |
| 146 | { |
| 147 | auto &dst = routine->getVariable(resultId); |
| 148 | int offset = 0; |
| 149 | VisitInterface(resultId, |
| 150 | [&](Decorations const &d, AttribType type) { |
| 151 | auto scalarSlot = d.Location << 2 | d.Component; |
| 152 | dst[offset++] = routine->inputs[scalarSlot]; |
| 153 | }); |
| 154 | } |
| 155 | ASSERT(objectTy.opcode() == spv::OpTypePointer); |
| 156 | auto base = &routine->getVariable(resultId)[0]; |
| 157 | auto elementTy = getType(objectTy.element); |
Nicolas Capens | ff9f9b5 | 2020-04-14 00:46:38 -0400 | [diff] [blame] | 158 | auto size = elementTy.componentCount * static_cast<uint32_t>(sizeof(float)) * SIMD::Width; |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 159 | state->createPointer(resultId, SIMD::Pointer(base, size)); |
| 160 | break; |
| 161 | } |
| 162 | case spv::StorageClassUniformConstant: |
| 163 | { |
| 164 | const auto &d = descriptorDecorations.at(resultId); |
| 165 | ASSERT(d.DescriptorSet >= 0); |
| 166 | ASSERT(d.Binding >= 0); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 167 | |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 168 | uint32_t arrayIndex = 0; // TODO(b/129523279) |
| 169 | auto setLayout = routine->pipelineLayout->getDescriptorSetLayout(d.DescriptorSet); |
| 170 | if(setLayout->hasBinding(d.Binding)) |
| 171 | { |
| 172 | uint32_t bindingOffset = static_cast<uint32_t>(setLayout->getBindingOffset(d.Binding, arrayIndex)); |
| 173 | Pointer<Byte> set = routine->descriptorSets[d.DescriptorSet]; // DescriptorSet* |
| 174 | Pointer<Byte> binding = Pointer<Byte>(set + bindingOffset); // vk::SampledImageDescriptor* |
| 175 | auto size = 0; // Not required as this pointer is not directly used by SIMD::Read or SIMD::Write. |
| 176 | state->createPointer(resultId, SIMD::Pointer(binding, size)); |
| 177 | } |
| 178 | else |
| 179 | { |
| 180 | // TODO: Error if the variable with the non-existant binding is |
| 181 | // used? Or perhaps strip these unused variable declarations as |
| 182 | // a preprocess on the SPIR-V? |
| 183 | } |
| 184 | break; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 185 | } |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 186 | case spv::StorageClassUniform: |
| 187 | case spv::StorageClassStorageBuffer: |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 188 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 189 | const auto &d = descriptorDecorations.at(resultId); |
| 190 | ASSERT(d.DescriptorSet >= 0); |
| 191 | auto size = 0; // Not required as this pointer is not directly used by SIMD::Read or SIMD::Write. |
| 192 | // Note: the module may contain descriptor set references that are not suitable for this implementation -- using a set index higher than the number |
| 193 | // of descriptor set binding points we support. As long as the selected entrypoint doesn't actually touch the out of range binding points, this |
| 194 | // is valid. In this case make the value nullptr to make it easier to diagnose an attempt to dereference it. |
| 195 | if(d.DescriptorSet < vk::MAX_BOUND_DESCRIPTOR_SETS) |
| 196 | { |
| 197 | state->createPointer(resultId, SIMD::Pointer(routine->descriptorSets[d.DescriptorSet], size)); |
| 198 | } |
| 199 | else |
| 200 | { |
| 201 | state->createPointer(resultId, SIMD::Pointer(nullptr, 0)); |
| 202 | } |
| 203 | break; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 204 | } |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 205 | case spv::StorageClassPushConstant: |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 206 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 207 | state->createPointer(resultId, SIMD::Pointer(routine->pushConstants, vk::MAX_PUSH_CONSTANT_SIZE)); |
| 208 | break; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 209 | } |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 210 | default: |
| 211 | UNREACHABLE("Storage class %d", objectTy.storageClass); |
| 212 | break; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 213 | } |
| 214 | |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 215 | if(insn.wordCount() > 4) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 216 | { |
| 217 | Object::ID initializerId = insn.word(4); |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 218 | if(getObject(initializerId).kind != Object::Kind::Constant) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 219 | { |
Nicolas Capens | dd0e600 | 2020-01-24 01:21:47 -0500 | [diff] [blame] | 220 | UNIMPLEMENTED("b/148241854: Non-constant initializers not yet implemented"); // FIXME(b/148241854) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 221 | } |
Nicolas Capens | 44bd43a | 2020-01-22 03:07:14 -0500 | [diff] [blame] | 222 | |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 223 | switch(objectTy.storageClass) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 224 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 225 | case spv::StorageClassOutput: |
| 226 | case spv::StorageClassPrivate: |
| 227 | case spv::StorageClassFunction: |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 228 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 229 | bool interleavedByLane = IsStorageInterleavedByLane(objectTy.storageClass); |
| 230 | auto ptr = GetPointerToData(resultId, 0, state); |
Nicolas Capens | e6f65d9 | 2020-04-08 21:55:43 -0400 | [diff] [blame] | 231 | Operand initialValue(this, state, initializerId); |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 232 | VisitMemoryObject(resultId, [&](const MemoryElement &el) { |
| 233 | auto p = ptr + el.offset; |
| 234 | if(interleavedByLane) { p = InterleaveByLane(p); } |
| 235 | auto robustness = OutOfBoundsBehavior::UndefinedBehavior; // Local variables are always within bounds. |
| 236 | p.Store(initialValue.Float(el.index), robustness, state->activeLaneMask()); |
| 237 | }); |
| 238 | break; |
| 239 | } |
| 240 | default: |
| 241 | ASSERT_MSG(initializerId == 0, "Vulkan does not permit variables of storage class %d to have initializers", int(objectTy.storageClass)); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 242 | } |
| 243 | } |
| 244 | |
| 245 | return EmitResult::Continue; |
| 246 | } |
| 247 | |
| 248 | SpirvShader::EmitResult SpirvShader::EmitCopyMemory(InsnIterator insn, EmitState *state) const |
| 249 | { |
| 250 | Object::ID dstPtrId = insn.word(1); |
| 251 | Object::ID srcPtrId = insn.word(2); |
Nicolas Capens | 72f089c | 2020-04-08 23:37:08 -0400 | [diff] [blame] | 252 | auto &dstPtrTy = getType(getObject(dstPtrId)); |
| 253 | auto &srcPtrTy = getType(getObject(srcPtrId)); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 254 | ASSERT(dstPtrTy.element == srcPtrTy.element); |
| 255 | |
| 256 | bool dstInterleavedByLane = IsStorageInterleavedByLane(dstPtrTy.storageClass); |
| 257 | bool srcInterleavedByLane = IsStorageInterleavedByLane(srcPtrTy.storageClass); |
| 258 | auto dstPtr = GetPointerToData(dstPtrId, 0, state); |
| 259 | auto srcPtr = GetPointerToData(srcPtrId, 0, state); |
| 260 | |
| 261 | std::unordered_map<uint32_t, uint32_t> srcOffsets; |
| 262 | |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 263 | VisitMemoryObject(srcPtrId, [&](const MemoryElement &el) { srcOffsets[el.index] = el.offset; }); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 264 | |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 265 | VisitMemoryObject(dstPtrId, [&](const MemoryElement &el) { |
Ben Clayton | 18c6a78 | 2019-12-03 12:08:16 +0000 | [diff] [blame] | 266 | auto it = srcOffsets.find(el.index); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 267 | ASSERT(it != srcOffsets.end()); |
| 268 | auto srcOffset = it->second; |
Ben Clayton | 18c6a78 | 2019-12-03 12:08:16 +0000 | [diff] [blame] | 269 | auto dstOffset = el.offset; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 270 | |
| 271 | auto dst = dstPtr + dstOffset; |
| 272 | auto src = srcPtr + srcOffset; |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 273 | if(dstInterleavedByLane) { dst = InterleaveByLane(dst); } |
| 274 | if(srcInterleavedByLane) { src = InterleaveByLane(src); } |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 275 | |
| 276 | // TODO(b/131224163): Optimize based on src/dst storage classes. |
| 277 | auto robustness = OutOfBoundsBehavior::RobustBufferAccess; |
| 278 | |
| 279 | auto value = src.Load<SIMD::Float>(robustness, state->activeLaneMask()); |
| 280 | dst.Store(value, robustness, state->activeLaneMask()); |
| 281 | }); |
| 282 | return EmitResult::Continue; |
| 283 | } |
| 284 | |
| 285 | SpirvShader::EmitResult SpirvShader::EmitMemoryBarrier(InsnIterator insn, EmitState *state) const |
| 286 | { |
| 287 | auto semantics = spv::MemorySemanticsMask(GetConstScalarInt(insn.word(2))); |
| 288 | // TODO: We probably want to consider the memory scope here. For now, |
| 289 | // just always emit the full fence. |
| 290 | Fence(semantics); |
| 291 | return EmitResult::Continue; |
| 292 | } |
| 293 | |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 294 | void SpirvShader::VisitMemoryObjectInner(sw::SpirvShader::Type::ID id, sw::SpirvShader::Decorations d, uint32_t &index, uint32_t offset, const MemoryVisitor &f) const |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 295 | { |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 296 | ApplyDecorationsForId(&d, id); |
| 297 | auto const &type = getType(id); |
| 298 | |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 299 | if(d.HasOffset) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 300 | { |
| 301 | offset += d.Offset; |
| 302 | d.HasOffset = false; |
| 303 | } |
| 304 | |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 305 | switch(type.opcode()) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 306 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 307 | case spv::OpTypePointer: |
| 308 | VisitMemoryObjectInner(type.definition.word(3), d, index, offset, f); |
| 309 | break; |
| 310 | case spv::OpTypeInt: |
| 311 | case spv::OpTypeFloat: |
| 312 | case spv::OpTypeRuntimeArray: |
| 313 | f(MemoryElement{ index++, offset, type }); |
| 314 | break; |
| 315 | case spv::OpTypeVector: |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 316 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 317 | auto elemStride = (d.InsideMatrix && d.HasRowMajor && d.RowMajor) ? d.MatrixStride : static_cast<int32_t>(sizeof(float)); |
| 318 | for(auto i = 0u; i < type.definition.word(3); i++) |
| 319 | { |
| 320 | VisitMemoryObjectInner(type.definition.word(2), d, index, offset + elemStride * i, f); |
| 321 | } |
| 322 | break; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 323 | } |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 324 | case spv::OpTypeMatrix: |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 325 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 326 | auto columnStride = (d.HasRowMajor && d.RowMajor) ? static_cast<int32_t>(sizeof(float)) : d.MatrixStride; |
| 327 | d.InsideMatrix = true; |
| 328 | for(auto i = 0u; i < type.definition.word(3); i++) |
| 329 | { |
| 330 | ASSERT(d.HasMatrixStride); |
| 331 | VisitMemoryObjectInner(type.definition.word(2), d, index, offset + columnStride * i, f); |
| 332 | } |
| 333 | break; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 334 | } |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 335 | case spv::OpTypeStruct: |
| 336 | for(auto i = 0u; i < type.definition.wordCount() - 2; i++) |
| 337 | { |
| 338 | ApplyDecorationsForIdMember(&d, id, i); |
| 339 | VisitMemoryObjectInner(type.definition.word(i + 2), d, index, offset, f); |
| 340 | } |
| 341 | break; |
| 342 | case spv::OpTypeArray: |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 343 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 344 | auto arraySize = GetConstScalarInt(type.definition.word(3)); |
| 345 | for(auto i = 0u; i < arraySize; i++) |
| 346 | { |
| 347 | ASSERT(d.HasArrayStride); |
| 348 | VisitMemoryObjectInner(type.definition.word(2), d, index, offset + i * d.ArrayStride, f); |
| 349 | } |
| 350 | break; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 351 | } |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 352 | default: |
| 353 | UNREACHABLE("%s", OpcodeName(type.opcode()).c_str()); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 354 | } |
| 355 | } |
| 356 | |
Nicolas Capens | 72f089c | 2020-04-08 23:37:08 -0400 | [diff] [blame] | 357 | void SpirvShader::VisitMemoryObject(Object::ID id, const MemoryVisitor &f) const |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 358 | { |
Nicolas Capens | 72f089c | 2020-04-08 23:37:08 -0400 | [diff] [blame] | 359 | auto typeId = getObject(id).typeId(); |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 360 | auto const &type = getType(typeId); |
Nicolas Capens | 72f089c | 2020-04-08 23:37:08 -0400 | [diff] [blame] | 361 | |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 362 | if(IsExplicitLayout(type.storageClass)) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 363 | { |
| 364 | Decorations d{}; |
| 365 | ApplyDecorationsForId(&d, id); |
| 366 | uint32_t index = 0; |
| 367 | VisitMemoryObjectInner(typeId, d, index, 0, f); |
| 368 | } |
| 369 | else |
| 370 | { |
| 371 | // Objects without explicit layout are tightly packed. |
Ben Clayton | 18c6a78 | 2019-12-03 12:08:16 +0000 | [diff] [blame] | 372 | auto &elType = getType(type.element); |
Nicolas Capens | ff9f9b5 | 2020-04-14 00:46:38 -0400 | [diff] [blame] | 373 | for(auto index = 0u; index < elType.componentCount; index++) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 374 | { |
Ben Clayton | 18c6a78 | 2019-12-03 12:08:16 +0000 | [diff] [blame] | 375 | auto offset = static_cast<uint32_t>(index * sizeof(float)); |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 376 | f({ index, offset, elType }); |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 377 | } |
| 378 | } |
| 379 | } |
| 380 | |
| 381 | SIMD::Pointer SpirvShader::GetPointerToData(Object::ID id, int arrayIndex, EmitState const *state) const |
| 382 | { |
| 383 | auto routine = state->routine; |
| 384 | auto &object = getObject(id); |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 385 | switch(object.kind) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 386 | { |
| 387 | case Object::Kind::Pointer: |
| 388 | case Object::Kind::InterfaceVariable: |
| 389 | return state->getPointer(id); |
| 390 | |
| 391 | case Object::Kind::DescriptorSet: |
| 392 | { |
| 393 | const auto &d = descriptorDecorations.at(id); |
| 394 | ASSERT(d.DescriptorSet >= 0 && d.DescriptorSet < vk::MAX_BOUND_DESCRIPTOR_SETS); |
| 395 | ASSERT(d.Binding >= 0); |
| 396 | |
| 397 | auto set = state->getPointer(id); |
| 398 | |
| 399 | auto setLayout = routine->pipelineLayout->getDescriptorSetLayout(d.DescriptorSet); |
| 400 | ASSERT_MSG(setLayout->hasBinding(d.Binding), "Descriptor set %d does not contain binding %d", int(d.DescriptorSet), int(d.Binding)); |
| 401 | int bindingOffset = static_cast<int>(setLayout->getBindingOffset(d.Binding, arrayIndex)); |
| 402 | |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 403 | Pointer<Byte> descriptor = set.base + bindingOffset; // BufferDescriptor* |
| 404 | Pointer<Byte> data = *Pointer<Pointer<Byte>>(descriptor + OFFSET(vk::BufferDescriptor, ptr)); // void* |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 405 | Int size = *Pointer<Int>(descriptor + OFFSET(vk::BufferDescriptor, sizeInBytes)); |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 406 | if(setLayout->isBindingDynamic(d.Binding)) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 407 | { |
| 408 | uint32_t dynamicBindingIndex = |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 409 | routine->pipelineLayout->getDynamicOffsetBase(d.DescriptorSet) + |
| 410 | setLayout->getDynamicDescriptorOffset(d.Binding) + |
| 411 | arrayIndex; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 412 | Int offset = routine->descriptorDynamicOffsets[dynamicBindingIndex]; |
| 413 | Int robustnessSize = *Pointer<Int>(descriptor + OFFSET(vk::BufferDescriptor, robustnessSize)); |
| 414 | return SIMD::Pointer(data + offset, Min(size, robustnessSize - offset)); |
| 415 | } |
| 416 | else |
| 417 | { |
| 418 | return SIMD::Pointer(data, size); |
| 419 | } |
| 420 | } |
| 421 | |
| 422 | default: |
| 423 | UNREACHABLE("Invalid pointer kind %d", int(object.kind)); |
| 424 | return SIMD::Pointer(Pointer<Byte>(), 0); |
| 425 | } |
| 426 | } |
| 427 | |
| 428 | std::memory_order SpirvShader::MemoryOrder(spv::MemorySemanticsMask memorySemantics) |
| 429 | { |
| 430 | auto control = static_cast<uint32_t>(memorySemantics) & static_cast<uint32_t>( |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 431 | spv::MemorySemanticsAcquireMask | |
| 432 | spv::MemorySemanticsReleaseMask | |
| 433 | spv::MemorySemanticsAcquireReleaseMask | |
| 434 | spv::MemorySemanticsSequentiallyConsistentMask); |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 435 | switch(control) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 436 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 437 | case spv::MemorySemanticsMaskNone: return std::memory_order_relaxed; |
| 438 | case spv::MemorySemanticsAcquireMask: return std::memory_order_acquire; |
| 439 | case spv::MemorySemanticsReleaseMask: return std::memory_order_release; |
| 440 | case spv::MemorySemanticsAcquireReleaseMask: return std::memory_order_acq_rel; |
| 441 | case spv::MemorySemanticsSequentiallyConsistentMask: return std::memory_order_acq_rel; // Vulkan 1.1: "SequentiallyConsistent is treated as AcquireRelease" |
| 442 | default: |
| 443 | // "it is invalid for more than one of these four bits to be set: |
| 444 | // Acquire, Release, AcquireRelease, or SequentiallyConsistent." |
| 445 | UNREACHABLE("MemorySemanticsMask: %x", int(control)); |
| 446 | return std::memory_order_acq_rel; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 447 | } |
| 448 | } |
| 449 | |
| 450 | bool SpirvShader::StoresInHelperInvocation(spv::StorageClass storageClass) |
| 451 | { |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 452 | switch(storageClass) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 453 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 454 | case spv::StorageClassUniform: |
| 455 | case spv::StorageClassStorageBuffer: |
| 456 | case spv::StorageClassImage: |
| 457 | return false; |
| 458 | default: |
| 459 | return true; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 460 | } |
| 461 | } |
| 462 | |
| 463 | bool SpirvShader::IsExplicitLayout(spv::StorageClass storageClass) |
| 464 | { |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 465 | switch(storageClass) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 466 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 467 | case spv::StorageClassUniform: |
| 468 | case spv::StorageClassStorageBuffer: |
| 469 | case spv::StorageClassPushConstant: |
| 470 | return true; |
| 471 | default: |
| 472 | return false; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 473 | } |
| 474 | } |
| 475 | |
| 476 | sw::SIMD::Pointer SpirvShader::InterleaveByLane(sw::SIMD::Pointer p) |
| 477 | { |
| 478 | p *= sw::SIMD::Width; |
| 479 | p.staticOffsets[0] += 0 * sizeof(float); |
| 480 | p.staticOffsets[1] += 1 * sizeof(float); |
| 481 | p.staticOffsets[2] += 2 * sizeof(float); |
| 482 | p.staticOffsets[3] += 3 * sizeof(float); |
| 483 | return p; |
| 484 | } |
| 485 | |
| 486 | bool SpirvShader::IsStorageInterleavedByLane(spv::StorageClass storageClass) |
| 487 | { |
Nicolas Capens | 81bc9d9 | 2019-12-16 15:05:57 -0500 | [diff] [blame] | 488 | switch(storageClass) |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 489 | { |
Ben Clayton | bc1c067be | 2019-12-17 20:37:37 +0000 | [diff] [blame] | 490 | case spv::StorageClassUniform: |
| 491 | case spv::StorageClassStorageBuffer: |
| 492 | case spv::StorageClassPushConstant: |
| 493 | case spv::StorageClassWorkgroup: |
| 494 | case spv::StorageClassImage: |
| 495 | return false; |
| 496 | default: |
| 497 | return true; |
Ben Clayton | f3e2cc2 | 2019-11-28 12:02:15 +0000 | [diff] [blame] | 498 | } |
| 499 | } |
| 500 | |
| 501 | } // namespace sw |