blob: 84071660c992f3240ad61c250934585e43c0db33 [file] [log] [blame]
Ben Claytonf3e2cc22019-11-28 12:02:15 +00001// Copyright 2019 The SwiftShader Authors. All Rights Reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include "SpirvShader.hpp"
16
17#include "ShaderCore.hpp"
18
19#include "Vulkan/VkPipelineLayout.hpp"
20
21#include <spirv/unified1/spirv.hpp>
Ben Claytonf3e2cc22019-11-28 12:02:15 +000022
23namespace sw {
24
25SpirvShader::EmitResult SpirvShader::EmitLoad(InsnIterator insn, EmitState *state) const
26{
27 bool atomic = (insn.opcode() == spv::OpAtomicLoad);
28 Object::ID resultId = insn.word(2);
29 Object::ID pointerId = insn.word(3);
30 auto &result = getObject(resultId);
Nicolas Capens72f089c2020-04-08 23:37:08 -040031 auto &resultTy = getType(result);
Ben Claytonf3e2cc22019-11-28 12:02:15 +000032 auto &pointer = getObject(pointerId);
Nicolas Capens72f089c2020-04-08 23:37:08 -040033 auto &pointerTy = getType(pointer);
Ben Claytonf3e2cc22019-11-28 12:02:15 +000034 std::memory_order memoryOrder = std::memory_order_relaxed;
35
Nicolas Capens72f089c2020-04-08 23:37:08 -040036 ASSERT(getType(pointer).element == result.typeId());
37 ASSERT(Type::ID(insn.word(1)) == result.typeId());
38 ASSERT(!atomic || getType(getType(pointer).element).opcode() == spv::OpTypeInt); // Vulkan 1.1: "Atomic instructions must declare a scalar 32-bit integer type, for the value pointed to by Pointer."
Ben Claytonf3e2cc22019-11-28 12:02:15 +000039
40 if(pointerTy.storageClass == spv::StorageClassUniformConstant)
41 {
42 // Just propagate the pointer.
43 auto &ptr = state->getPointer(pointerId);
44 state->createPointer(resultId, ptr);
45 return EmitResult::Continue;
46 }
47
48 if(atomic)
49 {
50 Object::ID semanticsId = insn.word(5);
51 auto memorySemantics = static_cast<spv::MemorySemanticsMask>(getObject(semanticsId).constantValue[0]);
52 memoryOrder = MemoryOrder(memorySemantics);
53 }
54
55 auto ptr = GetPointerToData(pointerId, 0, state);
56 bool interleavedByLane = IsStorageInterleavedByLane(pointerTy.storageClass);
Nicolas Capensff9f9b52020-04-14 00:46:38 -040057 auto &dst = state->createIntermediate(resultId, resultTy.componentCount);
Ben Claytonf3e2cc22019-11-28 12:02:15 +000058 auto robustness = state->getOutOfBoundsBehavior(pointerTy.storageClass);
59
Ben Claytonbc1c067be2019-12-17 20:37:37 +000060 VisitMemoryObject(pointerId, [&](const MemoryElement &el) {
Ben Clayton18c6a782019-12-03 12:08:16 +000061 auto p = ptr + el.offset;
Nicolas Capens81bc9d92019-12-16 15:05:57 -050062 if(interleavedByLane) { p = InterleaveByLane(p); } // TODO: Interleave once, then add offset?
Ben Clayton18c6a782019-12-03 12:08:16 +000063 dst.move(el.index, p.Load<SIMD::Float>(robustness, state->activeLaneMask(), atomic, memoryOrder));
Ben Claytonf3e2cc22019-11-28 12:02:15 +000064 });
65
66 return EmitResult::Continue;
67}
68
69SpirvShader::EmitResult SpirvShader::EmitStore(InsnIterator insn, EmitState *state) const
70{
71 bool atomic = (insn.opcode() == spv::OpAtomicStore);
72 Object::ID pointerId = insn.word(1);
73 Object::ID objectId = insn.word(atomic ? 4 : 2);
Ben Claytonf3e2cc22019-11-28 12:02:15 +000074 std::memory_order memoryOrder = std::memory_order_relaxed;
75
76 if(atomic)
77 {
78 Object::ID semanticsId = insn.word(3);
79 auto memorySemantics = static_cast<spv::MemorySemanticsMask>(getObject(semanticsId).constantValue[0]);
80 memoryOrder = MemoryOrder(memorySemantics);
81 }
82
Nicolas Capens0b77aa52020-04-09 02:48:16 -040083 const auto &value = Operand(this, state, objectId);
84
85 Store(pointerId, value, atomic, memoryOrder, state);
86
87 return EmitResult::Continue;
88}
89
90void SpirvShader::Store(Object::ID pointerId, const Operand &value, bool atomic, std::memory_order memoryOrder, EmitState *state) const
91{
92 auto &pointer = getObject(pointerId);
93 auto &pointerTy = getType(pointer);
94 auto &elementTy = getType(pointerTy.element);
95
Ben Claytonf3e2cc22019-11-28 12:02:15 +000096 ASSERT(!atomic || elementTy.opcode() == spv::OpTypeInt); // Vulkan 1.1: "Atomic instructions must declare a scalar 32-bit integer type, for the value pointed to by Pointer."
97
98 auto ptr = GetPointerToData(pointerId, 0, state);
99 bool interleavedByLane = IsStorageInterleavedByLane(pointerTy.storageClass);
100 auto robustness = state->getOutOfBoundsBehavior(pointerTy.storageClass);
101
102 SIMD::Int mask = state->activeLaneMask();
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500103 if(!StoresInHelperInvocation(pointerTy.storageClass))
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000104 {
105 mask = mask & state->storesAndAtomicsMask();
106 }
107
Nicolas Capens0b77aa52020-04-09 02:48:16 -0400108 VisitMemoryObject(pointerId, [&](const MemoryElement &el) {
109 auto p = ptr + el.offset;
110 if(interleavedByLane) { p = InterleaveByLane(p); }
111 p.Store(value.Float(el.index), robustness, mask, atomic, memoryOrder);
112 });
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000113}
114
115SpirvShader::EmitResult SpirvShader::EmitVariable(InsnIterator insn, EmitState *state) const
116{
117 auto routine = state->routine;
118 Object::ID resultId = insn.word(2);
119 auto &object = getObject(resultId);
Nicolas Capens72f089c2020-04-08 23:37:08 -0400120 auto &objectTy = getType(object);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000121
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500122 switch(objectTy.storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000123 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000124 case spv::StorageClassOutput:
125 case spv::StorageClassPrivate:
126 case spv::StorageClassFunction:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000127 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000128 ASSERT(objectTy.opcode() == spv::OpTypePointer);
129 auto base = &routine->getVariable(resultId)[0];
130 auto elementTy = getType(objectTy.element);
Nicolas Capensff9f9b52020-04-14 00:46:38 -0400131 auto size = elementTy.componentCount * static_cast<uint32_t>(sizeof(float)) * SIMD::Width;
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000132 state->createPointer(resultId, SIMD::Pointer(base, size));
133 break;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000134 }
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000135 case spv::StorageClassWorkgroup:
136 {
137 ASSERT(objectTy.opcode() == spv::OpTypePointer);
138 auto base = &routine->workgroupMemory[0];
139 auto size = workgroupMemory.size();
140 state->createPointer(resultId, SIMD::Pointer(base, size, workgroupMemory.offsetOf(resultId)));
141 break;
142 }
143 case spv::StorageClassInput:
144 {
145 if(object.kind == Object::Kind::InterfaceVariable)
146 {
147 auto &dst = routine->getVariable(resultId);
148 int offset = 0;
149 VisitInterface(resultId,
150 [&](Decorations const &d, AttribType type) {
151 auto scalarSlot = d.Location << 2 | d.Component;
152 dst[offset++] = routine->inputs[scalarSlot];
153 });
154 }
155 ASSERT(objectTy.opcode() == spv::OpTypePointer);
156 auto base = &routine->getVariable(resultId)[0];
157 auto elementTy = getType(objectTy.element);
Nicolas Capensff9f9b52020-04-14 00:46:38 -0400158 auto size = elementTy.componentCount * static_cast<uint32_t>(sizeof(float)) * SIMD::Width;
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000159 state->createPointer(resultId, SIMD::Pointer(base, size));
160 break;
161 }
162 case spv::StorageClassUniformConstant:
163 {
164 const auto &d = descriptorDecorations.at(resultId);
165 ASSERT(d.DescriptorSet >= 0);
166 ASSERT(d.Binding >= 0);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000167
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000168 uint32_t arrayIndex = 0; // TODO(b/129523279)
169 auto setLayout = routine->pipelineLayout->getDescriptorSetLayout(d.DescriptorSet);
170 if(setLayout->hasBinding(d.Binding))
171 {
172 uint32_t bindingOffset = static_cast<uint32_t>(setLayout->getBindingOffset(d.Binding, arrayIndex));
173 Pointer<Byte> set = routine->descriptorSets[d.DescriptorSet]; // DescriptorSet*
174 Pointer<Byte> binding = Pointer<Byte>(set + bindingOffset); // vk::SampledImageDescriptor*
175 auto size = 0; // Not required as this pointer is not directly used by SIMD::Read or SIMD::Write.
176 state->createPointer(resultId, SIMD::Pointer(binding, size));
177 }
178 else
179 {
180 // TODO: Error if the variable with the non-existant binding is
181 // used? Or perhaps strip these unused variable declarations as
182 // a preprocess on the SPIR-V?
183 }
184 break;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000185 }
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000186 case spv::StorageClassUniform:
187 case spv::StorageClassStorageBuffer:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000188 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000189 const auto &d = descriptorDecorations.at(resultId);
190 ASSERT(d.DescriptorSet >= 0);
191 auto size = 0; // Not required as this pointer is not directly used by SIMD::Read or SIMD::Write.
192 // Note: the module may contain descriptor set references that are not suitable for this implementation -- using a set index higher than the number
193 // of descriptor set binding points we support. As long as the selected entrypoint doesn't actually touch the out of range binding points, this
194 // is valid. In this case make the value nullptr to make it easier to diagnose an attempt to dereference it.
195 if(d.DescriptorSet < vk::MAX_BOUND_DESCRIPTOR_SETS)
196 {
197 state->createPointer(resultId, SIMD::Pointer(routine->descriptorSets[d.DescriptorSet], size));
198 }
199 else
200 {
201 state->createPointer(resultId, SIMD::Pointer(nullptr, 0));
202 }
203 break;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000204 }
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000205 case spv::StorageClassPushConstant:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000206 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000207 state->createPointer(resultId, SIMD::Pointer(routine->pushConstants, vk::MAX_PUSH_CONSTANT_SIZE));
208 break;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000209 }
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000210 default:
211 UNREACHABLE("Storage class %d", objectTy.storageClass);
212 break;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000213 }
214
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500215 if(insn.wordCount() > 4)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000216 {
217 Object::ID initializerId = insn.word(4);
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500218 if(getObject(initializerId).kind != Object::Kind::Constant)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000219 {
Nicolas Capensdd0e6002020-01-24 01:21:47 -0500220 UNIMPLEMENTED("b/148241854: Non-constant initializers not yet implemented"); // FIXME(b/148241854)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000221 }
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500222
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500223 switch(objectTy.storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000224 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000225 case spv::StorageClassOutput:
226 case spv::StorageClassPrivate:
227 case spv::StorageClassFunction:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000228 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000229 bool interleavedByLane = IsStorageInterleavedByLane(objectTy.storageClass);
230 auto ptr = GetPointerToData(resultId, 0, state);
Nicolas Capense6f65d92020-04-08 21:55:43 -0400231 Operand initialValue(this, state, initializerId);
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000232 VisitMemoryObject(resultId, [&](const MemoryElement &el) {
233 auto p = ptr + el.offset;
234 if(interleavedByLane) { p = InterleaveByLane(p); }
235 auto robustness = OutOfBoundsBehavior::UndefinedBehavior; // Local variables are always within bounds.
236 p.Store(initialValue.Float(el.index), robustness, state->activeLaneMask());
237 });
238 break;
239 }
240 default:
241 ASSERT_MSG(initializerId == 0, "Vulkan does not permit variables of storage class %d to have initializers", int(objectTy.storageClass));
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000242 }
243 }
244
245 return EmitResult::Continue;
246}
247
248SpirvShader::EmitResult SpirvShader::EmitCopyMemory(InsnIterator insn, EmitState *state) const
249{
250 Object::ID dstPtrId = insn.word(1);
251 Object::ID srcPtrId = insn.word(2);
Nicolas Capens72f089c2020-04-08 23:37:08 -0400252 auto &dstPtrTy = getType(getObject(dstPtrId));
253 auto &srcPtrTy = getType(getObject(srcPtrId));
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000254 ASSERT(dstPtrTy.element == srcPtrTy.element);
255
256 bool dstInterleavedByLane = IsStorageInterleavedByLane(dstPtrTy.storageClass);
257 bool srcInterleavedByLane = IsStorageInterleavedByLane(srcPtrTy.storageClass);
258 auto dstPtr = GetPointerToData(dstPtrId, 0, state);
259 auto srcPtr = GetPointerToData(srcPtrId, 0, state);
260
261 std::unordered_map<uint32_t, uint32_t> srcOffsets;
262
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000263 VisitMemoryObject(srcPtrId, [&](const MemoryElement &el) { srcOffsets[el.index] = el.offset; });
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000264
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000265 VisitMemoryObject(dstPtrId, [&](const MemoryElement &el) {
Ben Clayton18c6a782019-12-03 12:08:16 +0000266 auto it = srcOffsets.find(el.index);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000267 ASSERT(it != srcOffsets.end());
268 auto srcOffset = it->second;
Ben Clayton18c6a782019-12-03 12:08:16 +0000269 auto dstOffset = el.offset;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000270
271 auto dst = dstPtr + dstOffset;
272 auto src = srcPtr + srcOffset;
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500273 if(dstInterleavedByLane) { dst = InterleaveByLane(dst); }
274 if(srcInterleavedByLane) { src = InterleaveByLane(src); }
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000275
276 // TODO(b/131224163): Optimize based on src/dst storage classes.
277 auto robustness = OutOfBoundsBehavior::RobustBufferAccess;
278
279 auto value = src.Load<SIMD::Float>(robustness, state->activeLaneMask());
280 dst.Store(value, robustness, state->activeLaneMask());
281 });
282 return EmitResult::Continue;
283}
284
285SpirvShader::EmitResult SpirvShader::EmitMemoryBarrier(InsnIterator insn, EmitState *state) const
286{
287 auto semantics = spv::MemorySemanticsMask(GetConstScalarInt(insn.word(2)));
288 // TODO: We probably want to consider the memory scope here. For now,
289 // just always emit the full fence.
290 Fence(semantics);
291 return EmitResult::Continue;
292}
293
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000294void SpirvShader::VisitMemoryObjectInner(sw::SpirvShader::Type::ID id, sw::SpirvShader::Decorations d, uint32_t &index, uint32_t offset, const MemoryVisitor &f) const
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000295{
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000296 ApplyDecorationsForId(&d, id);
297 auto const &type = getType(id);
298
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500299 if(d.HasOffset)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000300 {
301 offset += d.Offset;
302 d.HasOffset = false;
303 }
304
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500305 switch(type.opcode())
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000306 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000307 case spv::OpTypePointer:
308 VisitMemoryObjectInner(type.definition.word(3), d, index, offset, f);
309 break;
310 case spv::OpTypeInt:
311 case spv::OpTypeFloat:
312 case spv::OpTypeRuntimeArray:
313 f(MemoryElement{ index++, offset, type });
314 break;
315 case spv::OpTypeVector:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000316 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000317 auto elemStride = (d.InsideMatrix && d.HasRowMajor && d.RowMajor) ? d.MatrixStride : static_cast<int32_t>(sizeof(float));
318 for(auto i = 0u; i < type.definition.word(3); i++)
319 {
320 VisitMemoryObjectInner(type.definition.word(2), d, index, offset + elemStride * i, f);
321 }
322 break;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000323 }
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000324 case spv::OpTypeMatrix:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000325 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000326 auto columnStride = (d.HasRowMajor && d.RowMajor) ? static_cast<int32_t>(sizeof(float)) : d.MatrixStride;
327 d.InsideMatrix = true;
328 for(auto i = 0u; i < type.definition.word(3); i++)
329 {
330 ASSERT(d.HasMatrixStride);
331 VisitMemoryObjectInner(type.definition.word(2), d, index, offset + columnStride * i, f);
332 }
333 break;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000334 }
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000335 case spv::OpTypeStruct:
336 for(auto i = 0u; i < type.definition.wordCount() - 2; i++)
337 {
338 ApplyDecorationsForIdMember(&d, id, i);
339 VisitMemoryObjectInner(type.definition.word(i + 2), d, index, offset, f);
340 }
341 break;
342 case spv::OpTypeArray:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000343 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000344 auto arraySize = GetConstScalarInt(type.definition.word(3));
345 for(auto i = 0u; i < arraySize; i++)
346 {
347 ASSERT(d.HasArrayStride);
348 VisitMemoryObjectInner(type.definition.word(2), d, index, offset + i * d.ArrayStride, f);
349 }
350 break;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000351 }
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000352 default:
353 UNREACHABLE("%s", OpcodeName(type.opcode()).c_str());
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000354 }
355}
356
Nicolas Capens72f089c2020-04-08 23:37:08 -0400357void SpirvShader::VisitMemoryObject(Object::ID id, const MemoryVisitor &f) const
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000358{
Nicolas Capens72f089c2020-04-08 23:37:08 -0400359 auto typeId = getObject(id).typeId();
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000360 auto const &type = getType(typeId);
Nicolas Capens72f089c2020-04-08 23:37:08 -0400361
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500362 if(IsExplicitLayout(type.storageClass))
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000363 {
364 Decorations d{};
365 ApplyDecorationsForId(&d, id);
366 uint32_t index = 0;
367 VisitMemoryObjectInner(typeId, d, index, 0, f);
368 }
369 else
370 {
371 // Objects without explicit layout are tightly packed.
Ben Clayton18c6a782019-12-03 12:08:16 +0000372 auto &elType = getType(type.element);
Nicolas Capensff9f9b52020-04-14 00:46:38 -0400373 for(auto index = 0u; index < elType.componentCount; index++)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000374 {
Ben Clayton18c6a782019-12-03 12:08:16 +0000375 auto offset = static_cast<uint32_t>(index * sizeof(float));
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000376 f({ index, offset, elType });
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000377 }
378 }
379}
380
381SIMD::Pointer SpirvShader::GetPointerToData(Object::ID id, int arrayIndex, EmitState const *state) const
382{
383 auto routine = state->routine;
384 auto &object = getObject(id);
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500385 switch(object.kind)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000386 {
387 case Object::Kind::Pointer:
388 case Object::Kind::InterfaceVariable:
389 return state->getPointer(id);
390
391 case Object::Kind::DescriptorSet:
392 {
393 const auto &d = descriptorDecorations.at(id);
394 ASSERT(d.DescriptorSet >= 0 && d.DescriptorSet < vk::MAX_BOUND_DESCRIPTOR_SETS);
395 ASSERT(d.Binding >= 0);
396
397 auto set = state->getPointer(id);
398
399 auto setLayout = routine->pipelineLayout->getDescriptorSetLayout(d.DescriptorSet);
400 ASSERT_MSG(setLayout->hasBinding(d.Binding), "Descriptor set %d does not contain binding %d", int(d.DescriptorSet), int(d.Binding));
401 int bindingOffset = static_cast<int>(setLayout->getBindingOffset(d.Binding, arrayIndex));
402
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000403 Pointer<Byte> descriptor = set.base + bindingOffset; // BufferDescriptor*
404 Pointer<Byte> data = *Pointer<Pointer<Byte>>(descriptor + OFFSET(vk::BufferDescriptor, ptr)); // void*
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000405 Int size = *Pointer<Int>(descriptor + OFFSET(vk::BufferDescriptor, sizeInBytes));
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500406 if(setLayout->isBindingDynamic(d.Binding))
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000407 {
408 uint32_t dynamicBindingIndex =
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000409 routine->pipelineLayout->getDynamicOffsetBase(d.DescriptorSet) +
410 setLayout->getDynamicDescriptorOffset(d.Binding) +
411 arrayIndex;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000412 Int offset = routine->descriptorDynamicOffsets[dynamicBindingIndex];
413 Int robustnessSize = *Pointer<Int>(descriptor + OFFSET(vk::BufferDescriptor, robustnessSize));
414 return SIMD::Pointer(data + offset, Min(size, robustnessSize - offset));
415 }
416 else
417 {
418 return SIMD::Pointer(data, size);
419 }
420 }
421
422 default:
423 UNREACHABLE("Invalid pointer kind %d", int(object.kind));
424 return SIMD::Pointer(Pointer<Byte>(), 0);
425 }
426}
427
428std::memory_order SpirvShader::MemoryOrder(spv::MemorySemanticsMask memorySemantics)
429{
430 auto control = static_cast<uint32_t>(memorySemantics) & static_cast<uint32_t>(
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000431 spv::MemorySemanticsAcquireMask |
432 spv::MemorySemanticsReleaseMask |
433 spv::MemorySemanticsAcquireReleaseMask |
434 spv::MemorySemanticsSequentiallyConsistentMask);
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500435 switch(control)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000436 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000437 case spv::MemorySemanticsMaskNone: return std::memory_order_relaxed;
438 case spv::MemorySemanticsAcquireMask: return std::memory_order_acquire;
439 case spv::MemorySemanticsReleaseMask: return std::memory_order_release;
440 case spv::MemorySemanticsAcquireReleaseMask: return std::memory_order_acq_rel;
441 case spv::MemorySemanticsSequentiallyConsistentMask: return std::memory_order_acq_rel; // Vulkan 1.1: "SequentiallyConsistent is treated as AcquireRelease"
442 default:
443 // "it is invalid for more than one of these four bits to be set:
444 // Acquire, Release, AcquireRelease, or SequentiallyConsistent."
445 UNREACHABLE("MemorySemanticsMask: %x", int(control));
446 return std::memory_order_acq_rel;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000447 }
448}
449
450bool SpirvShader::StoresInHelperInvocation(spv::StorageClass storageClass)
451{
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500452 switch(storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000453 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000454 case spv::StorageClassUniform:
455 case spv::StorageClassStorageBuffer:
456 case spv::StorageClassImage:
457 return false;
458 default:
459 return true;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000460 }
461}
462
463bool SpirvShader::IsExplicitLayout(spv::StorageClass storageClass)
464{
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500465 switch(storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000466 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000467 case spv::StorageClassUniform:
468 case spv::StorageClassStorageBuffer:
469 case spv::StorageClassPushConstant:
470 return true;
471 default:
472 return false;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000473 }
474}
475
476sw::SIMD::Pointer SpirvShader::InterleaveByLane(sw::SIMD::Pointer p)
477{
478 p *= sw::SIMD::Width;
479 p.staticOffsets[0] += 0 * sizeof(float);
480 p.staticOffsets[1] += 1 * sizeof(float);
481 p.staticOffsets[2] += 2 * sizeof(float);
482 p.staticOffsets[3] += 3 * sizeof(float);
483 return p;
484}
485
486bool SpirvShader::IsStorageInterleavedByLane(spv::StorageClass storageClass)
487{
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500488 switch(storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000489 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000490 case spv::StorageClassUniform:
491 case spv::StorageClassStorageBuffer:
492 case spv::StorageClassPushConstant:
493 case spv::StorageClassWorkgroup:
494 case spv::StorageClassImage:
495 return false;
496 default:
497 return true;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000498 }
499}
500
501} // namespace sw