blob: d281d08b70956a6d6e8438b47032b9bbca5406c5 [file] [log] [blame]
Ben Claytonf3e2cc22019-11-28 12:02:15 +00001// Copyright 2019 The SwiftShader Authors. All Rights Reserved.
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15#include "SpirvShader.hpp"
Ben Claytonfc951cd2019-05-15 17:16:56 +010016#include "SpirvShaderDebug.hpp"
Ben Claytonf3e2cc22019-11-28 12:02:15 +000017
18#include "ShaderCore.hpp"
Nicolas Capens9e727fa2021-11-22 12:06:33 -050019#include "Reactor/Assert.hpp"
Ben Claytonf3e2cc22019-11-28 12:02:15 +000020#include "Vulkan/VkPipelineLayout.hpp"
21
22#include <spirv/unified1/spirv.hpp>
Ben Claytonf3e2cc22019-11-28 12:02:15 +000023
24namespace sw {
25
Nicolas Capens1ab775a2022-10-12 15:27:02 -040026void SpirvEmitter::EmitLoad(InsnIterator insn)
Ben Claytonf3e2cc22019-11-28 12:02:15 +000027{
28 bool atomic = (insn.opcode() == spv::OpAtomicLoad);
29 Object::ID resultId = insn.word(2);
30 Object::ID pointerId = insn.word(3);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040031 auto &result = shader.getObject(resultId);
32 auto &resultTy = shader.getType(result);
33 auto &pointer = shader.getObject(pointerId);
34 auto &pointerTy = shader.getType(pointer);
Ben Claytonf3e2cc22019-11-28 12:02:15 +000035 std::memory_order memoryOrder = std::memory_order_relaxed;
36
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040037 ASSERT(shader.getType(pointer).element == result.typeId());
Nicolas Capens72f089c2020-04-08 23:37:08 -040038 ASSERT(Type::ID(insn.word(1)) == result.typeId());
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040039 ASSERT(!atomic || shader.getType(shader.getType(pointer).element).opcode() == spv::OpTypeInt); // Vulkan 1.1: "Atomic instructions must declare a scalar 32-bit integer type, for the value pointed to by Pointer."
Ben Claytonf3e2cc22019-11-28 12:02:15 +000040
41 if(pointerTy.storageClass == spv::StorageClassUniformConstant)
42 {
43 // Just propagate the pointer.
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040044 auto &ptr = getPointer(pointerId);
45 createPointer(resultId, ptr);
Ben Claytonf3e2cc22019-11-28 12:02:15 +000046 }
47
48 if(atomic)
49 {
50 Object::ID semanticsId = insn.word(5);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040051 auto memorySemantics = static_cast<spv::MemorySemanticsMask>(shader.getObject(semanticsId).constantValue[0]);
Nicolas Capensfdf41472022-09-26 00:40:11 -040052 memoryOrder = shader.MemoryOrder(memorySemantics);
Ben Claytonf3e2cc22019-11-28 12:02:15 +000053 }
54
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040055 auto ptr = GetPointerToData(pointerId, 0, false);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040056 auto robustness = shader.getOutOfBoundsBehavior(pointerId, routine->pipelineLayout);
Ben Claytonf3e2cc22019-11-28 12:02:15 +000057
Alexis Hetuda978d82022-06-14 17:51:58 -040058 if(result.kind == Object::Kind::Pointer)
59 {
Nicolas Capens71134742022-10-12 12:44:16 -040060 shader.VisitMemoryObject(pointerId, true, [&](const Spirv::MemoryElement &el) {
Alexis Hetuda978d82022-06-14 17:51:58 -040061 ASSERT(el.index == 0);
Nicolas Capensfdf41472022-09-26 00:40:11 -040062 auto p = GetElementPointer(ptr, el.offset, pointerTy.storageClass);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040063 createPointer(resultId, p.Load<SIMD::Pointer>(robustness, activeLaneMask(), atomic, memoryOrder, sizeof(void *)));
Alexis Hetuda978d82022-06-14 17:51:58 -040064 });
Alexis Hetuec31f542022-06-22 16:47:48 -040065
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040066 SPIRV_SHADER_DBG("Load(atomic: {0}, order: {1}, ptr: {2}, mask: {3})", atomic, int(memoryOrder), ptr, activeLaneMask());
Alexis Hetuda978d82022-06-14 17:51:58 -040067 }
68 else
69 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040070 auto &dst = createIntermediate(resultId, resultTy.componentCount);
Nicolas Capens71134742022-10-12 12:44:16 -040071 shader.VisitMemoryObject(pointerId, false, [&](const Spirv::MemoryElement &el) {
Nicolas Capensfdf41472022-09-26 00:40:11 -040072 auto p = GetElementPointer(ptr, el.offset, pointerTy.storageClass);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040073 dst.move(el.index, p.Load<SIMD::Float>(robustness, activeLaneMask(), atomic, memoryOrder));
Alexis Hetuda978d82022-06-14 17:51:58 -040074 });
Ben Claytonf3e2cc22019-11-28 12:02:15 +000075
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040076 SPIRV_SHADER_DBG("Load(atomic: {0}, order: {1}, ptr: {2}, val: {3}, mask: {4})", atomic, int(memoryOrder), ptr, dst, activeLaneMask());
Alexis Hetuec31f542022-06-22 16:47:48 -040077 }
Ben Claytonf3e2cc22019-11-28 12:02:15 +000078}
79
Nicolas Capens1ab775a2022-10-12 15:27:02 -040080void SpirvEmitter::EmitStore(InsnIterator insn)
Ben Claytonf3e2cc22019-11-28 12:02:15 +000081{
82 bool atomic = (insn.opcode() == spv::OpAtomicStore);
83 Object::ID pointerId = insn.word(1);
84 Object::ID objectId = insn.word(atomic ? 4 : 2);
Ben Claytonf3e2cc22019-11-28 12:02:15 +000085 std::memory_order memoryOrder = std::memory_order_relaxed;
86
87 if(atomic)
88 {
89 Object::ID semanticsId = insn.word(3);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040090 auto memorySemantics = static_cast<spv::MemorySemanticsMask>(shader.getObject(semanticsId).constantValue[0]);
Nicolas Capensfdf41472022-09-26 00:40:11 -040091 memoryOrder = shader.MemoryOrder(memorySemantics);
Ben Claytonf3e2cc22019-11-28 12:02:15 +000092 }
93
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040094 const auto &value = Operand(shader, *this, objectId);
Nicolas Capens0b77aa52020-04-09 02:48:16 -040095
Nicolas Capense1b8cbd2022-09-12 09:13:06 -040096 Store(pointerId, value, atomic, memoryOrder);
Nicolas Capens0b77aa52020-04-09 02:48:16 -040097}
98
Nicolas Capens1ab775a2022-10-12 15:27:02 -040099void SpirvEmitter::Store(Object::ID pointerId, const Operand &value, bool atomic, std::memory_order memoryOrder) const
Nicolas Capens0b77aa52020-04-09 02:48:16 -0400100{
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400101 auto &pointer = shader.getObject(pointerId);
102 auto &pointerTy = shader.getType(pointer);
103 auto &elementTy = shader.getType(pointerTy.element);
Nicolas Capens0b77aa52020-04-09 02:48:16 -0400104
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000105 ASSERT(!atomic || elementTy.opcode() == spv::OpTypeInt); // Vulkan 1.1: "Atomic instructions must declare a scalar 32-bit integer type, for the value pointed to by Pointer."
106
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400107 auto ptr = GetPointerToData(pointerId, 0, false);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400108 auto robustness = shader.getOutOfBoundsBehavior(pointerId, routine->pipelineLayout);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000109
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400110 SIMD::Int mask = activeLaneMask();
Nicolas Capens3a82d572022-10-22 21:50:25 -0400111 if(shader.StoresInHelperInvocationsHaveNoEffect(pointerTy.storageClass))
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000112 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400113 mask = mask & storesAndAtomicsMask();
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000114 }
115
Ben Claytonfc951cd2019-05-15 17:16:56 +0100116 SPIRV_SHADER_DBG("Store(atomic: {0}, order: {1}, ptr: {2}, val: {3}, mask: {4}", atomic, int(memoryOrder), ptr, value, mask);
117
Alexis Hetuda978d82022-06-14 17:51:58 -0400118 if(value.isPointer())
119 {
Nicolas Capens71134742022-10-12 12:44:16 -0400120 shader.VisitMemoryObject(pointerId, true, [&](const Spirv::MemoryElement &el) {
Nicolas Capens08dfcbb2022-09-02 14:32:12 -0400121 ASSERT(el.index == 0);
Nicolas Capensfdf41472022-09-26 00:40:11 -0400122 auto p = GetElementPointer(ptr, el.offset, pointerTy.storageClass);
Nicolas Capens08dfcbb2022-09-02 14:32:12 -0400123 p.Store(value.Pointer(), robustness, mask, atomic, memoryOrder);
Alexis Hetuda978d82022-06-14 17:51:58 -0400124 });
125 }
126 else
127 {
Nicolas Capens71134742022-10-12 12:44:16 -0400128 shader.VisitMemoryObject(pointerId, false, [&](const Spirv::MemoryElement &el) {
Nicolas Capensfdf41472022-09-26 00:40:11 -0400129 auto p = GetElementPointer(ptr, el.offset, pointerTy.storageClass);
Alexis Hetuda978d82022-06-14 17:51:58 -0400130 p.Store(value.Float(el.index), robustness, mask, atomic, memoryOrder);
131 });
132 }
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000133}
134
Nicolas Capens1ab775a2022-10-12 15:27:02 -0400135void SpirvEmitter::EmitVariable(InsnIterator insn)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000136{
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000137 Object::ID resultId = insn.word(2);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400138 auto &object = shader.getObject(resultId);
139 auto &objectTy = shader.getType(object);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000140
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500141 switch(objectTy.storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000142 {
Nicolas Capens112faf42019-12-13 17:32:26 -0500143 case spv::StorageClassOutput:
144 case spv::StorageClassPrivate:
145 case spv::StorageClassFunction:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000146 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000147 ASSERT(objectTy.opcode() == spv::OpTypePointer);
148 auto base = &routine->getVariable(resultId)[0];
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400149 auto elementTy = shader.getType(objectTy.element);
Nicolas Capensff9f9b52020-04-14 00:46:38 -0400150 auto size = elementTy.componentCount * static_cast<uint32_t>(sizeof(float)) * SIMD::Width;
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400151 createPointer(resultId, SIMD::Pointer(base, size));
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000152 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500153 break;
154 case spv::StorageClassWorkgroup:
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000155 {
156 ASSERT(objectTy.opcode() == spv::OpTypePointer);
157 auto base = &routine->workgroupMemory[0];
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400158 auto size = shader.workgroupMemory.size();
159 createPointer(resultId, SIMD::Pointer(base, size, shader.workgroupMemory.offsetOf(resultId)));
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000160 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500161 break;
162 case spv::StorageClassInput:
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000163 {
164 if(object.kind == Object::Kind::InterfaceVariable)
165 {
166 auto &dst = routine->getVariable(resultId);
167 int offset = 0;
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400168 shader.VisitInterface(resultId,
Nicolas Capens71134742022-10-12 12:44:16 -0400169 [&](const Decorations &d, Spirv::AttribType type) {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400170 auto scalarSlot = d.Location << 2 | d.Component;
171 dst[offset++] = routine->inputs[scalarSlot];
172 });
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000173 }
174 ASSERT(objectTy.opcode() == spv::OpTypePointer);
175 auto base = &routine->getVariable(resultId)[0];
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400176 auto elementTy = shader.getType(objectTy.element);
Nicolas Capensff9f9b52020-04-14 00:46:38 -0400177 auto size = elementTy.componentCount * static_cast<uint32_t>(sizeof(float)) * SIMD::Width;
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400178 createPointer(resultId, SIMD::Pointer(base, size));
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000179 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500180 break;
181 case spv::StorageClassUniformConstant:
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000182 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400183 const auto &d = shader.descriptorDecorations.at(resultId);
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000184 ASSERT(d.DescriptorSet >= 0);
185 ASSERT(d.Binding >= 0);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000186
Nicolas Capensc7d5ec32020-04-22 01:11:37 -0400187 uint32_t bindingOffset = routine->pipelineLayout->getBindingOffset(d.DescriptorSet, d.Binding);
Nicolas Capensca9de962020-04-23 00:42:39 -0400188 Pointer<Byte> set = routine->descriptorSets[d.DescriptorSet]; // DescriptorSet*
189 Pointer<Byte> binding = Pointer<Byte>(set + bindingOffset); // vk::SampledImageDescriptor*
190 auto size = 0; // Not required as this pointer is not directly used by SIMD::Read or SIMD::Write.
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400191 createPointer(resultId, SIMD::Pointer(binding, size));
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000192 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500193 break;
194 case spv::StorageClassUniform:
195 case spv::StorageClassStorageBuffer:
Alexis Hetu71ec98e2022-06-14 16:58:44 -0400196 case spv::StorageClassPhysicalStorageBuffer:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000197 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400198 const auto &d = shader.descriptorDecorations.at(resultId);
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000199 ASSERT(d.DescriptorSet >= 0);
200 auto size = 0; // Not required as this pointer is not directly used by SIMD::Read or SIMD::Write.
201 // Note: the module may contain descriptor set references that are not suitable for this implementation -- using a set index higher than the number
202 // of descriptor set binding points we support. As long as the selected entrypoint doesn't actually touch the out of range binding points, this
203 // is valid. In this case make the value nullptr to make it easier to diagnose an attempt to dereference it.
Nicolas Capensb7b7cb72021-09-29 14:02:53 -0400204 if(static_cast<uint32_t>(d.DescriptorSet) < vk::MAX_BOUND_DESCRIPTOR_SETS)
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000205 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400206 createPointer(resultId, SIMD::Pointer(routine->descriptorSets[d.DescriptorSet], size));
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000207 }
208 else
209 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400210 createPointer(resultId, SIMD::Pointer(nullptr, 0));
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000211 }
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000212 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500213 break;
214 case spv::StorageClassPushConstant:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000215 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400216 createPointer(resultId, SIMD::Pointer(routine->pushConstants, vk::MAX_PUSH_CONSTANT_SIZE));
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000217 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500218 break;
219 default:
220 UNREACHABLE("Storage class %d", objectTy.storageClass);
221 break;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000222 }
223
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500224 if(insn.wordCount() > 4)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000225 {
226 Object::ID initializerId = insn.word(4);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400227 if(shader.getObject(initializerId).kind != Object::Kind::Constant)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000228 {
Nicolas Capensdd0e6002020-01-24 01:21:47 -0500229 UNIMPLEMENTED("b/148241854: Non-constant initializers not yet implemented"); // FIXME(b/148241854)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000230 }
Nicolas Capens44bd43a2020-01-22 03:07:14 -0500231
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500232 switch(objectTy.storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000233 {
Nicolas Capens112faf42019-12-13 17:32:26 -0500234 case spv::StorageClassOutput:
235 case spv::StorageClassPrivate:
236 case spv::StorageClassFunction:
Alexis Hetu7021c482021-10-27 18:29:54 -0400237 case spv::StorageClassWorkgroup:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000238 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400239 auto ptr = GetPointerToData(resultId, 0, false);
240 Operand initialValue(shader, *this, initializerId);
241
Nicolas Capens71134742022-10-12 12:44:16 -0400242 shader.VisitMemoryObject(resultId, false, [&](const Spirv::MemoryElement &el) {
Nicolas Capensfdf41472022-09-26 00:40:11 -0400243 auto p = GetElementPointer(ptr, el.offset, objectTy.storageClass);
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000244 auto robustness = OutOfBoundsBehavior::UndefinedBehavior; // Local variables are always within bounds.
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400245 p.Store(initialValue.Float(el.index), robustness, activeLaneMask());
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000246 });
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400247
Alexis Hetu7021c482021-10-27 18:29:54 -0400248 if(objectTy.storageClass == spv::StorageClassWorkgroup)
249 {
250 // Initialization of workgroup memory is done by each subgroup and requires waiting on a barrier.
251 // TODO(b/221242292): Initialize just once per workgroup and eliminate the barrier.
252 Yield(YieldResult::ControlBarrier);
253 }
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000254 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500255 break;
256 default:
257 ASSERT_MSG(initializerId == 0, "Vulkan does not permit variables of storage class %d to have initializers", int(objectTy.storageClass));
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000258 }
259 }
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000260}
261
Nicolas Capens1ab775a2022-10-12 15:27:02 -0400262void SpirvEmitter::EmitCopyMemory(InsnIterator insn)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000263{
264 Object::ID dstPtrId = insn.word(1);
265 Object::ID srcPtrId = insn.word(2);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400266 auto &dstPtrTy = shader.getObjectType(dstPtrId);
267 auto &srcPtrTy = shader.getObjectType(srcPtrId);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000268 ASSERT(dstPtrTy.element == srcPtrTy.element);
269
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400270 auto dstPtr = GetPointerToData(dstPtrId, 0, false);
271 auto srcPtr = GetPointerToData(srcPtrId, 0, false);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000272
273 std::unordered_map<uint32_t, uint32_t> srcOffsets;
274
Nicolas Capens71134742022-10-12 12:44:16 -0400275 shader.VisitMemoryObject(srcPtrId, false, [&](const Spirv::MemoryElement &el) { srcOffsets[el.index] = el.offset; });
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000276
Nicolas Capens71134742022-10-12 12:44:16 -0400277 shader.VisitMemoryObject(dstPtrId, false, [&](const Spirv::MemoryElement &el) {
Ben Clayton18c6a782019-12-03 12:08:16 +0000278 auto it = srcOffsets.find(el.index);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000279 ASSERT(it != srcOffsets.end());
280 auto srcOffset = it->second;
Ben Clayton18c6a782019-12-03 12:08:16 +0000281 auto dstOffset = el.offset;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000282
Nicolas Capensfdf41472022-09-26 00:40:11 -0400283 auto dst = GetElementPointer(dstPtr, dstOffset, dstPtrTy.storageClass);
284 auto src = GetElementPointer(srcPtr, srcOffset, srcPtrTy.storageClass);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000285
286 // TODO(b/131224163): Optimize based on src/dst storage classes.
287 auto robustness = OutOfBoundsBehavior::RobustBufferAccess;
288
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400289 auto value = src.Load<SIMD::Float>(robustness, activeLaneMask());
290 dst.Store(value, robustness, activeLaneMask());
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000291 });
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000292}
293
Nicolas Capens1ab775a2022-10-12 15:27:02 -0400294void SpirvEmitter::EmitMemoryBarrier(InsnIterator insn)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000295{
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400296 auto semantics = spv::MemorySemanticsMask(shader.GetConstScalarInt(insn.word(2)));
Nicolas Capens4c629802021-12-08 02:05:19 -0500297 // TODO(b/176819536): We probably want to consider the memory scope here.
298 // For now, just always emit the full fence.
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000299 Fence(semantics);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000300}
301
Nicolas Capens71134742022-10-12 12:44:16 -0400302void Spirv::VisitMemoryObjectInner(Type::ID id, Decorations d, uint32_t &index, uint32_t offset, bool resultIsPointer, const MemoryVisitor &f) const
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000303{
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000304 ApplyDecorationsForId(&d, id);
Shahbaz Youssefi4dbbcd02022-09-13 22:23:30 -0400305 const auto &type = getType(id);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000306
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500307 if(d.HasOffset)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000308 {
309 offset += d.Offset;
310 d.HasOffset = false;
311 }
312
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500313 switch(type.opcode())
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000314 {
Nicolas Capens112faf42019-12-13 17:32:26 -0500315 case spv::OpTypePointer:
Alexis Hetuda978d82022-06-14 17:51:58 -0400316 if(resultIsPointer)
317 {
318 // Load/Store the pointer itself, rather than the structure pointed to by the pointer
319 f(MemoryElement{ index++, offset, type });
320 }
321 else
322 {
323 VisitMemoryObjectInner(type.definition.word(3), d, index, offset, resultIsPointer, f);
324 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500325 break;
326 case spv::OpTypeInt:
327 case spv::OpTypeFloat:
328 case spv::OpTypeRuntimeArray:
329 f(MemoryElement{ index++, offset, type });
330 break;
331 case spv::OpTypeVector:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000332 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000333 auto elemStride = (d.InsideMatrix && d.HasRowMajor && d.RowMajor) ? d.MatrixStride : static_cast<int32_t>(sizeof(float));
334 for(auto i = 0u; i < type.definition.word(3); i++)
335 {
Alexis Hetuda978d82022-06-14 17:51:58 -0400336 VisitMemoryObjectInner(type.definition.word(2), d, index, offset + elemStride * i, resultIsPointer, f);
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000337 }
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000338 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500339 break;
340 case spv::OpTypeMatrix:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000341 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000342 auto columnStride = (d.HasRowMajor && d.RowMajor) ? static_cast<int32_t>(sizeof(float)) : d.MatrixStride;
343 d.InsideMatrix = true;
344 for(auto i = 0u; i < type.definition.word(3); i++)
345 {
346 ASSERT(d.HasMatrixStride);
Alexis Hetuda978d82022-06-14 17:51:58 -0400347 VisitMemoryObjectInner(type.definition.word(2), d, index, offset + columnStride * i, resultIsPointer, f);
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000348 }
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000349 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500350 break;
351 case spv::OpTypeStruct:
352 for(auto i = 0u; i < type.definition.wordCount() - 2; i++)
353 {
354 ApplyDecorationsForIdMember(&d, id, i);
Alexis Hetuda978d82022-06-14 17:51:58 -0400355 VisitMemoryObjectInner(type.definition.word(i + 2), d, index, offset, resultIsPointer, f);
Nicolas Capens112faf42019-12-13 17:32:26 -0500356 }
357 break;
358 case spv::OpTypeArray:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000359 {
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000360 auto arraySize = GetConstScalarInt(type.definition.word(3));
361 for(auto i = 0u; i < arraySize; i++)
362 {
363 ASSERT(d.HasArrayStride);
Alexis Hetuda978d82022-06-14 17:51:58 -0400364 VisitMemoryObjectInner(type.definition.word(2), d, index, offset + i * d.ArrayStride, resultIsPointer, f);
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000365 }
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000366 }
Nicolas Capens112faf42019-12-13 17:32:26 -0500367 break;
368 default:
369 UNREACHABLE("%s", OpcodeName(type.opcode()));
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000370 }
371}
372
Nicolas Capens71134742022-10-12 12:44:16 -0400373void Spirv::VisitMemoryObject(Object::ID id, bool resultIsPointer, const MemoryVisitor &f) const
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000374{
Nicolas Capens72f089c2020-04-08 23:37:08 -0400375 auto typeId = getObject(id).typeId();
Shahbaz Youssefi4dbbcd02022-09-13 22:23:30 -0400376 const auto &type = getType(typeId);
Nicolas Capens72f089c2020-04-08 23:37:08 -0400377
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500378 if(IsExplicitLayout(type.storageClass))
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000379 {
Nicolas Capensd6806b32022-03-02 10:16:31 -0500380 Decorations d = GetDecorationsForId(id);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000381 uint32_t index = 0;
Alexis Hetuda978d82022-06-14 17:51:58 -0400382 VisitMemoryObjectInner(typeId, d, index, 0, resultIsPointer, f);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000383 }
384 else
385 {
386 // Objects without explicit layout are tightly packed.
Ben Clayton18c6a782019-12-03 12:08:16 +0000387 auto &elType = getType(type.element);
Nicolas Capensff9f9b52020-04-14 00:46:38 -0400388 for(auto index = 0u; index < elType.componentCount; index++)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000389 {
Ben Clayton18c6a782019-12-03 12:08:16 +0000390 auto offset = static_cast<uint32_t>(index * sizeof(float));
Ben Claytonbc1c067be2019-12-17 20:37:37 +0000391 f({ index, offset, elType });
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000392 }
393 }
394}
395
Nicolas Capens1ab775a2022-10-12 15:27:02 -0400396SIMD::Pointer SpirvEmitter::GetPointerToData(Object::ID id, SIMD::Int arrayIndices, bool nonUniform) const
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000397{
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400398 auto &object = shader.getObject(id);
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500399 switch(object.kind)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000400 {
Nicolas Capens112faf42019-12-13 17:32:26 -0500401 case Object::Kind::Pointer:
402 case Object::Kind::InterfaceVariable:
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400403 return getPointer(id);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000404
Nicolas Capens112faf42019-12-13 17:32:26 -0500405 case Object::Kind::DescriptorSet:
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000406 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400407 const auto &d = shader.descriptorDecorations.at(id);
Nicolas Capensb7b7cb72021-09-29 14:02:53 -0400408 ASSERT(d.DescriptorSet >= 0 && static_cast<uint32_t>(d.DescriptorSet) < vk::MAX_BOUND_DESCRIPTOR_SETS);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000409 ASSERT(d.Binding >= 0);
Nicolas Capenseb682442020-06-01 15:24:52 -0400410 ASSERT(routine->pipelineLayout->getDescriptorCount(d.DescriptorSet, d.Binding) != 0); // "If descriptorCount is zero this binding entry is reserved and the resource must not be accessed from any stage via this binding within any pipeline using the set layout."
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000411
Nicolas Capensc7d5ec32020-04-22 01:11:37 -0400412 uint32_t bindingOffset = routine->pipelineLayout->getBindingOffset(d.DescriptorSet, d.Binding);
413 uint32_t descriptorSize = routine->pipelineLayout->getDescriptorSize(d.DescriptorSet, d.Binding);
Nicolas Capensc7d5ec32020-04-22 01:11:37 -0400414
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400415 auto set = getPointer(id);
Sean Risserda08cb22022-04-08 20:49:06 -0400416 if(nonUniform)
417 {
418 SIMD::Int descriptorOffset = bindingOffset + descriptorSize * arrayIndices;
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400419 auto robustness = shader.getOutOfBoundsBehavior(id, routine->pipelineLayout);
Sean Risserda08cb22022-04-08 20:49:06 -0400420 ASSERT(routine->pipelineLayout->getDescriptorType(d.DescriptorSet, d.Binding) != VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT);
Nicolas Capensc7d5ec32020-04-22 01:11:37 -0400421
Nicolas Capens942c6392022-06-30 00:20:40 -0400422 std::vector<Pointer<Byte>> pointers(SIMD::Width);
Sean Risserda08cb22022-04-08 20:49:06 -0400423 for(int i = 0; i < SIMD::Width; i++)
424 {
425 pointers[i] = *Pointer<Pointer<Byte>>(set.getPointerForLane(i) + Extract(descriptorOffset, i) + OFFSET(vk::BufferDescriptor, ptr));
426 }
427
428 SIMD::Pointer ptr(pointers);
Alexis Hetu8941bde2021-11-17 17:45:40 -0500429
430 if(routine->pipelineLayout->isDescriptorDynamic(d.DescriptorSet, d.Binding))
431 {
Sean Risserda08cb22022-04-08 20:49:06 -0400432 SIMD::Int dynamicOffsetIndex = SIMD::Int(routine->pipelineLayout->getDynamicOffsetIndex(d.DescriptorSet, d.Binding) + arrayIndices);
433 SIMD::Pointer routineDynamicOffsets = SIMD::Pointer(routine->descriptorDynamicOffsets, 0, sizeof(int) * dynamicOffsetIndex);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400434 SIMD::Int dynamicOffsets = routineDynamicOffsets.Load<SIMD::Int>(robustness, activeLaneMask());
Sean Risserda08cb22022-04-08 20:49:06 -0400435 ptr += dynamicOffsets;
436 }
437 return ptr;
438 }
439 else
440 {
441 rr::Int arrayIdx = Extract(arrayIndices, 0);
442 rr::Int descriptorOffset = bindingOffset + descriptorSize * arrayIdx;
443 Pointer<Byte> descriptor = set.getUniformPointer() + descriptorOffset; // BufferDescriptor* or inline uniform block
Alexis Hetu8941bde2021-11-17 17:45:40 -0500444
Sean Risserda08cb22022-04-08 20:49:06 -0400445 auto descriptorType = routine->pipelineLayout->getDescriptorType(d.DescriptorSet, d.Binding);
446 if(descriptorType == VK_DESCRIPTOR_TYPE_INLINE_UNIFORM_BLOCK_EXT)
447 {
448 // Note: there is no bounds checking for inline uniform blocks.
449 // MAX_INLINE_UNIFORM_BLOCK_SIZE represents the maximum size of
450 // an inline uniform block, but this value should remain unused.
451 return SIMD::Pointer(descriptor, vk::MAX_INLINE_UNIFORM_BLOCK_SIZE);
Alexis Hetu8941bde2021-11-17 17:45:40 -0500452 }
453 else
454 {
Sean Risserda08cb22022-04-08 20:49:06 -0400455 Pointer<Byte> data = *Pointer<Pointer<Byte>>(descriptor + OFFSET(vk::BufferDescriptor, ptr)); // void*
456 rr::Int size = *Pointer<Int>(descriptor + OFFSET(vk::BufferDescriptor, sizeInBytes));
457
458 if(routine->pipelineLayout->isDescriptorDynamic(d.DescriptorSet, d.Binding))
459 {
460 rr::Int dynamicOffsetIndex =
461 routine->pipelineLayout->getDynamicOffsetIndex(d.DescriptorSet, d.Binding) +
462 arrayIdx;
463 rr::Int offset = routine->descriptorDynamicOffsets[dynamicOffsetIndex];
464 rr::Int robustnessSize = *Pointer<rr::Int>(descriptor + OFFSET(vk::BufferDescriptor, robustnessSize));
465
466 return SIMD::Pointer(data + offset, Min(size, robustnessSize - offset));
467 }
468 else
469 {
470 return SIMD::Pointer(data, size);
471 }
Alexis Hetu8941bde2021-11-17 17:45:40 -0500472 }
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000473 }
474 }
475
Nicolas Capens112faf42019-12-13 17:32:26 -0500476 default:
477 UNREACHABLE("Invalid pointer kind %d", int(object.kind));
478 return SIMD::Pointer(Pointer<Byte>(), 0);
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000479 }
480}
481
Nicolas Capens1ab775a2022-10-12 15:27:02 -0400482void SpirvEmitter::OffsetToElement(SIMD::Pointer &ptr, Object::ID elementId, int32_t arrayStride) const
Alexis Hetu47c22462022-06-06 18:00:16 -0400483{
484 if(elementId != 0 && arrayStride != 0)
485 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400486 auto &elementObject = shader.getObject(elementId);
Alexis Hetu47c22462022-06-06 18:00:16 -0400487 ASSERT(elementObject.kind == Object::Kind::Constant || elementObject.kind == Object::Kind::Intermediate);
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400488
Alexis Hetu47c22462022-06-06 18:00:16 -0400489 if(elementObject.kind == Object::Kind::Constant)
490 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400491 ptr += shader.GetConstScalarInt(elementId) * arrayStride;
Alexis Hetu47c22462022-06-06 18:00:16 -0400492 }
493 else
494 {
Nicolas Capense1b8cbd2022-09-12 09:13:06 -0400495 ptr += getIntermediate(elementId).Int(0) * arrayStride;
Alexis Hetu47c22462022-06-06 18:00:16 -0400496 }
497 }
498}
499
Nicolas Capens1ab775a2022-10-12 15:27:02 -0400500void SpirvEmitter::Fence(spv::MemorySemanticsMask semantics) const
Nicolas Capens4c629802021-12-08 02:05:19 -0500501{
502 if(semantics != spv::MemorySemanticsMaskNone)
503 {
Nicolas Capensfdf41472022-09-26 00:40:11 -0400504 rr::Fence(shader.MemoryOrder(semantics));
Nicolas Capens4c629802021-12-08 02:05:19 -0500505 }
506}
507
Nicolas Capens71134742022-10-12 12:44:16 -0400508std::memory_order Spirv::MemoryOrder(spv::MemorySemanticsMask memorySemantics)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000509{
Nicolas Capens4c629802021-12-08 02:05:19 -0500510 uint32_t control = static_cast<uint32_t>(memorySemantics) & static_cast<uint32_t>(
511 spv::MemorySemanticsAcquireMask |
512 spv::MemorySemanticsReleaseMask |
513 spv::MemorySemanticsAcquireReleaseMask |
514 spv::MemorySemanticsSequentiallyConsistentMask);
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500515 switch(control)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000516 {
Nicolas Capens112faf42019-12-13 17:32:26 -0500517 case spv::MemorySemanticsMaskNone: return std::memory_order_relaxed;
518 case spv::MemorySemanticsAcquireMask: return std::memory_order_acquire;
519 case spv::MemorySemanticsReleaseMask: return std::memory_order_release;
520 case spv::MemorySemanticsAcquireReleaseMask: return std::memory_order_acq_rel;
521 case spv::MemorySemanticsSequentiallyConsistentMask: return std::memory_order_acq_rel; // Vulkan 1.1: "SequentiallyConsistent is treated as AcquireRelease"
522 default:
523 // "it is invalid for more than one of these four bits to be set:
Nicolas Capens4c629802021-12-08 02:05:19 -0500524 // Acquire, Release, AcquireRelease, or SequentiallyConsistent."
Nicolas Capens112faf42019-12-13 17:32:26 -0500525 UNREACHABLE("MemorySemanticsMask: %x", int(control));
526 return std::memory_order_acq_rel;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000527 }
528}
529
Nicolas Capens71134742022-10-12 12:44:16 -0400530bool Spirv::StoresInHelperInvocationsHaveNoEffect(spv::StorageClass storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000531{
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500532 switch(storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000533 {
Nicolas Capens3a82d572022-10-22 21:50:25 -0400534 // "Stores and atomics performed by helper invocations must not have any effect on memory..."
Nicolas Capens112faf42019-12-13 17:32:26 -0500535 default:
536 return true;
Nicolas Capensbb5d3bd2022-11-23 14:03:20 -0500537 // "...except for the Function, Private and Output storage classes".
Nicolas Capens3a82d572022-10-22 21:50:25 -0400538 case spv::StorageClassFunction:
539 case spv::StorageClassPrivate:
Nicolas Capensbb5d3bd2022-11-23 14:03:20 -0500540 case spv::StorageClassOutput:
Nicolas Capens3a82d572022-10-22 21:50:25 -0400541 return false;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000542 }
543}
544
Nicolas Capens71134742022-10-12 12:44:16 -0400545bool Spirv::IsExplicitLayout(spv::StorageClass storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000546{
Alexis Hetu71ec98e2022-06-14 16:58:44 -0400547 // From the Vulkan spec:
548 // "Composite objects in the StorageBuffer, PhysicalStorageBuffer, Uniform,
549 // and PushConstant Storage Classes must be explicitly laid out."
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500550 switch(storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000551 {
Nicolas Capens112faf42019-12-13 17:32:26 -0500552 case spv::StorageClassUniform:
553 case spv::StorageClassStorageBuffer:
Alexis Hetu71ec98e2022-06-14 16:58:44 -0400554 case spv::StorageClassPhysicalStorageBuffer:
Nicolas Capens112faf42019-12-13 17:32:26 -0500555 case spv::StorageClassPushConstant:
556 return true;
557 default:
558 return false;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000559 }
560}
561
Nicolas Capens1ab775a2022-10-12 15:27:02 -0400562sw::SIMD::Pointer SpirvEmitter::GetElementPointer(sw::SIMD::Pointer structure, uint32_t offset, spv::StorageClass storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000563{
Nicolas Capensfdf41472022-09-26 00:40:11 -0400564 if(IsStorageInterleavedByLane(storageClass))
Nicolas Capens94c73622022-06-06 13:05:38 -0400565 {
Nicolas Capensafdb5122022-06-30 11:31:35 -0400566 for(int i = 0; i < SIMD::Width; i++)
567 {
568 structure.staticOffsets[i] += i * sizeof(float);
569 }
Nicolas Capens94c73622022-06-06 13:05:38 -0400570
571 return structure + offset * sw::SIMD::Width;
572 }
573 else
574 {
575 return structure + offset;
576 }
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000577}
578
Nicolas Capens1ab775a2022-10-12 15:27:02 -0400579bool SpirvEmitter::IsStorageInterleavedByLane(spv::StorageClass storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000580{
Nicolas Capens81bc9d92019-12-16 15:05:57 -0500581 switch(storageClass)
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000582 {
Nicolas Capens112faf42019-12-13 17:32:26 -0500583 case spv::StorageClassUniform:
584 case spv::StorageClassStorageBuffer:
Alexis Hetu71ec98e2022-06-14 16:58:44 -0400585 case spv::StorageClassPhysicalStorageBuffer:
Nicolas Capens112faf42019-12-13 17:32:26 -0500586 case spv::StorageClassPushConstant:
587 case spv::StorageClassWorkgroup:
588 case spv::StorageClassImage:
589 return false;
590 default:
591 return true;
Ben Claytonf3e2cc22019-11-28 12:02:15 +0000592 }
593}
594
595} // namespace sw