blob: 69207889b7e349b2a10a5561088f6699e1d431a3 [file] [log] [blame]
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -07001//===- subzero/src/IceTargetLowering.cpp - Basic lowering implementation --===//
2//
3// The Subzero Code Generator
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Andrew Scull9612d322015-07-06 14:53:25 -07009///
10/// \file
Andrew Scull57e12682015-09-16 11:30:19 -070011/// This file implements the skeleton of the TargetLowering class, specifically
12/// invoking the appropriate lowering method for a given instruction kind and
13/// driving global register allocation. It also implements the non-deleted
14/// instruction iteration in LoweringContext.
Andrew Scull9612d322015-07-06 14:53:25 -070015///
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070016//===----------------------------------------------------------------------===//
17
John Porto67f8de92015-06-25 10:14:17 -070018#include "IceTargetLowering.h"
19
John Portoaff4ccf2015-06-10 16:35:06 -070020#include "IceAssemblerARM32.h"
John Porto2da710c2015-06-29 07:57:02 -070021#include "IceAssemblerMIPS32.h"
John Portoaff4ccf2015-06-10 16:35:06 -070022#include "IceAssemblerX8632.h"
John Porto7e93c622015-06-23 10:58:57 -070023#include "IceAssemblerX8664.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070024#include "IceCfg.h" // setError()
25#include "IceCfgNode.h"
Jan Voung58eea4d2015-06-15 15:11:56 -070026#include "IceGlobalInits.h"
John Portoec3f5652015-08-31 15:07:09 -070027#include "IceInstVarIter.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070028#include "IceOperand.h"
Jim Stichnothd97c7df2014-06-04 11:57:08 -070029#include "IceRegAlloc.h"
Jan Voungb36ad9b2015-04-21 17:01:49 -070030#include "IceTargetLoweringARM32.h"
Jim Stichnoth6da4cef2015-06-11 13:26:33 -070031#include "IceTargetLoweringMIPS32.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070032#include "IceTargetLoweringX8632.h"
John Porto7e93c622015-06-23 10:58:57 -070033#include "IceTargetLoweringX8664.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070034
35namespace Ice {
36
37void LoweringContext::init(CfgNode *N) {
38 Node = N;
Jim Stichnoth336f6c42014-10-30 15:01:31 -070039 End = getNode()->getInsts().end();
40 rewind();
41 advanceForward(Next);
42}
43
44void LoweringContext::rewind() {
Jan Vounge6e497d2014-07-30 10:06:03 -070045 Begin = getNode()->getInsts().begin();
46 Cur = Begin;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070047 skipDeleted(Cur);
48 Next = Cur;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070049}
50
51void LoweringContext::insert(Inst *Inst) {
52 getNode()->getInsts().insert(Next, Inst);
Jim Stichnoth98712a32014-10-24 10:59:02 -070053 LastInserted = Inst;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070054}
55
Jan Voungc820ddf2014-07-29 14:38:51 -070056void LoweringContext::skipDeleted(InstList::iterator &I) const {
Jim Stichnoth607e9f02014-11-06 13:32:05 -080057 while (I != End && I->isDeleted())
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070058 ++I;
59}
60
Jan Vounge6e497d2014-07-30 10:06:03 -070061void LoweringContext::advanceForward(InstList::iterator &I) const {
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070062 if (I != End) {
63 ++I;
64 skipDeleted(I);
65 }
66}
67
Jan Vounge6e497d2014-07-30 10:06:03 -070068Inst *LoweringContext::getLastInserted() const {
Jim Stichnoth98712a32014-10-24 10:59:02 -070069 assert(LastInserted);
70 return LastInserted;
Jan Vounge6e497d2014-07-30 10:06:03 -070071}
72
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070073TargetLowering *TargetLowering::createLowering(TargetArch Target, Cfg *Func) {
Jan Voungb36ad9b2015-04-21 17:01:49 -070074#define SUBZERO_TARGET(X) \
75 if (Target == Target_##X) \
76 return Target##X::create(Func);
77#include "llvm/Config/SZTargets.def"
78
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070079 Func->setError("Unsupported target");
Jim Stichnothae953202014-12-20 06:17:49 -080080 return nullptr;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070081}
82
Jim Stichnothe6d24782014-12-19 05:42:24 -080083TargetLowering::TargetLowering(Cfg *Func)
Jim Stichnotheafb56c2015-06-22 10:35:22 -070084 : Func(Func), Ctx(Func->getContext()), Context() {}
Jim Stichnothe6d24782014-12-19 05:42:24 -080085
Jan Voungec270732015-01-12 17:00:22 -080086std::unique_ptr<Assembler> TargetLowering::createAssembler(TargetArch Target,
87 Cfg *Func) {
Jan Voung90ccc3f2015-04-30 14:15:10 -070088#define SUBZERO_TARGET(X) \
89 if (Target == Target_##X) \
90 return std::unique_ptr<Assembler>(new X::Assembler##X());
91#include "llvm/Config/SZTargets.def"
Jan Voungb36ad9b2015-04-21 17:01:49 -070092
93 Func->setError("Unsupported target assembler");
Jim Stichnothae953202014-12-20 06:17:49 -080094 return nullptr;
Jan Voung8acded02014-09-22 18:02:25 -070095}
96
Jim Stichnothd97c7df2014-06-04 11:57:08 -070097void TargetLowering::doAddressOpt() {
98 if (llvm::isa<InstLoad>(*Context.getCur()))
99 doAddressOptLoad();
100 else if (llvm::isa<InstStore>(*Context.getCur()))
101 doAddressOptStore();
102 Context.advanceCur();
103 Context.advanceNext();
104}
105
Qining Luaee5fa82015-08-20 14:59:03 -0700106void TargetLowering::doNopInsertion(RandomNumberGenerator &RNG) {
Jim Stichnoth607e9f02014-11-06 13:32:05 -0800107 Inst *I = Context.getCur();
Matt Walac3302742014-08-15 16:21:56 -0700108 bool ShouldSkip = llvm::isa<InstFakeUse>(I) || llvm::isa<InstFakeDef>(I) ||
109 llvm::isa<InstFakeKill>(I) || I->isRedundantAssign() ||
110 I->isDeleted();
111 if (!ShouldSkip) {
Jan Voung1f47ad02015-03-20 15:01:26 -0700112 int Probability = Ctx->getFlags().getNopProbabilityAsPercentage();
113 for (int I = 0; I < Ctx->getFlags().getMaxNopsPerInstruction(); ++I) {
Qining Luaee5fa82015-08-20 14:59:03 -0700114 randomlyInsertNop(Probability / 100.0, RNG);
Matt Walac3302742014-08-15 16:21:56 -0700115 }
116 }
117}
118
Andrew Scull57e12682015-09-16 11:30:19 -0700119// Lowers a single instruction according to the information in Context, by
120// checking the Context.Cur instruction kind and calling the appropriate
121// lowering method. The lowering method should insert target instructions at
122// the Cur.Next insertion point, and should not delete the Context.Cur
123// instruction or advance Context.Cur.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700124//
Andrew Scull57e12682015-09-16 11:30:19 -0700125// The lowering method may look ahead in the instruction stream as desired, and
126// lower additional instructions in conjunction with the current one, for
127// example fusing a compare and branch. If it does, it should advance
128// Context.Cur to point to the next non-deleted instruction to process, and it
129// should delete any additional instructions it consumes.
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700130void TargetLowering::lower() {
131 assert(!Context.atEnd());
Jim Stichnoth607e9f02014-11-06 13:32:05 -0800132 Inst *Inst = Context.getCur();
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700133 Inst->deleteIfDead();
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700134 if (!Inst->isDeleted() && !llvm::isa<InstFakeDef>(Inst) &&
135 !llvm::isa<InstFakeUse>(Inst)) {
Andrew Scull57e12682015-09-16 11:30:19 -0700136 // Mark the current instruction as deleted before lowering, otherwise the
137 // Dest variable will likely get marked as non-SSA. See
138 // Variable::setDefinition(). However, just pass-through FakeDef and
139 // FakeUse instructions that might have been inserted prior to lowering.
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700140 Inst->setDeleted();
141 switch (Inst->getKind()) {
142 case Inst::Alloca:
143 lowerAlloca(llvm::cast<InstAlloca>(Inst));
144 break;
145 case Inst::Arithmetic:
146 lowerArithmetic(llvm::cast<InstArithmetic>(Inst));
147 break;
148 case Inst::Assign:
149 lowerAssign(llvm::cast<InstAssign>(Inst));
150 break;
151 case Inst::Br:
152 lowerBr(llvm::cast<InstBr>(Inst));
153 break;
154 case Inst::Call:
155 lowerCall(llvm::cast<InstCall>(Inst));
156 break;
157 case Inst::Cast:
158 lowerCast(llvm::cast<InstCast>(Inst));
159 break;
160 case Inst::ExtractElement:
161 lowerExtractElement(llvm::cast<InstExtractElement>(Inst));
162 break;
163 case Inst::Fcmp:
164 lowerFcmp(llvm::cast<InstFcmp>(Inst));
165 break;
166 case Inst::Icmp:
167 lowerIcmp(llvm::cast<InstIcmp>(Inst));
168 break;
169 case Inst::InsertElement:
170 lowerInsertElement(llvm::cast<InstInsertElement>(Inst));
171 break;
172 case Inst::IntrinsicCall: {
173 InstIntrinsicCall *Call = llvm::cast<InstIntrinsicCall>(Inst);
174 if (Call->getIntrinsicInfo().ReturnsTwice)
175 setCallsReturnsTwice(true);
176 lowerIntrinsicCall(Call);
177 break;
178 }
179 case Inst::Load:
180 lowerLoad(llvm::cast<InstLoad>(Inst));
181 break;
182 case Inst::Phi:
183 lowerPhi(llvm::cast<InstPhi>(Inst));
184 break;
185 case Inst::Ret:
186 lowerRet(llvm::cast<InstRet>(Inst));
187 break;
188 case Inst::Select:
189 lowerSelect(llvm::cast<InstSelect>(Inst));
190 break;
191 case Inst::Store:
192 lowerStore(llvm::cast<InstStore>(Inst));
193 break;
194 case Inst::Switch:
195 lowerSwitch(llvm::cast<InstSwitch>(Inst));
196 break;
197 case Inst::Unreachable:
198 lowerUnreachable(llvm::cast<InstUnreachable>(Inst));
199 break;
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700200 default:
201 lowerOther(Inst);
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700202 break;
203 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700204
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700205 postLower();
206 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700207
208 Context.advanceCur();
209 Context.advanceNext();
210}
211
Jim Stichnotha3f57b92015-07-30 12:46:04 -0700212void TargetLowering::lowerInst(CfgNode *Node, InstList::iterator Next,
213 InstHighLevel *Instr) {
214 // TODO(stichnot): Consider modifying the design/implementation to avoid
215 // multiple init() calls when using lowerInst() to lower several instructions
216 // in the same node.
217 Context.init(Node);
218 Context.setNext(Next);
219 Context.insert(Instr);
220 --Next;
221 assert(&*Next == Instr);
222 Context.setCur(Next);
223 lower();
224}
225
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700226void TargetLowering::lowerOther(const Inst *Instr) {
227 (void)Instr;
228 Func->setError("Can't lower unsupported instruction type");
229}
230
Andrew Scull57e12682015-09-16 11:30:19 -0700231// Drives register allocation, allowing all physical registers (except perhaps
232// for the frame pointer) to be allocated. This set of registers could
233// potentially be parameterized if we want to restrict registers e.g. for
234// performance testing.
Jim Stichnoth70d0a052014-11-14 15:53:46 -0800235void TargetLowering::regAlloc(RegAllocKind Kind) {
Jim Stichnoth8363a062014-10-07 10:02:38 -0700236 TimerMarker T(TimerStack::TT_regAlloc, Func);
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700237 LinearScan LinearScan(Func);
238 RegSetMask RegInclude = RegSet_None;
239 RegSetMask RegExclude = RegSet_None;
240 RegInclude |= RegSet_CallerSave;
241 RegInclude |= RegSet_CalleeSave;
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700242 if (hasFramePointer())
243 RegExclude |= RegSet_FramePointer;
Jim Stichnoth70d0a052014-11-14 15:53:46 -0800244 LinearScan.init(Kind);
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700245 llvm::SmallBitVector RegMask = getRegisterSet(RegInclude, RegExclude);
Jan Voung1f47ad02015-03-20 15:01:26 -0700246 LinearScan.scan(RegMask, Ctx->getFlags().shouldRandomizeRegAlloc());
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700247}
248
Jan Voungb3401d22015-05-18 09:38:21 -0700249void TargetLowering::inferTwoAddress() {
Andrew Scull57e12682015-09-16 11:30:19 -0700250 // Find two-address non-SSA instructions where Dest==Src0, and set the
251 // DestNonKillable flag to keep liveness analysis consistent.
Jan Voungb3401d22015-05-18 09:38:21 -0700252 for (auto Inst = Context.getCur(), E = Context.getNext(); Inst != E; ++Inst) {
253 if (Inst->isDeleted())
254 continue;
255 if (Variable *Dest = Inst->getDest()) {
Andrew Scull57e12682015-09-16 11:30:19 -0700256 // TODO(stichnot): We may need to consider all source operands, not just
257 // the first one, if using 3-address instructions.
Jan Voungb3401d22015-05-18 09:38:21 -0700258 if (Inst->getSrcSize() > 0 && Inst->getSrc(0) == Dest)
259 Inst->setDestNonKillable();
260 }
261 }
262}
263
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700264void TargetLowering::sortVarsByAlignment(VarList &Dest,
265 const VarList &Source) const {
266 Dest = Source;
Andrew Scull57e12682015-09-16 11:30:19 -0700267 // Instead of std::sort, we could do a bucket sort with log2(alignment) as
268 // the buckets, if performance is an issue.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700269 std::sort(Dest.begin(), Dest.end(),
270 [this](const Variable *V1, const Variable *V2) {
Jim Stichnoth8e6bf6e2015-06-03 15:58:12 -0700271 return typeWidthInBytesOnStack(V1->getType()) >
272 typeWidthInBytesOnStack(V2->getType());
273 });
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700274}
275
276void TargetLowering::getVarStackSlotParams(
277 VarList &SortedSpilledVariables, llvm::SmallBitVector &RegsUsed,
278 size_t *GlobalsSize, size_t *SpillAreaSizeBytes,
279 uint32_t *SpillAreaAlignmentBytes, uint32_t *LocalsSlotsAlignmentBytes,
280 std::function<bool(Variable *)> TargetVarHook) {
281 const VariablesMetadata *VMetadata = Func->getVMetadata();
282 llvm::BitVector IsVarReferenced(Func->getNumVariables());
283 for (CfgNode *Node : Func->getNodes()) {
284 for (Inst &Inst : Node->getInsts()) {
285 if (Inst.isDeleted())
286 continue;
287 if (const Variable *Var = Inst.getDest())
288 IsVarReferenced[Var->getIndex()] = true;
John Portoec3f5652015-08-31 15:07:09 -0700289 FOREACH_VAR_IN_INST(Var, Inst) {
290 IsVarReferenced[Var->getIndex()] = true;
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700291 }
292 }
293 }
294
Andrew Scull57e12682015-09-16 11:30:19 -0700295 // If SimpleCoalescing is false, each variable without a register gets its
296 // own unique stack slot, which leads to large stack frames. If
297 // SimpleCoalescing is true, then each "global" variable without a register
298 // gets its own slot, but "local" variable slots are reused across basic
299 // blocks. E.g., if A and B are local to block 1 and C is local to block 2,
300 // then C may share a slot with A or B.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700301 //
302 // We cannot coalesce stack slots if this function calls a "returns twice"
Andrew Scull57e12682015-09-16 11:30:19 -0700303 // function. In that case, basic blocks may be revisited, and variables local
304 // to those basic blocks are actually live until after the called function
305 // returns a second time.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700306 const bool SimpleCoalescing = !callsReturnsTwice();
307
308 std::vector<size_t> LocalsSize(Func->getNumNodes());
309 const VarList &Variables = Func->getVariables();
310 VarList SpilledVariables;
311 for (Variable *Var : Variables) {
312 if (Var->hasReg()) {
313 RegsUsed[Var->getRegNum()] = true;
314 continue;
315 }
Andrew Scull57e12682015-09-16 11:30:19 -0700316 // An argument either does not need a stack slot (if passed in a register)
317 // or already has one (if passed on the stack).
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700318 if (Var->getIsArg())
319 continue;
320 // An unreferenced variable doesn't need a stack slot.
321 if (!IsVarReferenced[Var->getIndex()])
322 continue;
Andrew Scull57e12682015-09-16 11:30:19 -0700323 // Check a target-specific variable (it may end up sharing stack slots) and
324 // not need accounting here.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700325 if (TargetVarHook(Var))
326 continue;
327 SpilledVariables.push_back(Var);
328 }
329
330 SortedSpilledVariables.reserve(SpilledVariables.size());
331 sortVarsByAlignment(SortedSpilledVariables, SpilledVariables);
332
333 for (Variable *Var : SortedSpilledVariables) {
334 size_t Increment = typeWidthInBytesOnStack(Var->getType());
Andrew Scull57e12682015-09-16 11:30:19 -0700335 // We have sorted by alignment, so the first variable we encounter that is
336 // located in each area determines the max alignment for the area.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700337 if (!*SpillAreaAlignmentBytes)
338 *SpillAreaAlignmentBytes = Increment;
339 if (SimpleCoalescing && VMetadata->isTracked(Var)) {
340 if (VMetadata->isMultiBlock(Var)) {
341 *GlobalsSize += Increment;
342 } else {
343 SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex();
344 LocalsSize[NodeIndex] += Increment;
345 if (LocalsSize[NodeIndex] > *SpillAreaSizeBytes)
346 *SpillAreaSizeBytes = LocalsSize[NodeIndex];
347 if (!*LocalsSlotsAlignmentBytes)
348 *LocalsSlotsAlignmentBytes = Increment;
349 }
350 } else {
351 *SpillAreaSizeBytes += Increment;
352 }
353 }
Jan Voung28068ad2015-07-31 12:58:46 -0700354 // For testing legalization of large stack offsets on targets with limited
355 // offset bits in instruction encodings, add some padding.
356 *SpillAreaSizeBytes += Ctx->getFlags().getTestStackExtra();
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700357}
358
359void TargetLowering::alignStackSpillAreas(uint32_t SpillAreaStartOffset,
360 uint32_t SpillAreaAlignmentBytes,
361 size_t GlobalsSize,
362 uint32_t LocalsSlotsAlignmentBytes,
363 uint32_t *SpillAreaPaddingBytes,
364 uint32_t *LocalsSlotsPaddingBytes) {
365 if (SpillAreaAlignmentBytes) {
366 uint32_t PaddingStart = SpillAreaStartOffset;
367 uint32_t SpillAreaStart =
368 Utils::applyAlignment(PaddingStart, SpillAreaAlignmentBytes);
369 *SpillAreaPaddingBytes = SpillAreaStart - PaddingStart;
370 }
371
Andrew Scull57e12682015-09-16 11:30:19 -0700372 // If there are separate globals and locals areas, make sure the locals area
373 // is aligned by padding the end of the globals area.
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700374 if (LocalsSlotsAlignmentBytes) {
375 uint32_t GlobalsAndSubsequentPaddingSize = GlobalsSize;
376 GlobalsAndSubsequentPaddingSize =
377 Utils::applyAlignment(GlobalsSize, LocalsSlotsAlignmentBytes);
378 *LocalsSlotsPaddingBytes = GlobalsAndSubsequentPaddingSize - GlobalsSize;
379 }
380}
381
382void TargetLowering::assignVarStackSlots(VarList &SortedSpilledVariables,
383 size_t SpillAreaPaddingBytes,
384 size_t SpillAreaSizeBytes,
385 size_t GlobalsAndSubsequentPaddingSize,
386 bool UsesFramePointer) {
387 const VariablesMetadata *VMetadata = Func->getVMetadata();
Jan Voung28068ad2015-07-31 12:58:46 -0700388 // For testing legalization of large stack offsets on targets with limited
389 // offset bits in instruction encodings, add some padding. This assumes that
Andrew Scull57e12682015-09-16 11:30:19 -0700390 // SpillAreaSizeBytes has accounted for the extra test padding. When
391 // UseFramePointer is true, the offset depends on the padding, not just the
392 // SpillAreaSizeBytes. On the other hand, when UseFramePointer is false, the
393 // offsets depend on the gap between SpillAreaSizeBytes and
394 // SpillAreaPaddingBytes, so we don't increment that.
Jan Voung28068ad2015-07-31 12:58:46 -0700395 size_t TestPadding = Ctx->getFlags().getTestStackExtra();
396 if (UsesFramePointer)
397 SpillAreaPaddingBytes += TestPadding;
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700398 size_t GlobalsSpaceUsed = SpillAreaPaddingBytes;
399 size_t NextStackOffset = SpillAreaPaddingBytes;
400 std::vector<size_t> LocalsSize(Func->getNumNodes());
401 const bool SimpleCoalescing = !callsReturnsTwice();
Jan Voung28068ad2015-07-31 12:58:46 -0700402
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700403 for (Variable *Var : SortedSpilledVariables) {
404 size_t Increment = typeWidthInBytesOnStack(Var->getType());
405 if (SimpleCoalescing && VMetadata->isTracked(Var)) {
406 if (VMetadata->isMultiBlock(Var)) {
407 GlobalsSpaceUsed += Increment;
408 NextStackOffset = GlobalsSpaceUsed;
409 } else {
410 SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex();
411 LocalsSize[NodeIndex] += Increment;
412 NextStackOffset = SpillAreaPaddingBytes +
413 GlobalsAndSubsequentPaddingSize +
414 LocalsSize[NodeIndex];
415 }
416 } else {
417 NextStackOffset += Increment;
418 }
419 if (UsesFramePointer)
420 Var->setStackOffset(-NextStackOffset);
421 else
422 Var->setStackOffset(SpillAreaSizeBytes - NextStackOffset);
423 }
424}
425
Jan Voungb36ad9b2015-04-21 17:01:49 -0700426InstCall *TargetLowering::makeHelperCall(const IceString &Name, Variable *Dest,
427 SizeT MaxSrcs) {
428 const bool HasTailCall = false;
429 Constant *CallTarget = Ctx->getConstantExternSym(Name);
430 InstCall *Call =
431 InstCall::create(Func, MaxSrcs, Dest, CallTarget, HasTailCall);
432 return Call;
433}
434
Andrew Scullcfa628b2015-08-20 14:23:05 -0700435bool TargetLowering::shouldOptimizeMemIntrins() {
436 return Ctx->getFlags().getOptLevel() >= Opt_1 ||
437 Ctx->getFlags().getForceMemIntrinOpt();
438}
439
Jan Voung76bb0be2015-05-14 09:26:19 -0700440void TargetLowering::emitWithoutPrefix(const ConstantRelocatable *C) const {
Jim Stichnoth20b71f52015-06-24 15:52:24 -0700441 if (!BuildDefs::dump())
Jan Voung76bb0be2015-05-14 09:26:19 -0700442 return;
443 Ostream &Str = Ctx->getStrEmit();
444 if (C->getSuppressMangling())
445 Str << C->getName();
446 else
447 Str << Ctx->mangleName(C->getName());
448 RelocOffsetT Offset = C->getOffset();
449 if (Offset) {
450 if (Offset > 0)
451 Str << "+";
452 Str << Offset;
453 }
454}
455
456void TargetLowering::emit(const ConstantRelocatable *C) const {
Jim Stichnoth20b71f52015-06-24 15:52:24 -0700457 if (!BuildDefs::dump())
Jan Voung76bb0be2015-05-14 09:26:19 -0700458 return;
459 Ostream &Str = Ctx->getStrEmit();
460 Str << getConstantPrefix();
461 emitWithoutPrefix(C);
462}
463
Jim Stichnothbbca7542015-02-11 16:08:31 -0800464std::unique_ptr<TargetDataLowering>
465TargetDataLowering::createLowering(GlobalContext *Ctx) {
Jan Voung1f47ad02015-03-20 15:01:26 -0700466 TargetArch Target = Ctx->getFlags().getTargetArch();
Jan Voungb36ad9b2015-04-21 17:01:49 -0700467#define SUBZERO_TARGET(X) \
468 if (Target == Target_##X) \
Jan Voungfb792842015-06-11 15:27:50 -0700469 return TargetData##X::create(Ctx);
Jan Voungb36ad9b2015-04-21 17:01:49 -0700470#include "llvm/Config/SZTargets.def"
471
Jan Voungfb792842015-06-11 15:27:50 -0700472 llvm::report_fatal_error("Unsupported target data lowering");
Jim Stichnothde4ca712014-06-29 08:13:48 -0700473}
474
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700475TargetDataLowering::~TargetDataLowering() = default;
Jan Voung839c4ce2014-07-28 15:19:43 -0700476
John Porto8b1a7052015-06-17 13:20:08 -0700477namespace {
478
479// dataSectionSuffix decides whether to use SectionSuffix or MangledVarName as
480// data section suffix. Essentially, when using separate data sections for
481// globals SectionSuffix is not necessary.
482IceString dataSectionSuffix(const IceString &SectionSuffix,
483 const IceString &MangledVarName,
484 const bool DataSections) {
485 if (SectionSuffix.empty() && !DataSections) {
486 return "";
487 }
488
489 if (DataSections) {
490 // With data sections we don't need to use the SectionSuffix.
491 return "." + MangledVarName;
492 }
493
494 assert(!SectionSuffix.empty());
495 return "." + SectionSuffix;
496}
497
498} // end of anonymous namespace
499
500void TargetDataLowering::emitGlobal(const VariableDeclaration &Var,
501 const IceString &SectionSuffix) {
Jim Stichnoth20b71f52015-06-24 15:52:24 -0700502 if (!BuildDefs::dump())
Jan Voung58eea4d2015-06-15 15:11:56 -0700503 return;
504
Andrew Scull57e12682015-09-16 11:30:19 -0700505 // If external and not initialized, this must be a cross test. Don't generate
506 // a declaration for such cases.
John Porto8b1a7052015-06-17 13:20:08 -0700507 const bool IsExternal =
508 Var.isExternal() || Ctx->getFlags().getDisableInternal();
Jan Voung58eea4d2015-06-15 15:11:56 -0700509 if (IsExternal && !Var.hasInitializer())
510 return;
511
512 Ostream &Str = Ctx->getStrEmit();
John Porto8b1a7052015-06-17 13:20:08 -0700513 const bool HasNonzeroInitializer = Var.hasNonzeroInitializer();
514 const bool IsConstant = Var.getIsConstant();
515 const SizeT Size = Var.getNumBytes();
516 const IceString MangledName = Var.mangleName(Ctx);
Jan Voung58eea4d2015-06-15 15:11:56 -0700517
518 Str << "\t.type\t" << MangledName << ",%object\n";
519
John Porto8b1a7052015-06-17 13:20:08 -0700520 const bool UseDataSections = Ctx->getFlags().getDataSections();
521 const IceString Suffix =
522 dataSectionSuffix(SectionSuffix, MangledName, UseDataSections);
Jan Voung58eea4d2015-06-15 15:11:56 -0700523 if (IsConstant)
John Porto8b1a7052015-06-17 13:20:08 -0700524 Str << "\t.section\t.rodata" << Suffix << ",\"a\",%progbits\n";
Jan Voung58eea4d2015-06-15 15:11:56 -0700525 else if (HasNonzeroInitializer)
John Porto8b1a7052015-06-17 13:20:08 -0700526 Str << "\t.section\t.data" << Suffix << ",\"aw\",%progbits\n";
Jan Voung58eea4d2015-06-15 15:11:56 -0700527 else
John Porto8b1a7052015-06-17 13:20:08 -0700528 Str << "\t.section\t.bss" << Suffix << ",\"aw\",%nobits\n";
Jan Voung58eea4d2015-06-15 15:11:56 -0700529
530 if (IsExternal)
531 Str << "\t.globl\t" << MangledName << "\n";
532
John Porto8b1a7052015-06-17 13:20:08 -0700533 const uint32_t Align = Var.getAlignment();
Jan Voung58eea4d2015-06-15 15:11:56 -0700534 if (Align > 1) {
535 assert(llvm::isPowerOf2_32(Align));
536 // Use the .p2align directive, since the .align N directive can either
537 // interpret N as bytes, or power of 2 bytes, depending on the target.
538 Str << "\t.p2align\t" << llvm::Log2_32(Align) << "\n";
539 }
540
541 Str << MangledName << ":\n";
542
543 if (HasNonzeroInitializer) {
John Porto1bec8bc2015-06-22 10:51:13 -0700544 for (const std::unique_ptr<VariableDeclaration::Initializer> &Init :
545 Var.getInitializers()) {
Jan Voung58eea4d2015-06-15 15:11:56 -0700546 switch (Init->getKind()) {
547 case VariableDeclaration::Initializer::DataInitializerKind: {
Jan Vounge0df91f2015-06-30 08:47:06 -0700548 const auto &Data =
549 llvm::cast<VariableDeclaration::DataInitializer>(Init.get())
550 ->getContents();
Jan Voung58eea4d2015-06-15 15:11:56 -0700551 for (SizeT i = 0; i < Init->getNumBytes(); ++i) {
552 Str << "\t.byte\t" << (((unsigned)Data[i]) & 0xff) << "\n";
553 }
554 break;
555 }
556 case VariableDeclaration::Initializer::ZeroInitializerKind:
557 Str << "\t.zero\t" << Init->getNumBytes() << "\n";
558 break;
559 case VariableDeclaration::Initializer::RelocInitializerKind: {
John Porto8b1a7052015-06-17 13:20:08 -0700560 const auto *Reloc =
John Porto1bec8bc2015-06-22 10:51:13 -0700561 llvm::cast<VariableDeclaration::RelocInitializer>(Init.get());
Jan Voung58eea4d2015-06-15 15:11:56 -0700562 Str << "\t" << getEmit32Directive() << "\t";
563 Str << Reloc->getDeclaration()->mangleName(Ctx);
564 if (RelocOffsetT Offset = Reloc->getOffset()) {
565 if (Offset >= 0 || (Offset == INT32_MIN))
566 Str << " + " << Offset;
567 else
568 Str << " - " << -Offset;
569 }
570 Str << "\n";
571 break;
572 }
573 }
574 }
John Porto8b1a7052015-06-17 13:20:08 -0700575 } else {
Andrew Scull57e12682015-09-16 11:30:19 -0700576 // NOTE: for non-constant zero initializers, this is BSS (no bits), so an
577 // ELF writer would not write to the file, and only track virtual offsets,
578 // but the .s writer still needs this .zero and cannot simply use the .size
579 // to advance offsets.
Jan Voung58eea4d2015-06-15 15:11:56 -0700580 Str << "\t.zero\t" << Size << "\n";
John Porto8b1a7052015-06-17 13:20:08 -0700581 }
Jan Voung58eea4d2015-06-15 15:11:56 -0700582
583 Str << "\t.size\t" << MangledName << ", " << Size << "\n";
584}
585
Jan Voungfb792842015-06-11 15:27:50 -0700586std::unique_ptr<TargetHeaderLowering>
587TargetHeaderLowering::createLowering(GlobalContext *Ctx) {
588 TargetArch Target = Ctx->getFlags().getTargetArch();
589#define SUBZERO_TARGET(X) \
590 if (Target == Target_##X) \
591 return TargetHeader##X::create(Ctx);
592#include "llvm/Config/SZTargets.def"
593
594 llvm::report_fatal_error("Unsupported target header lowering");
595}
596
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700597TargetHeaderLowering::~TargetHeaderLowering() = default;
Jan Voungfb792842015-06-11 15:27:50 -0700598
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700599} // end of namespace Ice