blob: 671bab029d0d30871a55e94f9a27052f27f36a9d [file] [log] [blame]
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -07001//===- subzero/src/IceTargetLowering.cpp - Basic lowering implementation --===//
2//
3// The Subzero Code Generator
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Andrew Scull9612d322015-07-06 14:53:25 -07009///
10/// \file
11/// This file implements the skeleton of the TargetLowering class,
12/// specifically invoking the appropriate lowering method for a given
13/// instruction kind and driving global register allocation. It also
14/// implements the non-deleted instruction iteration in
15/// LoweringContext.
16///
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070017//===----------------------------------------------------------------------===//
18
John Porto67f8de92015-06-25 10:14:17 -070019#include "IceTargetLowering.h"
20
John Portoaff4ccf2015-06-10 16:35:06 -070021#include "IceAssemblerARM32.h"
John Porto2da710c2015-06-29 07:57:02 -070022#include "IceAssemblerMIPS32.h"
John Portoaff4ccf2015-06-10 16:35:06 -070023#include "IceAssemblerX8632.h"
John Porto7e93c622015-06-23 10:58:57 -070024#include "IceAssemblerX8664.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070025#include "IceCfg.h" // setError()
26#include "IceCfgNode.h"
Jan Voung58eea4d2015-06-15 15:11:56 -070027#include "IceGlobalInits.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070028#include "IceOperand.h"
Jim Stichnothd97c7df2014-06-04 11:57:08 -070029#include "IceRegAlloc.h"
Jan Voungb36ad9b2015-04-21 17:01:49 -070030#include "IceTargetLoweringARM32.h"
Jim Stichnoth6da4cef2015-06-11 13:26:33 -070031#include "IceTargetLoweringMIPS32.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070032#include "IceTargetLoweringX8632.h"
John Porto7e93c622015-06-23 10:58:57 -070033#include "IceTargetLoweringX8664.h"
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070034
35namespace Ice {
36
37void LoweringContext::init(CfgNode *N) {
38 Node = N;
Jim Stichnoth336f6c42014-10-30 15:01:31 -070039 End = getNode()->getInsts().end();
40 rewind();
41 advanceForward(Next);
42}
43
44void LoweringContext::rewind() {
Jan Vounge6e497d2014-07-30 10:06:03 -070045 Begin = getNode()->getInsts().begin();
46 Cur = Begin;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070047 skipDeleted(Cur);
48 Next = Cur;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070049}
50
51void LoweringContext::insert(Inst *Inst) {
52 getNode()->getInsts().insert(Next, Inst);
Jim Stichnoth98712a32014-10-24 10:59:02 -070053 LastInserted = Inst;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070054}
55
Jan Voungc820ddf2014-07-29 14:38:51 -070056void LoweringContext::skipDeleted(InstList::iterator &I) const {
Jim Stichnoth607e9f02014-11-06 13:32:05 -080057 while (I != End && I->isDeleted())
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070058 ++I;
59}
60
Jan Vounge6e497d2014-07-30 10:06:03 -070061void LoweringContext::advanceForward(InstList::iterator &I) const {
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070062 if (I != End) {
63 ++I;
64 skipDeleted(I);
65 }
66}
67
Jan Vounge6e497d2014-07-30 10:06:03 -070068Inst *LoweringContext::getLastInserted() const {
Jim Stichnoth98712a32014-10-24 10:59:02 -070069 assert(LastInserted);
70 return LastInserted;
Jan Vounge6e497d2014-07-30 10:06:03 -070071}
72
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070073TargetLowering *TargetLowering::createLowering(TargetArch Target, Cfg *Func) {
Jan Voungb36ad9b2015-04-21 17:01:49 -070074#define SUBZERO_TARGET(X) \
75 if (Target == Target_##X) \
76 return Target##X::create(Func);
77#include "llvm/Config/SZTargets.def"
78
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070079 Func->setError("Unsupported target");
Jim Stichnothae953202014-12-20 06:17:49 -080080 return nullptr;
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -070081}
82
Jim Stichnothe6d24782014-12-19 05:42:24 -080083TargetLowering::TargetLowering(Cfg *Func)
Jim Stichnotheafb56c2015-06-22 10:35:22 -070084 : Func(Func), Ctx(Func->getContext()), Context() {}
Jim Stichnothe6d24782014-12-19 05:42:24 -080085
Jan Voungec270732015-01-12 17:00:22 -080086std::unique_ptr<Assembler> TargetLowering::createAssembler(TargetArch Target,
87 Cfg *Func) {
Jan Voung90ccc3f2015-04-30 14:15:10 -070088#define SUBZERO_TARGET(X) \
89 if (Target == Target_##X) \
90 return std::unique_ptr<Assembler>(new X::Assembler##X());
91#include "llvm/Config/SZTargets.def"
Jan Voungb36ad9b2015-04-21 17:01:49 -070092
93 Func->setError("Unsupported target assembler");
Jim Stichnothae953202014-12-20 06:17:49 -080094 return nullptr;
Jan Voung8acded02014-09-22 18:02:25 -070095}
96
Jim Stichnothd97c7df2014-06-04 11:57:08 -070097void TargetLowering::doAddressOpt() {
98 if (llvm::isa<InstLoad>(*Context.getCur()))
99 doAddressOptLoad();
100 else if (llvm::isa<InstStore>(*Context.getCur()))
101 doAddressOptStore();
102 Context.advanceCur();
103 Context.advanceNext();
104}
105
Matt Walac3302742014-08-15 16:21:56 -0700106void TargetLowering::doNopInsertion() {
Jim Stichnoth607e9f02014-11-06 13:32:05 -0800107 Inst *I = Context.getCur();
Matt Walac3302742014-08-15 16:21:56 -0700108 bool ShouldSkip = llvm::isa<InstFakeUse>(I) || llvm::isa<InstFakeDef>(I) ||
109 llvm::isa<InstFakeKill>(I) || I->isRedundantAssign() ||
110 I->isDeleted();
111 if (!ShouldSkip) {
Jan Voung1f47ad02015-03-20 15:01:26 -0700112 int Probability = Ctx->getFlags().getNopProbabilityAsPercentage();
113 for (int I = 0; I < Ctx->getFlags().getMaxNopsPerInstruction(); ++I) {
114 randomlyInsertNop(Probability / 100.0);
Matt Walac3302742014-08-15 16:21:56 -0700115 }
116 }
117}
118
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700119// Lowers a single instruction according to the information in
120// Context, by checking the Context.Cur instruction kind and calling
121// the appropriate lowering method. The lowering method should insert
122// target instructions at the Cur.Next insertion point, and should not
123// delete the Context.Cur instruction or advance Context.Cur.
124//
125// The lowering method may look ahead in the instruction stream as
126// desired, and lower additional instructions in conjunction with the
127// current one, for example fusing a compare and branch. If it does,
128// it should advance Context.Cur to point to the next non-deleted
129// instruction to process, and it should delete any additional
130// instructions it consumes.
131void TargetLowering::lower() {
132 assert(!Context.atEnd());
Jim Stichnoth607e9f02014-11-06 13:32:05 -0800133 Inst *Inst = Context.getCur();
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700134 Inst->deleteIfDead();
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700135 if (!Inst->isDeleted() && !llvm::isa<InstFakeDef>(Inst) &&
136 !llvm::isa<InstFakeUse>(Inst)) {
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700137 // Mark the current instruction as deleted before lowering,
138 // otherwise the Dest variable will likely get marked as non-SSA.
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700139 // See Variable::setDefinition(). However, just pass-through
140 // FakeDef and FakeUse instructions that might have been inserted
141 // prior to lowering.
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700142 Inst->setDeleted();
143 switch (Inst->getKind()) {
144 case Inst::Alloca:
145 lowerAlloca(llvm::cast<InstAlloca>(Inst));
146 break;
147 case Inst::Arithmetic:
148 lowerArithmetic(llvm::cast<InstArithmetic>(Inst));
149 break;
150 case Inst::Assign:
151 lowerAssign(llvm::cast<InstAssign>(Inst));
152 break;
153 case Inst::Br:
154 lowerBr(llvm::cast<InstBr>(Inst));
155 break;
156 case Inst::Call:
157 lowerCall(llvm::cast<InstCall>(Inst));
158 break;
159 case Inst::Cast:
160 lowerCast(llvm::cast<InstCast>(Inst));
161 break;
162 case Inst::ExtractElement:
163 lowerExtractElement(llvm::cast<InstExtractElement>(Inst));
164 break;
165 case Inst::Fcmp:
166 lowerFcmp(llvm::cast<InstFcmp>(Inst));
167 break;
168 case Inst::Icmp:
169 lowerIcmp(llvm::cast<InstIcmp>(Inst));
170 break;
171 case Inst::InsertElement:
172 lowerInsertElement(llvm::cast<InstInsertElement>(Inst));
173 break;
174 case Inst::IntrinsicCall: {
175 InstIntrinsicCall *Call = llvm::cast<InstIntrinsicCall>(Inst);
176 if (Call->getIntrinsicInfo().ReturnsTwice)
177 setCallsReturnsTwice(true);
178 lowerIntrinsicCall(Call);
179 break;
180 }
181 case Inst::Load:
182 lowerLoad(llvm::cast<InstLoad>(Inst));
183 break;
184 case Inst::Phi:
185 lowerPhi(llvm::cast<InstPhi>(Inst));
186 break;
187 case Inst::Ret:
188 lowerRet(llvm::cast<InstRet>(Inst));
189 break;
190 case Inst::Select:
191 lowerSelect(llvm::cast<InstSelect>(Inst));
192 break;
193 case Inst::Store:
194 lowerStore(llvm::cast<InstStore>(Inst));
195 break;
196 case Inst::Switch:
197 lowerSwitch(llvm::cast<InstSwitch>(Inst));
198 break;
199 case Inst::Unreachable:
200 lowerUnreachable(llvm::cast<InstUnreachable>(Inst));
201 break;
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700202 default:
203 lowerOther(Inst);
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700204 break;
205 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700206
Jim Stichnotha59ae6f2015-05-17 10:11:41 -0700207 postLower();
208 }
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700209
210 Context.advanceCur();
211 Context.advanceNext();
212}
213
Jim Stichnothe4f65d82015-06-17 22:16:02 -0700214void TargetLowering::lowerOther(const Inst *Instr) {
215 (void)Instr;
216 Func->setError("Can't lower unsupported instruction type");
217}
218
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700219// Drives register allocation, allowing all physical registers (except
220// perhaps for the frame pointer) to be allocated. This set of
221// registers could potentially be parameterized if we want to restrict
222// registers e.g. for performance testing.
Jim Stichnoth70d0a052014-11-14 15:53:46 -0800223void TargetLowering::regAlloc(RegAllocKind Kind) {
Jim Stichnoth8363a062014-10-07 10:02:38 -0700224 TimerMarker T(TimerStack::TT_regAlloc, Func);
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700225 LinearScan LinearScan(Func);
226 RegSetMask RegInclude = RegSet_None;
227 RegSetMask RegExclude = RegSet_None;
228 RegInclude |= RegSet_CallerSave;
229 RegInclude |= RegSet_CalleeSave;
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700230 if (hasFramePointer())
231 RegExclude |= RegSet_FramePointer;
Jim Stichnoth70d0a052014-11-14 15:53:46 -0800232 LinearScan.init(Kind);
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700233 llvm::SmallBitVector RegMask = getRegisterSet(RegInclude, RegExclude);
Jan Voung1f47ad02015-03-20 15:01:26 -0700234 LinearScan.scan(RegMask, Ctx->getFlags().shouldRandomizeRegAlloc());
Jim Stichnothd97c7df2014-06-04 11:57:08 -0700235}
236
Jan Voungb3401d22015-05-18 09:38:21 -0700237void TargetLowering::inferTwoAddress() {
238 // Find two-address non-SSA instructions where Dest==Src0, and set
239 // the DestNonKillable flag to keep liveness analysis consistent.
240 for (auto Inst = Context.getCur(), E = Context.getNext(); Inst != E; ++Inst) {
241 if (Inst->isDeleted())
242 continue;
243 if (Variable *Dest = Inst->getDest()) {
244 // TODO(stichnot): We may need to consider all source
245 // operands, not just the first one, if using 3-address
246 // instructions.
247 if (Inst->getSrcSize() > 0 && Inst->getSrc(0) == Dest)
248 Inst->setDestNonKillable();
249 }
250 }
251}
252
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700253void TargetLowering::sortVarsByAlignment(VarList &Dest,
254 const VarList &Source) const {
255 Dest = Source;
256 // Instead of std::sort, we could do a bucket sort with log2(alignment)
257 // as the buckets, if performance is an issue.
258 std::sort(Dest.begin(), Dest.end(),
259 [this](const Variable *V1, const Variable *V2) {
Jim Stichnoth8e6bf6e2015-06-03 15:58:12 -0700260 return typeWidthInBytesOnStack(V1->getType()) >
261 typeWidthInBytesOnStack(V2->getType());
262 });
Jan Voung0fa6c5a2015-06-01 11:04:04 -0700263}
264
265void TargetLowering::getVarStackSlotParams(
266 VarList &SortedSpilledVariables, llvm::SmallBitVector &RegsUsed,
267 size_t *GlobalsSize, size_t *SpillAreaSizeBytes,
268 uint32_t *SpillAreaAlignmentBytes, uint32_t *LocalsSlotsAlignmentBytes,
269 std::function<bool(Variable *)> TargetVarHook) {
270 const VariablesMetadata *VMetadata = Func->getVMetadata();
271 llvm::BitVector IsVarReferenced(Func->getNumVariables());
272 for (CfgNode *Node : Func->getNodes()) {
273 for (Inst &Inst : Node->getInsts()) {
274 if (Inst.isDeleted())
275 continue;
276 if (const Variable *Var = Inst.getDest())
277 IsVarReferenced[Var->getIndex()] = true;
278 for (SizeT I = 0; I < Inst.getSrcSize(); ++I) {
279 Operand *Src = Inst.getSrc(I);
280 SizeT NumVars = Src->getNumVars();
281 for (SizeT J = 0; J < NumVars; ++J) {
282 const Variable *Var = Src->getVar(J);
283 IsVarReferenced[Var->getIndex()] = true;
284 }
285 }
286 }
287 }
288
289 // If SimpleCoalescing is false, each variable without a register
290 // gets its own unique stack slot, which leads to large stack
291 // frames. If SimpleCoalescing is true, then each "global" variable
292 // without a register gets its own slot, but "local" variable slots
293 // are reused across basic blocks. E.g., if A and B are local to
294 // block 1 and C is local to block 2, then C may share a slot with A or B.
295 //
296 // We cannot coalesce stack slots if this function calls a "returns twice"
297 // function. In that case, basic blocks may be revisited, and variables
298 // local to those basic blocks are actually live until after the
299 // called function returns a second time.
300 const bool SimpleCoalescing = !callsReturnsTwice();
301
302 std::vector<size_t> LocalsSize(Func->getNumNodes());
303 const VarList &Variables = Func->getVariables();
304 VarList SpilledVariables;
305 for (Variable *Var : Variables) {
306 if (Var->hasReg()) {
307 RegsUsed[Var->getRegNum()] = true;
308 continue;
309 }
310 // An argument either does not need a stack slot (if passed in a
311 // register) or already has one (if passed on the stack).
312 if (Var->getIsArg())
313 continue;
314 // An unreferenced variable doesn't need a stack slot.
315 if (!IsVarReferenced[Var->getIndex()])
316 continue;
317 // Check a target-specific variable (it may end up sharing stack slots)
318 // and not need accounting here.
319 if (TargetVarHook(Var))
320 continue;
321 SpilledVariables.push_back(Var);
322 }
323
324 SortedSpilledVariables.reserve(SpilledVariables.size());
325 sortVarsByAlignment(SortedSpilledVariables, SpilledVariables);
326
327 for (Variable *Var : SortedSpilledVariables) {
328 size_t Increment = typeWidthInBytesOnStack(Var->getType());
329 // We have sorted by alignment, so the first variable we encounter that
330 // is located in each area determines the max alignment for the area.
331 if (!*SpillAreaAlignmentBytes)
332 *SpillAreaAlignmentBytes = Increment;
333 if (SimpleCoalescing && VMetadata->isTracked(Var)) {
334 if (VMetadata->isMultiBlock(Var)) {
335 *GlobalsSize += Increment;
336 } else {
337 SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex();
338 LocalsSize[NodeIndex] += Increment;
339 if (LocalsSize[NodeIndex] > *SpillAreaSizeBytes)
340 *SpillAreaSizeBytes = LocalsSize[NodeIndex];
341 if (!*LocalsSlotsAlignmentBytes)
342 *LocalsSlotsAlignmentBytes = Increment;
343 }
344 } else {
345 *SpillAreaSizeBytes += Increment;
346 }
347 }
348}
349
350void TargetLowering::alignStackSpillAreas(uint32_t SpillAreaStartOffset,
351 uint32_t SpillAreaAlignmentBytes,
352 size_t GlobalsSize,
353 uint32_t LocalsSlotsAlignmentBytes,
354 uint32_t *SpillAreaPaddingBytes,
355 uint32_t *LocalsSlotsPaddingBytes) {
356 if (SpillAreaAlignmentBytes) {
357 uint32_t PaddingStart = SpillAreaStartOffset;
358 uint32_t SpillAreaStart =
359 Utils::applyAlignment(PaddingStart, SpillAreaAlignmentBytes);
360 *SpillAreaPaddingBytes = SpillAreaStart - PaddingStart;
361 }
362
363 // If there are separate globals and locals areas, make sure the
364 // locals area is aligned by padding the end of the globals area.
365 if (LocalsSlotsAlignmentBytes) {
366 uint32_t GlobalsAndSubsequentPaddingSize = GlobalsSize;
367 GlobalsAndSubsequentPaddingSize =
368 Utils::applyAlignment(GlobalsSize, LocalsSlotsAlignmentBytes);
369 *LocalsSlotsPaddingBytes = GlobalsAndSubsequentPaddingSize - GlobalsSize;
370 }
371}
372
373void TargetLowering::assignVarStackSlots(VarList &SortedSpilledVariables,
374 size_t SpillAreaPaddingBytes,
375 size_t SpillAreaSizeBytes,
376 size_t GlobalsAndSubsequentPaddingSize,
377 bool UsesFramePointer) {
378 const VariablesMetadata *VMetadata = Func->getVMetadata();
379 size_t GlobalsSpaceUsed = SpillAreaPaddingBytes;
380 size_t NextStackOffset = SpillAreaPaddingBytes;
381 std::vector<size_t> LocalsSize(Func->getNumNodes());
382 const bool SimpleCoalescing = !callsReturnsTwice();
383 for (Variable *Var : SortedSpilledVariables) {
384 size_t Increment = typeWidthInBytesOnStack(Var->getType());
385 if (SimpleCoalescing && VMetadata->isTracked(Var)) {
386 if (VMetadata->isMultiBlock(Var)) {
387 GlobalsSpaceUsed += Increment;
388 NextStackOffset = GlobalsSpaceUsed;
389 } else {
390 SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex();
391 LocalsSize[NodeIndex] += Increment;
392 NextStackOffset = SpillAreaPaddingBytes +
393 GlobalsAndSubsequentPaddingSize +
394 LocalsSize[NodeIndex];
395 }
396 } else {
397 NextStackOffset += Increment;
398 }
399 if (UsesFramePointer)
400 Var->setStackOffset(-NextStackOffset);
401 else
402 Var->setStackOffset(SpillAreaSizeBytes - NextStackOffset);
403 }
404}
405
Jan Voungb36ad9b2015-04-21 17:01:49 -0700406InstCall *TargetLowering::makeHelperCall(const IceString &Name, Variable *Dest,
407 SizeT MaxSrcs) {
408 const bool HasTailCall = false;
409 Constant *CallTarget = Ctx->getConstantExternSym(Name);
410 InstCall *Call =
411 InstCall::create(Func, MaxSrcs, Dest, CallTarget, HasTailCall);
412 return Call;
413}
414
Jan Voung76bb0be2015-05-14 09:26:19 -0700415void TargetLowering::emitWithoutPrefix(const ConstantRelocatable *C) const {
Jim Stichnoth20b71f52015-06-24 15:52:24 -0700416 if (!BuildDefs::dump())
Jan Voung76bb0be2015-05-14 09:26:19 -0700417 return;
418 Ostream &Str = Ctx->getStrEmit();
419 if (C->getSuppressMangling())
420 Str << C->getName();
421 else
422 Str << Ctx->mangleName(C->getName());
423 RelocOffsetT Offset = C->getOffset();
424 if (Offset) {
425 if (Offset > 0)
426 Str << "+";
427 Str << Offset;
428 }
429}
430
431void TargetLowering::emit(const ConstantRelocatable *C) const {
Jim Stichnoth20b71f52015-06-24 15:52:24 -0700432 if (!BuildDefs::dump())
Jan Voung76bb0be2015-05-14 09:26:19 -0700433 return;
434 Ostream &Str = Ctx->getStrEmit();
435 Str << getConstantPrefix();
436 emitWithoutPrefix(C);
437}
438
Jim Stichnothbbca7542015-02-11 16:08:31 -0800439std::unique_ptr<TargetDataLowering>
440TargetDataLowering::createLowering(GlobalContext *Ctx) {
Jan Voung1f47ad02015-03-20 15:01:26 -0700441 TargetArch Target = Ctx->getFlags().getTargetArch();
Jan Voungb36ad9b2015-04-21 17:01:49 -0700442#define SUBZERO_TARGET(X) \
443 if (Target == Target_##X) \
Jan Voungfb792842015-06-11 15:27:50 -0700444 return TargetData##X::create(Ctx);
Jan Voungb36ad9b2015-04-21 17:01:49 -0700445#include "llvm/Config/SZTargets.def"
446
Jan Voungfb792842015-06-11 15:27:50 -0700447 llvm::report_fatal_error("Unsupported target data lowering");
Jim Stichnothde4ca712014-06-29 08:13:48 -0700448}
449
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700450TargetDataLowering::~TargetDataLowering() = default;
Jan Voung839c4ce2014-07-28 15:19:43 -0700451
John Porto8b1a7052015-06-17 13:20:08 -0700452namespace {
453
454// dataSectionSuffix decides whether to use SectionSuffix or MangledVarName as
455// data section suffix. Essentially, when using separate data sections for
456// globals SectionSuffix is not necessary.
457IceString dataSectionSuffix(const IceString &SectionSuffix,
458 const IceString &MangledVarName,
459 const bool DataSections) {
460 if (SectionSuffix.empty() && !DataSections) {
461 return "";
462 }
463
464 if (DataSections) {
465 // With data sections we don't need to use the SectionSuffix.
466 return "." + MangledVarName;
467 }
468
469 assert(!SectionSuffix.empty());
470 return "." + SectionSuffix;
471}
472
473} // end of anonymous namespace
474
475void TargetDataLowering::emitGlobal(const VariableDeclaration &Var,
476 const IceString &SectionSuffix) {
Jim Stichnoth20b71f52015-06-24 15:52:24 -0700477 if (!BuildDefs::dump())
Jan Voung58eea4d2015-06-15 15:11:56 -0700478 return;
479
480 // If external and not initialized, this must be a cross test.
481 // Don't generate a declaration for such cases.
John Porto8b1a7052015-06-17 13:20:08 -0700482 const bool IsExternal =
483 Var.isExternal() || Ctx->getFlags().getDisableInternal();
Jan Voung58eea4d2015-06-15 15:11:56 -0700484 if (IsExternal && !Var.hasInitializer())
485 return;
486
487 Ostream &Str = Ctx->getStrEmit();
John Porto8b1a7052015-06-17 13:20:08 -0700488 const bool HasNonzeroInitializer = Var.hasNonzeroInitializer();
489 const bool IsConstant = Var.getIsConstant();
490 const SizeT Size = Var.getNumBytes();
491 const IceString MangledName = Var.mangleName(Ctx);
Jan Voung58eea4d2015-06-15 15:11:56 -0700492
493 Str << "\t.type\t" << MangledName << ",%object\n";
494
John Porto8b1a7052015-06-17 13:20:08 -0700495 const bool UseDataSections = Ctx->getFlags().getDataSections();
496 const IceString Suffix =
497 dataSectionSuffix(SectionSuffix, MangledName, UseDataSections);
Jan Voung58eea4d2015-06-15 15:11:56 -0700498 if (IsConstant)
John Porto8b1a7052015-06-17 13:20:08 -0700499 Str << "\t.section\t.rodata" << Suffix << ",\"a\",%progbits\n";
Jan Voung58eea4d2015-06-15 15:11:56 -0700500 else if (HasNonzeroInitializer)
John Porto8b1a7052015-06-17 13:20:08 -0700501 Str << "\t.section\t.data" << Suffix << ",\"aw\",%progbits\n";
Jan Voung58eea4d2015-06-15 15:11:56 -0700502 else
John Porto8b1a7052015-06-17 13:20:08 -0700503 Str << "\t.section\t.bss" << Suffix << ",\"aw\",%nobits\n";
Jan Voung58eea4d2015-06-15 15:11:56 -0700504
505 if (IsExternal)
506 Str << "\t.globl\t" << MangledName << "\n";
507
John Porto8b1a7052015-06-17 13:20:08 -0700508 const uint32_t Align = Var.getAlignment();
Jan Voung58eea4d2015-06-15 15:11:56 -0700509 if (Align > 1) {
510 assert(llvm::isPowerOf2_32(Align));
511 // Use the .p2align directive, since the .align N directive can either
512 // interpret N as bytes, or power of 2 bytes, depending on the target.
513 Str << "\t.p2align\t" << llvm::Log2_32(Align) << "\n";
514 }
515
516 Str << MangledName << ":\n";
517
518 if (HasNonzeroInitializer) {
John Porto1bec8bc2015-06-22 10:51:13 -0700519 for (const std::unique_ptr<VariableDeclaration::Initializer> &Init :
520 Var.getInitializers()) {
Jan Voung58eea4d2015-06-15 15:11:56 -0700521 switch (Init->getKind()) {
522 case VariableDeclaration::Initializer::DataInitializerKind: {
Jan Vounge0df91f2015-06-30 08:47:06 -0700523 const auto &Data =
524 llvm::cast<VariableDeclaration::DataInitializer>(Init.get())
525 ->getContents();
Jan Voung58eea4d2015-06-15 15:11:56 -0700526 for (SizeT i = 0; i < Init->getNumBytes(); ++i) {
527 Str << "\t.byte\t" << (((unsigned)Data[i]) & 0xff) << "\n";
528 }
529 break;
530 }
531 case VariableDeclaration::Initializer::ZeroInitializerKind:
532 Str << "\t.zero\t" << Init->getNumBytes() << "\n";
533 break;
534 case VariableDeclaration::Initializer::RelocInitializerKind: {
John Porto8b1a7052015-06-17 13:20:08 -0700535 const auto *Reloc =
John Porto1bec8bc2015-06-22 10:51:13 -0700536 llvm::cast<VariableDeclaration::RelocInitializer>(Init.get());
Jan Voung58eea4d2015-06-15 15:11:56 -0700537 Str << "\t" << getEmit32Directive() << "\t";
538 Str << Reloc->getDeclaration()->mangleName(Ctx);
539 if (RelocOffsetT Offset = Reloc->getOffset()) {
540 if (Offset >= 0 || (Offset == INT32_MIN))
541 Str << " + " << Offset;
542 else
543 Str << " - " << -Offset;
544 }
545 Str << "\n";
546 break;
547 }
548 }
549 }
John Porto8b1a7052015-06-17 13:20:08 -0700550 } else {
Jan Voung58eea4d2015-06-15 15:11:56 -0700551 // NOTE: for non-constant zero initializers, this is BSS (no bits),
552 // so an ELF writer would not write to the file, and only track
553 // virtual offsets, but the .s writer still needs this .zero and
554 // cannot simply use the .size to advance offsets.
555 Str << "\t.zero\t" << Size << "\n";
John Porto8b1a7052015-06-17 13:20:08 -0700556 }
Jan Voung58eea4d2015-06-15 15:11:56 -0700557
558 Str << "\t.size\t" << MangledName << ", " << Size << "\n";
559}
560
Jan Voungfb792842015-06-11 15:27:50 -0700561std::unique_ptr<TargetHeaderLowering>
562TargetHeaderLowering::createLowering(GlobalContext *Ctx) {
563 TargetArch Target = Ctx->getFlags().getTargetArch();
564#define SUBZERO_TARGET(X) \
565 if (Target == Target_##X) \
566 return TargetHeader##X::create(Ctx);
567#include "llvm/Config/SZTargets.def"
568
569 llvm::report_fatal_error("Unsupported target header lowering");
570}
571
Jim Stichnotheafb56c2015-06-22 10:35:22 -0700572TargetHeaderLowering::~TargetHeaderLowering() = default;
Jan Voungfb792842015-06-11 15:27:50 -0700573
Jim Stichnoth5bc2b1d2014-05-22 13:38:48 -0700574} // end of namespace Ice