Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 1 | //===- subzero/src/IceTargetLowering.cpp - Basic lowering implementation --===// |
| 2 | // |
| 3 | // The Subzero Code Generator |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file implements the skeleton of the TargetLowering class, |
| 11 | // specifically invoking the appropriate lowering method for a given |
| 12 | // instruction kind and driving global register allocation. It also |
| 13 | // implements the non-deleted instruction iteration in |
| 14 | // LoweringContext. |
| 15 | // |
| 16 | //===----------------------------------------------------------------------===// |
| 17 | |
John Porto | aff4ccf | 2015-06-10 16:35:06 -0700 | [diff] [blame] | 18 | #include "IceAssemblerARM32.h" |
| 19 | #include "IceAssemblerX8632.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 20 | #include "IceCfg.h" // setError() |
| 21 | #include "IceCfgNode.h" |
| 22 | #include "IceOperand.h" |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 23 | #include "IceRegAlloc.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 24 | #include "IceTargetLowering.h" |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 25 | #include "IceTargetLoweringARM32.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 26 | #include "IceTargetLoweringX8632.h" |
| 27 | |
| 28 | namespace Ice { |
| 29 | |
| 30 | void LoweringContext::init(CfgNode *N) { |
| 31 | Node = N; |
Jim Stichnoth | 336f6c4 | 2014-10-30 15:01:31 -0700 | [diff] [blame] | 32 | End = getNode()->getInsts().end(); |
| 33 | rewind(); |
| 34 | advanceForward(Next); |
| 35 | } |
| 36 | |
| 37 | void LoweringContext::rewind() { |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 38 | Begin = getNode()->getInsts().begin(); |
| 39 | Cur = Begin; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 40 | skipDeleted(Cur); |
| 41 | Next = Cur; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 42 | } |
| 43 | |
| 44 | void LoweringContext::insert(Inst *Inst) { |
| 45 | getNode()->getInsts().insert(Next, Inst); |
Jim Stichnoth | 98712a3 | 2014-10-24 10:59:02 -0700 | [diff] [blame] | 46 | LastInserted = Inst; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 47 | } |
| 48 | |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 49 | void LoweringContext::skipDeleted(InstList::iterator &I) const { |
Jim Stichnoth | 607e9f0 | 2014-11-06 13:32:05 -0800 | [diff] [blame] | 50 | while (I != End && I->isDeleted()) |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 51 | ++I; |
| 52 | } |
| 53 | |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 54 | void LoweringContext::advanceForward(InstList::iterator &I) const { |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 55 | if (I != End) { |
| 56 | ++I; |
| 57 | skipDeleted(I); |
| 58 | } |
| 59 | } |
| 60 | |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 61 | Inst *LoweringContext::getLastInserted() const { |
Jim Stichnoth | 98712a3 | 2014-10-24 10:59:02 -0700 | [diff] [blame] | 62 | assert(LastInserted); |
| 63 | return LastInserted; |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 64 | } |
| 65 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 66 | TargetLowering *TargetLowering::createLowering(TargetArch Target, Cfg *Func) { |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 67 | #define SUBZERO_TARGET(X) \ |
| 68 | if (Target == Target_##X) \ |
| 69 | return Target##X::create(Func); |
| 70 | #include "llvm/Config/SZTargets.def" |
| 71 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 72 | Func->setError("Unsupported target"); |
Jim Stichnoth | ae95320 | 2014-12-20 06:17:49 -0800 | [diff] [blame] | 73 | return nullptr; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 74 | } |
| 75 | |
Jim Stichnoth | e6d2478 | 2014-12-19 05:42:24 -0800 | [diff] [blame] | 76 | TargetLowering::TargetLowering(Cfg *Func) |
Jan Voung | 1f47ad0 | 2015-03-20 15:01:26 -0700 | [diff] [blame] | 77 | : Func(Func), Ctx(Func->getContext()), HasComputedFrame(false), |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 78 | CallsReturnsTwice(false), StackAdjustment(0), NextLabelNumber(0), |
| 79 | Context(), SnapshotStackAdjustment(0) {} |
Jim Stichnoth | e6d2478 | 2014-12-19 05:42:24 -0800 | [diff] [blame] | 80 | |
Jan Voung | ec27073 | 2015-01-12 17:00:22 -0800 | [diff] [blame] | 81 | std::unique_ptr<Assembler> TargetLowering::createAssembler(TargetArch Target, |
| 82 | Cfg *Func) { |
Jan Voung | 90ccc3f | 2015-04-30 14:15:10 -0700 | [diff] [blame] | 83 | #define SUBZERO_TARGET(X) \ |
| 84 | if (Target == Target_##X) \ |
| 85 | return std::unique_ptr<Assembler>(new X::Assembler##X()); |
| 86 | #include "llvm/Config/SZTargets.def" |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 87 | |
| 88 | Func->setError("Unsupported target assembler"); |
Jim Stichnoth | ae95320 | 2014-12-20 06:17:49 -0800 | [diff] [blame] | 89 | return nullptr; |
Jan Voung | 8acded0 | 2014-09-22 18:02:25 -0700 | [diff] [blame] | 90 | } |
| 91 | |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 92 | void TargetLowering::doAddressOpt() { |
| 93 | if (llvm::isa<InstLoad>(*Context.getCur())) |
| 94 | doAddressOptLoad(); |
| 95 | else if (llvm::isa<InstStore>(*Context.getCur())) |
| 96 | doAddressOptStore(); |
| 97 | Context.advanceCur(); |
| 98 | Context.advanceNext(); |
| 99 | } |
| 100 | |
Matt Wala | c330274 | 2014-08-15 16:21:56 -0700 | [diff] [blame] | 101 | void TargetLowering::doNopInsertion() { |
Jim Stichnoth | 607e9f0 | 2014-11-06 13:32:05 -0800 | [diff] [blame] | 102 | Inst *I = Context.getCur(); |
Matt Wala | c330274 | 2014-08-15 16:21:56 -0700 | [diff] [blame] | 103 | bool ShouldSkip = llvm::isa<InstFakeUse>(I) || llvm::isa<InstFakeDef>(I) || |
| 104 | llvm::isa<InstFakeKill>(I) || I->isRedundantAssign() || |
| 105 | I->isDeleted(); |
| 106 | if (!ShouldSkip) { |
Jan Voung | 1f47ad0 | 2015-03-20 15:01:26 -0700 | [diff] [blame] | 107 | int Probability = Ctx->getFlags().getNopProbabilityAsPercentage(); |
| 108 | for (int I = 0; I < Ctx->getFlags().getMaxNopsPerInstruction(); ++I) { |
| 109 | randomlyInsertNop(Probability / 100.0); |
Matt Wala | c330274 | 2014-08-15 16:21:56 -0700 | [diff] [blame] | 110 | } |
| 111 | } |
| 112 | } |
| 113 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 114 | // Lowers a single instruction according to the information in |
| 115 | // Context, by checking the Context.Cur instruction kind and calling |
| 116 | // the appropriate lowering method. The lowering method should insert |
| 117 | // target instructions at the Cur.Next insertion point, and should not |
| 118 | // delete the Context.Cur instruction or advance Context.Cur. |
| 119 | // |
| 120 | // The lowering method may look ahead in the instruction stream as |
| 121 | // desired, and lower additional instructions in conjunction with the |
| 122 | // current one, for example fusing a compare and branch. If it does, |
| 123 | // it should advance Context.Cur to point to the next non-deleted |
| 124 | // instruction to process, and it should delete any additional |
| 125 | // instructions it consumes. |
| 126 | void TargetLowering::lower() { |
| 127 | assert(!Context.atEnd()); |
Jim Stichnoth | 607e9f0 | 2014-11-06 13:32:05 -0800 | [diff] [blame] | 128 | Inst *Inst = Context.getCur(); |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 129 | Inst->deleteIfDead(); |
| 130 | if (!Inst->isDeleted()) { |
| 131 | // Mark the current instruction as deleted before lowering, |
| 132 | // otherwise the Dest variable will likely get marked as non-SSA. |
| 133 | // See Variable::setDefinition(). |
| 134 | Inst->setDeleted(); |
| 135 | switch (Inst->getKind()) { |
| 136 | case Inst::Alloca: |
| 137 | lowerAlloca(llvm::cast<InstAlloca>(Inst)); |
| 138 | break; |
| 139 | case Inst::Arithmetic: |
| 140 | lowerArithmetic(llvm::cast<InstArithmetic>(Inst)); |
| 141 | break; |
| 142 | case Inst::Assign: |
| 143 | lowerAssign(llvm::cast<InstAssign>(Inst)); |
| 144 | break; |
| 145 | case Inst::Br: |
| 146 | lowerBr(llvm::cast<InstBr>(Inst)); |
| 147 | break; |
| 148 | case Inst::Call: |
| 149 | lowerCall(llvm::cast<InstCall>(Inst)); |
| 150 | break; |
| 151 | case Inst::Cast: |
| 152 | lowerCast(llvm::cast<InstCast>(Inst)); |
| 153 | break; |
| 154 | case Inst::ExtractElement: |
| 155 | lowerExtractElement(llvm::cast<InstExtractElement>(Inst)); |
| 156 | break; |
| 157 | case Inst::Fcmp: |
| 158 | lowerFcmp(llvm::cast<InstFcmp>(Inst)); |
| 159 | break; |
| 160 | case Inst::Icmp: |
| 161 | lowerIcmp(llvm::cast<InstIcmp>(Inst)); |
| 162 | break; |
| 163 | case Inst::InsertElement: |
| 164 | lowerInsertElement(llvm::cast<InstInsertElement>(Inst)); |
| 165 | break; |
| 166 | case Inst::IntrinsicCall: { |
| 167 | InstIntrinsicCall *Call = llvm::cast<InstIntrinsicCall>(Inst); |
| 168 | if (Call->getIntrinsicInfo().ReturnsTwice) |
| 169 | setCallsReturnsTwice(true); |
| 170 | lowerIntrinsicCall(Call); |
| 171 | break; |
| 172 | } |
| 173 | case Inst::Load: |
| 174 | lowerLoad(llvm::cast<InstLoad>(Inst)); |
| 175 | break; |
| 176 | case Inst::Phi: |
| 177 | lowerPhi(llvm::cast<InstPhi>(Inst)); |
| 178 | break; |
| 179 | case Inst::Ret: |
| 180 | lowerRet(llvm::cast<InstRet>(Inst)); |
| 181 | break; |
| 182 | case Inst::Select: |
| 183 | lowerSelect(llvm::cast<InstSelect>(Inst)); |
| 184 | break; |
| 185 | case Inst::Store: |
| 186 | lowerStore(llvm::cast<InstStore>(Inst)); |
| 187 | break; |
| 188 | case Inst::Switch: |
| 189 | lowerSwitch(llvm::cast<InstSwitch>(Inst)); |
| 190 | break; |
| 191 | case Inst::Unreachable: |
| 192 | lowerUnreachable(llvm::cast<InstUnreachable>(Inst)); |
| 193 | break; |
| 194 | case Inst::BundleLock: |
| 195 | case Inst::BundleUnlock: |
| 196 | case Inst::FakeDef: |
| 197 | case Inst::FakeUse: |
| 198 | case Inst::FakeKill: |
| 199 | case Inst::Target: |
| 200 | // These are all Target instruction types and shouldn't be |
| 201 | // encountered at this stage. |
| 202 | Func->setError("Can't lower unsupported instruction type"); |
| 203 | break; |
| 204 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 205 | |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 206 | postLower(); |
| 207 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 208 | |
| 209 | Context.advanceCur(); |
| 210 | Context.advanceNext(); |
| 211 | } |
| 212 | |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 213 | // Drives register allocation, allowing all physical registers (except |
| 214 | // perhaps for the frame pointer) to be allocated. This set of |
| 215 | // registers could potentially be parameterized if we want to restrict |
| 216 | // registers e.g. for performance testing. |
Jim Stichnoth | 70d0a05 | 2014-11-14 15:53:46 -0800 | [diff] [blame] | 217 | void TargetLowering::regAlloc(RegAllocKind Kind) { |
Jim Stichnoth | 8363a06 | 2014-10-07 10:02:38 -0700 | [diff] [blame] | 218 | TimerMarker T(TimerStack::TT_regAlloc, Func); |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 219 | LinearScan LinearScan(Func); |
| 220 | RegSetMask RegInclude = RegSet_None; |
| 221 | RegSetMask RegExclude = RegSet_None; |
| 222 | RegInclude |= RegSet_CallerSave; |
| 223 | RegInclude |= RegSet_CalleeSave; |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 224 | if (hasFramePointer()) |
| 225 | RegExclude |= RegSet_FramePointer; |
Jim Stichnoth | 70d0a05 | 2014-11-14 15:53:46 -0800 | [diff] [blame] | 226 | LinearScan.init(Kind); |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 227 | llvm::SmallBitVector RegMask = getRegisterSet(RegInclude, RegExclude); |
Jan Voung | 1f47ad0 | 2015-03-20 15:01:26 -0700 | [diff] [blame] | 228 | LinearScan.scan(RegMask, Ctx->getFlags().shouldRandomizeRegAlloc()); |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 229 | } |
| 230 | |
Jan Voung | b3401d2 | 2015-05-18 09:38:21 -0700 | [diff] [blame] | 231 | void TargetLowering::inferTwoAddress() { |
| 232 | // Find two-address non-SSA instructions where Dest==Src0, and set |
| 233 | // the DestNonKillable flag to keep liveness analysis consistent. |
| 234 | for (auto Inst = Context.getCur(), E = Context.getNext(); Inst != E; ++Inst) { |
| 235 | if (Inst->isDeleted()) |
| 236 | continue; |
| 237 | if (Variable *Dest = Inst->getDest()) { |
| 238 | // TODO(stichnot): We may need to consider all source |
| 239 | // operands, not just the first one, if using 3-address |
| 240 | // instructions. |
| 241 | if (Inst->getSrcSize() > 0 && Inst->getSrc(0) == Dest) |
| 242 | Inst->setDestNonKillable(); |
| 243 | } |
| 244 | } |
| 245 | } |
| 246 | |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 247 | void TargetLowering::sortVarsByAlignment(VarList &Dest, |
| 248 | const VarList &Source) const { |
| 249 | Dest = Source; |
| 250 | // Instead of std::sort, we could do a bucket sort with log2(alignment) |
| 251 | // as the buckets, if performance is an issue. |
| 252 | std::sort(Dest.begin(), Dest.end(), |
| 253 | [this](const Variable *V1, const Variable *V2) { |
Jim Stichnoth | 8e6bf6e | 2015-06-03 15:58:12 -0700 | [diff] [blame] | 254 | return typeWidthInBytesOnStack(V1->getType()) > |
| 255 | typeWidthInBytesOnStack(V2->getType()); |
| 256 | }); |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 257 | } |
| 258 | |
| 259 | void TargetLowering::getVarStackSlotParams( |
| 260 | VarList &SortedSpilledVariables, llvm::SmallBitVector &RegsUsed, |
| 261 | size_t *GlobalsSize, size_t *SpillAreaSizeBytes, |
| 262 | uint32_t *SpillAreaAlignmentBytes, uint32_t *LocalsSlotsAlignmentBytes, |
| 263 | std::function<bool(Variable *)> TargetVarHook) { |
| 264 | const VariablesMetadata *VMetadata = Func->getVMetadata(); |
| 265 | llvm::BitVector IsVarReferenced(Func->getNumVariables()); |
| 266 | for (CfgNode *Node : Func->getNodes()) { |
| 267 | for (Inst &Inst : Node->getInsts()) { |
| 268 | if (Inst.isDeleted()) |
| 269 | continue; |
| 270 | if (const Variable *Var = Inst.getDest()) |
| 271 | IsVarReferenced[Var->getIndex()] = true; |
| 272 | for (SizeT I = 0; I < Inst.getSrcSize(); ++I) { |
| 273 | Operand *Src = Inst.getSrc(I); |
| 274 | SizeT NumVars = Src->getNumVars(); |
| 275 | for (SizeT J = 0; J < NumVars; ++J) { |
| 276 | const Variable *Var = Src->getVar(J); |
| 277 | IsVarReferenced[Var->getIndex()] = true; |
| 278 | } |
| 279 | } |
| 280 | } |
| 281 | } |
| 282 | |
| 283 | // If SimpleCoalescing is false, each variable without a register |
| 284 | // gets its own unique stack slot, which leads to large stack |
| 285 | // frames. If SimpleCoalescing is true, then each "global" variable |
| 286 | // without a register gets its own slot, but "local" variable slots |
| 287 | // are reused across basic blocks. E.g., if A and B are local to |
| 288 | // block 1 and C is local to block 2, then C may share a slot with A or B. |
| 289 | // |
| 290 | // We cannot coalesce stack slots if this function calls a "returns twice" |
| 291 | // function. In that case, basic blocks may be revisited, and variables |
| 292 | // local to those basic blocks are actually live until after the |
| 293 | // called function returns a second time. |
| 294 | const bool SimpleCoalescing = !callsReturnsTwice(); |
| 295 | |
| 296 | std::vector<size_t> LocalsSize(Func->getNumNodes()); |
| 297 | const VarList &Variables = Func->getVariables(); |
| 298 | VarList SpilledVariables; |
| 299 | for (Variable *Var : Variables) { |
| 300 | if (Var->hasReg()) { |
| 301 | RegsUsed[Var->getRegNum()] = true; |
| 302 | continue; |
| 303 | } |
| 304 | // An argument either does not need a stack slot (if passed in a |
| 305 | // register) or already has one (if passed on the stack). |
| 306 | if (Var->getIsArg()) |
| 307 | continue; |
| 308 | // An unreferenced variable doesn't need a stack slot. |
| 309 | if (!IsVarReferenced[Var->getIndex()]) |
| 310 | continue; |
| 311 | // Check a target-specific variable (it may end up sharing stack slots) |
| 312 | // and not need accounting here. |
| 313 | if (TargetVarHook(Var)) |
| 314 | continue; |
| 315 | SpilledVariables.push_back(Var); |
| 316 | } |
| 317 | |
| 318 | SortedSpilledVariables.reserve(SpilledVariables.size()); |
| 319 | sortVarsByAlignment(SortedSpilledVariables, SpilledVariables); |
| 320 | |
| 321 | for (Variable *Var : SortedSpilledVariables) { |
| 322 | size_t Increment = typeWidthInBytesOnStack(Var->getType()); |
| 323 | // We have sorted by alignment, so the first variable we encounter that |
| 324 | // is located in each area determines the max alignment for the area. |
| 325 | if (!*SpillAreaAlignmentBytes) |
| 326 | *SpillAreaAlignmentBytes = Increment; |
| 327 | if (SimpleCoalescing && VMetadata->isTracked(Var)) { |
| 328 | if (VMetadata->isMultiBlock(Var)) { |
| 329 | *GlobalsSize += Increment; |
| 330 | } else { |
| 331 | SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex(); |
| 332 | LocalsSize[NodeIndex] += Increment; |
| 333 | if (LocalsSize[NodeIndex] > *SpillAreaSizeBytes) |
| 334 | *SpillAreaSizeBytes = LocalsSize[NodeIndex]; |
| 335 | if (!*LocalsSlotsAlignmentBytes) |
| 336 | *LocalsSlotsAlignmentBytes = Increment; |
| 337 | } |
| 338 | } else { |
| 339 | *SpillAreaSizeBytes += Increment; |
| 340 | } |
| 341 | } |
| 342 | } |
| 343 | |
| 344 | void TargetLowering::alignStackSpillAreas(uint32_t SpillAreaStartOffset, |
| 345 | uint32_t SpillAreaAlignmentBytes, |
| 346 | size_t GlobalsSize, |
| 347 | uint32_t LocalsSlotsAlignmentBytes, |
| 348 | uint32_t *SpillAreaPaddingBytes, |
| 349 | uint32_t *LocalsSlotsPaddingBytes) { |
| 350 | if (SpillAreaAlignmentBytes) { |
| 351 | uint32_t PaddingStart = SpillAreaStartOffset; |
| 352 | uint32_t SpillAreaStart = |
| 353 | Utils::applyAlignment(PaddingStart, SpillAreaAlignmentBytes); |
| 354 | *SpillAreaPaddingBytes = SpillAreaStart - PaddingStart; |
| 355 | } |
| 356 | |
| 357 | // If there are separate globals and locals areas, make sure the |
| 358 | // locals area is aligned by padding the end of the globals area. |
| 359 | if (LocalsSlotsAlignmentBytes) { |
| 360 | uint32_t GlobalsAndSubsequentPaddingSize = GlobalsSize; |
| 361 | GlobalsAndSubsequentPaddingSize = |
| 362 | Utils::applyAlignment(GlobalsSize, LocalsSlotsAlignmentBytes); |
| 363 | *LocalsSlotsPaddingBytes = GlobalsAndSubsequentPaddingSize - GlobalsSize; |
| 364 | } |
| 365 | } |
| 366 | |
| 367 | void TargetLowering::assignVarStackSlots(VarList &SortedSpilledVariables, |
| 368 | size_t SpillAreaPaddingBytes, |
| 369 | size_t SpillAreaSizeBytes, |
| 370 | size_t GlobalsAndSubsequentPaddingSize, |
| 371 | bool UsesFramePointer) { |
| 372 | const VariablesMetadata *VMetadata = Func->getVMetadata(); |
| 373 | size_t GlobalsSpaceUsed = SpillAreaPaddingBytes; |
| 374 | size_t NextStackOffset = SpillAreaPaddingBytes; |
| 375 | std::vector<size_t> LocalsSize(Func->getNumNodes()); |
| 376 | const bool SimpleCoalescing = !callsReturnsTwice(); |
| 377 | for (Variable *Var : SortedSpilledVariables) { |
| 378 | size_t Increment = typeWidthInBytesOnStack(Var->getType()); |
| 379 | if (SimpleCoalescing && VMetadata->isTracked(Var)) { |
| 380 | if (VMetadata->isMultiBlock(Var)) { |
| 381 | GlobalsSpaceUsed += Increment; |
| 382 | NextStackOffset = GlobalsSpaceUsed; |
| 383 | } else { |
| 384 | SizeT NodeIndex = VMetadata->getLocalUseNode(Var)->getIndex(); |
| 385 | LocalsSize[NodeIndex] += Increment; |
| 386 | NextStackOffset = SpillAreaPaddingBytes + |
| 387 | GlobalsAndSubsequentPaddingSize + |
| 388 | LocalsSize[NodeIndex]; |
| 389 | } |
| 390 | } else { |
| 391 | NextStackOffset += Increment; |
| 392 | } |
| 393 | if (UsesFramePointer) |
| 394 | Var->setStackOffset(-NextStackOffset); |
| 395 | else |
| 396 | Var->setStackOffset(SpillAreaSizeBytes - NextStackOffset); |
| 397 | } |
| 398 | } |
| 399 | |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 400 | InstCall *TargetLowering::makeHelperCall(const IceString &Name, Variable *Dest, |
| 401 | SizeT MaxSrcs) { |
| 402 | const bool HasTailCall = false; |
| 403 | Constant *CallTarget = Ctx->getConstantExternSym(Name); |
| 404 | InstCall *Call = |
| 405 | InstCall::create(Func, MaxSrcs, Dest, CallTarget, HasTailCall); |
| 406 | return Call; |
| 407 | } |
| 408 | |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 409 | void TargetLowering::emitWithoutPrefix(const ConstantRelocatable *C) const { |
| 410 | if (!ALLOW_DUMP) |
| 411 | return; |
| 412 | Ostream &Str = Ctx->getStrEmit(); |
| 413 | if (C->getSuppressMangling()) |
| 414 | Str << C->getName(); |
| 415 | else |
| 416 | Str << Ctx->mangleName(C->getName()); |
| 417 | RelocOffsetT Offset = C->getOffset(); |
| 418 | if (Offset) { |
| 419 | if (Offset > 0) |
| 420 | Str << "+"; |
| 421 | Str << Offset; |
| 422 | } |
| 423 | } |
| 424 | |
| 425 | void TargetLowering::emit(const ConstantRelocatable *C) const { |
| 426 | if (!ALLOW_DUMP) |
| 427 | return; |
| 428 | Ostream &Str = Ctx->getStrEmit(); |
| 429 | Str << getConstantPrefix(); |
| 430 | emitWithoutPrefix(C); |
| 431 | } |
| 432 | |
Jim Stichnoth | bbca754 | 2015-02-11 16:08:31 -0800 | [diff] [blame] | 433 | std::unique_ptr<TargetDataLowering> |
| 434 | TargetDataLowering::createLowering(GlobalContext *Ctx) { |
Jan Voung | 1f47ad0 | 2015-03-20 15:01:26 -0700 | [diff] [blame] | 435 | TargetArch Target = Ctx->getFlags().getTargetArch(); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 436 | #define SUBZERO_TARGET(X) \ |
| 437 | if (Target == Target_##X) \ |
| 438 | return std::unique_ptr<TargetDataLowering>(TargetData##X::create(Ctx)); |
| 439 | #include "llvm/Config/SZTargets.def" |
| 440 | |
| 441 | llvm_unreachable("Unsupported target data lowering"); |
Jim Stichnoth | ae95320 | 2014-12-20 06:17:49 -0800 | [diff] [blame] | 442 | return nullptr; |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 443 | } |
| 444 | |
Jan Voung | 72984d8 | 2015-01-29 14:42:38 -0800 | [diff] [blame] | 445 | TargetDataLowering::~TargetDataLowering() {} |
Jan Voung | 839c4ce | 2014-07-28 15:19:43 -0700 | [diff] [blame] | 446 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 447 | } // end of namespace Ice |