Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 1 | //===- subzero/src/IceTargetLoweringARM32.cpp - ARM32 lowering ------------===// |
| 2 | // |
| 3 | // The Subzero Code Generator |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
| 9 | // |
| 10 | // This file implements the TargetLoweringARM32 class, which consists almost |
| 11 | // entirely of the lowering sequence for each high-level instruction. |
| 12 | // |
| 13 | //===----------------------------------------------------------------------===// |
| 14 | |
| 15 | #include "llvm/Support/MathExtras.h" |
| 16 | |
| 17 | #include "IceCfg.h" |
| 18 | #include "IceCfgNode.h" |
| 19 | #include "IceClFlags.h" |
| 20 | #include "IceDefs.h" |
| 21 | #include "IceELFObjectWriter.h" |
| 22 | #include "IceGlobalInits.h" |
| 23 | #include "IceInstARM32.h" |
| 24 | #include "IceLiveness.h" |
| 25 | #include "IceOperand.h" |
| 26 | #include "IceRegistersARM32.h" |
| 27 | #include "IceTargetLoweringARM32.def" |
| 28 | #include "IceTargetLoweringARM32.h" |
| 29 | #include "IceUtils.h" |
| 30 | |
| 31 | namespace Ice { |
| 32 | |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 33 | namespace { |
| 34 | void UnimplementedError(const ClFlags &Flags) { |
| 35 | if (!Flags.getSkipUnimplemented()) { |
| 36 | // Use llvm_unreachable instead of report_fatal_error, which gives better |
| 37 | // stack traces. |
| 38 | llvm_unreachable("Not yet implemented"); |
| 39 | abort(); |
| 40 | } |
| 41 | } |
| 42 | } // end of anonymous namespace |
| 43 | |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 44 | TargetARM32::TargetARM32(Cfg *Func) |
| 45 | : TargetLowering(Func), UsesFramePointer(false) { |
| 46 | // TODO: Don't initialize IntegerRegisters and friends every time. |
| 47 | // Instead, initialize in some sort of static initializer for the |
| 48 | // class. |
| 49 | llvm::SmallBitVector IntegerRegisters(RegARM32::Reg_NUM); |
| 50 | llvm::SmallBitVector FloatRegisters(RegARM32::Reg_NUM); |
| 51 | llvm::SmallBitVector VectorRegisters(RegARM32::Reg_NUM); |
| 52 | llvm::SmallBitVector InvalidRegisters(RegARM32::Reg_NUM); |
| 53 | ScratchRegs.resize(RegARM32::Reg_NUM); |
| 54 | #define X(val, encode, name, scratch, preserved, stackptr, frameptr, isInt, \ |
| 55 | isFP) \ |
| 56 | IntegerRegisters[RegARM32::val] = isInt; \ |
| 57 | FloatRegisters[RegARM32::val] = isFP; \ |
| 58 | VectorRegisters[RegARM32::val] = isFP; \ |
| 59 | ScratchRegs[RegARM32::val] = scratch; |
| 60 | REGARM32_TABLE; |
| 61 | #undef X |
| 62 | TypeToRegisterSet[IceType_void] = InvalidRegisters; |
| 63 | TypeToRegisterSet[IceType_i1] = IntegerRegisters; |
| 64 | TypeToRegisterSet[IceType_i8] = IntegerRegisters; |
| 65 | TypeToRegisterSet[IceType_i16] = IntegerRegisters; |
| 66 | TypeToRegisterSet[IceType_i32] = IntegerRegisters; |
| 67 | TypeToRegisterSet[IceType_i64] = IntegerRegisters; |
| 68 | TypeToRegisterSet[IceType_f32] = FloatRegisters; |
| 69 | TypeToRegisterSet[IceType_f64] = FloatRegisters; |
| 70 | TypeToRegisterSet[IceType_v4i1] = VectorRegisters; |
| 71 | TypeToRegisterSet[IceType_v8i1] = VectorRegisters; |
| 72 | TypeToRegisterSet[IceType_v16i1] = VectorRegisters; |
| 73 | TypeToRegisterSet[IceType_v16i8] = VectorRegisters; |
| 74 | TypeToRegisterSet[IceType_v8i16] = VectorRegisters; |
| 75 | TypeToRegisterSet[IceType_v4i32] = VectorRegisters; |
| 76 | TypeToRegisterSet[IceType_v4f32] = VectorRegisters; |
| 77 | } |
| 78 | |
| 79 | void TargetARM32::translateO2() { |
| 80 | TimerMarker T(TimerStack::TT_O2, Func); |
| 81 | |
| 82 | // TODO(stichnot): share passes with X86? |
| 83 | // https://code.google.com/p/nativeclient/issues/detail?id=4094 |
| 84 | |
| 85 | if (!Ctx->getFlags().getPhiEdgeSplit()) { |
| 86 | // Lower Phi instructions. |
| 87 | Func->placePhiLoads(); |
| 88 | if (Func->hasError()) |
| 89 | return; |
| 90 | Func->placePhiStores(); |
| 91 | if (Func->hasError()) |
| 92 | return; |
| 93 | Func->deletePhis(); |
| 94 | if (Func->hasError()) |
| 95 | return; |
| 96 | Func->dump("After Phi lowering"); |
| 97 | } |
| 98 | |
| 99 | // Address mode optimization. |
| 100 | Func->getVMetadata()->init(VMK_SingleDefs); |
| 101 | Func->doAddressOpt(); |
| 102 | |
| 103 | // Argument lowering |
| 104 | Func->doArgLowering(); |
| 105 | |
| 106 | // Target lowering. This requires liveness analysis for some parts |
| 107 | // of the lowering decisions, such as compare/branch fusing. If |
| 108 | // non-lightweight liveness analysis is used, the instructions need |
| 109 | // to be renumbered first. TODO: This renumbering should only be |
| 110 | // necessary if we're actually calculating live intervals, which we |
| 111 | // only do for register allocation. |
| 112 | Func->renumberInstructions(); |
| 113 | if (Func->hasError()) |
| 114 | return; |
| 115 | |
| 116 | // TODO: It should be sufficient to use the fastest liveness |
| 117 | // calculation, i.e. livenessLightweight(). However, for some |
| 118 | // reason that slows down the rest of the translation. Investigate. |
| 119 | Func->liveness(Liveness_Basic); |
| 120 | if (Func->hasError()) |
| 121 | return; |
| 122 | Func->dump("After ARM32 address mode opt"); |
| 123 | |
| 124 | Func->genCode(); |
| 125 | if (Func->hasError()) |
| 126 | return; |
| 127 | Func->dump("After ARM32 codegen"); |
| 128 | |
| 129 | // Register allocation. This requires instruction renumbering and |
| 130 | // full liveness analysis. |
| 131 | Func->renumberInstructions(); |
| 132 | if (Func->hasError()) |
| 133 | return; |
| 134 | Func->liveness(Liveness_Intervals); |
| 135 | if (Func->hasError()) |
| 136 | return; |
| 137 | // Validate the live range computations. The expensive validation |
| 138 | // call is deliberately only made when assertions are enabled. |
| 139 | assert(Func->validateLiveness()); |
| 140 | // The post-codegen dump is done here, after liveness analysis and |
| 141 | // associated cleanup, to make the dump cleaner and more useful. |
| 142 | Func->dump("After initial ARM32 codegen"); |
| 143 | Func->getVMetadata()->init(VMK_All); |
| 144 | regAlloc(RAK_Global); |
| 145 | if (Func->hasError()) |
| 146 | return; |
| 147 | Func->dump("After linear scan regalloc"); |
| 148 | |
| 149 | if (Ctx->getFlags().getPhiEdgeSplit()) { |
| 150 | Func->advancedPhiLowering(); |
| 151 | Func->dump("After advanced Phi lowering"); |
| 152 | } |
| 153 | |
| 154 | // Stack frame mapping. |
| 155 | Func->genFrame(); |
| 156 | if (Func->hasError()) |
| 157 | return; |
| 158 | Func->dump("After stack frame mapping"); |
| 159 | |
| 160 | Func->contractEmptyNodes(); |
| 161 | Func->reorderNodes(); |
| 162 | |
| 163 | // Branch optimization. This needs to be done just before code |
| 164 | // emission. In particular, no transformations that insert or |
| 165 | // reorder CfgNodes should be done after branch optimization. We go |
| 166 | // ahead and do it before nop insertion to reduce the amount of work |
| 167 | // needed for searching for opportunities. |
| 168 | Func->doBranchOpt(); |
| 169 | Func->dump("After branch optimization"); |
| 170 | |
| 171 | // Nop insertion |
| 172 | if (Ctx->getFlags().shouldDoNopInsertion()) { |
| 173 | Func->doNopInsertion(); |
| 174 | } |
| 175 | } |
| 176 | |
| 177 | void TargetARM32::translateOm1() { |
| 178 | TimerMarker T(TimerStack::TT_Om1, Func); |
| 179 | |
| 180 | // TODO: share passes with X86? |
| 181 | |
| 182 | Func->placePhiLoads(); |
| 183 | if (Func->hasError()) |
| 184 | return; |
| 185 | Func->placePhiStores(); |
| 186 | if (Func->hasError()) |
| 187 | return; |
| 188 | Func->deletePhis(); |
| 189 | if (Func->hasError()) |
| 190 | return; |
| 191 | Func->dump("After Phi lowering"); |
| 192 | |
| 193 | Func->doArgLowering(); |
| 194 | |
| 195 | Func->genCode(); |
| 196 | if (Func->hasError()) |
| 197 | return; |
| 198 | Func->dump("After initial ARM32 codegen"); |
| 199 | |
| 200 | regAlloc(RAK_InfOnly); |
| 201 | if (Func->hasError()) |
| 202 | return; |
| 203 | Func->dump("After regalloc of infinite-weight variables"); |
| 204 | |
| 205 | Func->genFrame(); |
| 206 | if (Func->hasError()) |
| 207 | return; |
| 208 | Func->dump("After stack frame mapping"); |
| 209 | |
| 210 | // Nop insertion |
| 211 | if (Ctx->getFlags().shouldDoNopInsertion()) { |
| 212 | Func->doNopInsertion(); |
| 213 | } |
| 214 | } |
| 215 | |
| 216 | bool TargetARM32::doBranchOpt(Inst *I, const CfgNode *NextNode) { |
| 217 | (void)I; |
| 218 | (void)NextNode; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 219 | UnimplementedError(Func->getContext()->getFlags()); |
| 220 | return false; |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 221 | } |
| 222 | |
| 223 | IceString TargetARM32::RegNames[] = { |
| 224 | #define X(val, encode, name, scratch, preserved, stackptr, frameptr, isInt, \ |
| 225 | isFP) \ |
| 226 | name, |
| 227 | REGARM32_TABLE |
| 228 | #undef X |
| 229 | }; |
| 230 | |
| 231 | IceString TargetARM32::getRegName(SizeT RegNum, Type Ty) const { |
| 232 | assert(RegNum < RegARM32::Reg_NUM); |
| 233 | (void)Ty; |
| 234 | return RegNames[RegNum]; |
| 235 | } |
| 236 | |
| 237 | Variable *TargetARM32::getPhysicalRegister(SizeT RegNum, Type Ty) { |
| 238 | if (Ty == IceType_void) |
| 239 | Ty = IceType_i32; |
| 240 | if (PhysicalRegisters[Ty].empty()) |
| 241 | PhysicalRegisters[Ty].resize(RegARM32::Reg_NUM); |
| 242 | assert(RegNum < PhysicalRegisters[Ty].size()); |
| 243 | Variable *Reg = PhysicalRegisters[Ty][RegNum]; |
| 244 | if (Reg == nullptr) { |
| 245 | Reg = Func->makeVariable(Ty); |
| 246 | Reg->setRegNum(RegNum); |
| 247 | PhysicalRegisters[Ty][RegNum] = Reg; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 248 | // Specially mark SP and LR as an "argument" so that it is considered |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 249 | // live upon function entry. |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 250 | if (RegNum == RegARM32::Reg_sp || RegNum == RegARM32::Reg_lr) { |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 251 | Func->addImplicitArg(Reg); |
| 252 | Reg->setIgnoreLiveness(); |
| 253 | } |
| 254 | } |
| 255 | return Reg; |
| 256 | } |
| 257 | |
| 258 | void TargetARM32::emitVariable(const Variable *Var) const { |
| 259 | Ostream &Str = Ctx->getStrEmit(); |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 260 | if (Var->hasReg()) { |
| 261 | Str << getRegName(Var->getRegNum(), Var->getType()); |
| 262 | return; |
| 263 | } |
| 264 | if (Var->getWeight().isInf()) { |
| 265 | llvm::report_fatal_error( |
| 266 | "Infinite-weight Variable has no register assigned"); |
| 267 | } |
| 268 | int32_t Offset = Var->getStackOffset(); |
| 269 | if (!hasFramePointer()) |
| 270 | Offset += getStackAdjustment(); |
| 271 | // TODO(jvoung): Handle out of range. Perhaps we need a scratch register |
| 272 | // to materialize a larger offset. |
| 273 | const bool SignExt = false; |
| 274 | if (!OperandARM32Mem::canHoldOffset(Var->getType(), SignExt, Offset)) { |
| 275 | llvm::report_fatal_error("Illegal stack offset"); |
| 276 | } |
| 277 | const Type FrameSPTy = IceType_i32; |
| 278 | Str << "[" << getRegName(getFrameOrStackReg(), FrameSPTy) << ", " << Offset |
| 279 | << "]"; |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 280 | } |
| 281 | |
| 282 | void TargetARM32::lowerArguments() { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 283 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 284 | } |
| 285 | |
| 286 | Type TargetARM32::stackSlotType() { return IceType_i32; } |
| 287 | |
| 288 | void TargetARM32::addProlog(CfgNode *Node) { |
| 289 | (void)Node; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 290 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 291 | } |
| 292 | |
| 293 | void TargetARM32::addEpilog(CfgNode *Node) { |
| 294 | (void)Node; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 295 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 296 | } |
| 297 | |
| 298 | llvm::SmallBitVector TargetARM32::getRegisterSet(RegSetMask Include, |
| 299 | RegSetMask Exclude) const { |
| 300 | llvm::SmallBitVector Registers(RegARM32::Reg_NUM); |
| 301 | |
| 302 | #define X(val, encode, name, scratch, preserved, stackptr, frameptr, isInt, \ |
| 303 | isFP) \ |
| 304 | if (scratch && (Include & RegSet_CallerSave)) \ |
| 305 | Registers[RegARM32::val] = true; \ |
| 306 | if (preserved && (Include & RegSet_CalleeSave)) \ |
| 307 | Registers[RegARM32::val] = true; \ |
| 308 | if (stackptr && (Include & RegSet_StackPointer)) \ |
| 309 | Registers[RegARM32::val] = true; \ |
| 310 | if (frameptr && (Include & RegSet_FramePointer)) \ |
| 311 | Registers[RegARM32::val] = true; \ |
| 312 | if (scratch && (Exclude & RegSet_CallerSave)) \ |
| 313 | Registers[RegARM32::val] = false; \ |
| 314 | if (preserved && (Exclude & RegSet_CalleeSave)) \ |
| 315 | Registers[RegARM32::val] = false; \ |
| 316 | if (stackptr && (Exclude & RegSet_StackPointer)) \ |
| 317 | Registers[RegARM32::val] = false; \ |
| 318 | if (frameptr && (Exclude & RegSet_FramePointer)) \ |
| 319 | Registers[RegARM32::val] = false; |
| 320 | |
| 321 | REGARM32_TABLE |
| 322 | |
| 323 | #undef X |
| 324 | |
| 325 | return Registers; |
| 326 | } |
| 327 | |
| 328 | void TargetARM32::lowerAlloca(const InstAlloca *Inst) { |
| 329 | UsesFramePointer = true; |
| 330 | // Conservatively require the stack to be aligned. Some stack |
| 331 | // adjustment operations implemented below assume that the stack is |
| 332 | // aligned before the alloca. All the alloca code ensures that the |
| 333 | // stack alignment is preserved after the alloca. The stack alignment |
| 334 | // restriction can be relaxed in some cases. |
| 335 | NeedsStackAlignment = true; |
| 336 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 337 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 338 | } |
| 339 | |
| 340 | void TargetARM32::lowerArithmetic(const InstArithmetic *Inst) { |
| 341 | switch (Inst->getOp()) { |
| 342 | case InstArithmetic::_num: |
| 343 | llvm_unreachable("Unknown arithmetic operator"); |
| 344 | break; |
| 345 | case InstArithmetic::Add: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 346 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 347 | break; |
| 348 | case InstArithmetic::And: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 349 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 350 | break; |
| 351 | case InstArithmetic::Or: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 352 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 353 | break; |
| 354 | case InstArithmetic::Xor: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 355 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 356 | break; |
| 357 | case InstArithmetic::Sub: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 358 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 359 | break; |
| 360 | case InstArithmetic::Mul: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 361 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 362 | break; |
| 363 | case InstArithmetic::Shl: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 364 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 365 | break; |
| 366 | case InstArithmetic::Lshr: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 367 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 368 | break; |
| 369 | case InstArithmetic::Ashr: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 370 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 371 | break; |
| 372 | case InstArithmetic::Udiv: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 373 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 374 | break; |
| 375 | case InstArithmetic::Sdiv: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 376 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 377 | break; |
| 378 | case InstArithmetic::Urem: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 379 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 380 | break; |
| 381 | case InstArithmetic::Srem: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 382 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 383 | break; |
| 384 | case InstArithmetic::Fadd: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 385 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 386 | break; |
| 387 | case InstArithmetic::Fsub: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 388 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 389 | break; |
| 390 | case InstArithmetic::Fmul: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 391 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 392 | break; |
| 393 | case InstArithmetic::Fdiv: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 394 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 395 | break; |
| 396 | case InstArithmetic::Frem: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 397 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 398 | break; |
| 399 | } |
| 400 | } |
| 401 | |
| 402 | void TargetARM32::lowerAssign(const InstAssign *Inst) { |
| 403 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 404 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 405 | } |
| 406 | |
| 407 | void TargetARM32::lowerBr(const InstBr *Inst) { |
| 408 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 409 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 410 | } |
| 411 | |
| 412 | void TargetARM32::lowerCall(const InstCall *Inst) { |
| 413 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 414 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 415 | } |
| 416 | |
| 417 | void TargetARM32::lowerCast(const InstCast *Inst) { |
| 418 | InstCast::OpKind CastKind = Inst->getCastKind(); |
| 419 | switch (CastKind) { |
| 420 | default: |
| 421 | Func->setError("Cast type not supported"); |
| 422 | return; |
| 423 | case InstCast::Sext: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 424 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 425 | break; |
| 426 | } |
| 427 | case InstCast::Zext: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 428 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 429 | break; |
| 430 | } |
| 431 | case InstCast::Trunc: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 432 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 433 | break; |
| 434 | } |
| 435 | case InstCast::Fptrunc: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 436 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 437 | break; |
| 438 | case InstCast::Fpext: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 439 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 440 | break; |
| 441 | } |
| 442 | case InstCast::Fptosi: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 443 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 444 | break; |
| 445 | case InstCast::Fptoui: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 446 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 447 | break; |
| 448 | case InstCast::Sitofp: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 449 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 450 | break; |
| 451 | case InstCast::Uitofp: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 452 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 453 | break; |
| 454 | } |
| 455 | case InstCast::Bitcast: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 456 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 457 | break; |
| 458 | } |
| 459 | } |
| 460 | } |
| 461 | |
| 462 | void TargetARM32::lowerExtractElement(const InstExtractElement *Inst) { |
| 463 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 464 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 465 | } |
| 466 | |
| 467 | void TargetARM32::lowerFcmp(const InstFcmp *Inst) { |
| 468 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 469 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 470 | } |
| 471 | |
| 472 | void TargetARM32::lowerIcmp(const InstIcmp *Inst) { |
| 473 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 474 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 475 | } |
| 476 | |
| 477 | void TargetARM32::lowerInsertElement(const InstInsertElement *Inst) { |
| 478 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 479 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 480 | } |
| 481 | |
| 482 | void TargetARM32::lowerIntrinsicCall(const InstIntrinsicCall *Instr) { |
| 483 | switch (Intrinsics::IntrinsicID ID = Instr->getIntrinsicInfo().ID) { |
| 484 | case Intrinsics::AtomicCmpxchg: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 485 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 486 | return; |
| 487 | } |
| 488 | case Intrinsics::AtomicFence: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 489 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 490 | return; |
| 491 | case Intrinsics::AtomicFenceAll: |
| 492 | // NOTE: FenceAll should prevent and load/store from being moved |
| 493 | // across the fence (both atomic and non-atomic). The InstARM32Mfence |
| 494 | // instruction is currently marked coarsely as "HasSideEffects". |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 495 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 496 | return; |
| 497 | case Intrinsics::AtomicIsLockFree: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 498 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 499 | return; |
| 500 | } |
| 501 | case Intrinsics::AtomicLoad: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 502 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 503 | return; |
| 504 | } |
| 505 | case Intrinsics::AtomicRMW: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 506 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 507 | return; |
| 508 | case Intrinsics::AtomicStore: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 509 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 510 | return; |
| 511 | } |
| 512 | case Intrinsics::Bswap: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 513 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 514 | return; |
| 515 | } |
| 516 | case Intrinsics::Ctpop: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 517 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 518 | return; |
| 519 | } |
| 520 | case Intrinsics::Ctlz: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 521 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 522 | return; |
| 523 | } |
| 524 | case Intrinsics::Cttz: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 525 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 526 | return; |
| 527 | } |
| 528 | case Intrinsics::Fabs: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 529 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 530 | return; |
| 531 | } |
| 532 | case Intrinsics::Longjmp: { |
| 533 | InstCall *Call = makeHelperCall(H_call_longjmp, nullptr, 2); |
| 534 | Call->addArg(Instr->getArg(0)); |
| 535 | Call->addArg(Instr->getArg(1)); |
| 536 | lowerCall(Call); |
| 537 | return; |
| 538 | } |
| 539 | case Intrinsics::Memcpy: { |
| 540 | // In the future, we could potentially emit an inline memcpy/memset, etc. |
| 541 | // for intrinsic calls w/ a known length. |
| 542 | InstCall *Call = makeHelperCall(H_call_memcpy, nullptr, 3); |
| 543 | Call->addArg(Instr->getArg(0)); |
| 544 | Call->addArg(Instr->getArg(1)); |
| 545 | Call->addArg(Instr->getArg(2)); |
| 546 | lowerCall(Call); |
| 547 | return; |
| 548 | } |
| 549 | case Intrinsics::Memmove: { |
| 550 | InstCall *Call = makeHelperCall(H_call_memmove, nullptr, 3); |
| 551 | Call->addArg(Instr->getArg(0)); |
| 552 | Call->addArg(Instr->getArg(1)); |
| 553 | Call->addArg(Instr->getArg(2)); |
| 554 | lowerCall(Call); |
| 555 | return; |
| 556 | } |
| 557 | case Intrinsics::Memset: { |
| 558 | // The value operand needs to be extended to a stack slot size |
| 559 | // because the PNaCl ABI requires arguments to be at least 32 bits |
| 560 | // wide. |
| 561 | Operand *ValOp = Instr->getArg(1); |
| 562 | assert(ValOp->getType() == IceType_i8); |
| 563 | Variable *ValExt = Func->makeVariable(stackSlotType()); |
| 564 | lowerCast(InstCast::create(Func, InstCast::Zext, ValExt, ValOp)); |
| 565 | InstCall *Call = makeHelperCall(H_call_memset, nullptr, 3); |
| 566 | Call->addArg(Instr->getArg(0)); |
| 567 | Call->addArg(ValExt); |
| 568 | Call->addArg(Instr->getArg(2)); |
| 569 | lowerCall(Call); |
| 570 | return; |
| 571 | } |
| 572 | case Intrinsics::NaClReadTP: { |
| 573 | if (Ctx->getFlags().getUseSandboxing()) { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 574 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 575 | } else { |
| 576 | InstCall *Call = makeHelperCall(H_call_read_tp, Instr->getDest(), 0); |
| 577 | lowerCall(Call); |
| 578 | } |
| 579 | return; |
| 580 | } |
| 581 | case Intrinsics::Setjmp: { |
| 582 | InstCall *Call = makeHelperCall(H_call_setjmp, Instr->getDest(), 1); |
| 583 | Call->addArg(Instr->getArg(0)); |
| 584 | lowerCall(Call); |
| 585 | return; |
| 586 | } |
| 587 | case Intrinsics::Sqrt: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 588 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 589 | return; |
| 590 | } |
| 591 | case Intrinsics::Stacksave: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 592 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 593 | return; |
| 594 | } |
| 595 | case Intrinsics::Stackrestore: { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 596 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 597 | return; |
| 598 | } |
| 599 | case Intrinsics::Trap: |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 600 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 601 | return; |
| 602 | case Intrinsics::UnknownIntrinsic: |
| 603 | Func->setError("Should not be lowering UnknownIntrinsic"); |
| 604 | return; |
| 605 | } |
| 606 | return; |
| 607 | } |
| 608 | |
| 609 | void TargetARM32::lowerLoad(const InstLoad *Inst) { |
| 610 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 611 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 612 | } |
| 613 | |
| 614 | void TargetARM32::doAddressOptLoad() { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 615 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 616 | } |
| 617 | |
| 618 | void TargetARM32::randomlyInsertNop(float Probability) { |
| 619 | RandomNumberGeneratorWrapper RNG(Ctx->getRNG()); |
| 620 | if (RNG.getTrueWithProbability(Probability)) { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 621 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 622 | } |
| 623 | } |
| 624 | |
| 625 | void TargetARM32::lowerPhi(const InstPhi * /*Inst*/) { |
| 626 | Func->setError("Phi found in regular instruction list"); |
| 627 | } |
| 628 | |
| 629 | void TargetARM32::lowerRet(const InstRet *Inst) { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 630 | Variable *Reg = nullptr; |
| 631 | if (Inst->hasRetValue()) { |
| 632 | UnimplementedError(Func->getContext()->getFlags()); |
| 633 | } |
| 634 | // Add a ret instruction even if sandboxing is enabled, because |
| 635 | // addEpilog explicitly looks for a ret instruction as a marker for |
| 636 | // where to insert the frame removal instructions. |
| 637 | // addEpilog is responsible for restoring the "lr" register as needed |
| 638 | // prior to this ret instruction. |
| 639 | _ret(getPhysicalRegister(RegARM32::Reg_lr), Reg); |
| 640 | // Add a fake use of sp to make sure sp stays alive for the entire |
| 641 | // function. Otherwise post-call sp adjustments get dead-code |
| 642 | // eliminated. TODO: Are there more places where the fake use |
| 643 | // should be inserted? E.g. "void f(int n){while(1) g(n);}" may not |
| 644 | // have a ret instruction. |
| 645 | Variable *SP = Func->getTarget()->getPhysicalRegister(RegARM32::Reg_sp); |
| 646 | Context.insert(InstFakeUse::create(Func, SP)); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 647 | } |
| 648 | |
| 649 | void TargetARM32::lowerSelect(const InstSelect *Inst) { |
| 650 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 651 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 652 | } |
| 653 | |
| 654 | void TargetARM32::lowerStore(const InstStore *Inst) { |
| 655 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 656 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 657 | } |
| 658 | |
| 659 | void TargetARM32::doAddressOptStore() { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 660 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 661 | } |
| 662 | |
| 663 | void TargetARM32::lowerSwitch(const InstSwitch *Inst) { |
| 664 | (void)Inst; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 665 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 666 | } |
| 667 | |
| 668 | void TargetARM32::lowerUnreachable(const InstUnreachable * /*Inst*/) { |
| 669 | llvm_unreachable("Not yet implemented"); |
| 670 | } |
| 671 | |
| 672 | // Turn an i64 Phi instruction into a pair of i32 Phi instructions, to |
| 673 | // preserve integrity of liveness analysis. Undef values are also |
| 674 | // turned into zeroes, since loOperand() and hiOperand() don't expect |
| 675 | // Undef input. |
| 676 | void TargetARM32::prelowerPhis() { |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 677 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 678 | } |
| 679 | |
| 680 | // Lower the pre-ordered list of assignments into mov instructions. |
| 681 | // Also has to do some ad-hoc register allocation as necessary. |
| 682 | void TargetARM32::lowerPhiAssignments(CfgNode *Node, |
| 683 | const AssignList &Assignments) { |
| 684 | (void)Node; |
| 685 | (void)Assignments; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 686 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 687 | } |
| 688 | |
| 689 | void TargetARM32::postLower() { |
| 690 | if (Ctx->getFlags().getOptLevel() == Opt_m1) |
| 691 | return; |
| 692 | // Find two-address non-SSA instructions where Dest==Src0, and set |
| 693 | // the DestNonKillable flag to keep liveness analysis consistent. |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 694 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 695 | } |
| 696 | |
| 697 | void TargetARM32::makeRandomRegisterPermutation( |
| 698 | llvm::SmallVectorImpl<int32_t> &Permutation, |
| 699 | const llvm::SmallBitVector &ExcludeRegisters) const { |
| 700 | (void)Permutation; |
| 701 | (void)ExcludeRegisters; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 702 | UnimplementedError(Func->getContext()->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 703 | } |
| 704 | |
| 705 | /* TODO(jvoung): avoid duplicate symbols with multiple targets. |
| 706 | void ConstantUndef::emitWithoutDollar(GlobalContext *) const { |
| 707 | llvm_unreachable("Not expecting to emitWithoutDollar undef"); |
| 708 | } |
| 709 | |
| 710 | void ConstantUndef::emit(GlobalContext *) const { |
| 711 | llvm_unreachable("undef value encountered by emitter."); |
| 712 | } |
| 713 | */ |
| 714 | |
| 715 | TargetDataARM32::TargetDataARM32(GlobalContext *Ctx) |
| 716 | : TargetDataLowering(Ctx) {} |
| 717 | |
| 718 | void TargetDataARM32::lowerGlobal(const VariableDeclaration &Var) const { |
| 719 | (void)Var; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 720 | UnimplementedError(Ctx->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 721 | } |
| 722 | |
| 723 | void TargetDataARM32::lowerGlobals( |
| 724 | std::unique_ptr<VariableDeclarationList> Vars) const { |
| 725 | switch (Ctx->getFlags().getOutFileType()) { |
| 726 | case FT_Elf: { |
| 727 | ELFObjectWriter *Writer = Ctx->getObjectWriter(); |
| 728 | Writer->writeDataSection(*Vars, llvm::ELF::R_ARM_ABS32); |
| 729 | } break; |
| 730 | case FT_Asm: |
| 731 | case FT_Iasm: { |
| 732 | const IceString &TranslateOnly = Ctx->getFlags().getTranslateOnly(); |
| 733 | OstreamLocker L(Ctx); |
| 734 | for (const VariableDeclaration *Var : *Vars) { |
| 735 | if (GlobalContext::matchSymbolName(Var->getName(), TranslateOnly)) { |
| 736 | lowerGlobal(*Var); |
| 737 | } |
| 738 | } |
| 739 | } break; |
| 740 | } |
| 741 | } |
| 742 | |
| 743 | void TargetDataARM32::lowerConstants() const { |
| 744 | if (Ctx->getFlags().getDisableTranslation()) |
| 745 | return; |
Jan Voung | b2d5084 | 2015-05-12 09:53:50 -0700 | [diff] [blame^] | 746 | UnimplementedError(Ctx->getFlags()); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 747 | } |
| 748 | |
| 749 | } // end of namespace Ice |