blob: 287e42dab33537604c52d985b2e65abfc1aaf544 [file] [log] [blame]
Jan Voungb36ad9b2015-04-21 17:01:49 -07001//===- subzero/src/IceTargetLoweringARM32.cpp - ARM32 lowering ------------===//
2//
3// The Subzero Code Generator
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the TargetLoweringARM32 class, which consists almost
11// entirely of the lowering sequence for each high-level instruction.
12//
13//===----------------------------------------------------------------------===//
14
15#include "llvm/Support/MathExtras.h"
16
17#include "IceCfg.h"
18#include "IceCfgNode.h"
19#include "IceClFlags.h"
20#include "IceDefs.h"
21#include "IceELFObjectWriter.h"
22#include "IceGlobalInits.h"
23#include "IceInstARM32.h"
24#include "IceLiveness.h"
25#include "IceOperand.h"
26#include "IceRegistersARM32.h"
27#include "IceTargetLoweringARM32.def"
28#include "IceTargetLoweringARM32.h"
29#include "IceUtils.h"
30
31namespace Ice {
32
Jan Voungb2d50842015-05-12 09:53:50 -070033namespace {
34void UnimplementedError(const ClFlags &Flags) {
35 if (!Flags.getSkipUnimplemented()) {
36 // Use llvm_unreachable instead of report_fatal_error, which gives better
37 // stack traces.
38 llvm_unreachable("Not yet implemented");
39 abort();
40 }
41}
42} // end of anonymous namespace
43
Jan Voungb36ad9b2015-04-21 17:01:49 -070044TargetARM32::TargetARM32(Cfg *Func)
45 : TargetLowering(Func), UsesFramePointer(false) {
46 // TODO: Don't initialize IntegerRegisters and friends every time.
47 // Instead, initialize in some sort of static initializer for the
48 // class.
49 llvm::SmallBitVector IntegerRegisters(RegARM32::Reg_NUM);
50 llvm::SmallBitVector FloatRegisters(RegARM32::Reg_NUM);
51 llvm::SmallBitVector VectorRegisters(RegARM32::Reg_NUM);
52 llvm::SmallBitVector InvalidRegisters(RegARM32::Reg_NUM);
53 ScratchRegs.resize(RegARM32::Reg_NUM);
54#define X(val, encode, name, scratch, preserved, stackptr, frameptr, isInt, \
55 isFP) \
56 IntegerRegisters[RegARM32::val] = isInt; \
57 FloatRegisters[RegARM32::val] = isFP; \
58 VectorRegisters[RegARM32::val] = isFP; \
59 ScratchRegs[RegARM32::val] = scratch;
60 REGARM32_TABLE;
61#undef X
62 TypeToRegisterSet[IceType_void] = InvalidRegisters;
63 TypeToRegisterSet[IceType_i1] = IntegerRegisters;
64 TypeToRegisterSet[IceType_i8] = IntegerRegisters;
65 TypeToRegisterSet[IceType_i16] = IntegerRegisters;
66 TypeToRegisterSet[IceType_i32] = IntegerRegisters;
67 TypeToRegisterSet[IceType_i64] = IntegerRegisters;
68 TypeToRegisterSet[IceType_f32] = FloatRegisters;
69 TypeToRegisterSet[IceType_f64] = FloatRegisters;
70 TypeToRegisterSet[IceType_v4i1] = VectorRegisters;
71 TypeToRegisterSet[IceType_v8i1] = VectorRegisters;
72 TypeToRegisterSet[IceType_v16i1] = VectorRegisters;
73 TypeToRegisterSet[IceType_v16i8] = VectorRegisters;
74 TypeToRegisterSet[IceType_v8i16] = VectorRegisters;
75 TypeToRegisterSet[IceType_v4i32] = VectorRegisters;
76 TypeToRegisterSet[IceType_v4f32] = VectorRegisters;
77}
78
79void TargetARM32::translateO2() {
80 TimerMarker T(TimerStack::TT_O2, Func);
81
82 // TODO(stichnot): share passes with X86?
83 // https://code.google.com/p/nativeclient/issues/detail?id=4094
84
85 if (!Ctx->getFlags().getPhiEdgeSplit()) {
86 // Lower Phi instructions.
87 Func->placePhiLoads();
88 if (Func->hasError())
89 return;
90 Func->placePhiStores();
91 if (Func->hasError())
92 return;
93 Func->deletePhis();
94 if (Func->hasError())
95 return;
96 Func->dump("After Phi lowering");
97 }
98
99 // Address mode optimization.
100 Func->getVMetadata()->init(VMK_SingleDefs);
101 Func->doAddressOpt();
102
103 // Argument lowering
104 Func->doArgLowering();
105
106 // Target lowering. This requires liveness analysis for some parts
107 // of the lowering decisions, such as compare/branch fusing. If
108 // non-lightweight liveness analysis is used, the instructions need
109 // to be renumbered first. TODO: This renumbering should only be
110 // necessary if we're actually calculating live intervals, which we
111 // only do for register allocation.
112 Func->renumberInstructions();
113 if (Func->hasError())
114 return;
115
116 // TODO: It should be sufficient to use the fastest liveness
117 // calculation, i.e. livenessLightweight(). However, for some
118 // reason that slows down the rest of the translation. Investigate.
119 Func->liveness(Liveness_Basic);
120 if (Func->hasError())
121 return;
122 Func->dump("After ARM32 address mode opt");
123
124 Func->genCode();
125 if (Func->hasError())
126 return;
127 Func->dump("After ARM32 codegen");
128
129 // Register allocation. This requires instruction renumbering and
130 // full liveness analysis.
131 Func->renumberInstructions();
132 if (Func->hasError())
133 return;
134 Func->liveness(Liveness_Intervals);
135 if (Func->hasError())
136 return;
137 // Validate the live range computations. The expensive validation
138 // call is deliberately only made when assertions are enabled.
139 assert(Func->validateLiveness());
140 // The post-codegen dump is done here, after liveness analysis and
141 // associated cleanup, to make the dump cleaner and more useful.
142 Func->dump("After initial ARM32 codegen");
143 Func->getVMetadata()->init(VMK_All);
144 regAlloc(RAK_Global);
145 if (Func->hasError())
146 return;
147 Func->dump("After linear scan regalloc");
148
149 if (Ctx->getFlags().getPhiEdgeSplit()) {
150 Func->advancedPhiLowering();
151 Func->dump("After advanced Phi lowering");
152 }
153
154 // Stack frame mapping.
155 Func->genFrame();
156 if (Func->hasError())
157 return;
158 Func->dump("After stack frame mapping");
159
160 Func->contractEmptyNodes();
161 Func->reorderNodes();
162
163 // Branch optimization. This needs to be done just before code
164 // emission. In particular, no transformations that insert or
165 // reorder CfgNodes should be done after branch optimization. We go
166 // ahead and do it before nop insertion to reduce the amount of work
167 // needed for searching for opportunities.
168 Func->doBranchOpt();
169 Func->dump("After branch optimization");
170
171 // Nop insertion
172 if (Ctx->getFlags().shouldDoNopInsertion()) {
173 Func->doNopInsertion();
174 }
175}
176
177void TargetARM32::translateOm1() {
178 TimerMarker T(TimerStack::TT_Om1, Func);
179
180 // TODO: share passes with X86?
181
182 Func->placePhiLoads();
183 if (Func->hasError())
184 return;
185 Func->placePhiStores();
186 if (Func->hasError())
187 return;
188 Func->deletePhis();
189 if (Func->hasError())
190 return;
191 Func->dump("After Phi lowering");
192
193 Func->doArgLowering();
194
195 Func->genCode();
196 if (Func->hasError())
197 return;
198 Func->dump("After initial ARM32 codegen");
199
200 regAlloc(RAK_InfOnly);
201 if (Func->hasError())
202 return;
203 Func->dump("After regalloc of infinite-weight variables");
204
205 Func->genFrame();
206 if (Func->hasError())
207 return;
208 Func->dump("After stack frame mapping");
209
210 // Nop insertion
211 if (Ctx->getFlags().shouldDoNopInsertion()) {
212 Func->doNopInsertion();
213 }
214}
215
216bool TargetARM32::doBranchOpt(Inst *I, const CfgNode *NextNode) {
217 (void)I;
218 (void)NextNode;
Jan Voungb2d50842015-05-12 09:53:50 -0700219 UnimplementedError(Func->getContext()->getFlags());
220 return false;
Jan Voungb36ad9b2015-04-21 17:01:49 -0700221}
222
223IceString TargetARM32::RegNames[] = {
224#define X(val, encode, name, scratch, preserved, stackptr, frameptr, isInt, \
225 isFP) \
226 name,
227 REGARM32_TABLE
228#undef X
229};
230
231IceString TargetARM32::getRegName(SizeT RegNum, Type Ty) const {
232 assert(RegNum < RegARM32::Reg_NUM);
233 (void)Ty;
234 return RegNames[RegNum];
235}
236
237Variable *TargetARM32::getPhysicalRegister(SizeT RegNum, Type Ty) {
238 if (Ty == IceType_void)
239 Ty = IceType_i32;
240 if (PhysicalRegisters[Ty].empty())
241 PhysicalRegisters[Ty].resize(RegARM32::Reg_NUM);
242 assert(RegNum < PhysicalRegisters[Ty].size());
243 Variable *Reg = PhysicalRegisters[Ty][RegNum];
244 if (Reg == nullptr) {
245 Reg = Func->makeVariable(Ty);
246 Reg->setRegNum(RegNum);
247 PhysicalRegisters[Ty][RegNum] = Reg;
Jan Voungb2d50842015-05-12 09:53:50 -0700248 // Specially mark SP and LR as an "argument" so that it is considered
Jan Voungb36ad9b2015-04-21 17:01:49 -0700249 // live upon function entry.
Jan Voungb2d50842015-05-12 09:53:50 -0700250 if (RegNum == RegARM32::Reg_sp || RegNum == RegARM32::Reg_lr) {
Jan Voungb36ad9b2015-04-21 17:01:49 -0700251 Func->addImplicitArg(Reg);
252 Reg->setIgnoreLiveness();
253 }
254 }
255 return Reg;
256}
257
258void TargetARM32::emitVariable(const Variable *Var) const {
259 Ostream &Str = Ctx->getStrEmit();
Jan Voungb2d50842015-05-12 09:53:50 -0700260 if (Var->hasReg()) {
261 Str << getRegName(Var->getRegNum(), Var->getType());
262 return;
263 }
264 if (Var->getWeight().isInf()) {
265 llvm::report_fatal_error(
266 "Infinite-weight Variable has no register assigned");
267 }
268 int32_t Offset = Var->getStackOffset();
269 if (!hasFramePointer())
270 Offset += getStackAdjustment();
271 // TODO(jvoung): Handle out of range. Perhaps we need a scratch register
272 // to materialize a larger offset.
273 const bool SignExt = false;
274 if (!OperandARM32Mem::canHoldOffset(Var->getType(), SignExt, Offset)) {
275 llvm::report_fatal_error("Illegal stack offset");
276 }
277 const Type FrameSPTy = IceType_i32;
278 Str << "[" << getRegName(getFrameOrStackReg(), FrameSPTy) << ", " << Offset
279 << "]";
Jan Voungb36ad9b2015-04-21 17:01:49 -0700280}
281
282void TargetARM32::lowerArguments() {
Jan Voungb2d50842015-05-12 09:53:50 -0700283 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700284}
285
286Type TargetARM32::stackSlotType() { return IceType_i32; }
287
288void TargetARM32::addProlog(CfgNode *Node) {
289 (void)Node;
Jan Voungb2d50842015-05-12 09:53:50 -0700290 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700291}
292
293void TargetARM32::addEpilog(CfgNode *Node) {
294 (void)Node;
Jan Voungb2d50842015-05-12 09:53:50 -0700295 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700296}
297
298llvm::SmallBitVector TargetARM32::getRegisterSet(RegSetMask Include,
299 RegSetMask Exclude) const {
300 llvm::SmallBitVector Registers(RegARM32::Reg_NUM);
301
302#define X(val, encode, name, scratch, preserved, stackptr, frameptr, isInt, \
303 isFP) \
304 if (scratch && (Include & RegSet_CallerSave)) \
305 Registers[RegARM32::val] = true; \
306 if (preserved && (Include & RegSet_CalleeSave)) \
307 Registers[RegARM32::val] = true; \
308 if (stackptr && (Include & RegSet_StackPointer)) \
309 Registers[RegARM32::val] = true; \
310 if (frameptr && (Include & RegSet_FramePointer)) \
311 Registers[RegARM32::val] = true; \
312 if (scratch && (Exclude & RegSet_CallerSave)) \
313 Registers[RegARM32::val] = false; \
314 if (preserved && (Exclude & RegSet_CalleeSave)) \
315 Registers[RegARM32::val] = false; \
316 if (stackptr && (Exclude & RegSet_StackPointer)) \
317 Registers[RegARM32::val] = false; \
318 if (frameptr && (Exclude & RegSet_FramePointer)) \
319 Registers[RegARM32::val] = false;
320
321 REGARM32_TABLE
322
323#undef X
324
325 return Registers;
326}
327
328void TargetARM32::lowerAlloca(const InstAlloca *Inst) {
329 UsesFramePointer = true;
330 // Conservatively require the stack to be aligned. Some stack
331 // adjustment operations implemented below assume that the stack is
332 // aligned before the alloca. All the alloca code ensures that the
333 // stack alignment is preserved after the alloca. The stack alignment
334 // restriction can be relaxed in some cases.
335 NeedsStackAlignment = true;
336 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700337 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700338}
339
340void TargetARM32::lowerArithmetic(const InstArithmetic *Inst) {
341 switch (Inst->getOp()) {
342 case InstArithmetic::_num:
343 llvm_unreachable("Unknown arithmetic operator");
344 break;
345 case InstArithmetic::Add:
Jan Voungb2d50842015-05-12 09:53:50 -0700346 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700347 break;
348 case InstArithmetic::And:
Jan Voungb2d50842015-05-12 09:53:50 -0700349 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700350 break;
351 case InstArithmetic::Or:
Jan Voungb2d50842015-05-12 09:53:50 -0700352 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700353 break;
354 case InstArithmetic::Xor:
Jan Voungb2d50842015-05-12 09:53:50 -0700355 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700356 break;
357 case InstArithmetic::Sub:
Jan Voungb2d50842015-05-12 09:53:50 -0700358 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700359 break;
360 case InstArithmetic::Mul:
Jan Voungb2d50842015-05-12 09:53:50 -0700361 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700362 break;
363 case InstArithmetic::Shl:
Jan Voungb2d50842015-05-12 09:53:50 -0700364 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700365 break;
366 case InstArithmetic::Lshr:
Jan Voungb2d50842015-05-12 09:53:50 -0700367 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700368 break;
369 case InstArithmetic::Ashr:
Jan Voungb2d50842015-05-12 09:53:50 -0700370 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700371 break;
372 case InstArithmetic::Udiv:
Jan Voungb2d50842015-05-12 09:53:50 -0700373 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700374 break;
375 case InstArithmetic::Sdiv:
Jan Voungb2d50842015-05-12 09:53:50 -0700376 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700377 break;
378 case InstArithmetic::Urem:
Jan Voungb2d50842015-05-12 09:53:50 -0700379 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700380 break;
381 case InstArithmetic::Srem:
Jan Voungb2d50842015-05-12 09:53:50 -0700382 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700383 break;
384 case InstArithmetic::Fadd:
Jan Voungb2d50842015-05-12 09:53:50 -0700385 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700386 break;
387 case InstArithmetic::Fsub:
Jan Voungb2d50842015-05-12 09:53:50 -0700388 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700389 break;
390 case InstArithmetic::Fmul:
Jan Voungb2d50842015-05-12 09:53:50 -0700391 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700392 break;
393 case InstArithmetic::Fdiv:
Jan Voungb2d50842015-05-12 09:53:50 -0700394 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700395 break;
396 case InstArithmetic::Frem:
Jan Voungb2d50842015-05-12 09:53:50 -0700397 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700398 break;
399 }
400}
401
402void TargetARM32::lowerAssign(const InstAssign *Inst) {
403 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700404 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700405}
406
407void TargetARM32::lowerBr(const InstBr *Inst) {
408 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700409 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700410}
411
412void TargetARM32::lowerCall(const InstCall *Inst) {
413 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700414 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700415}
416
417void TargetARM32::lowerCast(const InstCast *Inst) {
418 InstCast::OpKind CastKind = Inst->getCastKind();
419 switch (CastKind) {
420 default:
421 Func->setError("Cast type not supported");
422 return;
423 case InstCast::Sext: {
Jan Voungb2d50842015-05-12 09:53:50 -0700424 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700425 break;
426 }
427 case InstCast::Zext: {
Jan Voungb2d50842015-05-12 09:53:50 -0700428 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700429 break;
430 }
431 case InstCast::Trunc: {
Jan Voungb2d50842015-05-12 09:53:50 -0700432 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700433 break;
434 }
435 case InstCast::Fptrunc:
Jan Voungb2d50842015-05-12 09:53:50 -0700436 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700437 break;
438 case InstCast::Fpext: {
Jan Voungb2d50842015-05-12 09:53:50 -0700439 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700440 break;
441 }
442 case InstCast::Fptosi:
Jan Voungb2d50842015-05-12 09:53:50 -0700443 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700444 break;
445 case InstCast::Fptoui:
Jan Voungb2d50842015-05-12 09:53:50 -0700446 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700447 break;
448 case InstCast::Sitofp:
Jan Voungb2d50842015-05-12 09:53:50 -0700449 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700450 break;
451 case InstCast::Uitofp: {
Jan Voungb2d50842015-05-12 09:53:50 -0700452 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700453 break;
454 }
455 case InstCast::Bitcast: {
Jan Voungb2d50842015-05-12 09:53:50 -0700456 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700457 break;
458 }
459 }
460}
461
462void TargetARM32::lowerExtractElement(const InstExtractElement *Inst) {
463 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700464 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700465}
466
467void TargetARM32::lowerFcmp(const InstFcmp *Inst) {
468 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700469 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700470}
471
472void TargetARM32::lowerIcmp(const InstIcmp *Inst) {
473 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700474 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700475}
476
477void TargetARM32::lowerInsertElement(const InstInsertElement *Inst) {
478 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700479 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700480}
481
482void TargetARM32::lowerIntrinsicCall(const InstIntrinsicCall *Instr) {
483 switch (Intrinsics::IntrinsicID ID = Instr->getIntrinsicInfo().ID) {
484 case Intrinsics::AtomicCmpxchg: {
Jan Voungb2d50842015-05-12 09:53:50 -0700485 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700486 return;
487 }
488 case Intrinsics::AtomicFence:
Jan Voungb2d50842015-05-12 09:53:50 -0700489 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700490 return;
491 case Intrinsics::AtomicFenceAll:
492 // NOTE: FenceAll should prevent and load/store from being moved
493 // across the fence (both atomic and non-atomic). The InstARM32Mfence
494 // instruction is currently marked coarsely as "HasSideEffects".
Jan Voungb2d50842015-05-12 09:53:50 -0700495 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700496 return;
497 case Intrinsics::AtomicIsLockFree: {
Jan Voungb2d50842015-05-12 09:53:50 -0700498 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700499 return;
500 }
501 case Intrinsics::AtomicLoad: {
Jan Voungb2d50842015-05-12 09:53:50 -0700502 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700503 return;
504 }
505 case Intrinsics::AtomicRMW:
Jan Voungb2d50842015-05-12 09:53:50 -0700506 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700507 return;
508 case Intrinsics::AtomicStore: {
Jan Voungb2d50842015-05-12 09:53:50 -0700509 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700510 return;
511 }
512 case Intrinsics::Bswap: {
Jan Voungb2d50842015-05-12 09:53:50 -0700513 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700514 return;
515 }
516 case Intrinsics::Ctpop: {
Jan Voungb2d50842015-05-12 09:53:50 -0700517 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700518 return;
519 }
520 case Intrinsics::Ctlz: {
Jan Voungb2d50842015-05-12 09:53:50 -0700521 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700522 return;
523 }
524 case Intrinsics::Cttz: {
Jan Voungb2d50842015-05-12 09:53:50 -0700525 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700526 return;
527 }
528 case Intrinsics::Fabs: {
Jan Voungb2d50842015-05-12 09:53:50 -0700529 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700530 return;
531 }
532 case Intrinsics::Longjmp: {
533 InstCall *Call = makeHelperCall(H_call_longjmp, nullptr, 2);
534 Call->addArg(Instr->getArg(0));
535 Call->addArg(Instr->getArg(1));
536 lowerCall(Call);
537 return;
538 }
539 case Intrinsics::Memcpy: {
540 // In the future, we could potentially emit an inline memcpy/memset, etc.
541 // for intrinsic calls w/ a known length.
542 InstCall *Call = makeHelperCall(H_call_memcpy, nullptr, 3);
543 Call->addArg(Instr->getArg(0));
544 Call->addArg(Instr->getArg(1));
545 Call->addArg(Instr->getArg(2));
546 lowerCall(Call);
547 return;
548 }
549 case Intrinsics::Memmove: {
550 InstCall *Call = makeHelperCall(H_call_memmove, nullptr, 3);
551 Call->addArg(Instr->getArg(0));
552 Call->addArg(Instr->getArg(1));
553 Call->addArg(Instr->getArg(2));
554 lowerCall(Call);
555 return;
556 }
557 case Intrinsics::Memset: {
558 // The value operand needs to be extended to a stack slot size
559 // because the PNaCl ABI requires arguments to be at least 32 bits
560 // wide.
561 Operand *ValOp = Instr->getArg(1);
562 assert(ValOp->getType() == IceType_i8);
563 Variable *ValExt = Func->makeVariable(stackSlotType());
564 lowerCast(InstCast::create(Func, InstCast::Zext, ValExt, ValOp));
565 InstCall *Call = makeHelperCall(H_call_memset, nullptr, 3);
566 Call->addArg(Instr->getArg(0));
567 Call->addArg(ValExt);
568 Call->addArg(Instr->getArg(2));
569 lowerCall(Call);
570 return;
571 }
572 case Intrinsics::NaClReadTP: {
573 if (Ctx->getFlags().getUseSandboxing()) {
Jan Voungb2d50842015-05-12 09:53:50 -0700574 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700575 } else {
576 InstCall *Call = makeHelperCall(H_call_read_tp, Instr->getDest(), 0);
577 lowerCall(Call);
578 }
579 return;
580 }
581 case Intrinsics::Setjmp: {
582 InstCall *Call = makeHelperCall(H_call_setjmp, Instr->getDest(), 1);
583 Call->addArg(Instr->getArg(0));
584 lowerCall(Call);
585 return;
586 }
587 case Intrinsics::Sqrt: {
Jan Voungb2d50842015-05-12 09:53:50 -0700588 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700589 return;
590 }
591 case Intrinsics::Stacksave: {
Jan Voungb2d50842015-05-12 09:53:50 -0700592 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700593 return;
594 }
595 case Intrinsics::Stackrestore: {
Jan Voungb2d50842015-05-12 09:53:50 -0700596 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700597 return;
598 }
599 case Intrinsics::Trap:
Jan Voungb2d50842015-05-12 09:53:50 -0700600 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700601 return;
602 case Intrinsics::UnknownIntrinsic:
603 Func->setError("Should not be lowering UnknownIntrinsic");
604 return;
605 }
606 return;
607}
608
609void TargetARM32::lowerLoad(const InstLoad *Inst) {
610 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700611 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700612}
613
614void TargetARM32::doAddressOptLoad() {
Jan Voungb2d50842015-05-12 09:53:50 -0700615 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700616}
617
618void TargetARM32::randomlyInsertNop(float Probability) {
619 RandomNumberGeneratorWrapper RNG(Ctx->getRNG());
620 if (RNG.getTrueWithProbability(Probability)) {
Jan Voungb2d50842015-05-12 09:53:50 -0700621 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700622 }
623}
624
625void TargetARM32::lowerPhi(const InstPhi * /*Inst*/) {
626 Func->setError("Phi found in regular instruction list");
627}
628
629void TargetARM32::lowerRet(const InstRet *Inst) {
Jan Voungb2d50842015-05-12 09:53:50 -0700630 Variable *Reg = nullptr;
631 if (Inst->hasRetValue()) {
632 UnimplementedError(Func->getContext()->getFlags());
633 }
634 // Add a ret instruction even if sandboxing is enabled, because
635 // addEpilog explicitly looks for a ret instruction as a marker for
636 // where to insert the frame removal instructions.
637 // addEpilog is responsible for restoring the "lr" register as needed
638 // prior to this ret instruction.
639 _ret(getPhysicalRegister(RegARM32::Reg_lr), Reg);
640 // Add a fake use of sp to make sure sp stays alive for the entire
641 // function. Otherwise post-call sp adjustments get dead-code
642 // eliminated. TODO: Are there more places where the fake use
643 // should be inserted? E.g. "void f(int n){while(1) g(n);}" may not
644 // have a ret instruction.
645 Variable *SP = Func->getTarget()->getPhysicalRegister(RegARM32::Reg_sp);
646 Context.insert(InstFakeUse::create(Func, SP));
Jan Voungb36ad9b2015-04-21 17:01:49 -0700647}
648
649void TargetARM32::lowerSelect(const InstSelect *Inst) {
650 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700651 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700652}
653
654void TargetARM32::lowerStore(const InstStore *Inst) {
655 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700656 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700657}
658
659void TargetARM32::doAddressOptStore() {
Jan Voungb2d50842015-05-12 09:53:50 -0700660 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700661}
662
663void TargetARM32::lowerSwitch(const InstSwitch *Inst) {
664 (void)Inst;
Jan Voungb2d50842015-05-12 09:53:50 -0700665 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700666}
667
668void TargetARM32::lowerUnreachable(const InstUnreachable * /*Inst*/) {
669 llvm_unreachable("Not yet implemented");
670}
671
672// Turn an i64 Phi instruction into a pair of i32 Phi instructions, to
673// preserve integrity of liveness analysis. Undef values are also
674// turned into zeroes, since loOperand() and hiOperand() don't expect
675// Undef input.
676void TargetARM32::prelowerPhis() {
Jan Voungb2d50842015-05-12 09:53:50 -0700677 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700678}
679
680// Lower the pre-ordered list of assignments into mov instructions.
681// Also has to do some ad-hoc register allocation as necessary.
682void TargetARM32::lowerPhiAssignments(CfgNode *Node,
683 const AssignList &Assignments) {
684 (void)Node;
685 (void)Assignments;
Jan Voungb2d50842015-05-12 09:53:50 -0700686 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700687}
688
689void TargetARM32::postLower() {
690 if (Ctx->getFlags().getOptLevel() == Opt_m1)
691 return;
692 // Find two-address non-SSA instructions where Dest==Src0, and set
693 // the DestNonKillable flag to keep liveness analysis consistent.
Jan Voungb2d50842015-05-12 09:53:50 -0700694 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700695}
696
697void TargetARM32::makeRandomRegisterPermutation(
698 llvm::SmallVectorImpl<int32_t> &Permutation,
699 const llvm::SmallBitVector &ExcludeRegisters) const {
700 (void)Permutation;
701 (void)ExcludeRegisters;
Jan Voungb2d50842015-05-12 09:53:50 -0700702 UnimplementedError(Func->getContext()->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700703}
704
705/* TODO(jvoung): avoid duplicate symbols with multiple targets.
706void ConstantUndef::emitWithoutDollar(GlobalContext *) const {
707 llvm_unreachable("Not expecting to emitWithoutDollar undef");
708}
709
710void ConstantUndef::emit(GlobalContext *) const {
711 llvm_unreachable("undef value encountered by emitter.");
712}
713*/
714
715TargetDataARM32::TargetDataARM32(GlobalContext *Ctx)
716 : TargetDataLowering(Ctx) {}
717
718void TargetDataARM32::lowerGlobal(const VariableDeclaration &Var) const {
719 (void)Var;
Jan Voungb2d50842015-05-12 09:53:50 -0700720 UnimplementedError(Ctx->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700721}
722
723void TargetDataARM32::lowerGlobals(
724 std::unique_ptr<VariableDeclarationList> Vars) const {
725 switch (Ctx->getFlags().getOutFileType()) {
726 case FT_Elf: {
727 ELFObjectWriter *Writer = Ctx->getObjectWriter();
728 Writer->writeDataSection(*Vars, llvm::ELF::R_ARM_ABS32);
729 } break;
730 case FT_Asm:
731 case FT_Iasm: {
732 const IceString &TranslateOnly = Ctx->getFlags().getTranslateOnly();
733 OstreamLocker L(Ctx);
734 for (const VariableDeclaration *Var : *Vars) {
735 if (GlobalContext::matchSymbolName(Var->getName(), TranslateOnly)) {
736 lowerGlobal(*Var);
737 }
738 }
739 } break;
740 }
741}
742
743void TargetDataARM32::lowerConstants() const {
744 if (Ctx->getFlags().getDisableTranslation())
745 return;
Jan Voungb2d50842015-05-12 09:53:50 -0700746 UnimplementedError(Ctx->getFlags());
Jan Voungb36ad9b2015-04-21 17:01:49 -0700747}
748
749} // end of namespace Ice