blob: 99c992827475a222248838f4f07d062e42dd367f [file] [log] [blame]
Jim Stichnoth6da4cef2015-06-11 13:26:33 -07001//===- subzero/src/IceTargetLoweringMIPS32.cpp - MIPS32 lowering ----------===//
2//
3// The Subzero Code Generator
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
Andrew Scull9612d322015-07-06 14:53:25 -07009///
10/// \file
11/// This file implements the TargetLoweringMIPS32 class, which consists almost
12/// entirely of the lowering sequence for each high-level instruction.
13///
Jim Stichnoth6da4cef2015-06-11 13:26:33 -070014//===----------------------------------------------------------------------===//
15
John Porto67f8de92015-06-25 10:14:17 -070016#include "IceTargetLoweringMIPS32.h"
Jim Stichnoth6da4cef2015-06-11 13:26:33 -070017
18#include "IceCfg.h"
19#include "IceCfgNode.h"
20#include "IceClFlags.h"
21#include "IceDefs.h"
22#include "IceELFObjectWriter.h"
23#include "IceGlobalInits.h"
24#include "IceInstMIPS32.h"
25#include "IceLiveness.h"
26#include "IceOperand.h"
27#include "IceRegistersMIPS32.h"
28#include "IceTargetLoweringMIPS32.def"
Jim Stichnoth6da4cef2015-06-11 13:26:33 -070029#include "IceUtils.h"
John Porto67f8de92015-06-25 10:14:17 -070030#include "llvm/Support/MathExtras.h"
Jim Stichnoth6da4cef2015-06-11 13:26:33 -070031
32namespace Ice {
33
Reed Kotlerd00d48d2015-07-08 09:49:07 -070034namespace {
35void UnimplementedError(const ClFlags &Flags) {
36 if (!Flags.getSkipUnimplemented()) {
37 // Use llvm_unreachable instead of report_fatal_error, which gives better
38 // stack traces.
39 llvm_unreachable("Not yet implemented");
40 abort();
41 }
42}
43} // end of anonymous namespace
44
Jim Stichnotheafb56c2015-06-22 10:35:22 -070045TargetMIPS32::TargetMIPS32(Cfg *Func) : TargetLowering(Func) {
Jim Stichnoth6da4cef2015-06-11 13:26:33 -070046 // TODO: Don't initialize IntegerRegisters and friends every time.
47 // Instead, initialize in some sort of static initializer for the
48 // class.
49 llvm::SmallBitVector IntegerRegisters(RegMIPS32::Reg_NUM);
50 llvm::SmallBitVector FloatRegisters(RegMIPS32::Reg_NUM);
51 llvm::SmallBitVector VectorRegisters(RegMIPS32::Reg_NUM);
52 llvm::SmallBitVector InvalidRegisters(RegMIPS32::Reg_NUM);
53 ScratchRegs.resize(RegMIPS32::Reg_NUM);
54#define X(val, encode, name, scratch, preserved, stackptr, frameptr, isInt, \
55 isFP) \
56 IntegerRegisters[RegMIPS32::val] = isInt; \
57 FloatRegisters[RegMIPS32::val] = isFP; \
58 VectorRegisters[RegMIPS32::val] = isFP; \
59 ScratchRegs[RegMIPS32::val] = scratch;
60 REGMIPS32_TABLE;
61#undef X
62 TypeToRegisterSet[IceType_void] = InvalidRegisters;
63 TypeToRegisterSet[IceType_i1] = IntegerRegisters;
64 TypeToRegisterSet[IceType_i8] = IntegerRegisters;
65 TypeToRegisterSet[IceType_i16] = IntegerRegisters;
66 TypeToRegisterSet[IceType_i32] = IntegerRegisters;
67 TypeToRegisterSet[IceType_i64] = IntegerRegisters;
68 TypeToRegisterSet[IceType_f32] = FloatRegisters;
69 TypeToRegisterSet[IceType_f64] = FloatRegisters;
70 TypeToRegisterSet[IceType_v4i1] = VectorRegisters;
71 TypeToRegisterSet[IceType_v8i1] = VectorRegisters;
72 TypeToRegisterSet[IceType_v16i1] = VectorRegisters;
73 TypeToRegisterSet[IceType_v16i8] = VectorRegisters;
74 TypeToRegisterSet[IceType_v8i16] = VectorRegisters;
75 TypeToRegisterSet[IceType_v4i32] = VectorRegisters;
76 TypeToRegisterSet[IceType_v4f32] = VectorRegisters;
77}
78
79void TargetMIPS32::translateO2() {
80 TimerMarker T(TimerStack::TT_O2, Func);
81
82 // TODO(stichnot): share passes with X86?
83 // https://code.google.com/p/nativeclient/issues/detail?id=4094
84
85 if (!Ctx->getFlags().getPhiEdgeSplit()) {
86 // Lower Phi instructions.
87 Func->placePhiLoads();
88 if (Func->hasError())
89 return;
90 Func->placePhiStores();
91 if (Func->hasError())
92 return;
93 Func->deletePhis();
94 if (Func->hasError())
95 return;
96 Func->dump("After Phi lowering");
97 }
98
99 // Address mode optimization.
100 Func->getVMetadata()->init(VMK_SingleDefs);
101 Func->doAddressOpt();
102
103 // Argument lowering
104 Func->doArgLowering();
105
106 // Target lowering. This requires liveness analysis for some parts
107 // of the lowering decisions, such as compare/branch fusing. If
108 // non-lightweight liveness analysis is used, the instructions need
109 // to be renumbered first. TODO: This renumbering should only be
110 // necessary if we're actually calculating live intervals, which we
111 // only do for register allocation.
112 Func->renumberInstructions();
113 if (Func->hasError())
114 return;
115
116 // TODO: It should be sufficient to use the fastest liveness
117 // calculation, i.e. livenessLightweight(). However, for some
118 // reason that slows down the rest of the translation. Investigate.
119 Func->liveness(Liveness_Basic);
120 if (Func->hasError())
121 return;
122 Func->dump("After MIPS32 address mode opt");
123
124 Func->genCode();
125 if (Func->hasError())
126 return;
127 Func->dump("After MIPS32 codegen");
128
129 // Register allocation. This requires instruction renumbering and
130 // full liveness analysis.
131 Func->renumberInstructions();
132 if (Func->hasError())
133 return;
134 Func->liveness(Liveness_Intervals);
135 if (Func->hasError())
136 return;
137 // Validate the live range computations. The expensive validation
138 // call is deliberately only made when assertions are enabled.
139 assert(Func->validateLiveness());
140 // The post-codegen dump is done here, after liveness analysis and
141 // associated cleanup, to make the dump cleaner and more useful.
142 Func->dump("After initial MIPS32 codegen");
143 Func->getVMetadata()->init(VMK_All);
144 regAlloc(RAK_Global);
145 if (Func->hasError())
146 return;
147 Func->dump("After linear scan regalloc");
148
149 if (Ctx->getFlags().getPhiEdgeSplit()) {
150 Func->advancedPhiLowering();
151 Func->dump("After advanced Phi lowering");
152 }
153
154 // Stack frame mapping.
155 Func->genFrame();
156 if (Func->hasError())
157 return;
158 Func->dump("After stack frame mapping");
159
160 Func->contractEmptyNodes();
161 Func->reorderNodes();
162
163 // Branch optimization. This needs to be done just before code
164 // emission. In particular, no transformations that insert or
165 // reorder CfgNodes should be done after branch optimization. We go
166 // ahead and do it before nop insertion to reduce the amount of work
167 // needed for searching for opportunities.
168 Func->doBranchOpt();
169 Func->dump("After branch optimization");
170
171 // Nop insertion
172 if (Ctx->getFlags().shouldDoNopInsertion()) {
173 Func->doNopInsertion();
174 }
175}
176
177void TargetMIPS32::translateOm1() {
178 TimerMarker T(TimerStack::TT_Om1, Func);
179
180 // TODO: share passes with X86?
181
182 Func->placePhiLoads();
183 if (Func->hasError())
184 return;
185 Func->placePhiStores();
186 if (Func->hasError())
187 return;
188 Func->deletePhis();
189 if (Func->hasError())
190 return;
191 Func->dump("After Phi lowering");
192
193 Func->doArgLowering();
194
195 Func->genCode();
196 if (Func->hasError())
197 return;
198 Func->dump("After initial MIPS32 codegen");
199
200 regAlloc(RAK_InfOnly);
201 if (Func->hasError())
202 return;
203 Func->dump("After regalloc of infinite-weight variables");
204
205 Func->genFrame();
206 if (Func->hasError())
207 return;
208 Func->dump("After stack frame mapping");
209
210 // Nop insertion
211 if (Ctx->getFlags().shouldDoNopInsertion()) {
212 Func->doNopInsertion();
213 }
214}
215
216bool TargetMIPS32::doBranchOpt(Inst *I, const CfgNode *NextNode) {
217 (void)I;
218 (void)NextNode;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700219 UnimplementedError(Func->getContext()->getFlags());
220 return false;
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700221}
222
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700223IceString TargetMIPS32::getRegName(SizeT RegNum, Type Ty) const {
224 assert(RegNum < RegMIPS32::Reg_NUM);
225 (void)Ty;
Jan Voung0dab0322015-07-21 14:29:34 -0700226 static const char *RegNames[] = {
227#define X(val, encode, name, scratch, preserved, stackptr, frameptr, isInt, \
228 isFP) \
229 name,
230 REGMIPS32_TABLE
231#undef X
232 };
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700233 return RegNames[RegNum];
234}
235
236Variable *TargetMIPS32::getPhysicalRegister(SizeT RegNum, Type Ty) {
237 if (Ty == IceType_void)
238 Ty = IceType_i32;
239 if (PhysicalRegisters[Ty].empty())
240 PhysicalRegisters[Ty].resize(RegMIPS32::Reg_NUM);
241 assert(RegNum < PhysicalRegisters[Ty].size());
242 Variable *Reg = PhysicalRegisters[Ty][RegNum];
243 if (Reg == nullptr) {
244 Reg = Func->makeVariable(Ty);
245 Reg->setRegNum(RegNum);
246 PhysicalRegisters[Ty][RegNum] = Reg;
247 // Specially mark SP as an "argument" so that it is considered
248 // live upon function entry.
249 if (RegNum == RegMIPS32::Reg_SP) {
250 Func->addImplicitArg(Reg);
251 Reg->setIgnoreLiveness();
252 }
253 }
254 return Reg;
255}
256
257void TargetMIPS32::emitVariable(const Variable *Var) const {
258 Ostream &Str = Ctx->getStrEmit();
259 (void)Var;
260 (void)Str;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700261 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700262}
263
264void TargetMIPS32::lowerArguments() {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700265 VarList &Args = Func->getArgs();
266 if (Args.size() > 0)
267 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700268}
269
270Type TargetMIPS32::stackSlotType() { return IceType_i32; }
271
272void TargetMIPS32::addProlog(CfgNode *Node) {
273 (void)Node;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700274 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700275}
276
277void TargetMIPS32::addEpilog(CfgNode *Node) {
278 (void)Node;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700279 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700280}
281
282llvm::SmallBitVector TargetMIPS32::getRegisterSet(RegSetMask Include,
283 RegSetMask Exclude) const {
284 llvm::SmallBitVector Registers(RegMIPS32::Reg_NUM);
285
286#define X(val, encode, name, scratch, preserved, stackptr, frameptr, isInt, \
287 isFP) \
288 if (scratch && (Include & RegSet_CallerSave)) \
289 Registers[RegMIPS32::val] = true; \
290 if (preserved && (Include & RegSet_CalleeSave)) \
291 Registers[RegMIPS32::val] = true; \
292 if (stackptr && (Include & RegSet_StackPointer)) \
293 Registers[RegMIPS32::val] = true; \
294 if (frameptr && (Include & RegSet_FramePointer)) \
295 Registers[RegMIPS32::val] = true; \
296 if (scratch && (Exclude & RegSet_CallerSave)) \
297 Registers[RegMIPS32::val] = false; \
298 if (preserved && (Exclude & RegSet_CalleeSave)) \
299 Registers[RegMIPS32::val] = false; \
300 if (stackptr && (Exclude & RegSet_StackPointer)) \
301 Registers[RegMIPS32::val] = false; \
302 if (frameptr && (Exclude & RegSet_FramePointer)) \
303 Registers[RegMIPS32::val] = false;
304
305 REGMIPS32_TABLE
306
307#undef X
308
309 return Registers;
310}
311
312void TargetMIPS32::lowerAlloca(const InstAlloca *Inst) {
313 UsesFramePointer = true;
314 // Conservatively require the stack to be aligned. Some stack
315 // adjustment operations implemented below assume that the stack is
316 // aligned before the alloca. All the alloca code ensures that the
317 // stack alignment is preserved after the alloca. The stack alignment
318 // restriction can be relaxed in some cases.
319 NeedsStackAlignment = true;
320 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700321 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700322}
323
324void TargetMIPS32::lowerArithmetic(const InstArithmetic *Inst) {
325 switch (Inst->getOp()) {
326 case InstArithmetic::_num:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700327 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700328 break;
329 case InstArithmetic::Add:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700330 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700331 break;
332 case InstArithmetic::And:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700333 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700334 break;
335 case InstArithmetic::Or:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700336 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700337 break;
338 case InstArithmetic::Xor:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700339 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700340 break;
341 case InstArithmetic::Sub:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700342 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700343 break;
344 case InstArithmetic::Mul:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700345 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700346 break;
347 case InstArithmetic::Shl:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700348 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700349 break;
350 case InstArithmetic::Lshr:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700351 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700352 break;
353 case InstArithmetic::Ashr:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700354 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700355 break;
356 case InstArithmetic::Udiv:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700357 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700358 break;
359 case InstArithmetic::Sdiv:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700360 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700361 break;
362 case InstArithmetic::Urem:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700363 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700364 break;
365 case InstArithmetic::Srem:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700366 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700367 break;
368 case InstArithmetic::Fadd:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700369 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700370 break;
371 case InstArithmetic::Fsub:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700372 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700373 break;
374 case InstArithmetic::Fmul:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700375 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700376 break;
377 case InstArithmetic::Fdiv:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700378 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700379 break;
380 case InstArithmetic::Frem:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700381 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700382 break;
383 }
384}
385
386void TargetMIPS32::lowerAssign(const InstAssign *Inst) {
387 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700388 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700389}
390
391void TargetMIPS32::lowerBr(const InstBr *Inst) {
392 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700393 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700394}
395
396void TargetMIPS32::lowerCall(const InstCall *Inst) {
397 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700398 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700399}
400
401void TargetMIPS32::lowerCast(const InstCast *Inst) {
402 InstCast::OpKind CastKind = Inst->getCastKind();
403 switch (CastKind) {
404 default:
405 Func->setError("Cast type not supported");
406 return;
407 case InstCast::Sext: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700408 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700409 break;
410 }
411 case InstCast::Zext: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700412 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700413 break;
414 }
415 case InstCast::Trunc: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700416 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700417 break;
418 }
419 case InstCast::Fptrunc:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700420 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700421 break;
422 case InstCast::Fpext: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700423 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700424 break;
425 }
426 case InstCast::Fptosi:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700427 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700428 break;
429 case InstCast::Fptoui:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700430 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700431 break;
432 case InstCast::Sitofp:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700433 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700434 break;
435 case InstCast::Uitofp: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700436 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700437 break;
438 }
439 case InstCast::Bitcast: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700440 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700441 break;
442 }
443 }
444}
445
446void TargetMIPS32::lowerExtractElement(const InstExtractElement *Inst) {
447 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700448 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700449}
450
451void TargetMIPS32::lowerFcmp(const InstFcmp *Inst) {
452 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700453 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700454}
455
456void TargetMIPS32::lowerIcmp(const InstIcmp *Inst) {
457 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700458 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700459}
460
461void TargetMIPS32::lowerInsertElement(const InstInsertElement *Inst) {
462 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700463 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700464}
465
466void TargetMIPS32::lowerIntrinsicCall(const InstIntrinsicCall *Instr) {
467 switch (Intrinsics::IntrinsicID ID = Instr->getIntrinsicInfo().ID) {
468 case Intrinsics::AtomicCmpxchg: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700469 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700470 return;
471 }
472 case Intrinsics::AtomicFence:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700473 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700474 return;
475 case Intrinsics::AtomicFenceAll:
476 // NOTE: FenceAll should prevent and load/store from being moved
477 // across the fence (both atomic and non-atomic). The InstMIPS32Mfence
478 // instruction is currently marked coarsely as "HasSideEffects".
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700479 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700480 return;
481 case Intrinsics::AtomicIsLockFree: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700482 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700483 return;
484 }
485 case Intrinsics::AtomicLoad: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700486 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700487 return;
488 }
489 case Intrinsics::AtomicRMW:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700490 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700491 return;
492 case Intrinsics::AtomicStore: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700493 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700494 return;
495 }
496 case Intrinsics::Bswap: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700497 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700498 return;
499 }
500 case Intrinsics::Ctpop: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700501 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700502 return;
503 }
504 case Intrinsics::Ctlz: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700505 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700506 return;
507 }
508 case Intrinsics::Cttz: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700509 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700510 return;
511 }
512 case Intrinsics::Fabs: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700513 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700514 return;
515 }
516 case Intrinsics::Longjmp: {
517 InstCall *Call = makeHelperCall(H_call_longjmp, nullptr, 2);
518 Call->addArg(Instr->getArg(0));
519 Call->addArg(Instr->getArg(1));
520 lowerCall(Call);
521 return;
522 }
523 case Intrinsics::Memcpy: {
524 // In the future, we could potentially emit an inline memcpy/memset, etc.
525 // for intrinsic calls w/ a known length.
526 InstCall *Call = makeHelperCall(H_call_memcpy, nullptr, 3);
527 Call->addArg(Instr->getArg(0));
528 Call->addArg(Instr->getArg(1));
529 Call->addArg(Instr->getArg(2));
530 lowerCall(Call);
531 return;
532 }
533 case Intrinsics::Memmove: {
534 InstCall *Call = makeHelperCall(H_call_memmove, nullptr, 3);
535 Call->addArg(Instr->getArg(0));
536 Call->addArg(Instr->getArg(1));
537 Call->addArg(Instr->getArg(2));
538 lowerCall(Call);
539 return;
540 }
541 case Intrinsics::Memset: {
542 // The value operand needs to be extended to a stack slot size
543 // because the PNaCl ABI requires arguments to be at least 32 bits
544 // wide.
545 Operand *ValOp = Instr->getArg(1);
546 assert(ValOp->getType() == IceType_i8);
547 Variable *ValExt = Func->makeVariable(stackSlotType());
548 lowerCast(InstCast::create(Func, InstCast::Zext, ValExt, ValOp));
549 InstCall *Call = makeHelperCall(H_call_memset, nullptr, 3);
550 Call->addArg(Instr->getArg(0));
551 Call->addArg(ValExt);
552 Call->addArg(Instr->getArg(2));
553 lowerCall(Call);
554 return;
555 }
556 case Intrinsics::NaClReadTP: {
557 if (Ctx->getFlags().getUseSandboxing()) {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700558 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700559 } else {
560 InstCall *Call = makeHelperCall(H_call_read_tp, Instr->getDest(), 0);
561 lowerCall(Call);
562 }
563 return;
564 }
565 case Intrinsics::Setjmp: {
566 InstCall *Call = makeHelperCall(H_call_setjmp, Instr->getDest(), 1);
567 Call->addArg(Instr->getArg(0));
568 lowerCall(Call);
569 return;
570 }
571 case Intrinsics::Sqrt: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700572 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700573 return;
574 }
575 case Intrinsics::Stacksave: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700576 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700577 return;
578 }
579 case Intrinsics::Stackrestore: {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700580 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700581 return;
582 }
583 case Intrinsics::Trap:
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700584 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700585 return;
586 case Intrinsics::UnknownIntrinsic:
587 Func->setError("Should not be lowering UnknownIntrinsic");
588 return;
589 }
590 return;
591}
592
593void TargetMIPS32::lowerLoad(const InstLoad *Inst) {
594 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700595 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700596}
597
598void TargetMIPS32::doAddressOptLoad() {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700599 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700600}
601
602void TargetMIPS32::randomlyInsertNop(float Probability) {
603 RandomNumberGeneratorWrapper RNG(Ctx->getRNG());
604 if (RNG.getTrueWithProbability(Probability)) {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700605 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700606 }
607}
608
609void TargetMIPS32::lowerPhi(const InstPhi * /*Inst*/) {
610 Func->setError("Phi found in regular instruction list");
611}
612
613void TargetMIPS32::lowerRet(const InstRet *Inst) {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700614 Variable *Reg = nullptr;
615 if (Inst->hasRetValue())
616 UnimplementedError(Func->getContext()->getFlags());
617 _ret(getPhysicalRegister(RegMIPS32::Reg_RA), Reg);
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700618}
619
620void TargetMIPS32::lowerSelect(const InstSelect *Inst) {
621 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700622 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700623}
624
625void TargetMIPS32::lowerStore(const InstStore *Inst) {
626 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700627 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700628}
629
630void TargetMIPS32::doAddressOptStore() {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700631 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700632}
633
634void TargetMIPS32::lowerSwitch(const InstSwitch *Inst) {
635 (void)Inst;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700636 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700637}
638
639void TargetMIPS32::lowerUnreachable(const InstUnreachable * /*Inst*/) {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700640 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700641}
642
643// Turn an i64 Phi instruction into a pair of i32 Phi instructions, to
644// preserve integrity of liveness analysis. Undef values are also
645// turned into zeroes, since loOperand() and hiOperand() don't expect
646// Undef input.
647void TargetMIPS32::prelowerPhis() {
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700648 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700649}
650
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700651void TargetMIPS32::postLower() {
652 if (Ctx->getFlags().getOptLevel() == Opt_m1)
653 return;
654 // Find two-address non-SSA instructions where Dest==Src0, and set
655 // the DestNonKillable flag to keep liveness analysis consistent.
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700656 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700657}
658
659void TargetMIPS32::makeRandomRegisterPermutation(
660 llvm::SmallVectorImpl<int32_t> &Permutation,
661 const llvm::SmallBitVector &ExcludeRegisters) const {
662 (void)Permutation;
663 (void)ExcludeRegisters;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700664 UnimplementedError(Func->getContext()->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700665}
666
667/* TODO(jvoung): avoid duplicate symbols with multiple targets.
668void ConstantUndef::emitWithoutDollar(GlobalContext *) const {
669 llvm_unreachable("Not expecting to emitWithoutDollar undef");
670}
671
672void ConstantUndef::emit(GlobalContext *) const {
673 llvm_unreachable("undef value encountered by emitter.");
674}
675*/
676
677TargetDataMIPS32::TargetDataMIPS32(GlobalContext *Ctx)
678 : TargetDataLowering(Ctx) {}
679
John Porto8b1a7052015-06-17 13:20:08 -0700680void TargetDataMIPS32::lowerGlobals(const VariableDeclarationList &Vars,
681 const IceString &SectionSuffix) {
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700682 switch (Ctx->getFlags().getOutFileType()) {
683 case FT_Elf: {
684 ELFObjectWriter *Writer = Ctx->getObjectWriter();
John Porto8b1a7052015-06-17 13:20:08 -0700685 Writer->writeDataSection(Vars, llvm::ELF::R_MIPS_GLOB_DAT, SectionSuffix);
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700686 } break;
687 case FT_Asm:
688 case FT_Iasm: {
689 const IceString &TranslateOnly = Ctx->getFlags().getTranslateOnly();
690 OstreamLocker L(Ctx);
John Porto8b1a7052015-06-17 13:20:08 -0700691 for (const VariableDeclaration *Var : Vars) {
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700692 if (GlobalContext::matchSymbolName(Var->getName(), TranslateOnly)) {
John Porto8b1a7052015-06-17 13:20:08 -0700693 emitGlobal(*Var, SectionSuffix);
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700694 }
695 }
696 } break;
697 }
698}
699
John Porto0f86d032015-06-15 07:44:27 -0700700void TargetDataMIPS32::lowerConstants() {
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700701 if (Ctx->getFlags().getDisableTranslation())
702 return;
Reed Kotlerd00d48d2015-07-08 09:49:07 -0700703 UnimplementedError(Ctx->getFlags());
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700704}
705
Jan Voungfb792842015-06-11 15:27:50 -0700706TargetHeaderMIPS32::TargetHeaderMIPS32(GlobalContext *Ctx)
707 : TargetHeaderLowering(Ctx) {}
708
Jim Stichnoth6da4cef2015-06-11 13:26:33 -0700709} // end of namespace Ice