Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 1 | //===- subzero/src/IceTargetLowering.h - Lowering interface -----*- C++ -*-===// |
| 2 | // |
| 3 | // The Subzero Code Generator |
| 4 | // |
| 5 | // This file is distributed under the University of Illinois Open Source |
| 6 | // License. See LICENSE.TXT for details. |
| 7 | // |
| 8 | //===----------------------------------------------------------------------===// |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 9 | /// |
| 10 | /// \file |
Jim Stichnoth | 92a6e5b | 2015-12-02 16:52:44 -0800 | [diff] [blame] | 11 | /// \brief Declares the TargetLowering, LoweringContext, and TargetDataLowering |
| 12 | /// classes. |
| 13 | /// |
| 14 | /// TargetLowering is an abstract class used to drive the translation/lowering |
| 15 | /// process. LoweringContext maintains a context for lowering each instruction, |
| 16 | /// offering conveniences such as iterating over non-deleted instructions. |
| 17 | /// TargetDataLowering is an abstract class used to drive the lowering/emission |
| 18 | /// of global initializers, external global declarations, and internal constant |
| 19 | /// pools. |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 20 | /// |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 21 | //===----------------------------------------------------------------------===// |
| 22 | |
| 23 | #ifndef SUBZERO_SRC_ICETARGETLOWERING_H |
| 24 | #define SUBZERO_SRC_ICETARGETLOWERING_H |
| 25 | |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 26 | #include "IceBitVector.h" |
| 27 | #include "IceCfgNode.h" |
Manasij Mukherjee | 7cd926d | 2016-08-04 12:33:23 -0700 | [diff] [blame] | 28 | #include "IceDefs.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 29 | #include "IceInst.h" // for the names of the Inst subtypes |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 30 | #include "IceOperand.h" |
Manasij Mukherjee | 7cd926d | 2016-08-04 12:33:23 -0700 | [diff] [blame] | 31 | #include "IceRegAlloc.h" |
Jim Stichnoth | a18cc9c | 2014-09-30 19:10:22 -0700 | [diff] [blame] | 32 | #include "IceTypes.h" |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 33 | |
John Porto | 1d937a8 | 2015-12-17 06:19:34 -0800 | [diff] [blame] | 34 | #include <utility> |
| 35 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 36 | namespace Ice { |
| 37 | |
Karl Schimpf | c5abdc1 | 2015-10-09 13:29:13 -0700 | [diff] [blame] | 38 | // UnimplementedError is defined as a macro so that we can get actual line |
| 39 | // numbers. |
| 40 | #define UnimplementedError(Flags) \ |
| 41 | do { \ |
| 42 | if (!static_cast<const ClFlags &>(Flags).getSkipUnimplemented()) { \ |
| 43 | /* Use llvm_unreachable instead of report_fatal_error, which gives \ |
| 44 | better stack traces. */ \ |
| 45 | llvm_unreachable("Not yet implemented"); \ |
| 46 | abort(); \ |
| 47 | } \ |
| 48 | } while (0) |
| 49 | |
Jim Stichnoth | 91c773e | 2016-01-19 09:52:22 -0800 | [diff] [blame] | 50 | // UnimplementedLoweringError is similar in style to UnimplementedError. Given |
| 51 | // a TargetLowering object pointer and an Inst pointer, it adds appropriate |
| 52 | // FakeDef and FakeUse instructions to try maintain liveness consistency. |
| 53 | #define UnimplementedLoweringError(Target, Instr) \ |
| 54 | do { \ |
Karl Schimpf | d469994 | 2016-04-02 09:55:31 -0700 | [diff] [blame] | 55 | if (getFlags().getSkipUnimplemented()) { \ |
Jim Stichnoth | 91c773e | 2016-01-19 09:52:22 -0800 | [diff] [blame] | 56 | (Target)->addFakeDefUses(Instr); \ |
| 57 | } else { \ |
| 58 | /* Use llvm_unreachable instead of report_fatal_error, which gives \ |
| 59 | better stack traces. */ \ |
Eric Holk | e37076a | 2016-01-27 14:06:35 -0800 | [diff] [blame] | 60 | llvm_unreachable( \ |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 61 | (std::string("Not yet implemented: ") + Instr->getInstName()) \ |
| 62 | .c_str()); \ |
Jim Stichnoth | 91c773e | 2016-01-19 09:52:22 -0800 | [diff] [blame] | 63 | abort(); \ |
| 64 | } \ |
| 65 | } while (0) |
| 66 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 67 | /// LoweringContext makes it easy to iterate through non-deleted instructions in |
| 68 | /// a node, and insert new (lowered) instructions at the current point. Along |
| 69 | /// with the instruction list container and associated iterators, it holds the |
| 70 | /// current node, which is needed when inserting new instructions in order to |
| 71 | /// track whether variables are used as single-block or multi-block. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 72 | class LoweringContext { |
Jim Stichnoth | 7b451a9 | 2014-10-15 14:39:23 -0700 | [diff] [blame] | 73 | LoweringContext(const LoweringContext &) = delete; |
| 74 | LoweringContext &operator=(const LoweringContext &) = delete; |
| 75 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 76 | public: |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 77 | LoweringContext() = default; |
| 78 | ~LoweringContext() = default; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 79 | void init(CfgNode *Node); |
| 80 | Inst *getNextInst() const { |
| 81 | if (Next == End) |
Jim Stichnoth | ae95320 | 2014-12-20 06:17:49 -0800 | [diff] [blame] | 82 | return nullptr; |
Jim Stichnoth | f5fdd23 | 2016-05-09 12:24:36 -0700 | [diff] [blame] | 83 | return iteratorToInst(Next); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 84 | } |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 85 | Inst *getNextInst(InstList::iterator &Iter) const { |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 86 | advanceForward(Iter); |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 87 | if (Iter == End) |
Jim Stichnoth | ae95320 | 2014-12-20 06:17:49 -0800 | [diff] [blame] | 88 | return nullptr; |
Jim Stichnoth | f5fdd23 | 2016-05-09 12:24:36 -0700 | [diff] [blame] | 89 | return iteratorToInst(Iter); |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 90 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 91 | CfgNode *getNode() const { return Node; } |
| 92 | bool atEnd() const { return Cur == End; } |
| 93 | InstList::iterator getCur() const { return Cur; } |
Jim Stichnoth | 5d2fa0c | 2014-12-01 09:30:55 -0800 | [diff] [blame] | 94 | InstList::iterator getNext() const { return Next; } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 95 | InstList::iterator getEnd() const { return End; } |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 96 | void insert(Inst *Instr); |
John Porto | 1d937a8 | 2015-12-17 06:19:34 -0800 | [diff] [blame] | 97 | template <typename Inst, typename... Args> Inst *insert(Args &&... A) { |
| 98 | auto *New = Inst::create(Node->getCfg(), std::forward<Args>(A)...); |
| 99 | insert(New); |
| 100 | return New; |
| 101 | } |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 102 | Inst *getLastInserted() const; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 103 | void advanceCur() { Cur = Next; } |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 104 | void advanceNext() { advanceForward(Next); } |
Jim Stichnoth | a3f57b9 | 2015-07-30 12:46:04 -0700 | [diff] [blame] | 105 | void setCur(InstList::iterator C) { Cur = C; } |
| 106 | void setNext(InstList::iterator N) { Next = N; } |
Jim Stichnoth | 336f6c4 | 2014-10-30 15:01:31 -0700 | [diff] [blame] | 107 | void rewind(); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 108 | void setInsertPoint(const InstList::iterator &Position) { Next = Position; } |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 109 | void availabilityReset(); |
| 110 | void availabilityUpdate(); |
| 111 | Variable *availabilityGet(Operand *Src) const; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 112 | |
| 113 | private: |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 114 | /// Node is the argument to Inst::updateVars(). |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 115 | CfgNode *Node = nullptr; |
| 116 | Inst *LastInserted = nullptr; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 117 | /// Cur points to the current instruction being considered. It is guaranteed |
| 118 | /// to point to a non-deleted instruction, or to be End. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 119 | InstList::iterator Cur; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 120 | /// Next doubles as a pointer to the next valid instruction (if any), and the |
| 121 | /// new-instruction insertion point. It is also updated for the caller in case |
| 122 | /// the lowering consumes more than one high-level instruction. It is |
| 123 | /// guaranteed to point to a non-deleted instruction after Cur, or to be End. |
| 124 | // TODO: Consider separating the notion of "next valid instruction" and "new |
| 125 | // instruction insertion point", to avoid confusion when previously-deleted |
| 126 | // instructions come between the two points. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 127 | InstList::iterator Next; |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 128 | /// Begin is a copy of Insts.begin(), used if iterators are moved backward. |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 129 | InstList::iterator Begin; |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 130 | /// End is a copy of Insts.end(), used if Next needs to be advanced. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 131 | InstList::iterator End; |
Jim Stichnoth | 318f4cd | 2015-10-01 21:02:37 -0700 | [diff] [blame] | 132 | /// LastDest and LastSrc capture the parameters of the last "Dest=Src" simple |
| 133 | /// assignment inserted (provided Src is a variable). This is used for simple |
| 134 | /// availability analysis. |
| 135 | Variable *LastDest = nullptr; |
| 136 | Variable *LastSrc = nullptr; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 137 | |
Jan Voung | c820ddf | 2014-07-29 14:38:51 -0700 | [diff] [blame] | 138 | void skipDeleted(InstList::iterator &I) const; |
Jan Voung | e6e497d | 2014-07-30 10:06:03 -0700 | [diff] [blame] | 139 | void advanceForward(InstList::iterator &I) const; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 140 | }; |
| 141 | |
Jan Voung | 28068ad | 2015-07-31 12:58:46 -0700 | [diff] [blame] | 142 | /// A helper class to advance the LoweringContext at each loop iteration. |
| 143 | class PostIncrLoweringContext { |
| 144 | PostIncrLoweringContext() = delete; |
| 145 | PostIncrLoweringContext(const PostIncrLoweringContext &) = delete; |
| 146 | PostIncrLoweringContext &operator=(const PostIncrLoweringContext &) = delete; |
| 147 | |
| 148 | public: |
| 149 | explicit PostIncrLoweringContext(LoweringContext &Context) |
| 150 | : Context(Context) {} |
| 151 | ~PostIncrLoweringContext() { |
| 152 | Context.advanceCur(); |
| 153 | Context.advanceNext(); |
| 154 | } |
| 155 | |
| 156 | private: |
| 157 | LoweringContext &Context; |
| 158 | }; |
| 159 | |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 160 | /// TargetLowering is the base class for all backends in Subzero. In addition to |
| 161 | /// implementing the abstract methods in this class, each concrete target must |
| 162 | /// also implement a named constructor in its own namespace. For instance, for |
| 163 | /// X8632 we have: |
| 164 | /// |
| 165 | /// namespace X8632 { |
| 166 | /// void createTargetLowering(Cfg *Func); |
| 167 | /// } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 168 | class TargetLowering { |
Jim Stichnoth | c6ead20 | 2015-02-24 09:30:30 -0800 | [diff] [blame] | 169 | TargetLowering() = delete; |
Jim Stichnoth | 7b451a9 | 2014-10-15 14:39:23 -0700 | [diff] [blame] | 170 | TargetLowering(const TargetLowering &) = delete; |
| 171 | TargetLowering &operator=(const TargetLowering &) = delete; |
| 172 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 173 | public: |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 174 | static void staticInit(GlobalContext *Ctx); |
Jim Stichnoth | 8ff4b28 | 2016-01-04 15:39:06 -0800 | [diff] [blame] | 175 | // Each target must define a public static method: |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 176 | // static void staticInit(GlobalContext *Ctx); |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 177 | static bool shouldBePooled(const class Constant *C); |
Nicolas Capens | 32f9cce | 2016-10-19 01:24:27 -0400 | [diff] [blame] | 178 | static Type getPointerType(); |
John Porto | 53611e2 | 2015-12-30 07:30:10 -0800 | [diff] [blame] | 179 | |
| 180 | static std::unique_ptr<TargetLowering> createLowering(TargetArch Target, |
| 181 | Cfg *Func); |
| 182 | |
| 183 | virtual std::unique_ptr<Assembler> createAssembler() const = 0; |
| 184 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 185 | void translate() { |
Jim Stichnoth | dd6dcfa | 2016-04-18 12:52:09 -0700 | [diff] [blame] | 186 | switch (Func->getOptLevel()) { |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 187 | case Opt_m1: |
| 188 | translateOm1(); |
| 189 | break; |
| 190 | case Opt_0: |
| 191 | translateO0(); |
| 192 | break; |
| 193 | case Opt_1: |
| 194 | translateO1(); |
| 195 | break; |
| 196 | case Opt_2: |
| 197 | translateO2(); |
| 198 | break; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 199 | } |
| 200 | } |
| 201 | virtual void translateOm1() { |
| 202 | Func->setError("Target doesn't specify Om1 lowering steps."); |
| 203 | } |
| 204 | virtual void translateO0() { |
| 205 | Func->setError("Target doesn't specify O0 lowering steps."); |
| 206 | } |
| 207 | virtual void translateO1() { |
| 208 | Func->setError("Target doesn't specify O1 lowering steps."); |
| 209 | } |
| 210 | virtual void translateO2() { |
| 211 | Func->setError("Target doesn't specify O2 lowering steps."); |
| 212 | } |
| 213 | |
John Porto | 5e0a8a7 | 2015-11-20 13:50:36 -0800 | [diff] [blame] | 214 | /// Generates calls to intrinsics for operations the Target can't handle. |
| 215 | void genTargetHelperCalls(); |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 216 | /// Tries to do address mode optimization on a single instruction. |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 217 | void doAddressOpt(); |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 218 | /// Randomly insert NOPs. |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 219 | void doNopInsertion(RandomNumberGenerator &RNG); |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 220 | /// Lowers a single non-Phi instruction. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 221 | void lower(); |
Jim Stichnoth | a3f57b9 | 2015-07-30 12:46:04 -0700 | [diff] [blame] | 222 | /// Inserts and lowers a single high-level instruction at a specific insertion |
| 223 | /// point. |
| 224 | void lowerInst(CfgNode *Node, InstList::iterator Next, InstHighLevel *Instr); |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 225 | /// Does preliminary lowering of the set of Phi instructions in the current |
| 226 | /// node. The main intention is to do what's needed to keep the unlowered Phi |
| 227 | /// instructions consistent with the lowered non-Phi instructions, e.g. to |
| 228 | /// lower 64-bit operands on a 32-bit target. |
Jim Stichnoth | 336f6c4 | 2014-10-30 15:01:31 -0700 | [diff] [blame] | 229 | virtual void prelowerPhis() {} |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 230 | /// Tries to do branch optimization on a single instruction. Returns true if |
| 231 | /// some optimization was done. |
Jim Stichnoth | ff9c706 | 2014-09-18 04:50:49 -0700 | [diff] [blame] | 232 | virtual bool doBranchOpt(Inst * /*I*/, const CfgNode * /*NextNode*/) { |
| 233 | return false; |
| 234 | } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 235 | |
Jim Stichnoth | 3d44fe8 | 2014-11-01 10:10:18 -0700 | [diff] [blame] | 236 | virtual SizeT getNumRegisters() const = 0; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 237 | /// Returns a variable pre-colored to the specified physical register. This is |
| 238 | /// generally used to get very direct access to the register such as in the |
| 239 | /// prolog or epilog or for marking scratch registers as killed by a call. If |
| 240 | /// a Type is not provided, a target-specific default type is used. |
Jim Stichnoth | 8aa3966 | 2016-02-10 11:20:30 -0800 | [diff] [blame] | 241 | virtual Variable *getPhysicalRegister(RegNumT RegNum, |
Jim Stichnoth | 98712a3 | 2014-10-24 10:59:02 -0700 | [diff] [blame] | 242 | Type Ty = IceType_void) = 0; |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 243 | /// Returns a printable name for the register. |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 244 | virtual const char *getRegName(RegNumT RegNum, Type Ty) const = 0; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 245 | |
| 246 | virtual bool hasFramePointer() const { return false; } |
David Sehr | e39d0ca | 2015-11-06 11:25:41 -0800 | [diff] [blame] | 247 | virtual void setHasFramePointer() = 0; |
Jim Stichnoth | 8aa3966 | 2016-02-10 11:20:30 -0800 | [diff] [blame] | 248 | virtual RegNumT getStackReg() const = 0; |
| 249 | virtual RegNumT getFrameReg() const = 0; |
| 250 | virtual RegNumT getFrameOrStackReg() const = 0; |
Matt Wala | d4799f4 | 2014-08-14 14:24:12 -0700 | [diff] [blame] | 251 | virtual size_t typeWidthInBytesOnStack(Type Ty) const = 0; |
David Sehr | e39d0ca | 2015-11-06 11:25:41 -0800 | [diff] [blame] | 252 | virtual uint32_t getStackAlignment() const = 0; |
Nicolas Capens | 4e679e5 | 2017-01-12 17:01:06 -0500 | [diff] [blame] | 253 | virtual bool needsStackPointerAlignment() const { return false; } |
David Sehr | 2f3b8ec | 2015-11-16 16:51:39 -0800 | [diff] [blame] | 254 | virtual void reserveFixedAllocaArea(size_t Size, size_t Align) = 0; |
| 255 | virtual int32_t getFrameFixedAllocaOffset() const = 0; |
John Porto | 614140e | 2015-11-23 11:43:13 -0800 | [diff] [blame] | 256 | virtual uint32_t maxOutArgsSizeBytes() const { return 0; } |
Stefan Maksimovic | 298d14e | 2017-01-11 05:58:27 -0800 | [diff] [blame] | 257 | // Addressing relative to frame pointer differs in MIPS compared to X86/ARM |
| 258 | // since MIPS decrements its stack pointer prior to saving it in the frame |
| 259 | // pointer register. |
| 260 | virtual uint32_t getFramePointerOffset(uint32_t CurrentOffset, |
| 261 | uint32_t Size) const { |
| 262 | return -(CurrentOffset + Size); |
| 263 | } |
Andrew Scull | 6d47bcd | 2015-09-17 17:10:05 -0700 | [diff] [blame] | 264 | /// Return whether a 64-bit Variable should be split into a Variable64On32. |
| 265 | virtual bool shouldSplitToVariable64On32(Type Ty) const = 0; |
| 266 | |
Jaydeep Patil | 958ddb7 | 2016-10-03 07:52:48 -0700 | [diff] [blame] | 267 | /// Return whether a Vector Variable should be split into a VariableVecOn32. |
| 268 | virtual bool shouldSplitToVariableVecOn32(Type Ty) const { |
| 269 | (void)Ty; |
| 270 | return false; |
| 271 | } |
| 272 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 273 | bool hasComputedFrame() const { return HasComputedFrame; } |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 274 | /// Returns true if this function calls a function that has the "returns |
| 275 | /// twice" attribute. |
Jan Voung | 44d53e1 | 2014-09-11 19:18:03 -0700 | [diff] [blame] | 276 | bool callsReturnsTwice() const { return CallsReturnsTwice; } |
Jim Stichnoth | dd842db | 2015-01-27 12:53:53 -0800 | [diff] [blame] | 277 | void setCallsReturnsTwice(bool RetTwice) { CallsReturnsTwice = RetTwice; } |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 278 | SizeT makeNextLabelNumber() { return NextLabelNumber++; } |
Andrew Scull | 86df4e9 | 2015-07-30 13:54:44 -0700 | [diff] [blame] | 279 | SizeT makeNextJumpTableNumber() { return NextJumpTableNumber++; } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 280 | LoweringContext &getContext() { return Context; } |
Jim Stichnoth | 8ff4b28 | 2016-01-04 15:39:06 -0800 | [diff] [blame] | 281 | Cfg *getFunc() const { return Func; } |
| 282 | GlobalContext *getGlobalContext() const { return Ctx; } |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 283 | |
| 284 | enum RegSet { |
| 285 | RegSet_None = 0, |
| 286 | RegSet_CallerSave = 1 << 0, |
| 287 | RegSet_CalleeSave = 1 << 1, |
| 288 | RegSet_StackPointer = 1 << 2, |
| 289 | RegSet_FramePointer = 1 << 3, |
| 290 | RegSet_All = ~RegSet_None |
| 291 | }; |
Andrew Scull | 8072bae | 2015-09-14 16:01:26 -0700 | [diff] [blame] | 292 | using RegSetMask = uint32_t; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 293 | |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 294 | virtual SmallBitVector getRegisterSet(RegSetMask Include, |
| 295 | RegSetMask Exclude) const = 0; |
Jim Stichnoth | b40595a | 2016-01-29 06:14:31 -0800 | [diff] [blame] | 296 | /// Get the set of physical registers available for the specified Variable's |
| 297 | /// register class, applying register restrictions from the command line. |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 298 | virtual const SmallBitVector & |
Jim Stichnoth | c59288b | 2015-11-09 11:38:40 -0800 | [diff] [blame] | 299 | getRegistersForVariable(const Variable *Var) const = 0; |
Jim Stichnoth | b40595a | 2016-01-29 06:14:31 -0800 | [diff] [blame] | 300 | /// Get the set of *all* physical registers available for the specified |
| 301 | /// Variable's register class, *not* applying register restrictions from the |
| 302 | /// command line. |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 303 | virtual const SmallBitVector & |
Jim Stichnoth | b40595a | 2016-01-29 06:14:31 -0800 | [diff] [blame] | 304 | getAllRegistersForVariable(const Variable *Var) const = 0; |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 305 | virtual const SmallBitVector &getAliasesForRegister(RegNumT) const = 0; |
John Porto | bb0a5fe | 2015-09-04 11:23:41 -0700 | [diff] [blame] | 306 | |
Jim Stichnoth | 70d0a05 | 2014-11-14 15:53:46 -0800 | [diff] [blame] | 307 | void regAlloc(RegAllocKind Kind); |
Manasij Mukherjee | 7cd926d | 2016-08-04 12:33:23 -0700 | [diff] [blame] | 308 | void postRegallocSplitting(const SmallBitVector &RegMask); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 309 | |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 310 | virtual void |
Jim Stichnoth | 8aa3966 | 2016-02-10 11:20:30 -0800 | [diff] [blame] | 311 | makeRandomRegisterPermutation(llvm::SmallVectorImpl<RegNumT> &Permutation, |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 312 | const SmallBitVector &ExcludeRegisters, |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 313 | uint64_t Salt) const = 0; |
Jim Stichnoth | e6d2478 | 2014-12-19 05:42:24 -0800 | [diff] [blame] | 314 | |
Andrew Scull | 87f80c1 | 2015-07-20 10:19:16 -0700 | [diff] [blame] | 315 | /// Get the minimum number of clusters required for a jump table to be |
| 316 | /// considered. |
| 317 | virtual SizeT getMinJumpTableSize() const = 0; |
Andrew Scull | 86df4e9 | 2015-07-30 13:54:44 -0700 | [diff] [blame] | 318 | virtual void emitJumpTable(const Cfg *Func, |
| 319 | const InstJumpTable *JumpTable) const = 0; |
Andrew Scull | 87f80c1 | 2015-07-20 10:19:16 -0700 | [diff] [blame] | 320 | |
Jim Stichnoth | 144cdce | 2014-09-22 16:02:59 -0700 | [diff] [blame] | 321 | virtual void emitVariable(const Variable *Var) const = 0; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 322 | |
Jim Stichnoth | 8ff4b28 | 2016-01-04 15:39:06 -0800 | [diff] [blame] | 323 | void emitWithoutPrefix(const ConstantRelocatable *CR, |
| 324 | const char *Suffix = "") const; |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 325 | |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 326 | virtual void emit(const ConstantInteger32 *C) const = 0; |
| 327 | virtual void emit(const ConstantInteger64 *C) const = 0; |
| 328 | virtual void emit(const ConstantFloat *C) const = 0; |
| 329 | virtual void emit(const ConstantDouble *C) const = 0; |
Jim Stichnoth | 8ff4b28 | 2016-01-04 15:39:06 -0800 | [diff] [blame] | 330 | virtual void emit(const ConstantUndef *C) const = 0; |
| 331 | virtual void emit(const ConstantRelocatable *CR) const = 0; |
Jan Voung | 76bb0be | 2015-05-14 09:26:19 -0700 | [diff] [blame] | 332 | |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 333 | /// Performs target-specific argument lowering. |
Matt Wala | 45a0623 | 2014-07-09 16:33:22 -0700 | [diff] [blame] | 334 | virtual void lowerArguments() = 0; |
| 335 | |
Jim Stichnoth | a59ae6f | 2015-05-17 10:11:41 -0700 | [diff] [blame] | 336 | virtual void initNodeForLowering(CfgNode *) {} |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 337 | virtual void addProlog(CfgNode *Node) = 0; |
| 338 | virtual void addEpilog(CfgNode *Node) = 0; |
| 339 | |
Jim Stichnoth | b9a8472 | 2016-08-01 13:18:36 -0700 | [diff] [blame] | 340 | /// Create a properly-typed "mov" instruction. This is primarily for local |
| 341 | /// variable splitting. |
| 342 | virtual Inst *createLoweredMove(Variable *Dest, Variable *SrcVar) { |
| 343 | // TODO(stichnot): make pure virtual by implementing for all targets |
| 344 | (void)Dest; |
| 345 | (void)SrcVar; |
| 346 | llvm::report_fatal_error("createLoweredMove() unimplemented"); |
| 347 | return nullptr; |
| 348 | } |
| 349 | |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 350 | virtual ~TargetLowering() = default; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 351 | |
John Porto | 3bf335f | 2016-01-15 11:17:55 -0800 | [diff] [blame] | 352 | private: |
| 353 | // This control variable is used by AutoBundle (RAII-style bundle |
| 354 | // locking/unlocking) to prevent nested bundles. |
| 355 | bool AutoBundling = false; |
| 356 | |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 357 | /// This indicates whether we are in the genTargetHelperCalls phase, and |
| 358 | /// therefore can do things like scalarization. |
| 359 | bool GeneratingTargetHelpers = false; |
| 360 | |
John Porto | 3bf335f | 2016-01-15 11:17:55 -0800 | [diff] [blame] | 361 | // _bundle_lock(), and _bundle_unlock(), were made private to force subtargets |
| 362 | // to use the AutoBundle helper. |
| 363 | void |
| 364 | _bundle_lock(InstBundleLock::Option BundleOption = InstBundleLock::Opt_None) { |
| 365 | Context.insert<InstBundleLock>(BundleOption); |
| 366 | } |
| 367 | void _bundle_unlock() { Context.insert<InstBundleUnlock>(); } |
| 368 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 369 | protected: |
John Porto | 3bf335f | 2016-01-15 11:17:55 -0800 | [diff] [blame] | 370 | /// AutoBundle provides RIAA-style bundling. Sub-targets are expected to use |
| 371 | /// it when emitting NaCl Bundles to ensure proper bundle_unlocking, and |
| 372 | /// prevent nested bundles. |
| 373 | /// |
| 374 | /// AutoBundle objects will emit a _bundle_lock during construction (but only |
| 375 | /// if sandboxed code generation was requested), and a bundle_unlock() during |
| 376 | /// destruction. By carefully scoping objects of this type, Subtargets can |
| 377 | /// ensure proper bundle emission. |
| 378 | class AutoBundle { |
| 379 | AutoBundle() = delete; |
| 380 | AutoBundle(const AutoBundle &) = delete; |
| 381 | AutoBundle &operator=(const AutoBundle &) = delete; |
| 382 | |
| 383 | public: |
| 384 | explicit AutoBundle(TargetLowering *Target, InstBundleLock::Option Option = |
| 385 | InstBundleLock::Opt_None); |
| 386 | ~AutoBundle(); |
| 387 | |
| 388 | private: |
| 389 | TargetLowering *const Target; |
| 390 | const bool NeedSandboxing; |
| 391 | }; |
| 392 | |
Jim Stichnoth | c6ead20 | 2015-02-24 09:30:30 -0800 | [diff] [blame] | 393 | explicit TargetLowering(Cfg *Func); |
Karl Schimpf | 5403f5d | 2016-01-15 11:07:46 -0800 | [diff] [blame] | 394 | // Applies command line filters to TypeToRegisterSet array. |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 395 | static void filterTypeToRegisterSet( |
| 396 | GlobalContext *Ctx, int32_t NumRegs, SmallBitVector TypeToRegisterSet[], |
| 397 | size_t TypeToRegisterSetSize, |
| 398 | std::function<std::string(RegNumT)> getRegName, |
| 399 | std::function<const char *(RegClass)> getRegClassName); |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 400 | virtual void lowerAlloca(const InstAlloca *Instr) = 0; |
| 401 | virtual void lowerArithmetic(const InstArithmetic *Instr) = 0; |
| 402 | virtual void lowerAssign(const InstAssign *Instr) = 0; |
| 403 | virtual void lowerBr(const InstBr *Instr) = 0; |
Eric Holk | 67c7c41 | 2016-04-15 13:05:37 -0700 | [diff] [blame] | 404 | virtual void lowerBreakpoint(const InstBreakpoint *Instr) = 0; |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 405 | virtual void lowerCall(const InstCall *Instr) = 0; |
| 406 | virtual void lowerCast(const InstCast *Instr) = 0; |
| 407 | virtual void lowerFcmp(const InstFcmp *Instr) = 0; |
| 408 | virtual void lowerExtractElement(const InstExtractElement *Instr) = 0; |
| 409 | virtual void lowerIcmp(const InstIcmp *Instr) = 0; |
| 410 | virtual void lowerInsertElement(const InstInsertElement *Instr) = 0; |
| 411 | virtual void lowerIntrinsicCall(const InstIntrinsicCall *Instr) = 0; |
| 412 | virtual void lowerLoad(const InstLoad *Instr) = 0; |
| 413 | virtual void lowerPhi(const InstPhi *Instr) = 0; |
| 414 | virtual void lowerRet(const InstRet *Instr) = 0; |
| 415 | virtual void lowerSelect(const InstSelect *Instr) = 0; |
John Porto | a47c11c | 2016-04-21 05:53:42 -0700 | [diff] [blame] | 416 | virtual void lowerShuffleVector(const InstShuffleVector *Instr) = 0; |
Jim Stichnoth | 8cfeb69 | 2016-02-05 09:50:02 -0800 | [diff] [blame] | 417 | virtual void lowerStore(const InstStore *Instr) = 0; |
| 418 | virtual void lowerSwitch(const InstSwitch *Instr) = 0; |
| 419 | virtual void lowerUnreachable(const InstUnreachable *Instr) = 0; |
Jim Stichnoth | e4f65d8 | 2015-06-17 22:16:02 -0700 | [diff] [blame] | 420 | virtual void lowerOther(const Inst *Instr); |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 421 | |
John Porto | 5e0a8a7 | 2015-11-20 13:50:36 -0800 | [diff] [blame] | 422 | virtual void genTargetHelperCallFor(Inst *Instr) = 0; |
John Porto | f419854 | 2015-11-20 14:17:23 -0800 | [diff] [blame] | 423 | virtual uint32_t getCallStackArgumentsSizeBytes(const InstCall *Instr) = 0; |
John Porto | 5e0a8a7 | 2015-11-20 13:50:36 -0800 | [diff] [blame] | 424 | |
Manasij Mukherjee | 0c70417 | 2016-07-21 12:40:24 -0700 | [diff] [blame] | 425 | /// Opportunity to modify other instructions to help Address Optimization |
| 426 | virtual void doAddressOptOther() {} |
Jim Stichnoth | d97c7df | 2014-06-04 11:57:08 -0700 | [diff] [blame] | 427 | virtual void doAddressOptLoad() {} |
| 428 | virtual void doAddressOptStore() {} |
Nicolas Capens | e986b31 | 2017-01-27 00:56:42 -0800 | [diff] [blame] | 429 | virtual void doAddressOptLoadSubVector() {} |
| 430 | virtual void doAddressOptStoreSubVector() {} |
Jim Stichnoth | ad2989b | 2015-09-15 10:21:42 -0700 | [diff] [blame] | 431 | virtual void doMockBoundsCheck(Operand *) {} |
Qining Lu | aee5fa8 | 2015-08-20 14:59:03 -0700 | [diff] [blame] | 432 | virtual void randomlyInsertNop(float Probability, |
| 433 | RandomNumberGenerator &RNG) = 0; |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 434 | /// This gives the target an opportunity to post-process the lowered expansion |
| 435 | /// before returning. |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 436 | virtual void postLower() {} |
| 437 | |
Jim Stichnoth | 91c773e | 2016-01-19 09:52:22 -0800 | [diff] [blame] | 438 | /// When the SkipUnimplemented flag is set, addFakeDefUses() gets invoked by |
| 439 | /// the UnimplementedLoweringError macro to insert fake uses of all the |
| 440 | /// instruction variables and a fake def of the instruction dest, in order to |
| 441 | /// preserve integrity of liveness analysis. |
| 442 | void addFakeDefUses(const Inst *Instr); |
| 443 | |
Jim Stichnoth | 230d4101 | 2015-09-25 17:40:32 -0700 | [diff] [blame] | 444 | /// Find (non-SSA) instructions where the Dest variable appears in some source |
| 445 | /// operand, and set the IsDestRedefined flag. This keeps liveness analysis |
| 446 | /// consistent. |
| 447 | void markRedefinitions(); |
Jan Voung | b3401d2 | 2015-05-18 09:38:21 -0700 | [diff] [blame] | 448 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 449 | /// Make a pass over the Cfg to determine which variables need stack slots and |
| 450 | /// place them in a sorted list (SortedSpilledVariables). Among those, vars, |
| 451 | /// classify the spill variables as local to the basic block vs global |
| 452 | /// (multi-block) in order to compute the parameters GlobalsSize and |
| 453 | /// SpillAreaSizeBytes (represents locals or general vars if the coalescing of |
| 454 | /// locals is disallowed) along with alignments required for variables in each |
| 455 | /// area. We rely on accurate VMetadata in order to classify a variable as |
| 456 | /// global vs local (otherwise the variable is conservatively global). The |
| 457 | /// in-args should be initialized to 0. |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 458 | /// |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 459 | /// This is only a pre-pass and the actual stack slot assignment is handled |
| 460 | /// separately. |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 461 | /// |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 462 | /// There may be target-specific Variable types, which will be handled by |
| 463 | /// TargetVarHook. If the TargetVarHook returns true, then the variable is |
| 464 | /// skipped and not considered with the rest of the spilled variables. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 465 | void getVarStackSlotParams(VarList &SortedSpilledVariables, |
John Porto | e82b560 | 2016-02-24 15:58:55 -0800 | [diff] [blame] | 466 | SmallBitVector &RegsUsed, size_t *GlobalsSize, |
| 467 | size_t *SpillAreaSizeBytes, |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 468 | uint32_t *SpillAreaAlignmentBytes, |
| 469 | uint32_t *LocalsSlotsAlignmentBytes, |
| 470 | std::function<bool(Variable *)> TargetVarHook); |
| 471 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 472 | /// Calculate the amount of padding needed to align the local and global areas |
| 473 | /// to the required alignment. This assumes the globals/locals layout used by |
| 474 | /// getVarStackSlotParams and assignVarStackSlots. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 475 | void alignStackSpillAreas(uint32_t SpillAreaStartOffset, |
| 476 | uint32_t SpillAreaAlignmentBytes, |
| 477 | size_t GlobalsSize, |
| 478 | uint32_t LocalsSlotsAlignmentBytes, |
| 479 | uint32_t *SpillAreaPaddingBytes, |
| 480 | uint32_t *LocalsSlotsPaddingBytes); |
| 481 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 482 | /// Make a pass through the SortedSpilledVariables and actually assign stack |
| 483 | /// slots. SpillAreaPaddingBytes takes into account stack alignment padding. |
| 484 | /// The SpillArea starts after that amount of padding. This matches the scheme |
| 485 | /// in getVarStackSlotParams, where there may be a separate multi-block global |
| 486 | /// var spill area and a local var spill area. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 487 | void assignVarStackSlots(VarList &SortedSpilledVariables, |
| 488 | size_t SpillAreaPaddingBytes, |
| 489 | size_t SpillAreaSizeBytes, |
| 490 | size_t GlobalsAndSubsequentPaddingSize, |
| 491 | bool UsesFramePointer); |
| 492 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 493 | /// Sort the variables in Source based on required alignment. The variables |
| 494 | /// with the largest alignment need are placed in the front of the Dest list. |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 495 | void sortVarsByAlignment(VarList &Dest, const VarList &Source) const; |
| 496 | |
Karl Schimpf | 20070e8 | 2016-03-17 13:30:13 -0700 | [diff] [blame] | 497 | InstCall *makeHelperCall(RuntimeHelper FuncID, Variable *Dest, SizeT MaxSrcs); |
Jan Voung | b36ad9b | 2015-04-21 17:01:49 -0700 | [diff] [blame] | 498 | |
Jim Stichnoth | 230d4101 | 2015-09-25 17:40:32 -0700 | [diff] [blame] | 499 | void _set_dest_redefined() { Context.getLastInserted()->setDestRedefined(); } |
Jan Voung | 0fa6c5a | 2015-06-01 11:04:04 -0700 | [diff] [blame] | 500 | |
Andrew Scull | cfa628b | 2015-08-20 14:23:05 -0700 | [diff] [blame] | 501 | bool shouldOptimizeMemIntrins(); |
| 502 | |
Eric Holk | cfc2553 | 2016-02-09 17:47:58 -0800 | [diff] [blame] | 503 | void scalarizeArithmetic(InstArithmetic::OpKind K, Variable *Dest, |
| 504 | Operand *Src0, Operand *Src1); |
| 505 | |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 506 | /// Generalizes scalarizeArithmetic to support other instruction types. |
| 507 | /// |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 508 | /// insertScalarInstruction is a function-like object with signature |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 509 | /// (Variable *Dest, Variable *Src0, Variable *Src1) -> Instr *. |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 510 | template <typename... Operands, |
| 511 | typename F = std::function<Inst *(Variable *, Operands *...)>> |
| 512 | void scalarizeInstruction(Variable *Dest, F insertScalarInstruction, |
| 513 | Operands *... Srcs) { |
| 514 | assert(GeneratingTargetHelpers && |
| 515 | "scalarizeInstruction called during incorrect phase"); |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 516 | const Type DestTy = Dest->getType(); |
| 517 | assert(isVectorType(DestTy)); |
| 518 | const Type DestElementTy = typeElementType(DestTy); |
| 519 | const SizeT NumElements = typeNumElements(DestTy); |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 520 | |
| 521 | Variable *T = Func->makeVariable(DestTy); |
Jaydeep Patil | 958ddb7 | 2016-10-03 07:52:48 -0700 | [diff] [blame] | 522 | if (auto *VarVecOn32 = llvm::dyn_cast<VariableVecOn32>(T)) { |
| 523 | VarVecOn32->initVecElement(Func); |
Jaydeep Patil | 3a01f33 | 2016-10-17 06:33:50 -0700 | [diff] [blame] | 524 | auto *Undef = ConstantUndef::create(Ctx, DestTy); |
| 525 | Context.insert<InstAssign>(T, Undef); |
| 526 | } else { |
| 527 | Context.insert<InstFakeDef>(T); |
Jaydeep Patil | 958ddb7 | 2016-10-03 07:52:48 -0700 | [diff] [blame] | 528 | } |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 529 | |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 530 | for (SizeT I = 0; I < NumElements; ++I) { |
| 531 | auto *Index = Ctx->getConstantInt32(I); |
| 532 | |
Takuto Ikuta | 9911aca | 2018-11-03 10:43:22 +0000 | [diff] [blame] | 533 | auto makeExtractThunk = [this, Index, NumElements](Operand *Src) { |
| 534 | return [this, Index, NumElements, Src]() { |
Takuto Ikuta | 4169b31 | 2018-11-05 23:32:02 +0900 | [diff] [blame] | 535 | (void)NumElements; |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 536 | assert(typeNumElements(Src->getType()) == NumElements); |
| 537 | |
| 538 | const auto ElementTy = typeElementType(Src->getType()); |
| 539 | auto *Op = Func->makeVariable(ElementTy); |
| 540 | Context.insert<InstExtractElement>(Op, Src, Index); |
| 541 | return Op; |
| 542 | }; |
| 543 | }; |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 544 | |
| 545 | // Perform the operation as a scalar operation. |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 546 | auto *Res = Func->makeVariable(DestElementTy); |
| 547 | auto *Arith = applyToThunkedArgs(insertScalarInstruction, Res, |
| 548 | makeExtractThunk(Srcs)...); |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 549 | genTargetHelperCallFor(Arith); |
| 550 | |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 551 | Variable *DestT = Func->makeVariable(DestTy); |
| 552 | Context.insert<InstInsertElement>(DestT, T, Res, Index); |
| 553 | T = DestT; |
| 554 | } |
| 555 | Context.insert<InstAssign>(Dest, T); |
| 556 | } |
| 557 | |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 558 | // applyToThunkedArgs is used by scalarizeInstruction. Ideally, we would just |
| 559 | // call insertScalarInstruction(Res, Srcs...), but C++ does not specify |
| 560 | // evaluation order which means this leads to an unpredictable final |
| 561 | // output. Instead, we wrap each of the Srcs in a thunk and these |
| 562 | // applyToThunkedArgs functions apply the thunks in a well defined order so we |
| 563 | // still get well-defined output. |
| 564 | Inst *applyToThunkedArgs( |
| 565 | std::function<Inst *(Variable *, Variable *)> insertScalarInstruction, |
| 566 | Variable *Res, std::function<Variable *()> thunk0) { |
| 567 | auto *Src0 = thunk0(); |
| 568 | return insertScalarInstruction(Res, Src0); |
| 569 | } |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 570 | |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 571 | Inst * |
| 572 | applyToThunkedArgs(std::function<Inst *(Variable *, Variable *, Variable *)> |
| 573 | insertScalarInstruction, |
| 574 | Variable *Res, std::function<Variable *()> thunk0, |
| 575 | std::function<Variable *()> thunk1) { |
| 576 | auto *Src0 = thunk0(); |
| 577 | auto *Src1 = thunk1(); |
| 578 | return insertScalarInstruction(Res, Src0, Src1); |
| 579 | } |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 580 | |
Eric Holk | d6cf6b3 | 2016-02-17 11:09:48 -0800 | [diff] [blame] | 581 | Inst *applyToThunkedArgs( |
| 582 | std::function<Inst *(Variable *, Variable *, Variable *, Variable *)> |
| 583 | insertScalarInstruction, |
| 584 | Variable *Res, std::function<Variable *()> thunk0, |
| 585 | std::function<Variable *()> thunk1, std::function<Variable *()> thunk2) { |
| 586 | auto *Src0 = thunk0(); |
| 587 | auto *Src1 = thunk1(); |
| 588 | auto *Src2 = thunk2(); |
| 589 | return insertScalarInstruction(Res, Src0, Src1, Src2); |
Eric Holk | cc69fa2 | 2016-02-10 13:07:06 -0800 | [diff] [blame] | 590 | } |
| 591 | |
John Porto | ac2388c | 2016-01-22 07:10:56 -0800 | [diff] [blame] | 592 | /// SandboxType enumerates all possible sandboxing strategies that |
| 593 | enum SandboxType { |
| 594 | ST_None, |
| 595 | ST_NaCl, |
| 596 | ST_Nonsfi, |
| 597 | }; |
| 598 | |
| 599 | static SandboxType determineSandboxTypeFromFlags(const ClFlags &Flags); |
| 600 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 601 | Cfg *Func; |
| 602 | GlobalContext *Ctx; |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 603 | bool HasComputedFrame = false; |
| 604 | bool CallsReturnsTwice = false; |
Jim Stichnoth | eafb56c | 2015-06-22 10:35:22 -0700 | [diff] [blame] | 605 | SizeT NextLabelNumber = 0; |
Andrew Scull | 86df4e9 | 2015-07-30 13:54:44 -0700 | [diff] [blame] | 606 | SizeT NextJumpTableNumber = 0; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 607 | LoweringContext Context; |
John Porto | ac2388c | 2016-01-22 07:10:56 -0800 | [diff] [blame] | 608 | const SandboxType SandboxingType = ST_None; |
Jim Stichnoth | 9738a9e | 2015-02-23 16:39:06 -0800 | [diff] [blame] | 609 | |
Jim Stichnoth | 8ff4b28 | 2016-01-04 15:39:06 -0800 | [diff] [blame] | 610 | const static constexpr char *H_getIP_prefix = "__Sz_getIP_"; |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 611 | }; |
| 612 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 613 | /// TargetDataLowering is used for "lowering" data including initializers for |
| 614 | /// global variables, and the internal constant pools. It is separated out from |
| 615 | /// TargetLowering because it does not require a Cfg. |
Jan Voung | 72984d8 | 2015-01-29 14:42:38 -0800 | [diff] [blame] | 616 | class TargetDataLowering { |
| 617 | TargetDataLowering() = delete; |
| 618 | TargetDataLowering(const TargetDataLowering &) = delete; |
| 619 | TargetDataLowering &operator=(const TargetDataLowering &) = delete; |
Jim Stichnoth | 7b451a9 | 2014-10-15 14:39:23 -0700 | [diff] [blame] | 620 | |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 621 | public: |
Jim Stichnoth | bbca754 | 2015-02-11 16:08:31 -0800 | [diff] [blame] | 622 | static std::unique_ptr<TargetDataLowering> createLowering(GlobalContext *Ctx); |
Jan Voung | 72984d8 | 2015-01-29 14:42:38 -0800 | [diff] [blame] | 623 | virtual ~TargetDataLowering(); |
Jan Voung | 839c4ce | 2014-07-28 15:19:43 -0700 | [diff] [blame] | 624 | |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 625 | virtual void lowerGlobals(const VariableDeclarationList &Vars, |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 626 | const std::string &SectionSuffix) = 0; |
John Porto | 0f86d03 | 2015-06-15 07:44:27 -0700 | [diff] [blame] | 627 | virtual void lowerConstants() = 0; |
Andrew Scull | 86df4e9 | 2015-07-30 13:54:44 -0700 | [diff] [blame] | 628 | virtual void lowerJumpTables() = 0; |
Jaydeep Patil | 3da9f65 | 2016-11-03 22:54:06 -0700 | [diff] [blame] | 629 | virtual void emitTargetRODataSections() {} |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 630 | |
| 631 | protected: |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 632 | void emitGlobal(const VariableDeclaration &Var, |
Jim Stichnoth | 467ffe5 | 2016-03-29 15:01:06 -0700 | [diff] [blame] | 633 | const std::string &SectionSuffix); |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 634 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 635 | /// For now, we assume .long is the right directive for emitting 4 byte emit |
| 636 | /// global relocations. However, LLVM MIPS usually uses .4byte instead. |
Andrew Scull | 9612d32 | 2015-07-06 14:53:25 -0700 | [diff] [blame] | 637 | /// Perhaps there is some difference when the location is unaligned. |
John Porto | 8b1a705 | 2015-06-17 13:20:08 -0700 | [diff] [blame] | 638 | static const char *getEmit32Directive() { return ".long"; } |
Jan Voung | 58eea4d | 2015-06-15 15:11:56 -0700 | [diff] [blame] | 639 | |
Jim Stichnoth | c6ead20 | 2015-02-24 09:30:30 -0800 | [diff] [blame] | 640 | explicit TargetDataLowering(GlobalContext *Ctx) : Ctx(Ctx) {} |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 641 | GlobalContext *Ctx; |
Jim Stichnoth | de4ca71 | 2014-06-29 08:13:48 -0700 | [diff] [blame] | 642 | }; |
| 643 | |
Andrew Scull | 57e1268 | 2015-09-16 11:30:19 -0700 | [diff] [blame] | 644 | /// TargetHeaderLowering is used to "lower" the header of an output file. It |
| 645 | /// writes out the target-specific header attributes. E.g., for ARM this writes |
| 646 | /// out the build attributes (float ABI, etc.). |
Jan Voung | fb79284 | 2015-06-11 15:27:50 -0700 | [diff] [blame] | 647 | class TargetHeaderLowering { |
| 648 | TargetHeaderLowering() = delete; |
| 649 | TargetHeaderLowering(const TargetHeaderLowering &) = delete; |
| 650 | TargetHeaderLowering &operator=(const TargetHeaderLowering &) = delete; |
| 651 | |
| 652 | public: |
| 653 | static std::unique_ptr<TargetHeaderLowering> |
| 654 | createLowering(GlobalContext *Ctx); |
| 655 | virtual ~TargetHeaderLowering(); |
| 656 | |
| 657 | virtual void lower() {} |
| 658 | |
| 659 | protected: |
| 660 | explicit TargetHeaderLowering(GlobalContext *Ctx) : Ctx(Ctx) {} |
| 661 | GlobalContext *Ctx; |
| 662 | }; |
| 663 | |
Jim Stichnoth | 5bc2b1d | 2014-05-22 13:38:48 -0700 | [diff] [blame] | 664 | } // end of namespace Ice |
| 665 | |
| 666 | #endif // SUBZERO_SRC_ICETARGETLOWERING_H |