| /*===- TableGen'erated file -------------------------------------*- C++ -*-===*\ |
| |* *| |
| |* "Fast" Instruction Selector for the AArch64 target *| |
| |* *| |
| |* Automatically generated file, do not edit! *| |
| |* *| |
| \*===----------------------------------------------------------------------===*/ |
| |
| |
| // FastEmit Immediate Predicate functions. |
| static bool Predicate_imm0_31(int64_t Imm) { |
| |
| return ((uint64_t)Imm) < 32; |
| |
| } |
| static bool Predicate_imm0_63(int64_t Imm) { |
| |
| return ((uint64_t)Imm) < 64; |
| |
| } |
| static bool Predicate_imm32_0_31(int64_t Imm) { |
| |
| return ((uint64_t)Imm) < 32; |
| |
| } |
| static bool Predicate_tbz_imm0_31_diag(int64_t Imm) { |
| |
| return (((uint32_t)Imm) < 32); |
| |
| } |
| static bool Predicate_tbz_imm32_63(int64_t Imm) { |
| |
| return (((uint32_t)Imm) > 31) && (((uint32_t)Imm) < 64); |
| |
| } |
| static bool Predicate_VectorIndexD(int64_t Imm) { |
| return ((uint64_t)Imm) < 2; |
| } |
| static bool Predicate_VectorIndexS(int64_t Imm) { |
| return ((uint64_t)Imm) < 4; |
| } |
| static bool Predicate_VectorIndexH(int64_t Imm) { |
| return ((uint64_t)Imm) < 8; |
| } |
| static bool Predicate_VectorIndexB(int64_t Imm) { |
| return ((uint64_t)Imm) < 16; |
| } |
| static bool Predicate_VectorIndex0(int64_t Imm) { |
| return ((uint64_t)Imm) == 0; |
| } |
| static bool Predicate_imm0_255(int64_t Imm) { |
| |
| return ((uint32_t)Imm) < 256; |
| |
| } |
| static bool Predicate_vecshiftL64(int64_t Imm) { |
| |
| return (((uint32_t)Imm) < 64); |
| |
| } |
| static bool Predicate_vecshiftL32(int64_t Imm) { |
| |
| return (((uint32_t)Imm) < 32); |
| |
| } |
| static bool Predicate_vecshiftR64(int64_t Imm) { |
| |
| return (((uint32_t)Imm) > 0) && (((uint32_t)Imm) < 65); |
| |
| } |
| static bool Predicate_vecshiftL8(int64_t Imm) { |
| |
| return (((uint32_t)Imm) < 8); |
| |
| } |
| static bool Predicate_vecshiftL16(int64_t Imm) { |
| |
| return (((uint32_t)Imm) < 16); |
| |
| } |
| static bool Predicate_vecshiftR8(int64_t Imm) { |
| |
| return (((uint32_t)Imm) > 0) && (((uint32_t)Imm) < 9); |
| |
| } |
| static bool Predicate_vecshiftR16(int64_t Imm) { |
| |
| return (((uint32_t)Imm) > 0) && (((uint32_t)Imm) < 17); |
| |
| } |
| static bool Predicate_vecshiftR32(int64_t Imm) { |
| |
| return (((uint32_t)Imm) > 0) && (((uint32_t)Imm) < 33); |
| |
| } |
| static bool Predicate_simm8_32b(int64_t Imm) { |
| return Imm >= -128 && Imm < 128; |
| } |
| static bool Predicate_simm8_64b(int64_t Imm) { |
| return Imm >= -128 && Imm < 128; |
| } |
| static bool Predicate_uimm8_32b(int64_t Imm) { |
| return Imm >= 0 && Imm < 256; |
| } |
| static bool Predicate_uimm8_64b(int64_t Imm) { |
| return Imm >= 0 && Imm < 256; |
| } |
| static bool Predicate_simm6_32b(int64_t Imm) { |
| return Imm >= -32 && Imm < 32; |
| } |
| |
| |
| // FastEmit functions for AArch64ISD::THREAD_POINTER. |
| |
| unsigned fastEmit_AArch64ISD_THREAD_POINTER_MVT_i64_(MVT RetVT) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| return fastEmitInst_(AArch64::MOVbaseTLS, &AArch64::GPR64RegClass); |
| } |
| |
| unsigned fastEmit_AArch64ISD_THREAD_POINTER_(MVT VT, MVT RetVT) { |
| switch (VT.SimpleTy) { |
| case MVT::i64: return fastEmit_AArch64ISD_THREAD_POINTER_MVT_i64_(RetVT); |
| default: return 0; |
| } |
| } |
| |
| // Top-level FastEmit function. |
| |
| unsigned fastEmit_(MVT VT, MVT RetVT, unsigned Opcode) override { |
| switch (Opcode) { |
| case AArch64ISD::THREAD_POINTER: return fastEmit_AArch64ISD_THREAD_POINTER_(VT, RetVT); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::CALL. |
| |
| unsigned fastEmit_AArch64ISD_CALL_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::isVoid) |
| return 0; |
| if (( MF->getSubtarget<AArch64Subtarget>().hardenSlsBlr() )) { |
| return fastEmitInst_r(AArch64::BLRNoIP, &AArch64::GPR64noipRegClass, Op0); |
| } |
| if (( !MF->getSubtarget<AArch64Subtarget>().hardenSlsBlr() )) { |
| return fastEmitInst_r(AArch64::BLR, &AArch64::GPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CALL_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i64: return fastEmit_AArch64ISD_CALL_MVT_i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::CMEQz. |
| |
| unsigned fastEmit_AArch64ISD_CMEQz_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMEQv8i8rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMEQz_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMEQv16i8rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMEQz_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMEQv4i16rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMEQz_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMEQv8i16rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMEQz_MVT_v2i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMEQv2i32rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMEQz_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMEQv4i32rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMEQz_MVT_v1i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMEQv1i64rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMEQz_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMEQv2i64rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMEQz_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_CMEQz_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_CMEQz_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v4i16: return fastEmit_AArch64ISD_CMEQz_MVT_v4i16_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_AArch64ISD_CMEQz_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v2i32: return fastEmit_AArch64ISD_CMEQz_MVT_v2i32_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_AArch64ISD_CMEQz_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v1i64: return fastEmit_AArch64ISD_CMEQz_MVT_v1i64_r(RetVT, Op0); |
| case MVT::v2i64: return fastEmit_AArch64ISD_CMEQz_MVT_v2i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::CMGEz. |
| |
| unsigned fastEmit_AArch64ISD_CMGEz_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGEv8i8rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGEz_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGEv16i8rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGEz_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGEv4i16rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGEz_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGEv8i16rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGEz_MVT_v2i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGEv2i32rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGEz_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGEv4i32rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGEz_MVT_v1i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGEv1i64rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGEz_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGEv2i64rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGEz_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_CMGEz_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_CMGEz_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v4i16: return fastEmit_AArch64ISD_CMGEz_MVT_v4i16_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_AArch64ISD_CMGEz_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v2i32: return fastEmit_AArch64ISD_CMGEz_MVT_v2i32_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_AArch64ISD_CMGEz_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v1i64: return fastEmit_AArch64ISD_CMGEz_MVT_v1i64_r(RetVT, Op0); |
| case MVT::v2i64: return fastEmit_AArch64ISD_CMGEz_MVT_v2i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::CMGTz. |
| |
| unsigned fastEmit_AArch64ISD_CMGTz_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGTv8i8rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGTz_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGTv16i8rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGTz_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGTv4i16rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGTz_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGTv8i16rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGTz_MVT_v2i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGTv2i32rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGTz_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGTv4i32rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGTz_MVT_v1i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGTv1i64rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGTz_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMGTv2i64rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMGTz_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_CMGTz_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_CMGTz_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v4i16: return fastEmit_AArch64ISD_CMGTz_MVT_v4i16_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_AArch64ISD_CMGTz_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v2i32: return fastEmit_AArch64ISD_CMGTz_MVT_v2i32_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_AArch64ISD_CMGTz_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v1i64: return fastEmit_AArch64ISD_CMGTz_MVT_v1i64_r(RetVT, Op0); |
| case MVT::v2i64: return fastEmit_AArch64ISD_CMGTz_MVT_v2i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::CMLEz. |
| |
| unsigned fastEmit_AArch64ISD_CMLEz_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLEv8i8rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLEz_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLEv16i8rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLEz_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLEv4i16rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLEz_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLEv8i16rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLEz_MVT_v2i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLEv2i32rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLEz_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLEv4i32rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLEz_MVT_v1i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLEv1i64rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLEz_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLEv2i64rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLEz_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_CMLEz_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_CMLEz_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v4i16: return fastEmit_AArch64ISD_CMLEz_MVT_v4i16_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_AArch64ISD_CMLEz_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v2i32: return fastEmit_AArch64ISD_CMLEz_MVT_v2i32_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_AArch64ISD_CMLEz_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v1i64: return fastEmit_AArch64ISD_CMLEz_MVT_v1i64_r(RetVT, Op0); |
| case MVT::v2i64: return fastEmit_AArch64ISD_CMLEz_MVT_v2i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::CMLTz. |
| |
| unsigned fastEmit_AArch64ISD_CMLTz_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLTv8i8rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLTz_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLTv16i8rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLTz_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLTv4i16rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLTz_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLTv8i16rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLTz_MVT_v2i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLTv2i32rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLTz_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLTv4i32rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLTz_MVT_v1i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLTv1i64rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLTz_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::CMLTv2i64rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_CMLTz_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_CMLTz_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_CMLTz_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v4i16: return fastEmit_AArch64ISD_CMLTz_MVT_v4i16_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_AArch64ISD_CMLTz_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v2i32: return fastEmit_AArch64ISD_CMLTz_MVT_v2i32_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_AArch64ISD_CMLTz_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v1i64: return fastEmit_AArch64ISD_CMLTz_MVT_v1i64_r(RetVT, Op0); |
| case MVT::v2i64: return fastEmit_AArch64ISD_CMLTz_MVT_v2i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::DUP. |
| |
| unsigned fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v8i8_r(unsigned Op0) { |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::DUPv8i8gpr, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v16i8_r(unsigned Op0) { |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::DUPv16i8gpr, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v4i16_r(unsigned Op0) { |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::DUPv4i16gpr, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v8i16_r(unsigned Op0) { |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::DUPv8i16gpr, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v2i32_r(unsigned Op0) { |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::DUPv2i32gpr, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v4i32_r(unsigned Op0) { |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::DUPv4i32gpr, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_DUP_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v8i8_r(Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v16i8_r(Op0); |
| case MVT::v4i16: return fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v4i16_r(Op0); |
| case MVT::v8i16: return fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v8i16_r(Op0); |
| case MVT::v2i32: return fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v2i32_r(Op0); |
| case MVT::v4i32: return fastEmit_AArch64ISD_DUP_MVT_i32_MVT_v4i32_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_AArch64ISD_DUP_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::DUPv2i64gpr, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_DUP_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i32: return fastEmit_AArch64ISD_DUP_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_AArch64ISD_DUP_MVT_i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::FCMEQz. |
| |
| unsigned fastEmit_AArch64ISD_FCMEQz_MVT_v4f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMEQv4i16rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMEQz_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMEQv8i16rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMEQz_MVT_v2f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMEQv2i32rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMEQz_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMEQv4i32rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMEQz_MVT_v1f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMEQv1i64rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMEQz_MVT_v2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMEQv2i64rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMEQz_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v4f16: return fastEmit_AArch64ISD_FCMEQz_MVT_v4f16_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_AArch64ISD_FCMEQz_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v2f32: return fastEmit_AArch64ISD_FCMEQz_MVT_v2f32_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_AArch64ISD_FCMEQz_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v1f64: return fastEmit_AArch64ISD_FCMEQz_MVT_v1f64_r(RetVT, Op0); |
| case MVT::v2f64: return fastEmit_AArch64ISD_FCMEQz_MVT_v2f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::FCMGEz. |
| |
| unsigned fastEmit_AArch64ISD_FCMGEz_MVT_v4f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGEv4i16rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGEz_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGEv8i16rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGEz_MVT_v2f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGEv2i32rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGEz_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGEv4i32rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGEz_MVT_v1f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGEv1i64rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGEz_MVT_v2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGEv2i64rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGEz_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v4f16: return fastEmit_AArch64ISD_FCMGEz_MVT_v4f16_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_AArch64ISD_FCMGEz_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v2f32: return fastEmit_AArch64ISD_FCMGEz_MVT_v2f32_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_AArch64ISD_FCMGEz_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v1f64: return fastEmit_AArch64ISD_FCMGEz_MVT_v1f64_r(RetVT, Op0); |
| case MVT::v2f64: return fastEmit_AArch64ISD_FCMGEz_MVT_v2f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::FCMGTz. |
| |
| unsigned fastEmit_AArch64ISD_FCMGTz_MVT_v4f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGTv4i16rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGTz_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGTv8i16rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGTz_MVT_v2f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGTv2i32rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGTz_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGTv4i32rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGTz_MVT_v1f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGTv1i64rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGTz_MVT_v2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMGTv2i64rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMGTz_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v4f16: return fastEmit_AArch64ISD_FCMGTz_MVT_v4f16_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_AArch64ISD_FCMGTz_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v2f32: return fastEmit_AArch64ISD_FCMGTz_MVT_v2f32_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_AArch64ISD_FCMGTz_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v1f64: return fastEmit_AArch64ISD_FCMGTz_MVT_v1f64_r(RetVT, Op0); |
| case MVT::v2f64: return fastEmit_AArch64ISD_FCMGTz_MVT_v2f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::FCMLEz. |
| |
| unsigned fastEmit_AArch64ISD_FCMLEz_MVT_v4f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLEv4i16rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLEz_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLEv8i16rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLEz_MVT_v2f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLEv2i32rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLEz_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLEv4i32rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLEz_MVT_v1f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLEv1i64rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLEz_MVT_v2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLEv2i64rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLEz_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v4f16: return fastEmit_AArch64ISD_FCMLEz_MVT_v4f16_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_AArch64ISD_FCMLEz_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v2f32: return fastEmit_AArch64ISD_FCMLEz_MVT_v2f32_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_AArch64ISD_FCMLEz_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v1f64: return fastEmit_AArch64ISD_FCMLEz_MVT_v1f64_r(RetVT, Op0); |
| case MVT::v2f64: return fastEmit_AArch64ISD_FCMLEz_MVT_v2f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::FCMLTz. |
| |
| unsigned fastEmit_AArch64ISD_FCMLTz_MVT_v4f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLTv4i16rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLTz_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLTv8i16rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLTz_MVT_v2f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLTv2i32rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLTz_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLTv4i32rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLTz_MVT_v1f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLTv1i64rz, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLTz_MVT_v2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::FCMLTv2i64rz, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FCMLTz_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v4f16: return fastEmit_AArch64ISD_FCMLTz_MVT_v4f16_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_AArch64ISD_FCMLTz_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v2f32: return fastEmit_AArch64ISD_FCMLTz_MVT_v2f32_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_AArch64ISD_FCMLTz_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v1f64: return fastEmit_AArch64ISD_FCMLTz_MVT_v1f64_r(RetVT, Op0); |
| case MVT::v2f64: return fastEmit_AArch64ISD_FCMLTz_MVT_v2f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::FRECPE. |
| |
| unsigned fastEmit_AArch64ISD_FRECPE_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| return fastEmitInst_r(AArch64::FRECPEv1i32, &AArch64::FPR32RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRECPE_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f64) |
| return 0; |
| return fastEmitInst_r(AArch64::FRECPEv1i64, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRECPE_MVT_v2f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2f32) |
| return 0; |
| return fastEmitInst_r(AArch64::FRECPEv2f32, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRECPE_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f32) |
| return 0; |
| return fastEmitInst_r(AArch64::FRECPEv4f32, &AArch64::FPR128RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRECPE_MVT_v1f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1f64) |
| return 0; |
| return fastEmitInst_r(AArch64::FRECPEv1i64, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRECPE_MVT_v2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2f64) |
| return 0; |
| return fastEmitInst_r(AArch64::FRECPEv2f64, &AArch64::FPR128RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRECPE_MVT_nxv8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv8f16) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::FRECPE_ZZ_H, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRECPE_MVT_nxv4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv4f32) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::FRECPE_ZZ_S, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRECPE_MVT_nxv2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv2f64) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::FRECPE_ZZ_D, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRECPE_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f32: return fastEmit_AArch64ISD_FRECPE_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_AArch64ISD_FRECPE_MVT_f64_r(RetVT, Op0); |
| case MVT::v2f32: return fastEmit_AArch64ISD_FRECPE_MVT_v2f32_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_AArch64ISD_FRECPE_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v1f64: return fastEmit_AArch64ISD_FRECPE_MVT_v1f64_r(RetVT, Op0); |
| case MVT::v2f64: return fastEmit_AArch64ISD_FRECPE_MVT_v2f64_r(RetVT, Op0); |
| case MVT::nxv8f16: return fastEmit_AArch64ISD_FRECPE_MVT_nxv8f16_r(RetVT, Op0); |
| case MVT::nxv4f32: return fastEmit_AArch64ISD_FRECPE_MVT_nxv4f32_r(RetVT, Op0); |
| case MVT::nxv2f64: return fastEmit_AArch64ISD_FRECPE_MVT_nxv2f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::FRSQRTE. |
| |
| unsigned fastEmit_AArch64ISD_FRSQRTE_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| return fastEmitInst_r(AArch64::FRSQRTEv1i32, &AArch64::FPR32RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRSQRTE_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f64) |
| return 0; |
| return fastEmitInst_r(AArch64::FRSQRTEv1i64, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRSQRTE_MVT_v2f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2f32) |
| return 0; |
| return fastEmitInst_r(AArch64::FRSQRTEv2f32, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRSQRTE_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f32) |
| return 0; |
| return fastEmitInst_r(AArch64::FRSQRTEv4f32, &AArch64::FPR128RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRSQRTE_MVT_v1f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1f64) |
| return 0; |
| return fastEmitInst_r(AArch64::FRSQRTEv1i64, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRSQRTE_MVT_v2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2f64) |
| return 0; |
| return fastEmitInst_r(AArch64::FRSQRTEv2f64, &AArch64::FPR128RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRSQRTE_MVT_nxv8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv8f16) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::FRSQRTE_ZZ_H, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRSQRTE_MVT_nxv4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv4f32) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::FRSQRTE_ZZ_S, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRSQRTE_MVT_nxv2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv2f64) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::FRSQRTE_ZZ_D, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_FRSQRTE_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f32: return fastEmit_AArch64ISD_FRSQRTE_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_AArch64ISD_FRSQRTE_MVT_f64_r(RetVT, Op0); |
| case MVT::v2f32: return fastEmit_AArch64ISD_FRSQRTE_MVT_v2f32_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_AArch64ISD_FRSQRTE_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v1f64: return fastEmit_AArch64ISD_FRSQRTE_MVT_v1f64_r(RetVT, Op0); |
| case MVT::v2f64: return fastEmit_AArch64ISD_FRSQRTE_MVT_v2f64_r(RetVT, Op0); |
| case MVT::nxv8f16: return fastEmit_AArch64ISD_FRSQRTE_MVT_nxv8f16_r(RetVT, Op0); |
| case MVT::nxv4f32: return fastEmit_AArch64ISD_FRSQRTE_MVT_nxv4f32_r(RetVT, Op0); |
| case MVT::nxv2f64: return fastEmit_AArch64ISD_FRSQRTE_MVT_nxv2f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::OBSCURE_COPY. |
| |
| unsigned fastEmit_AArch64ISD_OBSCURE_COPY_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| if ((Subtarget->hasSME())) { |
| return fastEmitInst_r(AArch64::OBSCURE_COPY, &AArch64::GPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_OBSCURE_COPY_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i64: return fastEmit_AArch64ISD_OBSCURE_COPY_MVT_i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::REV16. |
| |
| unsigned fastEmit_AArch64ISD_REV16_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV16v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV16_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV16v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV16_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_REV16_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_REV16_MVT_v16i8_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::REV32. |
| |
| unsigned fastEmit_AArch64ISD_REV32_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV32v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV32_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV32v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV32_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV32_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV32_MVT_v4f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f16) |
| return 0; |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV32_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f16) |
| return 0; |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV32_MVT_v4bf16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4bf16) |
| return 0; |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV32_MVT_v8bf16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8bf16) |
| return 0; |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV32_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_REV32_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_REV32_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v4i16: return fastEmit_AArch64ISD_REV32_MVT_v4i16_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_AArch64ISD_REV32_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v4f16: return fastEmit_AArch64ISD_REV32_MVT_v4f16_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_AArch64ISD_REV32_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v4bf16: return fastEmit_AArch64ISD_REV32_MVT_v4bf16_r(RetVT, Op0); |
| case MVT::v8bf16: return fastEmit_AArch64ISD_REV32_MVT_v8bf16_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::REV64. |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV64v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV64v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v2i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV64v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::REV64v4i32, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v4f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f16) |
| return 0; |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f16) |
| return 0; |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v4bf16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4bf16) |
| return 0; |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v8bf16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8bf16) |
| return 0; |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v2f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2f32) |
| return 0; |
| return fastEmitInst_r(AArch64::REV64v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f32) |
| return 0; |
| return fastEmitInst_r(AArch64::REV64v4i32, &AArch64::FPR128RegClass, Op0); |
| } |
| |
| unsigned fastEmit_AArch64ISD_REV64_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_REV64_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_REV64_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v4i16: return fastEmit_AArch64ISD_REV64_MVT_v4i16_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_AArch64ISD_REV64_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v2i32: return fastEmit_AArch64ISD_REV64_MVT_v2i32_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_AArch64ISD_REV64_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v4f16: return fastEmit_AArch64ISD_REV64_MVT_v4f16_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_AArch64ISD_REV64_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v4bf16: return fastEmit_AArch64ISD_REV64_MVT_v4bf16_r(RetVT, Op0); |
| case MVT::v8bf16: return fastEmit_AArch64ISD_REV64_MVT_v8bf16_r(RetVT, Op0); |
| case MVT::v2f32: return fastEmit_AArch64ISD_REV64_MVT_v2f32_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_AArch64ISD_REV64_MVT_v4f32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::SADDLP. |
| |
| unsigned fastEmit_AArch64ISD_SADDLP_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::SADDLPv8i8_v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SADDLP_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::SADDLPv16i8_v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SADDLP_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::SADDLPv4i16_v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SADDLP_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::SADDLPv8i16_v4i32, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SADDLP_MVT_v2i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::SADDLPv2i32_v1i64, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SADDLP_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::SADDLPv4i32_v2i64, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SADDLP_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_SADDLP_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_SADDLP_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v4i16: return fastEmit_AArch64ISD_SADDLP_MVT_v4i16_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_AArch64ISD_SADDLP_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v2i32: return fastEmit_AArch64ISD_SADDLP_MVT_v2i32_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_AArch64ISD_SADDLP_MVT_v4i32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::SITOF. |
| |
| unsigned fastEmit_AArch64ISD_SITOF_MVT_f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::SCVTFv1i16, &AArch64::FPR16RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SITOF_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::SCVTFv1i32, &AArch64::FPR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SITOF_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::SCVTFv1i64, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SITOF_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f16: return fastEmit_AArch64ISD_SITOF_MVT_f16_r(RetVT, Op0); |
| case MVT::f32: return fastEmit_AArch64ISD_SITOF_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_AArch64ISD_SITOF_MVT_f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::SUNPKHI. |
| |
| unsigned fastEmit_AArch64ISD_SUNPKHI_MVT_nxv16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv8i16) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::SUNPKHI_ZZ_H, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SUNPKHI_MVT_nxv8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv4i32) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::SUNPKHI_ZZ_S, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SUNPKHI_MVT_nxv4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv2i64) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::SUNPKHI_ZZ_D, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SUNPKHI_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::nxv16i8: return fastEmit_AArch64ISD_SUNPKHI_MVT_nxv16i8_r(RetVT, Op0); |
| case MVT::nxv8i16: return fastEmit_AArch64ISD_SUNPKHI_MVT_nxv8i16_r(RetVT, Op0); |
| case MVT::nxv4i32: return fastEmit_AArch64ISD_SUNPKHI_MVT_nxv4i32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::SUNPKLO. |
| |
| unsigned fastEmit_AArch64ISD_SUNPKLO_MVT_nxv16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv8i16) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::SUNPKLO_ZZ_H, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SUNPKLO_MVT_nxv8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv4i32) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::SUNPKLO_ZZ_S, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SUNPKLO_MVT_nxv4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv2i64) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::SUNPKLO_ZZ_D, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_SUNPKLO_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::nxv16i8: return fastEmit_AArch64ISD_SUNPKLO_MVT_nxv16i8_r(RetVT, Op0); |
| case MVT::nxv8i16: return fastEmit_AArch64ISD_SUNPKLO_MVT_nxv8i16_r(RetVT, Op0); |
| case MVT::nxv4i32: return fastEmit_AArch64ISD_SUNPKLO_MVT_nxv4i32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::UADDLP. |
| |
| unsigned fastEmit_AArch64ISD_UADDLP_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::UADDLPv8i8_v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UADDLP_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::UADDLPv16i8_v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UADDLP_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::UADDLPv4i16_v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UADDLP_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::UADDLPv8i16_v4i32, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UADDLP_MVT_v2i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::UADDLPv2i32_v1i64, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UADDLP_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::UADDLPv4i32_v2i64, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UADDLP_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_AArch64ISD_UADDLP_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_AArch64ISD_UADDLP_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v4i16: return fastEmit_AArch64ISD_UADDLP_MVT_v4i16_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_AArch64ISD_UADDLP_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v2i32: return fastEmit_AArch64ISD_UADDLP_MVT_v2i32_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_AArch64ISD_UADDLP_MVT_v4i32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::UITOF. |
| |
| unsigned fastEmit_AArch64ISD_UITOF_MVT_f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f16) |
| return 0; |
| if ((Subtarget->hasFullFP16()) && (Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::UCVTFv1i16, &AArch64::FPR16RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UITOF_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::UCVTFv1i32, &AArch64::FPR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UITOF_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::UCVTFv1i64, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UITOF_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f16: return fastEmit_AArch64ISD_UITOF_MVT_f16_r(RetVT, Op0); |
| case MVT::f32: return fastEmit_AArch64ISD_UITOF_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_AArch64ISD_UITOF_MVT_f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::UUNPKHI. |
| |
| unsigned fastEmit_AArch64ISD_UUNPKHI_MVT_nxv16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv8i16) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::UUNPKHI_ZZ_H, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UUNPKHI_MVT_nxv8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv4i32) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::UUNPKHI_ZZ_S, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UUNPKHI_MVT_nxv4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv2i64) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::UUNPKHI_ZZ_D, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UUNPKHI_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::nxv16i8: return fastEmit_AArch64ISD_UUNPKHI_MVT_nxv16i8_r(RetVT, Op0); |
| case MVT::nxv8i16: return fastEmit_AArch64ISD_UUNPKHI_MVT_nxv8i16_r(RetVT, Op0); |
| case MVT::nxv4i32: return fastEmit_AArch64ISD_UUNPKHI_MVT_nxv4i32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for AArch64ISD::UUNPKLO. |
| |
| unsigned fastEmit_AArch64ISD_UUNPKLO_MVT_nxv16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv8i16) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::UUNPKLO_ZZ_H, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UUNPKLO_MVT_nxv8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv4i32) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::UUNPKLO_ZZ_S, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UUNPKLO_MVT_nxv4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::nxv2i64) |
| return 0; |
| if ((Subtarget->hasSVEorSME())) { |
| return fastEmitInst_r(AArch64::UUNPKLO_ZZ_D, &AArch64::ZPRRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_AArch64ISD_UUNPKLO_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::nxv16i8: return fastEmit_AArch64ISD_UUNPKLO_MVT_nxv16i8_r(RetVT, Op0); |
| case MVT::nxv8i16: return fastEmit_AArch64ISD_UUNPKLO_MVT_nxv8i16_r(RetVT, Op0); |
| case MVT::nxv4i32: return fastEmit_AArch64ISD_UUNPKLO_MVT_nxv4i32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::ABS. |
| |
| unsigned fastEmit_ISD_ABS_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i32) |
| return 0; |
| if ((Subtarget->hasCSSC())) { |
| return fastEmitInst_r(AArch64::ABSWr, &AArch64::GPR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| if ((!Subtarget->hasCSSC())) { |
| return fastEmitInst_r(AArch64::ABSv1i64, &AArch64::FPR64RegClass, Op0); |
| } |
| if ((Subtarget->hasCSSC())) { |
| return fastEmitInst_r(AArch64::ABSXr, &AArch64::GPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::ABSv8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::ABSv16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::ABSv4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::ABSv8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v2i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::ABSv2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::ABSv4i32, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v1i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v1i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::ABSv1i64, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasNEON())) { |
| return fastEmitInst_r(AArch64::ABSv2i64, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_ABS_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_ISD_ABS_MVT_i64_r(RetVT, Op0); |
| case MVT::v8i8: return fastEmit_ISD_ABS_MVT_v8i8_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_ISD_ABS_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v4i16: return fastEmit_ISD_ABS_MVT_v4i16_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_ISD_ABS_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v2i32: return fastEmit_ISD_ABS_MVT_v2i32_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_ISD_ABS_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v1i64: return fastEmit_ISD_ABS_MVT_v1i64_r(RetVT, Op0); |
| case MVT::v2i64: return fastEmit_ISD_ABS_MVT_v2i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::BITCAST. |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f64_MVT_v8i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f64_MVT_v4i16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f64_MVT_v2i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f64_MVT_v4f16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f64_MVT_v4bf16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f64_MVT_v2f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_ISD_BITCAST_MVT_f64_MVT_v8i8_r(Op0); |
| case MVT::v4i16: return fastEmit_ISD_BITCAST_MVT_f64_MVT_v4i16_r(Op0); |
| case MVT::v2i32: return fastEmit_ISD_BITCAST_MVT_f64_MVT_v2i32_r(Op0); |
| case MVT::v4f16: return fastEmit_ISD_BITCAST_MVT_f64_MVT_v4f16_r(Op0); |
| case MVT::v4bf16: return fastEmit_ISD_BITCAST_MVT_f64_MVT_v4bf16_r(Op0); |
| case MVT::v2f32: return fastEmit_ISD_BITCAST_MVT_f64_MVT_v2f32_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i8_MVT_f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v4i16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v2i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v1i64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v4f16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v4bf16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v2f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v1f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i8_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::f64: return fastEmit_ISD_BITCAST_MVT_v8i8_MVT_f64_r(Op0); |
| case MVT::v4i16: return fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v4i16_r(Op0); |
| case MVT::v2i32: return fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v2i32_r(Op0); |
| case MVT::v1i64: return fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v1i64_r(Op0); |
| case MVT::v4f16: return fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v4f16_r(Op0); |
| case MVT::v4bf16: return fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v4bf16_r(Op0); |
| case MVT::v2f32: return fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v2f32_r(Op0); |
| case MVT::v1f64: return fastEmit_ISD_BITCAST_MVT_v8i8_MVT_v1f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v8i16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v4i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v2i64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v8f16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v8bf16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v4f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v2f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8i16: return fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v8i16_r(Op0); |
| case MVT::v4i32: return fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v4i32_r(Op0); |
| case MVT::v2i64: return fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v2i64_r(Op0); |
| case MVT::v8f16: return fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v8f16_r(Op0); |
| case MVT::v8bf16: return fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v8bf16_r(Op0); |
| case MVT::v4f32: return fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v4f32_r(Op0); |
| case MVT::v2f64: return fastEmit_ISD_BITCAST_MVT_v16i8_MVT_v2f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i16_MVT_f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i16_MVT_v8i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i16_MVT_v2i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i16_MVT_v1i64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i16_MVT_v2f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i16_MVT_v1f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::f64: return fastEmit_ISD_BITCAST_MVT_v4i16_MVT_f64_r(Op0); |
| case MVT::v8i8: return fastEmit_ISD_BITCAST_MVT_v4i16_MVT_v8i8_r(Op0); |
| case MVT::v2i32: return fastEmit_ISD_BITCAST_MVT_v4i16_MVT_v2i32_r(Op0); |
| case MVT::v1i64: return fastEmit_ISD_BITCAST_MVT_v4i16_MVT_v1i64_r(Op0); |
| case MVT::v2f32: return fastEmit_ISD_BITCAST_MVT_v4i16_MVT_v2f32_r(Op0); |
| case MVT::v1f64: return fastEmit_ISD_BITCAST_MVT_v4i16_MVT_v1f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i16_MVT_v16i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i16_MVT_v4i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i16_MVT_v2i64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i16_MVT_v4f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i16_MVT_v2f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v16i8: return fastEmit_ISD_BITCAST_MVT_v8i16_MVT_v16i8_r(Op0); |
| case MVT::v4i32: return fastEmit_ISD_BITCAST_MVT_v8i16_MVT_v4i32_r(Op0); |
| case MVT::v2i64: return fastEmit_ISD_BITCAST_MVT_v8i16_MVT_v2i64_r(Op0); |
| case MVT::v4f32: return fastEmit_ISD_BITCAST_MVT_v8i16_MVT_v4f32_r(Op0); |
| case MVT::v2f64: return fastEmit_ISD_BITCAST_MVT_v8i16_MVT_v2f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i32_MVT_f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v8i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v4i16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v1i64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v4f16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v4bf16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v1f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::f64: return fastEmit_ISD_BITCAST_MVT_v2i32_MVT_f64_r(Op0); |
| case MVT::v8i8: return fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v8i8_r(Op0); |
| case MVT::v4i16: return fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v4i16_r(Op0); |
| case MVT::v1i64: return fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v1i64_r(Op0); |
| case MVT::v4f16: return fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v4f16_r(Op0); |
| case MVT::v4bf16: return fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v4bf16_r(Op0); |
| case MVT::v1f64: return fastEmit_ISD_BITCAST_MVT_v2i32_MVT_v1f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v16i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v8i16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v2i64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i32, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v8f16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v8bf16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v2f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i32, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v16i8: return fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v16i8_r(Op0); |
| case MVT::v8i16: return fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v8i16_r(Op0); |
| case MVT::v2i64: return fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v2i64_r(Op0); |
| case MVT::v8f16: return fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v8f16_r(Op0); |
| case MVT::v8bf16: return fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v8bf16_r(Op0); |
| case MVT::v2f64: return fastEmit_ISD_BITCAST_MVT_v4i32_MVT_v2f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v8i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v4i16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v2i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v4f16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v4bf16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v2f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v2i32, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v1i64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8i8: return fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v8i8_r(Op0); |
| case MVT::v4i16: return fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v4i16_r(Op0); |
| case MVT::v2i32: return fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v2i32_r(Op0); |
| case MVT::v4f16: return fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v4f16_r(Op0); |
| case MVT::v4bf16: return fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v4bf16_r(Op0); |
| case MVT::v2f32: return fastEmit_ISD_BITCAST_MVT_v1i64_MVT_v2f32_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v16i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v8i16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v4i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i32, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v8f16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v8bf16_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v4f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i32, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v16i8: return fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v16i8_r(Op0); |
| case MVT::v8i16: return fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v8i16_r(Op0); |
| case MVT::v4i32: return fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v4i32_r(Op0); |
| case MVT::v8f16: return fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v8f16_r(Op0); |
| case MVT::v8bf16: return fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v8bf16_r(Op0); |
| case MVT::v4f32: return fastEmit_ISD_BITCAST_MVT_v2i64_MVT_v4f32_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4f16_MVT_f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4f16_MVT_v8i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4f16_MVT_v2i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4f16_MVT_v1i64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4f16_MVT_v2f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4f16_MVT_v1f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4f16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::f64: return fastEmit_ISD_BITCAST_MVT_v4f16_MVT_f64_r(Op0); |
| case MVT::v8i8: return fastEmit_ISD_BITCAST_MVT_v4f16_MVT_v8i8_r(Op0); |
| case MVT::v2i32: return fastEmit_ISD_BITCAST_MVT_v4f16_MVT_v2i32_r(Op0); |
| case MVT::v1i64: return fastEmit_ISD_BITCAST_MVT_v4f16_MVT_v1i64_r(Op0); |
| case MVT::v2f32: return fastEmit_ISD_BITCAST_MVT_v4f16_MVT_v2f32_r(Op0); |
| case MVT::v1f64: return fastEmit_ISD_BITCAST_MVT_v4f16_MVT_v1f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8f16_MVT_v16i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8f16_MVT_v4i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8f16_MVT_v2i64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8f16_MVT_v4f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8f16_MVT_v2f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v16i8: return fastEmit_ISD_BITCAST_MVT_v8f16_MVT_v16i8_r(Op0); |
| case MVT::v4i32: return fastEmit_ISD_BITCAST_MVT_v8f16_MVT_v4i32_r(Op0); |
| case MVT::v2i64: return fastEmit_ISD_BITCAST_MVT_v8f16_MVT_v2i64_r(Op0); |
| case MVT::v4f32: return fastEmit_ISD_BITCAST_MVT_v8f16_MVT_v4f32_r(Op0); |
| case MVT::v2f64: return fastEmit_ISD_BITCAST_MVT_v8f16_MVT_v2f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_v8i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v8i8, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_v2i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_v1i64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_v2f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_v1f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v4i16, &AArch64::FPR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v4bf16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::f64: return fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_f64_r(Op0); |
| case MVT::v8i8: return fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_v8i8_r(Op0); |
| case MVT::v2i32: return fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_v2i32_r(Op0); |
| case MVT::v1i64: return fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_v1i64_r(Op0); |
| case MVT::v2f32: return fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_v2f32_r(Op0); |
| case MVT::v1f64: return fastEmit_ISD_BITCAST_MVT_v4bf16_MVT_v1f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8bf16_MVT_v16i8_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV16v16i8, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8bf16_MVT_v4i32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8bf16_MVT_v2i64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV64v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8bf16_MVT_v4f32_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian())) { |
| return fastEmitInst_r(AArch64::REV32v8i16, &AArch64::FPR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_v8bf16_MVT_v2f64_r(unsigned Op0) { |
| if ((!Subtarget->isLittleEndian |