| /*===- TableGen'erated file -------------------------------------*- C++ -*-===*\ |
| |* *| |
| |* "Fast" Instruction Selector for the X86 target *| |
| |* *| |
| |* Automatically generated file, do not edit! *| |
| |* *| |
| \*===----------------------------------------------------------------------===*/ |
| |
| |
| // FastEmit Immediate Predicate functions. |
| static bool Predicate_i16immSExt8(int64_t Imm) { |
| return isInt<8>(Imm); |
| } |
| static bool Predicate_i32immSExt8(int64_t Imm) { |
| return isInt<8>(Imm); |
| } |
| static bool Predicate_i64immSExt8(int64_t Imm) { |
| return isInt<8>(Imm); |
| } |
| static bool Predicate_i64immSExt32(int64_t Imm) { |
| return isInt<32>(Imm); |
| } |
| static bool Predicate_AndMask64(int64_t Imm) { |
| |
| return isMask_64(Imm) && !isUInt<32>(Imm); |
| |
| } |
| static bool Predicate_BTRMask64(int64_t Imm) { |
| |
| return !isUInt<32>(Imm) && !isInt<32>(Imm) && isPowerOf2_64(~Imm); |
| |
| } |
| static bool Predicate_BTCBTSMask64(int64_t Imm) { |
| |
| return !isInt<32>(Imm) && isPowerOf2_64(Imm); |
| |
| } |
| |
| |
| // FastEmit functions for ISD::ABS. |
| |
| unsigned fastEmit_ISD_ABS_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasBWI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPABSBZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSSE3() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::PABSBrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX() || !Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPABSBrr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v32i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v32i8) |
| return 0; |
| if ((Subtarget->hasBWI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPABSBZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX2()) && (!Subtarget->hasVLX() || !Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPABSBYrr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v64i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v64i8) |
| return 0; |
| if ((Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPABSBZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasBWI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPABSWZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSSE3() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::PABSWrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX() || !Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPABSWrr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v16i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i16) |
| return 0; |
| if ((Subtarget->hasBWI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPABSWZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX2()) && (!Subtarget->hasVLX() || !Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPABSWYrr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v32i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v32i16) |
| return 0; |
| if ((Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPABSWZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPABSDZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSSE3() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::PABSDrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPABSDrr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v8i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i32) |
| return 0; |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPABSDZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX2()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPABSDYrr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v16i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i32) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VPABSDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPABSQZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v4i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i64) |
| return 0; |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPABSQZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_MVT_v8i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i64) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VPABSQZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_ABS_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v16i8: return fastEmit_ISD_ABS_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v32i8: return fastEmit_ISD_ABS_MVT_v32i8_r(RetVT, Op0); |
| case MVT::v64i8: return fastEmit_ISD_ABS_MVT_v64i8_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_ISD_ABS_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v16i16: return fastEmit_ISD_ABS_MVT_v16i16_r(RetVT, Op0); |
| case MVT::v32i16: return fastEmit_ISD_ABS_MVT_v32i16_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_ISD_ABS_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v8i32: return fastEmit_ISD_ABS_MVT_v8i32_r(RetVT, Op0); |
| case MVT::v16i32: return fastEmit_ISD_ABS_MVT_v16i32_r(RetVT, Op0); |
| case MVT::v2i64: return fastEmit_ISD_ABS_MVT_v2i64_r(RetVT, Op0); |
| case MVT::v4i64: return fastEmit_ISD_ABS_MVT_v4i64_r(RetVT, Op0); |
| case MVT::v8i64: return fastEmit_ISD_ABS_MVT_v8i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::ANY_EXTEND. |
| |
| unsigned fastEmit_ISD_ANY_EXTEND_MVT_i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i32) |
| return 0; |
| return fastEmitInst_r(X86::MOVZX32rr8, &X86::GR32RegClass, Op0); |
| } |
| |
| unsigned fastEmit_ISD_ANY_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i8: return fastEmit_ISD_ANY_EXTEND_MVT_i8_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::BITCAST. |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOVDI2SSZrr, &X86::FR32XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::MOVDI2SSrr, &X86::FR32RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOVDI2SSrr, &X86::FR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_i64_MVT_f64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOV64toSDZrr, &X86::FR64XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::MOV64toSDrr, &X86::FR64RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOV64toSDrr, &X86::FR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_i64_MVT_x86mmx_r(unsigned Op0) { |
| if ((Subtarget->hasMMX()) && (Subtarget->is64Bit())) { |
| return fastEmitInst_r(X86::MMX_MOVD64to64rr, &X86::VR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::f64: return fastEmit_ISD_BITCAST_MVT_i64_MVT_f64_r(Op0); |
| case MVT::x86mmx: return fastEmit_ISD_BITCAST_MVT_i64_MVT_x86mmx_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i32) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOVSS2DIZrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::MOVSS2DIrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOVSS2DIrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f64_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOVSDto64Zrr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::MOVSDto64rr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOVSDto64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f64_MVT_x86mmx_r(unsigned Op0) { |
| if ((Subtarget->hasMMX()) && (Subtarget->hasSSE2())) { |
| return fastEmitInst_r(X86::MMX_MOVFR642Qrr, &X86::VR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i64: return fastEmit_ISD_BITCAST_MVT_f64_MVT_i64_r(Op0); |
| case MVT::x86mmx: return fastEmit_ISD_BITCAST_MVT_f64_MVT_x86mmx_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_x86mmx_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasMMX()) && (Subtarget->is64Bit())) { |
| return fastEmitInst_r(X86::MMX_MOVD64from64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_x86mmx_MVT_f64_r(unsigned Op0) { |
| if ((Subtarget->hasMMX()) && (Subtarget->hasSSE2())) { |
| return fastEmitInst_r(X86::MMX_MOVQ2FR64rr, &X86::FR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_MVT_x86mmx_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i64: return fastEmit_ISD_BITCAST_MVT_x86mmx_MVT_i64_r(Op0); |
| case MVT::f64: return fastEmit_ISD_BITCAST_MVT_x86mmx_MVT_f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_BITCAST_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_BITCAST_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_ISD_BITCAST_MVT_i64_r(RetVT, Op0); |
| case MVT::f32: return fastEmit_ISD_BITCAST_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_BITCAST_MVT_f64_r(RetVT, Op0); |
| case MVT::x86mmx: return fastEmit_ISD_BITCAST_MVT_x86mmx_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::BRIND. |
| |
| unsigned fastEmit_ISD_BRIND_MVT_i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::isVoid) |
| return 0; |
| if ((!Subtarget->is64Bit())) { |
| return fastEmitInst_r(X86::JMP16r, &X86::GR16RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BRIND_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::isVoid) |
| return 0; |
| if ((!Subtarget->is64Bit())) { |
| return fastEmitInst_r(X86::JMP32r, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BRIND_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::isVoid) |
| return 0; |
| if ((Subtarget->is64Bit())) { |
| return fastEmitInst_r(X86::JMP64r, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_BRIND_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i16: return fastEmit_ISD_BRIND_MVT_i16_r(RetVT, Op0); |
| case MVT::i32: return fastEmit_ISD_BRIND_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_ISD_BRIND_MVT_i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::BSWAP. |
| |
| unsigned fastEmit_ISD_BSWAP_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i32) |
| return 0; |
| return fastEmitInst_r(X86::BSWAP32r, &X86::GR32RegClass, Op0); |
| } |
| |
| unsigned fastEmit_ISD_BSWAP_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| return fastEmitInst_r(X86::BSWAP64r, &X86::GR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_ISD_BSWAP_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_BSWAP_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_ISD_BSWAP_MVT_i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::CTLZ. |
| |
| unsigned fastEmit_ISD_CTLZ_MVT_i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i16) |
| return 0; |
| if ((Subtarget->hasLZCNT())) { |
| return fastEmitInst_r(X86::LZCNT16rr, &X86::GR16RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTLZ_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i32) |
| return 0; |
| if ((Subtarget->hasLZCNT())) { |
| return fastEmitInst_r(X86::LZCNT32rr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTLZ_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| if ((Subtarget->hasLZCNT())) { |
| return fastEmitInst_r(X86::LZCNT64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTLZ_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasCDI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPLZCNTDZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTLZ_MVT_v8i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i32) |
| return 0; |
| if ((Subtarget->hasCDI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPLZCNTDZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTLZ_MVT_v16i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i32) |
| return 0; |
| if ((Subtarget->hasCDI())) { |
| return fastEmitInst_r(X86::VPLZCNTDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTLZ_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasCDI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPLZCNTQZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTLZ_MVT_v4i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i64) |
| return 0; |
| if ((Subtarget->hasCDI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPLZCNTQZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTLZ_MVT_v8i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i64) |
| return 0; |
| if ((Subtarget->hasCDI())) { |
| return fastEmitInst_r(X86::VPLZCNTQZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTLZ_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i16: return fastEmit_ISD_CTLZ_MVT_i16_r(RetVT, Op0); |
| case MVT::i32: return fastEmit_ISD_CTLZ_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_ISD_CTLZ_MVT_i64_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_ISD_CTLZ_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v8i32: return fastEmit_ISD_CTLZ_MVT_v8i32_r(RetVT, Op0); |
| case MVT::v16i32: return fastEmit_ISD_CTLZ_MVT_v16i32_r(RetVT, Op0); |
| case MVT::v2i64: return fastEmit_ISD_CTLZ_MVT_v2i64_r(RetVT, Op0); |
| case MVT::v4i64: return fastEmit_ISD_CTLZ_MVT_v4i64_r(RetVT, Op0); |
| case MVT::v8i64: return fastEmit_ISD_CTLZ_MVT_v8i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::CTPOP. |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i16) |
| return 0; |
| if ((Subtarget->hasPOPCNT())) { |
| return fastEmitInst_r(X86::POPCNT16rr, &X86::GR16RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i32) |
| return 0; |
| if ((Subtarget->hasPOPCNT())) { |
| return fastEmitInst_r(X86::POPCNT32rr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| if ((Subtarget->hasPOPCNT())) { |
| return fastEmitInst_r(X86::POPCNT64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i8) |
| return 0; |
| if ((Subtarget->hasBITALG()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPOPCNTBZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v32i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v32i8) |
| return 0; |
| if ((Subtarget->hasBITALG()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPOPCNTBZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v64i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v64i8) |
| return 0; |
| if ((Subtarget->hasBITALG())) { |
| return fastEmitInst_r(X86::VPOPCNTBZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i16) |
| return 0; |
| if ((Subtarget->hasBITALG()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPOPCNTWZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v16i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i16) |
| return 0; |
| if ((Subtarget->hasBITALG()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPOPCNTWZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v32i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v32i16) |
| return 0; |
| if ((Subtarget->hasBITALG())) { |
| return fastEmitInst_r(X86::VPOPCNTWZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasVLX()) && (Subtarget->hasVPOPCNTDQ())) { |
| return fastEmitInst_r(X86::VPOPCNTDZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v8i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i32) |
| return 0; |
| if ((Subtarget->hasVLX()) && (Subtarget->hasVPOPCNTDQ())) { |
| return fastEmitInst_r(X86::VPOPCNTDZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v16i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i32) |
| return 0; |
| if ((Subtarget->hasVPOPCNTDQ())) { |
| return fastEmitInst_r(X86::VPOPCNTDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasVLX()) && (Subtarget->hasVPOPCNTDQ())) { |
| return fastEmitInst_r(X86::VPOPCNTQZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v4i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i64) |
| return 0; |
| if ((Subtarget->hasVLX()) && (Subtarget->hasVPOPCNTDQ())) { |
| return fastEmitInst_r(X86::VPOPCNTQZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_MVT_v8i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i64) |
| return 0; |
| if ((Subtarget->hasVPOPCNTDQ())) { |
| return fastEmitInst_r(X86::VPOPCNTQZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTPOP_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i16: return fastEmit_ISD_CTPOP_MVT_i16_r(RetVT, Op0); |
| case MVT::i32: return fastEmit_ISD_CTPOP_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_ISD_CTPOP_MVT_i64_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_ISD_CTPOP_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v32i8: return fastEmit_ISD_CTPOP_MVT_v32i8_r(RetVT, Op0); |
| case MVT::v64i8: return fastEmit_ISD_CTPOP_MVT_v64i8_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_ISD_CTPOP_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v16i16: return fastEmit_ISD_CTPOP_MVT_v16i16_r(RetVT, Op0); |
| case MVT::v32i16: return fastEmit_ISD_CTPOP_MVT_v32i16_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_ISD_CTPOP_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v8i32: return fastEmit_ISD_CTPOP_MVT_v8i32_r(RetVT, Op0); |
| case MVT::v16i32: return fastEmit_ISD_CTPOP_MVT_v16i32_r(RetVT, Op0); |
| case MVT::v2i64: return fastEmit_ISD_CTPOP_MVT_v2i64_r(RetVT, Op0); |
| case MVT::v4i64: return fastEmit_ISD_CTPOP_MVT_v4i64_r(RetVT, Op0); |
| case MVT::v8i64: return fastEmit_ISD_CTPOP_MVT_v8i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::CTTZ. |
| |
| unsigned fastEmit_ISD_CTTZ_MVT_i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i16) |
| return 0; |
| if ((Subtarget->hasBMI())) { |
| return fastEmitInst_r(X86::TZCNT16rr, &X86::GR16RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTTZ_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i32) |
| return 0; |
| if ((Subtarget->hasBMI())) { |
| return fastEmitInst_r(X86::TZCNT32rr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTTZ_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| if ((Subtarget->hasBMI())) { |
| return fastEmitInst_r(X86::TZCNT64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_CTTZ_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i16: return fastEmit_ISD_CTTZ_MVT_i16_r(RetVT, Op0); |
| case MVT::i32: return fastEmit_ISD_CTTZ_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_ISD_CTTZ_MVT_i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::CTTZ_ZERO_UNDEF. |
| |
| unsigned fastEmit_ISD_CTTZ_ZERO_UNDEF_MVT_i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i16) |
| return 0; |
| return fastEmitInst_r(X86::BSF16rr, &X86::GR16RegClass, Op0); |
| } |
| |
| unsigned fastEmit_ISD_CTTZ_ZERO_UNDEF_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i32) |
| return 0; |
| return fastEmitInst_r(X86::BSF32rr, &X86::GR32RegClass, Op0); |
| } |
| |
| unsigned fastEmit_ISD_CTTZ_ZERO_UNDEF_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| return fastEmitInst_r(X86::BSF64rr, &X86::GR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_ISD_CTTZ_ZERO_UNDEF_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i16: return fastEmit_ISD_CTTZ_ZERO_UNDEF_MVT_i16_r(RetVT, Op0); |
| case MVT::i32: return fastEmit_ISD_CTTZ_ZERO_UNDEF_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_ISD_CTTZ_ZERO_UNDEF_MVT_i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::FABS. |
| |
| unsigned fastEmit_ISD_FABS_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| if ((!Subtarget->hasSSE1())) { |
| return fastEmitInst_r(X86::ABS_Fp32, &X86::RFP32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FABS_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f64) |
| return 0; |
| if ((!Subtarget->hasSSE2())) { |
| return fastEmitInst_r(X86::ABS_Fp64, &X86::RFP64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FABS_MVT_f80_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f80) |
| return 0; |
| if ((Subtarget->hasX87())) { |
| return fastEmitInst_r(X86::ABS_Fp80, &X86::RFP80RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FABS_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f32: return fastEmit_ISD_FABS_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_FABS_MVT_f64_r(RetVT, Op0); |
| case MVT::f80: return fastEmit_ISD_FABS_MVT_f80_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::FNEG. |
| |
| unsigned fastEmit_ISD_FNEG_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| if ((!Subtarget->hasSSE1())) { |
| return fastEmitInst_r(X86::CHS_Fp32, &X86::RFP32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FNEG_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f64) |
| return 0; |
| if ((!Subtarget->hasSSE2())) { |
| return fastEmitInst_r(X86::CHS_Fp64, &X86::RFP64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FNEG_MVT_f80_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f80) |
| return 0; |
| if ((Subtarget->hasX87())) { |
| return fastEmitInst_r(X86::CHS_Fp80, &X86::RFP80RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FNEG_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f32: return fastEmit_ISD_FNEG_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_FNEG_MVT_f64_r(RetVT, Op0); |
| case MVT::f80: return fastEmit_ISD_FNEG_MVT_f80_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::FP_EXTEND. |
| |
| unsigned fastEmit_ISD_FP_EXTEND_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f64) |
| return 0; |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSS2SDrr, &X86::FR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_EXTEND_MVT_v8f16_MVT_v8f32_r(unsigned Op0) { |
| if ((Subtarget->hasFP16()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTPH2PSXZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_EXTEND_MVT_v8f16_MVT_v8f64_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTPH2PDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_EXTEND_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8f32: return fastEmit_ISD_FP_EXTEND_MVT_v8f16_MVT_v8f32_r(Op0); |
| case MVT::v8f64: return fastEmit_ISD_FP_EXTEND_MVT_v8f16_MVT_v8f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_FP_EXTEND_MVT_v16f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16f32) |
| return 0; |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTPH2PSXZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_EXTEND_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f64) |
| return 0; |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTPS2PDZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTPS2PDYrr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_EXTEND_MVT_v8f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f64) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTPS2PDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f32: return fastEmit_ISD_FP_EXTEND_MVT_f32_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_ISD_FP_EXTEND_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v16f16: return fastEmit_ISD_FP_EXTEND_MVT_v16f16_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_ISD_FP_EXTEND_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v8f32: return fastEmit_ISD_FP_EXTEND_MVT_v8f32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::FP_ROUND. |
| |
| unsigned fastEmit_ISD_FP_ROUND_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSD2SSrr, &X86::FR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_ROUND_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f64: return fastEmit_ISD_FP_ROUND_MVT_f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::FP_TO_SINT. |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_MVT_f16_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTTSH2SIZrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_MVT_f16_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTTSH2SI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_MVT_f16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_FP_TO_SINT_MVT_f16_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_FP_TO_SINT_MVT_f16_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2SIZrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTTSS2SIrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2SIrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2SI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTTSS2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_FP_TO_SINT_MVT_f32_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2SIZrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTTSD2SIrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2SIrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2SI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTTSD2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_FP_TO_SINT_MVT_f64_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_MVT_v4f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTTPD2DQYrr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_SINT_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f16: return fastEmit_ISD_FP_TO_SINT_MVT_f16_r(RetVT, Op0); |
| case MVT::f32: return fastEmit_ISD_FP_TO_SINT_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_FP_TO_SINT_MVT_f64_r(RetVT, Op0); |
| case MVT::v4f64: return fastEmit_ISD_FP_TO_SINT_MVT_v4f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::FP_TO_UINT. |
| |
| unsigned fastEmit_ISD_FP_TO_UINT_MVT_f16_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTTSH2USIZrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_UINT_MVT_f16_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTTSH2USI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_UINT_MVT_f16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_FP_TO_UINT_MVT_f16_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_FP_TO_UINT_MVT_f16_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_UINT_MVT_f32_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2USIZrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_UINT_MVT_f32_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2USI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_UINT_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_FP_TO_UINT_MVT_f32_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_FP_TO_UINT_MVT_f32_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_UINT_MVT_f64_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2USIZrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_UINT_MVT_f64_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2USI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_UINT_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_FP_TO_UINT_MVT_f64_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_FP_TO_UINT_MVT_f64_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_FP_TO_UINT_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f16: return fastEmit_ISD_FP_TO_UINT_MVT_f16_r(RetVT, Op0); |
| case MVT::f32: return fastEmit_ISD_FP_TO_UINT_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_FP_TO_UINT_MVT_f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::FSQRT. |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::SQRTSSr, &X86::FR32RegClass, Op0); |
| } |
| if ((!Subtarget->hasSSE1())) { |
| return fastEmitInst_r(X86::SQRT_Fp32, &X86::RFP32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f64) |
| return 0; |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::SQRTSDr, &X86::FR64RegClass, Op0); |
| } |
| if ((!Subtarget->hasSSE2())) { |
| return fastEmitInst_r(X86::SQRT_Fp64, &X86::RFP64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_f80_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f80) |
| return 0; |
| if ((Subtarget->hasX87())) { |
| return fastEmitInst_r(X86::SQRT_Fp80, &X86::RFP80RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f16) |
| return 0; |
| if ((Subtarget->hasFP16()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPHZ128r, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_v16f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16f16) |
| return 0; |
| if ((Subtarget->hasFP16()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPHZ256r, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_v32f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v32f16) |
| return 0; |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VSQRTPHZr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f32) |
| return 0; |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPSZ128r, &X86::VR128XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::SQRTPSr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPSr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_v8f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f32) |
| return 0; |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPSZ256r, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPSYr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_v16f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16f32) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VSQRTPSZr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_v2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2f64) |
| return 0; |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPDZ128r, &X86::VR128XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::SQRTPDr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPDr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_v4f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f64) |
| return 0; |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPDZ256r, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPDYr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_MVT_v8f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f64) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VSQRTPDZr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_FSQRT_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f32: return fastEmit_ISD_FSQRT_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_FSQRT_MVT_f64_r(RetVT, Op0); |
| case MVT::f80: return fastEmit_ISD_FSQRT_MVT_f80_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_ISD_FSQRT_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v16f16: return fastEmit_ISD_FSQRT_MVT_v16f16_r(RetVT, Op0); |
| case MVT::v32f16: return fastEmit_ISD_FSQRT_MVT_v32f16_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_ISD_FSQRT_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v8f32: return fastEmit_ISD_FSQRT_MVT_v8f32_r(RetVT, Op0); |
| case MVT::v16f32: return fastEmit_ISD_FSQRT_MVT_v16f32_r(RetVT, Op0); |
| case MVT::v2f64: return fastEmit_ISD_FSQRT_MVT_v2f64_r(RetVT, Op0); |
| case MVT::v4f64: return fastEmit_ISD_FSQRT_MVT_v4f64_r(RetVT, Op0); |
| case MVT::v8f64: return fastEmit_ISD_FSQRT_MVT_v8f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::LLRINT. |
| |
| unsigned fastEmit_ISD_LLRINT_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSS2SI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSS2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSS2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_LLRINT_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSD2SI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSD2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSD2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_LLRINT_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f32: return fastEmit_ISD_LLRINT_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_LLRINT_MVT_f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::LRINT. |
| |
| unsigned fastEmit_ISD_LRINT_MVT_f32_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSS2SIZrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSS2SIrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSS2SIrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_LRINT_MVT_f32_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSS2SI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSS2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSS2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_LRINT_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_LRINT_MVT_f32_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_LRINT_MVT_f32_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_LRINT_MVT_f64_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSD2SIZrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSD2SIrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSD2SIrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_LRINT_MVT_f64_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSD2SI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSD2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTSD2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_LRINT_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_LRINT_MVT_f64_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_LRINT_MVT_f64_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_LRINT_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f32: return fastEmit_ISD_LRINT_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_LRINT_MVT_f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::SCALAR_TO_VECTOR. |
| |
| unsigned fastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOVDI2PDIZrr, &X86::VR128XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::MOVDI2PDIrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOVDI2PDIrr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SCALAR_TO_VECTOR_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOV64toPQIZrr, &X86::VR128XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::MOV64toPQIrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VMOV64toPQIrr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SCALAR_TO_VECTOR_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_SCALAR_TO_VECTOR_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_ISD_SCALAR_TO_VECTOR_MVT_i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::SIGN_EXTEND. |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i32_r(unsigned Op0) { |
| return fastEmitInst_r(X86::MOVSX32rr8, &X86::GR32RegClass, Op0); |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i64_r(unsigned Op0) { |
| return fastEmitInst_r(X86::MOVSX64rr8, &X86::GR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_i8_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_SIGN_EXTEND_MVT_i8_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i32_r(unsigned Op0) { |
| return fastEmitInst_r(X86::MOVSX32rr16, &X86::GR32RegClass, Op0); |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i64_r(unsigned Op0) { |
| return fastEmitInst_r(X86::MOVSX64rr16, &X86::GR64RegClass, Op0); |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_i16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_SIGN_EXTEND_MVT_i16_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::i64) |
| return 0; |
| if ((Subtarget->is64Bit())) { |
| return fastEmitInst_r(X86::MOVSX64rr32, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v2i1_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasDQI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVM2QZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v4i1_MVT_v4i32_r(unsigned Op0) { |
| if ((Subtarget->hasDQI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVM2DZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v4i1_MVT_v4i64_r(unsigned Op0) { |
| if ((Subtarget->hasDQI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVM2QZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v4i1_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v4i32: return fastEmit_ISD_SIGN_EXTEND_MVT_v4i1_MVT_v4i32_r(Op0); |
| case MVT::v4i64: return fastEmit_ISD_SIGN_EXTEND_MVT_v4i1_MVT_v4i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v8i1_MVT_v8i16_r(unsigned Op0) { |
| if ((Subtarget->hasBWI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVM2WZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v8i1_MVT_v8i32_r(unsigned Op0) { |
| if ((Subtarget->hasDQI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVM2DZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v8i1_MVT_v8i64_r(unsigned Op0) { |
| if ((Subtarget->hasDQI())) { |
| return fastEmitInst_r(X86::VPMOVM2QZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v8i1_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8i16: return fastEmit_ISD_SIGN_EXTEND_MVT_v8i1_MVT_v8i16_r(Op0); |
| case MVT::v8i32: return fastEmit_ISD_SIGN_EXTEND_MVT_v8i1_MVT_v8i32_r(Op0); |
| case MVT::v8i64: return fastEmit_ISD_SIGN_EXTEND_MVT_v8i1_MVT_v8i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v16i1_MVT_v16i8_r(unsigned Op0) { |
| if ((Subtarget->hasBWI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVM2BZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v16i1_MVT_v16i16_r(unsigned Op0) { |
| if ((Subtarget->hasBWI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVM2WZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v16i1_MVT_v16i32_r(unsigned Op0) { |
| if ((Subtarget->hasDQI())) { |
| return fastEmitInst_r(X86::VPMOVM2DZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v16i1_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v16i8: return fastEmit_ISD_SIGN_EXTEND_MVT_v16i1_MVT_v16i8_r(Op0); |
| case MVT::v16i16: return fastEmit_ISD_SIGN_EXTEND_MVT_v16i1_MVT_v16i16_r(Op0); |
| case MVT::v16i32: return fastEmit_ISD_SIGN_EXTEND_MVT_v16i1_MVT_v16i32_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v32i1_MVT_v32i8_r(unsigned Op0) { |
| if ((Subtarget->hasBWI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVM2BZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v32i1_MVT_v32i16_r(unsigned Op0) { |
| if ((Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPMOVM2WZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v32i1_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v32i8: return fastEmit_ISD_SIGN_EXTEND_MVT_v32i1_MVT_v32i8_r(Op0); |
| case MVT::v32i16: return fastEmit_ISD_SIGN_EXTEND_MVT_v32i1_MVT_v32i16_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v64i1_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v64i8) |
| return 0; |
| if ((Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPMOVM2BZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v16i8_MVT_v16i16_r(unsigned Op0) { |
| if ((Subtarget->hasAVX2()) && (!Subtarget->hasVLX() || !Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPMOVSXBWYrr, &X86::VR256RegClass, Op0); |
| } |
| if ((Subtarget->hasBWI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXBWZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v16i8_MVT_v16i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VPMOVSXBDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v16i16: return fastEmit_ISD_SIGN_EXTEND_MVT_v16i8_MVT_v16i16_r(Op0); |
| case MVT::v16i32: return fastEmit_ISD_SIGN_EXTEND_MVT_v16i8_MVT_v16i32_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v32i8_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v32i16) |
| return 0; |
| if ((Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPMOVSXBWZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v8i16_MVT_v8i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX2()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXWDYrr, &X86::VR256RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXWDZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v8i16_MVT_v8i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VPMOVSXWQZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8i32: return fastEmit_ISD_SIGN_EXTEND_MVT_v8i16_MVT_v8i32_r(Op0); |
| case MVT::v8i64: return fastEmit_ISD_SIGN_EXTEND_MVT_v8i16_MVT_v8i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v16i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16i32) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VPMOVSXWDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i64) |
| return 0; |
| if ((Subtarget->hasAVX2()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXDQYrr, &X86::VR256RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXDQZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_MVT_v8i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8i64) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VPMOVSXDQZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i8: return fastEmit_ISD_SIGN_EXTEND_MVT_i8_r(RetVT, Op0); |
| case MVT::i16: return fastEmit_ISD_SIGN_EXTEND_MVT_i16_r(RetVT, Op0); |
| case MVT::i32: return fastEmit_ISD_SIGN_EXTEND_MVT_i32_r(RetVT, Op0); |
| case MVT::v2i1: return fastEmit_ISD_SIGN_EXTEND_MVT_v2i1_r(RetVT, Op0); |
| case MVT::v4i1: return fastEmit_ISD_SIGN_EXTEND_MVT_v4i1_r(RetVT, Op0); |
| case MVT::v8i1: return fastEmit_ISD_SIGN_EXTEND_MVT_v8i1_r(RetVT, Op0); |
| case MVT::v16i1: return fastEmit_ISD_SIGN_EXTEND_MVT_v16i1_r(RetVT, Op0); |
| case MVT::v32i1: return fastEmit_ISD_SIGN_EXTEND_MVT_v32i1_r(RetVT, Op0); |
| case MVT::v64i1: return fastEmit_ISD_SIGN_EXTEND_MVT_v64i1_r(RetVT, Op0); |
| case MVT::v16i8: return fastEmit_ISD_SIGN_EXTEND_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v32i8: return fastEmit_ISD_SIGN_EXTEND_MVT_v32i8_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_ISD_SIGN_EXTEND_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v16i16: return fastEmit_ISD_SIGN_EXTEND_MVT_v16i16_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_ISD_SIGN_EXTEND_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v8i32: return fastEmit_ISD_SIGN_EXTEND_MVT_v8i32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::SIGN_EXTEND_VECTOR_INREG. |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v8i16_r(unsigned Op0) { |
| if ((Subtarget->hasSSE41() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::PMOVSXBWrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX() || !Subtarget->hasBWI())) { |
| return fastEmitInst_r(X86::VPMOVSXBWrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasBWI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXBWZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v4i32_r(unsigned Op0) { |
| if ((Subtarget->hasSSE41() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::PMOVSXBDrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXBDrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXBDZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v8i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX2()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXBDYrr, &X86::VR256RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXBDZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v2i64_r(unsigned Op0) { |
| if ((Subtarget->hasSSE41() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::PMOVSXBQrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXBQrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXBQZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v4i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX2()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXBQYrr, &X86::VR256RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXBQZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v8i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VPMOVSXBQZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8i16: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v8i16_r(Op0); |
| case MVT::v4i32: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v4i32_r(Op0); |
| case MVT::v8i32: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v8i32_r(Op0); |
| case MVT::v2i64: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v2i64_r(Op0); |
| case MVT::v4i64: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v4i64_r(Op0); |
| case MVT::v8i64: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_MVT_v8i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v8i16_MVT_v4i32_r(unsigned Op0) { |
| if ((Subtarget->hasSSE41() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::PMOVSXWDrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXWDrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXWDZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v8i16_MVT_v2i64_r(unsigned Op0) { |
| if ((Subtarget->hasSSE41() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::PMOVSXWQrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXWQrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXWQZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v8i16_MVT_v4i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX2()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXWQYrr, &X86::VR256RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXWQZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v4i32: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v8i16_MVT_v4i32_r(Op0); |
| case MVT::v2i64: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v8i16_MVT_v2i64_r(Op0); |
| case MVT::v4i64: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v8i16_MVT_v4i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2i64) |
| return 0; |
| if ((Subtarget->hasSSE41() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::PMOVSXDQrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXDQrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VPMOVSXDQZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::v16i8: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v16i8_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_ISD_SIGN_EXTEND_VECTOR_INREG_MVT_v4i32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::SINT_TO_FP. |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f32_r(unsigned Op0) { |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSI2SSrr, &X86::FR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f64_r(unsigned Op0) { |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSI2SDrr, &X86::FR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_i32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::f32: return fastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f32_r(Op0); |
| case MVT::f64: return fastEmit_ISD_SINT_TO_FP_MVT_i32_MVT_f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f32_r(unsigned Op0) { |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSI642SSrr, &X86::FR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f64_r(unsigned Op0) { |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSI642SDrr, &X86::FR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_i64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::f32: return fastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f32_r(Op0); |
| case MVT::f64: return fastEmit_ISD_SINT_TO_FP_MVT_i64_MVT_f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v8i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f16) |
| return 0; |
| if ((Subtarget->hasFP16()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTW2PHZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v16i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16f16) |
| return 0; |
| if ((Subtarget->hasFP16()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTW2PHZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v32i16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v32f16) |
| return 0; |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTW2PHZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v4i32_MVT_v4f32_r(unsigned Op0) { |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTDQ2PSZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTDQ2PSrr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTDQ2PSrr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v4i32_MVT_v4f64_r(unsigned Op0) { |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTDQ2PDZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTDQ2PDYrr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v4i32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v4f32: return fastEmit_ISD_SINT_TO_FP_MVT_v4i32_MVT_v4f32_r(Op0); |
| case MVT::v4f64: return fastEmit_ISD_SINT_TO_FP_MVT_v4i32_MVT_v4f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v8i32_MVT_v8f16_r(unsigned Op0) { |
| if ((Subtarget->hasFP16()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTDQ2PHZ256rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v8i32_MVT_v8f32_r(unsigned Op0) { |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTDQ2PSZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTDQ2PSYrr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v8i32_MVT_v8f64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTDQ2PDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v8i32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8f16: return fastEmit_ISD_SINT_TO_FP_MVT_v8i32_MVT_v8f16_r(Op0); |
| case MVT::v8f32: return fastEmit_ISD_SINT_TO_FP_MVT_v8i32_MVT_v8f32_r(Op0); |
| case MVT::v8f64: return fastEmit_ISD_SINT_TO_FP_MVT_v8i32_MVT_v8f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v16i32_MVT_v16f16_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTDQ2PHZrr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v16i32_MVT_v16f32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTDQ2PSZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v16i32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v16f16: return fastEmit_ISD_SINT_TO_FP_MVT_v16i32_MVT_v16f16_r(Op0); |
| case MVT::v16f32: return fastEmit_ISD_SINT_TO_FP_MVT_v16i32_MVT_v16f32_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v2i64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2f64) |
| return 0; |
| if ((Subtarget->hasDQI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTQQ2PDZ128rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v4i64_MVT_v4f32_r(unsigned Op0) { |
| if ((Subtarget->hasDQI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTQQ2PSZ256rr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v4i64_MVT_v4f64_r(unsigned Op0) { |
| if ((Subtarget->hasDQI()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTQQ2PDZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v4i64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v4f32: return fastEmit_ISD_SINT_TO_FP_MVT_v4i64_MVT_v4f32_r(Op0); |
| case MVT::v4f64: return fastEmit_ISD_SINT_TO_FP_MVT_v4i64_MVT_v4f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v8i64_MVT_v8f16_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTQQ2PHZrr, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v8i64_MVT_v8f32_r(unsigned Op0) { |
| if ((Subtarget->hasDQI())) { |
| return fastEmitInst_r(X86::VCVTQQ2PSZrr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v8i64_MVT_v8f64_r(unsigned Op0) { |
| if ((Subtarget->hasDQI())) { |
| return fastEmitInst_r(X86::VCVTQQ2PDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_MVT_v8i64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8f16: return fastEmit_ISD_SINT_TO_FP_MVT_v8i64_MVT_v8f16_r(Op0); |
| case MVT::v8f32: return fastEmit_ISD_SINT_TO_FP_MVT_v8i64_MVT_v8f32_r(Op0); |
| case MVT::v8f64: return fastEmit_ISD_SINT_TO_FP_MVT_v8i64_MVT_v8f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_SINT_TO_FP_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_SINT_TO_FP_MVT_i32_r(RetVT, Op0); |
| case MVT::i64: return fastEmit_ISD_SINT_TO_FP_MVT_i64_r(RetVT, Op0); |
| case MVT::v8i16: return fastEmit_ISD_SINT_TO_FP_MVT_v8i16_r(RetVT, Op0); |
| case MVT::v16i16: return fastEmit_ISD_SINT_TO_FP_MVT_v16i16_r(RetVT, Op0); |
| case MVT::v32i16: return fastEmit_ISD_SINT_TO_FP_MVT_v32i16_r(RetVT, Op0); |
| case MVT::v4i32: return fastEmit_ISD_SINT_TO_FP_MVT_v4i32_r(RetVT, Op0); |
| case MVT::v8i32: return fastEmit_ISD_SINT_TO_FP_MVT_v8i32_r(RetVT, Op0); |
| case MVT::v16i32: return fastEmit_ISD_SINT_TO_FP_MVT_v16i32_r(RetVT, Op0); |
| case MVT::v2i64: return fastEmit_ISD_SINT_TO_FP_MVT_v2i64_r(RetVT, Op0); |
| case MVT::v4i64: return fastEmit_ISD_SINT_TO_FP_MVT_v4i64_r(RetVT, Op0); |
| case MVT::v8i64: return fastEmit_ISD_SINT_TO_FP_MVT_v8i64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::STRICT_FP_EXTEND. |
| |
| unsigned fastEmit_ISD_STRICT_FP_EXTEND_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f64) |
| return 0; |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSS2SDrr, &X86::FR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_EXTEND_MVT_v8f16_MVT_v8f32_r(unsigned Op0) { |
| if ((Subtarget->hasFP16()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTPH2PSXZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_EXTEND_MVT_v8f16_MVT_v8f64_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTPH2PDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_EXTEND_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::v8f32: return fastEmit_ISD_STRICT_FP_EXTEND_MVT_v8f16_MVT_v8f32_r(Op0); |
| case MVT::v8f64: return fastEmit_ISD_STRICT_FP_EXTEND_MVT_v8f16_MVT_v8f64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_EXTEND_MVT_v16f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16f32) |
| return 0; |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTPH2PSXZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_EXTEND_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f64) |
| return 0; |
| if ((Subtarget->hasAVX512()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTPS2PDZ256rr, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTPS2PDYrr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_EXTEND_MVT_v8f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f64) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTPS2PDZrr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_EXTEND_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f32: return fastEmit_ISD_STRICT_FP_EXTEND_MVT_f32_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_ISD_STRICT_FP_EXTEND_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v16f16: return fastEmit_ISD_STRICT_FP_EXTEND_MVT_v16f16_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_ISD_STRICT_FP_EXTEND_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v8f32: return fastEmit_ISD_STRICT_FP_EXTEND_MVT_v8f32_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::STRICT_FP_ROUND. |
| |
| unsigned fastEmit_ISD_STRICT_FP_ROUND_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTSD2SSrr, &X86::FR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_ROUND_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f64: return fastEmit_ISD_STRICT_FP_ROUND_MVT_f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::STRICT_FP_TO_SINT. |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f16_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTTSH2SIZrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f16_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTTSH2SI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f16_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f16_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f32_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2SIZrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTTSS2SIrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2SIrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f32_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2SI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTTSS2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f32_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f32_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f64_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2SIZrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTTSD2SIrr, &X86::GR32RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2SIrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f64_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2SI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::CVTTSD2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX() && !Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2SI64rr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f64_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f64_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_MVT_v4f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4i32) |
| return 0; |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VCVTTPD2DQYrr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_SINT_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f16: return fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f16_r(RetVT, Op0); |
| case MVT::f32: return fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_STRICT_FP_TO_SINT_MVT_f64_r(RetVT, Op0); |
| case MVT::v4f64: return fastEmit_ISD_STRICT_FP_TO_SINT_MVT_v4f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::STRICT_FP_TO_UINT. |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f16_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTTSH2USIZrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f16_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VCVTTSH2USI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f16_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f16_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f16_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f32_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2USIZrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f32_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSS2USI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f32_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f32_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f64_MVT_i32_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2USIZrr, &X86::GR32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f64_MVT_i64_r(unsigned Op0) { |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VCVTTSD2USI64Zrr, &X86::GR64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| switch (RetVT.SimpleTy) { |
| case MVT::i32: return fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f64_MVT_i32_r(Op0); |
| case MVT::i64: return fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f64_MVT_i64_r(Op0); |
| default: return 0; |
| } |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FP_TO_UINT_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f16: return fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f16_r(RetVT, Op0); |
| case MVT::f32: return fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_STRICT_FP_TO_UINT_MVT_f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::STRICT_FSQRT. |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f32) |
| return 0; |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::SQRTSSr, &X86::FR32RegClass, Op0); |
| } |
| if ((!Subtarget->hasSSE1())) { |
| return fastEmitInst_r(X86::SQRT_Fp32, &X86::RFP32RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f64) |
| return 0; |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::SQRTSDr, &X86::FR64RegClass, Op0); |
| } |
| if ((!Subtarget->hasSSE2())) { |
| return fastEmitInst_r(X86::SQRT_Fp64, &X86::RFP64RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_f80_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::f80) |
| return 0; |
| if ((Subtarget->hasX87())) { |
| return fastEmitInst_r(X86::SQRT_Fp80, &X86::RFP80RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_v8f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f16) |
| return 0; |
| if ((Subtarget->hasFP16()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPHZ128r, &X86::VR128XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_v16f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16f16) |
| return 0; |
| if ((Subtarget->hasFP16()) && (Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPHZ256r, &X86::VR256XRegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_v32f16_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v32f16) |
| return 0; |
| if ((Subtarget->hasFP16())) { |
| return fastEmitInst_r(X86::VSQRTPHZr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_v4f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f32) |
| return 0; |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPSZ128r, &X86::VR128XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSE1() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::SQRTPSr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPSr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_v8f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f32) |
| return 0; |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPSZ256r, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPSYr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_v16f32_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v16f32) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VSQRTPSZr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_v2f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v2f64) |
| return 0; |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPDZ128r, &X86::VR128XRegClass, Op0); |
| } |
| if ((Subtarget->hasSSE2() && !Subtarget->hasAVX())) { |
| return fastEmitInst_r(X86::SQRTPDr, &X86::VR128RegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPDr, &X86::VR128RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_v4f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v4f64) |
| return 0; |
| if ((Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPDZ256r, &X86::VR256XRegClass, Op0); |
| } |
| if ((Subtarget->hasAVX()) && (!Subtarget->hasVLX())) { |
| return fastEmitInst_r(X86::VSQRTPDYr, &X86::VR256RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_MVT_v8f64_r(MVT RetVT, unsigned Op0) { |
| if (RetVT.SimpleTy != MVT::v8f64) |
| return 0; |
| if ((Subtarget->hasAVX512())) { |
| return fastEmitInst_r(X86::VSQRTPDZr, &X86::VR512RegClass, Op0); |
| } |
| return 0; |
| } |
| |
| unsigned fastEmit_ISD_STRICT_FSQRT_r(MVT VT, MVT RetVT, unsigned Op0) { |
| switch (VT.SimpleTy) { |
| case MVT::f32: return fastEmit_ISD_STRICT_FSQRT_MVT_f32_r(RetVT, Op0); |
| case MVT::f64: return fastEmit_ISD_STRICT_FSQRT_MVT_f64_r(RetVT, Op0); |
| case MVT::f80: return fastEmit_ISD_STRICT_FSQRT_MVT_f80_r(RetVT, Op0); |
| case MVT::v8f16: return fastEmit_ISD_STRICT_FSQRT_MVT_v8f16_r(RetVT, Op0); |
| case MVT::v16f16: return fastEmit_ISD_STRICT_FSQRT_MVT_v16f16_r(RetVT, Op0); |
| case MVT::v32f16: return fastEmit_ISD_STRICT_FSQRT_MVT_v32f16_r(RetVT, Op0); |
| case MVT::v4f32: return fastEmit_ISD_STRICT_FSQRT_MVT_v4f32_r(RetVT, Op0); |
| case MVT::v8f32: return fastEmit_ISD_STRICT_FSQRT_MVT_v8f32_r(RetVT, Op0); |
| case MVT::v16f32: return fastEmit_ISD_STRICT_FSQRT_MVT_v16f32_r(RetVT, Op0); |
| case MVT::v2f64: return fastEmit_ISD_STRICT_FSQRT_MVT_v2f64_r(RetVT, Op0); |
| case MVT::v4f64: return fastEmit_ISD_STRICT_FSQRT_MVT_v4f64_r(RetVT, Op0); |
| case MVT::v8f64: return fastEmit_ISD_STRICT_FSQRT_MVT_v8f64_r(RetVT, Op0); |
| default: return 0; |
| } |
| } |
| |
| // FastEmit functions for ISD::STRICT_SINT_TO_FP. |
|