|  | ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py | 
|  | ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi | FileCheck %s --check-prefixes=CHECK,BMI1 | 
|  | ; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+bmi,+bmi2 | FileCheck %s --check-prefixes=CHECK,BMI2 | 
|  |  | 
|  | declare i64 @llvm.x86.bmi.bextr.64(i64, i64) | 
|  |  | 
|  | define i64 @bextr64(i64 %x, i64 %y)   { | 
|  | ; CHECK-LABEL: bextr64: | 
|  | ; CHECK:       # %bb.0: | 
|  | ; CHECK-NEXT:    bextrq %rsi, %rdi, %rax | 
|  | ; CHECK-NEXT:    retq | 
|  | %tmp = tail call i64 @llvm.x86.bmi.bextr.64(i64 %x, i64 %y) | 
|  | ret i64 %tmp | 
|  | } | 
|  |  | 
|  | define i64 @bextr64b(i64 %x)  uwtable  ssp { | 
|  | ; CHECK-LABEL: bextr64b: | 
|  | ; CHECK:       # %bb.0: | 
|  | ; CHECK-NEXT:    movl $3076, %eax # imm = 0xC04 | 
|  | ; CHECK-NEXT:    bextrl %eax, %edi, %eax | 
|  | ; CHECK-NEXT:    retq | 
|  | %1 = lshr i64 %x, 4 | 
|  | %2 = and i64 %1, 4095 | 
|  | ret i64 %2 | 
|  | } | 
|  |  | 
|  | ; Make sure we still use the AH subreg trick to extract 15:8 | 
|  | define i64 @bextr64_subreg(i64 %x)  uwtable  ssp { | 
|  | ; CHECK-LABEL: bextr64_subreg: | 
|  | ; CHECK:       # %bb.0: | 
|  | ; CHECK-NEXT:    movq %rdi, %rax | 
|  | ; CHECK-NEXT:    movzbl %ah, %eax | 
|  | ; CHECK-NEXT:    retq | 
|  | %1 = lshr i64 %x, 8 | 
|  | %2 = and i64 %1, 255 | 
|  | ret i64 %2 | 
|  | } | 
|  |  | 
|  | define i64 @bextr64b_load(i64* %x) { | 
|  | ; CHECK-LABEL: bextr64b_load: | 
|  | ; CHECK:       # %bb.0: | 
|  | ; CHECK-NEXT:    movl $3076, %eax # imm = 0xC04 | 
|  | ; CHECK-NEXT:    bextrl %eax, (%rdi), %eax | 
|  | ; CHECK-NEXT:    retq | 
|  | %1 = load i64, i64* %x, align 8 | 
|  | %2 = lshr i64 %1, 4 | 
|  | %3 = and i64 %2, 4095 | 
|  | ret i64 %3 | 
|  | } | 
|  |  | 
|  | ; PR34042 | 
|  | define i64 @bextr64c(i64 %x, i32 %y) { | 
|  | ; CHECK-LABEL: bextr64c: | 
|  | ; CHECK:       # %bb.0: | 
|  | ; CHECK-NEXT:    # kill: def $esi killed $esi def $rsi | 
|  | ; CHECK-NEXT:    bextrq %rsi, %rdi, %rax | 
|  | ; CHECK-NEXT:    retq | 
|  | %tmp0 = sext i32 %y to i64 | 
|  | %tmp1 = tail call i64 @llvm.x86.bmi.bextr.64(i64 %x, i64 %tmp0) | 
|  | ret i64 %tmp1 | 
|  | } | 
|  |  | 
|  | define i64 @bextr64d(i64 %a) { | 
|  | ; CHECK-LABEL: bextr64d: | 
|  | ; CHECK:       # %bb.0: # %entry | 
|  | ; CHECK-NEXT:    movl $8450, %eax # imm = 0x2102 | 
|  | ; CHECK-NEXT:    bextrq %rax, %rdi, %rax | 
|  | ; CHECK-NEXT:    retq | 
|  | entry: | 
|  | %shr = lshr i64 %a, 2 | 
|  | %and = and i64 %shr, 8589934591 | 
|  | ret i64 %and | 
|  | } | 
|  |  | 
|  | define i64 @non_bextr64(i64 %x) { | 
|  | ; CHECK-LABEL: non_bextr64: | 
|  | ; CHECK:       # %bb.0: # %entry | 
|  | ; CHECK-NEXT:    shrq $2, %rdi | 
|  | ; CHECK-NEXT:    movabsq $8589934590, %rax # imm = 0x1FFFFFFFE | 
|  | ; CHECK-NEXT:    andq %rdi, %rax | 
|  | ; CHECK-NEXT:    retq | 
|  | entry: | 
|  | %shr = lshr i64 %x, 2 | 
|  | %and = and i64 %shr, 8589934590 | 
|  | ret i64 %and | 
|  | } |