|  | ; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py | 
|  | ; RUN: llc < %s -mtriple=i686-unknown -mattr=+sse2 | FileCheck %s --check-prefix=X32-SSE | 
|  | ; RUN: llc < %s -mtriple=i686-unknown -mattr=+avx | FileCheck %s --check-prefix=X32-AVX | 
|  | ; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+sse2 | FileCheck %s --check-prefix=X64-SSE | 
|  | ; RUN: llc < %s -mtriple=x86_64-unknown -mattr=+avx | FileCheck %s --check-prefix=X64-AVX | 
|  |  | 
|  | ;PR29079 | 
|  |  | 
|  | define <4 x float> @mask_ucvt_4i32_4f32(<4 x i32> %a) { | 
|  | ; X32-SSE-LABEL: mask_ucvt_4i32_4f32: | 
|  | ; X32-SSE:       # %bb.0: | 
|  | ; X32-SSE-NEXT:    andps {{\.LCPI.*}}, %xmm0 | 
|  | ; X32-SSE-NEXT:    cvtdq2ps %xmm0, %xmm0 | 
|  | ; X32-SSE-NEXT:    retl | 
|  | ; | 
|  | ; X32-AVX-LABEL: mask_ucvt_4i32_4f32: | 
|  | ; X32-AVX:       # %bb.0: | 
|  | ; X32-AVX-NEXT:    vandps {{\.LCPI.*}}, %xmm0, %xmm0 | 
|  | ; X32-AVX-NEXT:    vcvtdq2ps %xmm0, %xmm0 | 
|  | ; X32-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X64-SSE-LABEL: mask_ucvt_4i32_4f32: | 
|  | ; X64-SSE:       # %bb.0: | 
|  | ; X64-SSE-NEXT:    andps {{.*}}(%rip), %xmm0 | 
|  | ; X64-SSE-NEXT:    cvtdq2ps %xmm0, %xmm0 | 
|  | ; X64-SSE-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX-LABEL: mask_ucvt_4i32_4f32: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vandps {{.*}}(%rip), %xmm0, %xmm0 | 
|  | ; X64-AVX-NEXT:    vcvtdq2ps %xmm0, %xmm0 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | %and = and <4 x i32> %a, <i32 127, i32 255, i32 4095, i32 65595> | 
|  | %cvt = uitofp <4 x i32> %and to <4 x float> | 
|  | ret <4 x float> %cvt | 
|  | } | 
|  |  | 
|  | define <4 x double> @mask_ucvt_4i32_4f64(<4 x i32> %a) { | 
|  | ; X32-SSE-LABEL: mask_ucvt_4i32_4f64: | 
|  | ; X32-SSE:       # %bb.0: | 
|  | ; X32-SSE-NEXT:    pand {{\.LCPI.*}}, %xmm0 | 
|  | ; X32-SSE-NEXT:    cvtdq2pd %xmm0, %xmm2 | 
|  | ; X32-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1] | 
|  | ; X32-SSE-NEXT:    cvtdq2pd %xmm0, %xmm1 | 
|  | ; X32-SSE-NEXT:    movaps %xmm2, %xmm0 | 
|  | ; X32-SSE-NEXT:    retl | 
|  | ; | 
|  | ; X32-AVX-LABEL: mask_ucvt_4i32_4f64: | 
|  | ; X32-AVX:       # %bb.0: | 
|  | ; X32-AVX-NEXT:    vandps {{\.LCPI.*}}, %xmm0, %xmm0 | 
|  | ; X32-AVX-NEXT:    vcvtdq2pd %xmm0, %ymm0 | 
|  | ; X32-AVX-NEXT:    retl | 
|  | ; | 
|  | ; X64-SSE-LABEL: mask_ucvt_4i32_4f64: | 
|  | ; X64-SSE:       # %bb.0: | 
|  | ; X64-SSE-NEXT:    pand {{.*}}(%rip), %xmm0 | 
|  | ; X64-SSE-NEXT:    cvtdq2pd %xmm0, %xmm2 | 
|  | ; X64-SSE-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[2,3,0,1] | 
|  | ; X64-SSE-NEXT:    cvtdq2pd %xmm0, %xmm1 | 
|  | ; X64-SSE-NEXT:    movaps %xmm2, %xmm0 | 
|  | ; X64-SSE-NEXT:    retq | 
|  | ; | 
|  | ; X64-AVX-LABEL: mask_ucvt_4i32_4f64: | 
|  | ; X64-AVX:       # %bb.0: | 
|  | ; X64-AVX-NEXT:    vandps {{.*}}(%rip), %xmm0, %xmm0 | 
|  | ; X64-AVX-NEXT:    vcvtdq2pd %xmm0, %ymm0 | 
|  | ; X64-AVX-NEXT:    retq | 
|  | %and = and <4 x i32> %a, <i32 127, i32 255, i32 4095, i32 65595> | 
|  | %cvt = uitofp <4 x i32> %and to <4 x double> | 
|  | ret <4 x double> %cvt | 
|  | } |