1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -fast-isel -mtriple=x86_64-unknown-unknown -mattr=+tbm | FileCheck %s --check-prefix=X64 3 4; NOTE: This should use IR equivalent to what is generated by clang/test/CodeGen/tbm-builtins.c 5 6define i64 @test__bextri_u64(i64 %a0) { 7; X64-LABEL: test__bextri_u64: 8; X64: # %bb.0: 9; X64-NEXT: bextrq $3841, %rdi, %rax # imm = 0xF01 10; X64-NEXT: retq 11 %1 = call i64 @llvm.x86.tbm.bextri.u64(i64 %a0, i64 3841) 12 ret i64 %1 13} 14 15define i64 @test__blcfill_u64(i64 %a0) { 16; X64-LABEL: test__blcfill_u64: 17; X64: # %bb.0: 18; X64-NEXT: leaq 1(%rdi), %rax 19; X64-NEXT: andq %rdi, %rax 20; X64-NEXT: retq 21 %1 = add i64 %a0, 1 22 %2 = and i64 %a0, %1 23 ret i64 %2 24} 25 26define i64 @test__blci_u64(i64 %a0) { 27; X64-LABEL: test__blci_u64: 28; X64: # %bb.0: 29; X64-NEXT: leaq 1(%rdi), %rax 30; X64-NEXT: xorq $-1, %rax 31; X64-NEXT: orq %rdi, %rax 32; X64-NEXT: retq 33 %1 = add i64 %a0, 1 34 %2 = xor i64 %1, -1 35 %3 = or i64 %a0, %2 36 ret i64 %3 37} 38 39define i64 @test__blcic_u64(i64 %a0) { 40; X64-LABEL: test__blcic_u64: 41; X64: # %bb.0: 42; X64-NEXT: movq %rdi, %rax 43; X64-NEXT: movq %rdi, %rcx 44; X64-NEXT: xorq $-1, %rcx 45; X64-NEXT: addq $1, %rax 46; X64-NEXT: andq %rcx, %rax 47; X64-NEXT: retq 48 %1 = xor i64 %a0, -1 49 %2 = add i64 %a0, 1 50 %3 = and i64 %1, %2 51 ret i64 %3 52} 53 54define i64 @test__blcmsk_u64(i64 %a0) { 55; X64-LABEL: test__blcmsk_u64: 56; X64: # %bb.0: 57; X64-NEXT: leaq 1(%rdi), %rax 58; X64-NEXT: xorq %rdi, %rax 59; X64-NEXT: retq 60 %1 = add i64 %a0, 1 61 %2 = xor i64 %a0, %1 62 ret i64 %2 63} 64 65define i64 @test__blcs_u64(i64 %a0) { 66; X64-LABEL: test__blcs_u64: 67; X64: # %bb.0: 68; X64-NEXT: leaq 1(%rdi), %rax 69; X64-NEXT: orq %rdi, %rax 70; X64-NEXT: retq 71 %1 = add i64 %a0, 1 72 %2 = or i64 %a0, %1 73 ret i64 %2 74} 75 76define i64 @test__blsfill_u64(i64 %a0) { 77; X64-LABEL: test__blsfill_u64: 78; X64: # %bb.0: 79; X64-NEXT: leaq -1(%rdi), %rax 80; X64-NEXT: orq %rdi, %rax 81; X64-NEXT: retq 82 %1 = sub i64 %a0, 1 83 %2 = or i64 %a0, %1 84 ret i64 %2 85} 86 87define i64 @test__blsic_u64(i64 %a0) { 88; X64-LABEL: test__blsic_u64: 89; X64: # %bb.0: 90; X64-NEXT: movq %rdi, %rax 91; X64-NEXT: movq %rdi, %rcx 92; X64-NEXT: xorq $-1, %rcx 93; X64-NEXT: subq $1, %rax 94; X64-NEXT: orq %rcx, %rax 95; X64-NEXT: retq 96 %1 = xor i64 %a0, -1 97 %2 = sub i64 %a0, 1 98 %3 = or i64 %1, %2 99 ret i64 %3 100} 101 102define i64 @test__t1mskc_u64(i64 %a0) { 103; X64-LABEL: test__t1mskc_u64: 104; X64: # %bb.0: 105; X64-NEXT: movq %rdi, %rax 106; X64-NEXT: movq %rdi, %rcx 107; X64-NEXT: xorq $-1, %rcx 108; X64-NEXT: addq $1, %rax 109; X64-NEXT: orq %rcx, %rax 110; X64-NEXT: retq 111 %1 = xor i64 %a0, -1 112 %2 = add i64 %a0, 1 113 %3 = or i64 %1, %2 114 ret i64 %3 115} 116 117define i64 @test__tzmsk_u64(i64 %a0) { 118; X64-LABEL: test__tzmsk_u64: 119; X64: # %bb.0: 120; X64-NEXT: movq %rdi, %rax 121; X64-NEXT: movq %rdi, %rcx 122; X64-NEXT: xorq $-1, %rcx 123; X64-NEXT: subq $1, %rax 124; X64-NEXT: andq %rcx, %rax 125; X64-NEXT: retq 126 %1 = xor i64 %a0, -1 127 %2 = sub i64 %a0, 1 128 %3 = and i64 %1, %2 129 ret i64 %3 130} 131 132declare i64 @llvm.x86.tbm.bextri.u64(i64, i64) 133