1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py 2; RUN: llc < %s -mtriple=i386-unknown-unknown | FileCheck %s --check-prefix=X32 3; RUN: llc < %s -mtriple=x86_64-unknown-unknown | FileCheck %s --check-prefix=X64 4 5define i32 @t1(i32 %t, i32 %val) nounwind { 6; X32-LABEL: t1: 7; X32: # %bb.0: 8; X32-NEXT: movb {{[0-9]+}}(%esp), %cl 9; X32-NEXT: movl {{[0-9]+}}(%esp), %eax 10; X32-NEXT: shll %cl, %eax 11; X32-NEXT: retl 12; 13; X64-LABEL: t1: 14; X64: # %bb.0: 15; X64-NEXT: movl %esi, %eax 16; X64-NEXT: movl %edi, %ecx 17; X64-NEXT: # kill: def $cl killed $cl killed $ecx 18; X64-NEXT: shll %cl, %eax 19; X64-NEXT: retq 20 %shamt = and i32 %t, 31 21 %res = shl i32 %val, %shamt 22 ret i32 %res 23} 24 25define i32 @t2(i32 %t, i32 %val) nounwind { 26; X32-LABEL: t2: 27; X32: # %bb.0: 28; X32-NEXT: movb {{[0-9]+}}(%esp), %cl 29; X32-NEXT: movl {{[0-9]+}}(%esp), %eax 30; X32-NEXT: shll %cl, %eax 31; X32-NEXT: retl 32; 33; X64-LABEL: t2: 34; X64: # %bb.0: 35; X64-NEXT: movl %esi, %eax 36; X64-NEXT: movl %edi, %ecx 37; X64-NEXT: # kill: def $cl killed $cl killed $ecx 38; X64-NEXT: shll %cl, %eax 39; X64-NEXT: retq 40 %shamt = and i32 %t, 63 41 %res = shl i32 %val, %shamt 42 ret i32 %res 43} 44 45@X = internal global i16 0 46 47define void @t3(i16 %t) nounwind { 48; X32-LABEL: t3: 49; X32: # %bb.0: 50; X32-NEXT: movb {{[0-9]+}}(%esp), %cl 51; X32-NEXT: sarw %cl, X 52; X32-NEXT: retl 53; 54; X64-LABEL: t3: 55; X64: # %bb.0: 56; X64-NEXT: movl %edi, %ecx 57; X64-NEXT: # kill: def $cl killed $cl killed $ecx 58; X64-NEXT: sarw %cl, {{.*}}(%rip) 59; X64-NEXT: retq 60 %shamt = and i16 %t, 31 61 %tmp = load i16, i16* @X 62 %tmp1 = ashr i16 %tmp, %shamt 63 store i16 %tmp1, i16* @X 64 ret void 65} 66 67define i64 @t4(i64 %t, i64 %val) nounwind { 68; X32-LABEL: t4: 69; X32: # %bb.0: 70; X32-NEXT: pushl %esi 71; X32-NEXT: movb {{[0-9]+}}(%esp), %cl 72; X32-NEXT: movl {{[0-9]+}}(%esp), %eax 73; X32-NEXT: movl {{[0-9]+}}(%esp), %esi 74; X32-NEXT: movl %esi, %edx 75; X32-NEXT: shrl %cl, %edx 76; X32-NEXT: shrdl %cl, %esi, %eax 77; X32-NEXT: testb $32, %cl 78; X32-NEXT: je .LBB3_2 79; X32-NEXT: # %bb.1: 80; X32-NEXT: movl %edx, %eax 81; X32-NEXT: xorl %edx, %edx 82; X32-NEXT: .LBB3_2: 83; X32-NEXT: popl %esi 84; X32-NEXT: retl 85; 86; X64-LABEL: t4: 87; X64: # %bb.0: 88; X64-NEXT: movq %rsi, %rax 89; X64-NEXT: movq %rdi, %rcx 90; X64-NEXT: # kill: def $cl killed $cl killed $rcx 91; X64-NEXT: shrq %cl, %rax 92; X64-NEXT: retq 93 %shamt = and i64 %t, 63 94 %res = lshr i64 %val, %shamt 95 ret i64 %res 96} 97 98define i64 @t5(i64 %t, i64 %val) nounwind { 99; X32-LABEL: t5: 100; X32: # %bb.0: 101; X32-NEXT: pushl %esi 102; X32-NEXT: movb {{[0-9]+}}(%esp), %cl 103; X32-NEXT: movl {{[0-9]+}}(%esp), %eax 104; X32-NEXT: movl {{[0-9]+}}(%esp), %esi 105; X32-NEXT: movl %esi, %edx 106; X32-NEXT: shrl %cl, %edx 107; X32-NEXT: shrdl %cl, %esi, %eax 108; X32-NEXT: testb $32, %cl 109; X32-NEXT: je .LBB4_2 110; X32-NEXT: # %bb.1: 111; X32-NEXT: movl %edx, %eax 112; X32-NEXT: xorl %edx, %edx 113; X32-NEXT: .LBB4_2: 114; X32-NEXT: popl %esi 115; X32-NEXT: retl 116; 117; X64-LABEL: t5: 118; X64: # %bb.0: 119; X64-NEXT: movq %rsi, %rax 120; X64-NEXT: movq %rdi, %rcx 121; X64-NEXT: # kill: def $cl killed $cl killed $rcx 122; X64-NEXT: shrq %cl, %rax 123; X64-NEXT: retq 124 %shamt = and i64 %t, 191 125 %res = lshr i64 %val, %shamt 126 ret i64 %res 127} 128 129define void @t5ptr(i64 %t, i64* %ptr) nounwind { 130; X32-LABEL: t5ptr: 131; X32: # %bb.0: 132; X32-NEXT: pushl %edi 133; X32-NEXT: pushl %esi 134; X32-NEXT: movb {{[0-9]+}}(%esp), %cl 135; X32-NEXT: movl {{[0-9]+}}(%esp), %eax 136; X32-NEXT: movl (%eax), %edx 137; X32-NEXT: movl 4(%eax), %edi 138; X32-NEXT: movl %edi, %esi 139; X32-NEXT: shrl %cl, %esi 140; X32-NEXT: shrdl %cl, %edi, %edx 141; X32-NEXT: testb $32, %cl 142; X32-NEXT: je .LBB5_2 143; X32-NEXT: # %bb.1: 144; X32-NEXT: movl %esi, %edx 145; X32-NEXT: xorl %esi, %esi 146; X32-NEXT: .LBB5_2: 147; X32-NEXT: movl %edx, (%eax) 148; X32-NEXT: movl %esi, 4(%eax) 149; X32-NEXT: popl %esi 150; X32-NEXT: popl %edi 151; X32-NEXT: retl 152; 153; X64-LABEL: t5ptr: 154; X64: # %bb.0: 155; X64-NEXT: movq %rdi, %rcx 156; X64-NEXT: # kill: def $cl killed $cl killed $rcx 157; X64-NEXT: shrq %cl, (%rsi) 158; X64-NEXT: retq 159 %shamt = and i64 %t, 191 160 %tmp = load i64, i64* %ptr 161 %tmp1 = lshr i64 %tmp, %shamt 162 store i64 %tmp1, i64* %ptr 163 ret void 164} 165 166 167; rdar://11866926 168define i64 @t6(i64 %key, i64* nocapture %val) nounwind { 169; X32-LABEL: t6: 170; X32: # %bb.0: 171; X32-NEXT: pushl %edi 172; X32-NEXT: pushl %esi 173; X32-NEXT: movl {{[0-9]+}}(%esp), %ecx 174; X32-NEXT: movl {{[0-9]+}}(%esp), %esi 175; X32-NEXT: movl {{[0-9]+}}(%esp), %eax 176; X32-NEXT: shrdl $3, %eax, %esi 177; X32-NEXT: movl %eax, %edi 178; X32-NEXT: shrl $3, %edi 179; X32-NEXT: movl (%ecx), %eax 180; X32-NEXT: movl 4(%ecx), %edx 181; X32-NEXT: addl $-1, %eax 182; X32-NEXT: adcl $-1, %edx 183; X32-NEXT: andl %esi, %eax 184; X32-NEXT: andl %edi, %edx 185; X32-NEXT: popl %esi 186; X32-NEXT: popl %edi 187; X32-NEXT: retl 188; 189; X64-LABEL: t6: 190; X64: # %bb.0: 191; X64-NEXT: shrq $3, %rdi 192; X64-NEXT: movq (%rsi), %rax 193; X64-NEXT: decq %rax 194; X64-NEXT: andq %rdi, %rax 195; X64-NEXT: retq 196 %shr = lshr i64 %key, 3 197 %1 = load i64, i64* %val, align 8 198 %sub = add i64 %1, 2305843009213693951 199 %and = and i64 %sub, %shr 200 ret i64 %and 201} 202 203define i64 @big_mask_constant(i64 %x) nounwind { 204; X32-LABEL: big_mask_constant: 205; X32: # %bb.0: 206; X32-NEXT: movl {{[0-9]+}}(%esp), %eax 207; X32-NEXT: andl $4, %eax 208; X32-NEXT: shll $25, %eax 209; X32-NEXT: xorl %edx, %edx 210; X32-NEXT: retl 211; 212; X64-LABEL: big_mask_constant: 213; X64: # %bb.0: 214; X64-NEXT: movq %rdi, %rax 215; X64-NEXT: shrq $7, %rax 216; X64-NEXT: andl $134217728, %eax # imm = 0x8000000 217; X64-NEXT: retq 218 %and = and i64 %x, 17179869184 ; 0x400000000 219 %sh = lshr i64 %and, 7 220 ret i64 %sh 221} 222 223