1; NOTE: Assertions have been autogenerated by utils/update_llc_test_checks.py
2; RUN: llc < %s -mtriple=i686-unknown-unknown -mattr=+sse2 | FileCheck %s --check-prefixes=X86
3; RUN: llc < %s -mtriple=x86_64-unknown-unknown -mattr=+sse2 | FileCheck %s --check-prefixes=X64
4
5define i128 @add_i128(i128 %x, i128 %y) nounwind {
6; X86-LABEL: add_i128:
7; X86:       # %bb.0:
8; X86-NEXT:    pushl %edi
9; X86-NEXT:    pushl %esi
10; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
11; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
12; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
13; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
14; X86-NEXT:    movl {{[0-9]+}}(%esp), %edi
15; X86-NEXT:    addl {{[0-9]+}}(%esp), %esi
16; X86-NEXT:    adcl {{[0-9]+}}(%esp), %edi
17; X86-NEXT:    adcl {{[0-9]+}}(%esp), %edx
18; X86-NEXT:    adcl {{[0-9]+}}(%esp), %ecx
19; X86-NEXT:    addl $1, %esi
20; X86-NEXT:    adcl $0, %edi
21; X86-NEXT:    adcl $0, %edx
22; X86-NEXT:    adcl $0, %ecx
23; X86-NEXT:    movl %esi, (%eax)
24; X86-NEXT:    movl %edi, 4(%eax)
25; X86-NEXT:    movl %edx, 8(%eax)
26; X86-NEXT:    movl %ecx, 12(%eax)
27; X86-NEXT:    popl %esi
28; X86-NEXT:    popl %edi
29; X86-NEXT:    retl $4
30;
31; X64-LABEL: add_i128:
32; X64:       # %bb.0:
33; X64-NEXT:    movq %rdi, %rax
34; X64-NEXT:    addq %rdx, %rax
35; X64-NEXT:    adcq %rcx, %rsi
36; X64-NEXT:    addq $1, %rax
37; X64-NEXT:    adcq $0, %rsi
38; X64-NEXT:    movq %rsi, %rdx
39; X64-NEXT:    retq
40  %t0 = add i128 %x, 1
41  %t1 = add i128 %y, %t0
42  ret i128 %t1
43}
44
45; PR42486
46define <1 x i128> @add_v1i128(<1 x i128> %x, <1 x i128> %y) nounwind {
47; X86-LABEL: add_v1i128:
48; X86:       # %bb.0:
49; X86-NEXT:    pushl %edi
50; X86-NEXT:    pushl %esi
51; X86-NEXT:    movl {{[0-9]+}}(%esp), %eax
52; X86-NEXT:    movl {{[0-9]+}}(%esp), %ecx
53; X86-NEXT:    movl {{[0-9]+}}(%esp), %edx
54; X86-NEXT:    movl {{[0-9]+}}(%esp), %esi
55; X86-NEXT:    movl {{[0-9]+}}(%esp), %edi
56; X86-NEXT:    addl {{[0-9]+}}(%esp), %esi
57; X86-NEXT:    adcl {{[0-9]+}}(%esp), %edi
58; X86-NEXT:    adcl {{[0-9]+}}(%esp), %edx
59; X86-NEXT:    adcl {{[0-9]+}}(%esp), %ecx
60; X86-NEXT:    addl $1, %esi
61; X86-NEXT:    adcl $0, %edi
62; X86-NEXT:    adcl $0, %edx
63; X86-NEXT:    adcl $0, %ecx
64; X86-NEXT:    movl %esi, (%eax)
65; X86-NEXT:    movl %edi, 4(%eax)
66; X86-NEXT:    movl %edx, 8(%eax)
67; X86-NEXT:    movl %ecx, 12(%eax)
68; X86-NEXT:    popl %esi
69; X86-NEXT:    popl %edi
70; X86-NEXT:    retl $4
71;
72; X64-LABEL: add_v1i128:
73; X64:       # %bb.0:
74; X64-NEXT:    movq %rdi, %rax
75; X64-NEXT:    addq %rdx, %rax
76; X64-NEXT:    adcq %rcx, %rsi
77; X64-NEXT:    movq %rax, %xmm0
78; X64-NEXT:    movq %rsi, %xmm1
79; X64-NEXT:    punpcklqdq {{.*#+}} xmm0 = xmm0[0],xmm1[0]
80; X64-NEXT:    pshufd {{.*#+}} xmm0 = xmm0[2,3,2,3]
81; X64-NEXT:    movq %xmm0, %rdx
82; X64-NEXT:    addq $1, %rax
83; X64-NEXT:    adcq $0, %rdx
84; X64-NEXT:    retq
85  %t0 = add <1 x i128> %x, <i128 1>
86  %t1 = add <1 x i128> %y, %t0
87  ret <1 x i128> %t1
88}
89