1# NOTE: Assertions have been autogenerated by utils/update_mir_test_checks.py
2# RUN: llc -mtriple aarch64-apple-ios -run-pass=legalizer -verify-machineinstrs %s -o - | FileCheck %s
3--- |
4  target datalayout = "e-m:o-i64:64-i128:128-n32:64-S128"
5  target triple = "aarch64-apple-ios"
6
7  define void @udiv_test(i128* %v1ptr, i128* %v2ptr) { ret void }
8
9  define void @sdiv_test(i128* %v1ptr, i128* %v2ptr) { ret void }
10
11...
12---
13name:            udiv_test
14alignment:       4
15tracksRegLiveness: true
16liveins:
17  - { reg: '$x0' }
18  - { reg: '$x1' }
19machineFunctionInfo: {}
20body:             |
21  bb.1 (%ir-block.0):
22    liveins: $x0, $x1
23
24    ; CHECK-LABEL: name: udiv_test
25    ; CHECK: liveins: $x0, $x1
26    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
27    ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
28    ; CHECK: [[LOAD:%[0-9]+]]:_(s128) = G_LOAD [[COPY]](p0) :: (load 16 from %ir.v1ptr)
29    ; CHECK: [[LOAD1:%[0-9]+]]:_(s128) = G_LOAD [[COPY1]](p0) :: (load 16 from %ir.v2ptr)
30    ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
31    ; CHECK: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[LOAD]](s128)
32    ; CHECK: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[LOAD1]](s128)
33    ; CHECK: $x0 = COPY [[UV]](s64)
34    ; CHECK: $x1 = COPY [[UV1]](s64)
35    ; CHECK: $x2 = COPY [[UV2]](s64)
36    ; CHECK: $x3 = COPY [[UV3]](s64)
37    ; CHECK: BL &__udivti3, csr_darwin_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2, implicit $x3, implicit-def $x0, implicit-def $x1
38    ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x0
39    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $x1
40    ; CHECK: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[COPY2]](s64), [[COPY3]](s64)
41    ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
42    ; CHECK: G_STORE [[MV]](s128), [[COPY]](p0) :: (store 16 into %ir.v1ptr)
43    ; CHECK: RET_ReallyLR
44    %0:_(p0) = COPY $x0
45    %1:_(p0) = COPY $x1
46    %2:_(s128) = G_LOAD %0(p0) :: (load 16 from %ir.v1ptr)
47    %3:_(s128) = G_LOAD %1(p0) :: (load 16 from %ir.v2ptr)
48    %4:_(s128) = G_UDIV %2, %3
49    G_STORE %4(s128), %0(p0) :: (store 16 into %ir.v1ptr)
50    RET_ReallyLR
51
52...
53---
54name:            sdiv_test
55alignment:       4
56tracksRegLiveness: true
57liveins:
58  - { reg: '$x0' }
59  - { reg: '$x1' }
60machineFunctionInfo: {}
61body:             |
62  bb.1 (%ir-block.0):
63    liveins: $x0, $x1
64
65    ; CHECK-LABEL: name: sdiv_test
66    ; CHECK: liveins: $x0, $x1
67    ; CHECK: [[COPY:%[0-9]+]]:_(p0) = COPY $x0
68    ; CHECK: [[COPY1:%[0-9]+]]:_(p0) = COPY $x1
69    ; CHECK: [[LOAD:%[0-9]+]]:_(s128) = G_LOAD [[COPY]](p0) :: (load 16 from %ir.v1ptr)
70    ; CHECK: [[LOAD1:%[0-9]+]]:_(s128) = G_LOAD [[COPY1]](p0) :: (load 16 from %ir.v2ptr)
71    ; CHECK: ADJCALLSTACKDOWN 0, 0, implicit-def $sp, implicit $sp
72    ; CHECK: [[UV:%[0-9]+]]:_(s64), [[UV1:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[LOAD]](s128)
73    ; CHECK: [[UV2:%[0-9]+]]:_(s64), [[UV3:%[0-9]+]]:_(s64) = G_UNMERGE_VALUES [[LOAD1]](s128)
74    ; CHECK: $x0 = COPY [[UV]](s64)
75    ; CHECK: $x1 = COPY [[UV1]](s64)
76    ; CHECK: $x2 = COPY [[UV2]](s64)
77    ; CHECK: $x3 = COPY [[UV3]](s64)
78    ; CHECK: BL &__divti3, csr_darwin_aarch64_aapcs, implicit-def $lr, implicit $sp, implicit $x0, implicit $x1, implicit $x2, implicit $x3, implicit-def $x0, implicit-def $x1
79    ; CHECK: [[COPY2:%[0-9]+]]:_(s64) = COPY $x0
80    ; CHECK: [[COPY3:%[0-9]+]]:_(s64) = COPY $x1
81    ; CHECK: [[MV:%[0-9]+]]:_(s128) = G_MERGE_VALUES [[COPY2]](s64), [[COPY3]](s64)
82    ; CHECK: ADJCALLSTACKUP 0, 0, implicit-def $sp, implicit $sp
83    ; CHECK: G_STORE [[MV]](s128), [[COPY]](p0) :: (store 16 into %ir.v1ptr)
84    ; CHECK: RET_ReallyLR
85    %0:_(p0) = COPY $x0
86    %1:_(p0) = COPY $x1
87    %2:_(s128) = G_LOAD %0(p0) :: (load 16 from %ir.v1ptr)
88    %3:_(s128) = G_LOAD %1(p0) :: (load 16 from %ir.v2ptr)
89    %4:_(s128) = G_SDIV %2, %3
90    G_STORE %4(s128), %0(p0) :: (store 16 into %ir.v1ptr)
91    RET_ReallyLR
92
93...
94