1 // REQUIRES: aarch64-registered-target
2 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - -emit-llvm %s 2>&1 | FileCheck %s
3 // RUN: %clang_cc1 -DSVE_OVERLOADED_FORMS -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - -emit-llvm %s 2>&1 | FileCheck %s
4 // RUN: %clang_cc1 -triple aarch64-none-linux-gnu -target-feature +sve -fallow-half-arguments-and-returns -S -O1 -Werror -o - %s >/dev/null 2>%t
5 // RUN: FileCheck --check-prefix=ASM --allow-empty %s <%t
6
7 // If this check fails please read test/CodeGen/aarch64-sve-intrinsics/README for instructions on how to resolve it.
8 // ASM-NOT: warning
9 #include <arm_sve.h>
10
11 #ifdef SVE_OVERLOADED_FORMS
12 // A simple used,unused... macro, long enough to represent any SVE builtin.
13 #define SVE_ACLE_FUNC(A1,A2_UNUSED,A3,A4_UNUSED) A1##A3
14 #else
15 #define SVE_ACLE_FUNC(A1,A2,A3,A4) A1##A2##A3##A4
16 #endif
17
test_svst1h_s32(svbool_t pg,int16_t * base,svint32_t data)18 void test_svst1h_s32(svbool_t pg, int16_t *base, svint32_t data)
19 {
20 // CHECK-LABEL: test_svst1h_s32
21 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
22 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
23 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i16(<vscale x 4 x i16> %[[DATA]], <vscale x 4 x i1> %[[PG]], i16* %base)
24 // CHECK: ret void
25 return SVE_ACLE_FUNC(svst1h,_s32,,)(pg, base, data);
26 }
27
test_svst1h_s64(svbool_t pg,int16_t * base,svint64_t data)28 void test_svst1h_s64(svbool_t pg, int16_t *base, svint64_t data)
29 {
30 // CHECK-LABEL: test_svst1h_s64
31 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
32 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
33 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i16(<vscale x 2 x i16> %[[DATA]], <vscale x 2 x i1> %[[PG]], i16* %base)
34 // CHECK: ret void
35 return SVE_ACLE_FUNC(svst1h,_s64,,)(pg, base, data);
36 }
37
test_svst1h_u32(svbool_t pg,uint16_t * base,svuint32_t data)38 void test_svst1h_u32(svbool_t pg, uint16_t *base, svuint32_t data)
39 {
40 // CHECK-LABEL: test_svst1h_u32
41 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
42 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
43 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i16(<vscale x 4 x i16> %[[DATA]], <vscale x 4 x i1> %[[PG]], i16* %base)
44 // CHECK: ret void
45 return SVE_ACLE_FUNC(svst1h,_u32,,)(pg, base, data);
46 }
47
test_svst1h_u64(svbool_t pg,uint16_t * base,svuint64_t data)48 void test_svst1h_u64(svbool_t pg, uint16_t *base, svuint64_t data)
49 {
50 // CHECK-LABEL: test_svst1h_u64
51 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
52 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
53 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i16(<vscale x 2 x i16> %[[DATA]], <vscale x 2 x i1> %[[PG]], i16* %base)
54 // CHECK: ret void
55 return SVE_ACLE_FUNC(svst1h,_u64,,)(pg, base, data);
56 }
57
test_svst1h_vnum_s32(svbool_t pg,int16_t * base,int64_t vnum,svint32_t data)58 void test_svst1h_vnum_s32(svbool_t pg, int16_t *base, int64_t vnum, svint32_t data)
59 {
60 // CHECK-LABEL: test_svst1h_vnum_s32
61 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
62 // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 4 x i16>*
63 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
64 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i16>, <vscale x 4 x i16>* %[[BASE]], i64 %vnum, i64 0
65 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i16(<vscale x 4 x i16> %[[DATA]], <vscale x 4 x i1> %[[PG]], i16* %[[GEP]])
66 // CHECK: ret void
67 return SVE_ACLE_FUNC(svst1h_vnum,_s32,,)(pg, base, vnum, data);
68 }
69
test_svst1h_vnum_s64(svbool_t pg,int16_t * base,int64_t vnum,svint64_t data)70 void test_svst1h_vnum_s64(svbool_t pg, int16_t *base, int64_t vnum, svint64_t data)
71 {
72 // CHECK-LABEL: test_svst1h_vnum_s64
73 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
74 // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 2 x i16>*
75 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
76 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i16>, <vscale x 2 x i16>* %[[BASE]], i64 %vnum, i64 0
77 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i16(<vscale x 2 x i16> %[[DATA]], <vscale x 2 x i1> %[[PG]], i16* %[[GEP]])
78 // CHECK: ret void
79 return SVE_ACLE_FUNC(svst1h_vnum,_s64,,)(pg, base, vnum, data);
80 }
81
test_svst1h_vnum_u32(svbool_t pg,uint16_t * base,int64_t vnum,svuint32_t data)82 void test_svst1h_vnum_u32(svbool_t pg, uint16_t *base, int64_t vnum, svuint32_t data)
83 {
84 // CHECK-LABEL: test_svst1h_vnum_u32
85 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
86 // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 4 x i16>*
87 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
88 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 4 x i16>, <vscale x 4 x i16>* %[[BASE]], i64 %vnum, i64 0
89 // CHECK: call void @llvm.aarch64.sve.st1.nxv4i16(<vscale x 4 x i16> %[[DATA]], <vscale x 4 x i1> %[[PG]], i16* %[[GEP]])
90 // CHECK: ret void
91 return SVE_ACLE_FUNC(svst1h_vnum,_u32,,)(pg, base, vnum, data);
92 }
93
test_svst1h_vnum_u64(svbool_t pg,uint16_t * base,int64_t vnum,svuint64_t data)94 void test_svst1h_vnum_u64(svbool_t pg, uint16_t *base, int64_t vnum, svuint64_t data)
95 {
96 // CHECK-LABEL: test_svst1h_vnum_u64
97 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
98 // CHECK-DAG: %[[BASE:.*]] = bitcast i16* %base to <vscale x 2 x i16>*
99 // CHECK-DAG: %[[DATA:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
100 // CHECK-DAG: %[[GEP:.*]] = getelementptr <vscale x 2 x i16>, <vscale x 2 x i16>* %[[BASE]], i64 %vnum, i64 0
101 // CHECK: call void @llvm.aarch64.sve.st1.nxv2i16(<vscale x 2 x i16> %[[DATA]], <vscale x 2 x i1> %[[PG]], i16* %[[GEP]])
102 // CHECK: ret void
103 return SVE_ACLE_FUNC(svst1h_vnum,_u64,,)(pg, base, vnum, data);
104 }
105
test_svst1h_scatter_u32base_s32(svbool_t pg,svuint32_t bases,svint32_t data)106 void test_svst1h_scatter_u32base_s32(svbool_t pg, svuint32_t bases, svint32_t data)
107 {
108 // CHECK-LABEL: test_svst1h_scatter_u32base_s32
109 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
110 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
111 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
112 // CHECK: ret void
113 return SVE_ACLE_FUNC(svst1h_scatter,_u32base,,_s32)(pg, bases, data);
114 }
115
test_svst1h_scatter_u64base_s64(svbool_t pg,svuint64_t bases,svint64_t data)116 void test_svst1h_scatter_u64base_s64(svbool_t pg, svuint64_t bases, svint64_t data)
117 {
118 // CHECK-LABEL: test_svst1h_scatter_u64base_s64
119 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
120 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
121 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
122 // CHECK: ret void
123 return SVE_ACLE_FUNC(svst1h_scatter,_u64base,,_s64)(pg, bases, data);
124 }
125
test_svst1h_scatter_u32base_u32(svbool_t pg,svuint32_t bases,svuint32_t data)126 void test_svst1h_scatter_u32base_u32(svbool_t pg, svuint32_t bases, svuint32_t data)
127 {
128 // CHECK-LABEL: test_svst1h_scatter_u32base_u32
129 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
130 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
131 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 0)
132 // CHECK: ret void
133 return SVE_ACLE_FUNC(svst1h_scatter,_u32base,,_u32)(pg, bases, data);
134 }
135
test_svst1h_scatter_u64base_u64(svbool_t pg,svuint64_t bases,svuint64_t data)136 void test_svst1h_scatter_u64base_u64(svbool_t pg, svuint64_t bases, svuint64_t data)
137 {
138 // CHECK-LABEL: test_svst1h_scatter_u64base_u64
139 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
140 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
141 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 0)
142 // CHECK: ret void
143 return SVE_ACLE_FUNC(svst1h_scatter,_u64base,,_u64)(pg, bases, data);
144 }
145
test_svst1h_scatter_s32offset_s32(svbool_t pg,int16_t * base,svint32_t offsets,svint32_t data)146 void test_svst1h_scatter_s32offset_s32(svbool_t pg, int16_t *base, svint32_t offsets, svint32_t data)
147 {
148 // CHECK-LABEL: test_svst1h_scatter_s32offset_s32
149 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
150 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
151 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %offsets)
152 // CHECK: ret void
153 return SVE_ACLE_FUNC(svst1h_scatter_,s32,offset,_s32)(pg, base, offsets, data);
154 }
155
test_svst1h_scatter_s64offset_s64(svbool_t pg,int16_t * base,svint64_t offsets,svint64_t data)156 void test_svst1h_scatter_s64offset_s64(svbool_t pg, int16_t *base, svint64_t offsets, svint64_t data)
157 {
158 // CHECK-LABEL: test_svst1h_scatter_s64offset_s64
159 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
160 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
161 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %offsets)
162 // CHECK: ret void
163 return SVE_ACLE_FUNC(svst1h_scatter_,s64,offset,_s64)(pg, base, offsets, data);
164 }
165
test_svst1h_scatter_s32offset_u32(svbool_t pg,uint16_t * base,svint32_t offsets,svuint32_t data)166 void test_svst1h_scatter_s32offset_u32(svbool_t pg, uint16_t *base, svint32_t offsets, svuint32_t data)
167 {
168 // CHECK-LABEL: test_svst1h_scatter_s32offset_u32
169 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
170 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
171 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %offsets)
172 // CHECK: ret void
173 return SVE_ACLE_FUNC(svst1h_scatter_,s32,offset,_u32)(pg, base, offsets, data);
174 }
175
test_svst1h_scatter_s64offset_u64(svbool_t pg,uint16_t * base,svint64_t offsets,svuint64_t data)176 void test_svst1h_scatter_s64offset_u64(svbool_t pg, uint16_t *base, svint64_t offsets, svuint64_t data)
177 {
178 // CHECK-LABEL: test_svst1h_scatter_s64offset_u64
179 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
180 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
181 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %offsets)
182 // CHECK: ret void
183 return SVE_ACLE_FUNC(svst1h_scatter_,s64,offset,_u64)(pg, base, offsets, data);
184 }
185
test_svst1h_scatter_u32offset_s32(svbool_t pg,int16_t * base,svuint32_t offsets,svint32_t data)186 void test_svst1h_scatter_u32offset_s32(svbool_t pg, int16_t *base, svuint32_t offsets, svint32_t data)
187 {
188 // CHECK-LABEL: test_svst1h_scatter_u32offset_s32
189 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
190 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
191 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %offsets)
192 // CHECK: ret void
193 return SVE_ACLE_FUNC(svst1h_scatter_,u32,offset,_s32)(pg, base, offsets, data);
194 }
195
test_svst1h_scatter_u64offset_s64(svbool_t pg,int16_t * base,svuint64_t offsets,svint64_t data)196 void test_svst1h_scatter_u64offset_s64(svbool_t pg, int16_t *base, svuint64_t offsets, svint64_t data)
197 {
198 // CHECK-LABEL: test_svst1h_scatter_u64offset_s64
199 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
200 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
201 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %offsets)
202 // CHECK: ret void
203 return SVE_ACLE_FUNC(svst1h_scatter_,u64,offset,_s64)(pg, base, offsets, data);
204 }
205
test_svst1h_scatter_u32offset_u32(svbool_t pg,uint16_t * base,svuint32_t offsets,svuint32_t data)206 void test_svst1h_scatter_u32offset_u32(svbool_t pg, uint16_t *base, svuint32_t offsets, svuint32_t data)
207 {
208 // CHECK-LABEL: test_svst1h_scatter_u32offset_u32
209 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
210 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
211 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %offsets)
212 // CHECK: ret void
213 return SVE_ACLE_FUNC(svst1h_scatter_,u32,offset,_u32)(pg, base, offsets, data);
214 }
215
test_svst1h_scatter_u64offset_u64(svbool_t pg,uint16_t * base,svuint64_t offsets,svuint64_t data)216 void test_svst1h_scatter_u64offset_u64(svbool_t pg, uint16_t *base, svuint64_t offsets, svuint64_t data)
217 {
218 // CHECK-LABEL: test_svst1h_scatter_u64offset_u64
219 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
220 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
221 // CHECK: call void @llvm.aarch64.sve.st1.scatter.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %offsets)
222 // CHECK: ret void
223 return SVE_ACLE_FUNC(svst1h_scatter_,u64,offset,_u64)(pg, base, offsets, data);
224 }
225
test_svst1h_scatter_u32base_offset_s32(svbool_t pg,svuint32_t bases,int64_t offset,svint32_t data)226 void test_svst1h_scatter_u32base_offset_s32(svbool_t pg, svuint32_t bases, int64_t offset, svint32_t data)
227 {
228 // CHECK-LABEL: test_svst1h_scatter_u32base_offset_s32
229 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
230 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
231 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
232 // CHECK: ret void
233 return SVE_ACLE_FUNC(svst1h_scatter,_u32base,_offset,_s32)(pg, bases, offset, data);
234 }
235
test_svst1h_scatter_u64base_offset_s64(svbool_t pg,svuint64_t bases,int64_t offset,svint64_t data)236 void test_svst1h_scatter_u64base_offset_s64(svbool_t pg, svuint64_t bases, int64_t offset, svint64_t data)
237 {
238 // CHECK-LABEL: test_svst1h_scatter_u64base_offset_s64
239 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
240 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
241 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
242 // CHECK: ret void
243 return SVE_ACLE_FUNC(svst1h_scatter,_u64base,_offset,_s64)(pg, bases, offset, data);
244 }
245
test_svst1h_scatter_u32base_offset_u32(svbool_t pg,svuint32_t bases,int64_t offset,svuint32_t data)246 void test_svst1h_scatter_u32base_offset_u32(svbool_t pg, svuint32_t bases, int64_t offset, svuint32_t data)
247 {
248 // CHECK-LABEL: test_svst1h_scatter_u32base_offset_u32
249 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
250 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
251 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %offset)
252 // CHECK: ret void
253 return SVE_ACLE_FUNC(svst1h_scatter,_u32base,_offset,_u32)(pg, bases, offset, data);
254 }
255
test_svst1h_scatter_u64base_offset_u64(svbool_t pg,svuint64_t bases,int64_t offset,svuint64_t data)256 void test_svst1h_scatter_u64base_offset_u64(svbool_t pg, svuint64_t bases, int64_t offset, svuint64_t data)
257 {
258 // CHECK-LABEL: test_svst1h_scatter_u64base_offset_u64
259 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
260 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
261 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %offset)
262 // CHECK: ret void
263 return SVE_ACLE_FUNC(svst1h_scatter,_u64base,_offset,_u64)(pg, bases, offset, data);
264 }
265
test_svst1h_scatter_s32index_s32(svbool_t pg,int16_t * base,svint32_t indices,svint32_t data)266 void test_svst1h_scatter_s32index_s32(svbool_t pg, int16_t *base, svint32_t indices, svint32_t data)
267 {
268 // CHECK-LABEL: test_svst1h_scatter_s32index_s32
269 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
270 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
271 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %indices)
272 // CHECK: ret void
273 return SVE_ACLE_FUNC(svst1h_scatter_,s32,index,_s32)(pg, base, indices, data);
274 }
275
test_svst1h_scatter_s64index_s64(svbool_t pg,int16_t * base,svint64_t indices,svint64_t data)276 void test_svst1h_scatter_s64index_s64(svbool_t pg, int16_t *base, svint64_t indices, svint64_t data)
277 {
278 // CHECK-LABEL: test_svst1h_scatter_s64index_s64
279 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
280 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
281 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %indices)
282 // CHECK: ret void
283 return SVE_ACLE_FUNC(svst1h_scatter_,s64,index,_s64)(pg, base, indices, data);
284 }
285
test_svst1h_scatter_s32index_u32(svbool_t pg,uint16_t * base,svint32_t indices,svuint32_t data)286 void test_svst1h_scatter_s32index_u32(svbool_t pg, uint16_t *base, svint32_t indices, svuint32_t data)
287 {
288 // CHECK-LABEL: test_svst1h_scatter_s32index_u32
289 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
290 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
291 // CHECK: call void @llvm.aarch64.sve.st1.scatter.sxtw.index.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %indices)
292 // CHECK: ret void
293 return SVE_ACLE_FUNC(svst1h_scatter_,s32,index,_u32)(pg, base, indices, data);
294 }
295
test_svst1h_scatter_s64index_u64(svbool_t pg,uint16_t * base,svint64_t indices,svuint64_t data)296 void test_svst1h_scatter_s64index_u64(svbool_t pg, uint16_t *base, svint64_t indices, svuint64_t data)
297 {
298 // CHECK-LABEL: test_svst1h_scatter_s64index_u64
299 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
300 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
301 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %indices)
302 // CHECK: ret void
303 return SVE_ACLE_FUNC(svst1h_scatter_,s64,index,_u64)(pg, base, indices, data);
304 }
305
test_svst1h_scatter_u32index_s32(svbool_t pg,int16_t * base,svuint32_t indices,svint32_t data)306 void test_svst1h_scatter_u32index_s32(svbool_t pg, int16_t *base, svuint32_t indices, svint32_t data)
307 {
308 // CHECK-LABEL: test_svst1h_scatter_u32index_s32
309 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
310 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
311 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %indices)
312 // CHECK: ret void
313 return SVE_ACLE_FUNC(svst1h_scatter_,u32,index,_s32)(pg, base, indices, data);
314 }
315
test_svst1h_scatter_u64index_s64(svbool_t pg,int16_t * base,svuint64_t indices,svint64_t data)316 void test_svst1h_scatter_u64index_s64(svbool_t pg, int16_t *base, svuint64_t indices, svint64_t data)
317 {
318 // CHECK-LABEL: test_svst1h_scatter_u64index_s64
319 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
320 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
321 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %indices)
322 // CHECK: ret void
323 return SVE_ACLE_FUNC(svst1h_scatter_,u64,index,_s64)(pg, base, indices, data);
324 }
325
test_svst1h_scatter_u32index_u32(svbool_t pg,uint16_t * base,svuint32_t indices,svuint32_t data)326 void test_svst1h_scatter_u32index_u32(svbool_t pg, uint16_t *base, svuint32_t indices, svuint32_t data)
327 {
328 // CHECK-LABEL: test_svst1h_scatter_u32index_u32
329 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
330 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
331 // CHECK: call void @llvm.aarch64.sve.st1.scatter.uxtw.index.nxv4i16(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], i16* %base, <vscale x 4 x i32> %indices)
332 // CHECK: ret void
333 return SVE_ACLE_FUNC(svst1h_scatter_,u32,index,_u32)(pg, base, indices, data);
334 }
335
test_svst1h_scatter_u64index_u64(svbool_t pg,uint16_t * base,svuint64_t indices,svuint64_t data)336 void test_svst1h_scatter_u64index_u64(svbool_t pg, uint16_t *base, svuint64_t indices, svuint64_t data)
337 {
338 // CHECK-LABEL: test_svst1h_scatter_u64index_u64
339 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
340 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
341 // CHECK: call void @llvm.aarch64.sve.st1.scatter.index.nxv2i16(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], i16* %base, <vscale x 2 x i64> %indices)
342 // CHECK: ret void
343 return SVE_ACLE_FUNC(svst1h_scatter_,u64,index,_u64)(pg, base, indices, data);
344 }
345
test_svst1h_scatter_u32base_index_s32(svbool_t pg,svuint32_t bases,int64_t index,svint32_t data)346 void test_svst1h_scatter_u32base_index_s32(svbool_t pg, svuint32_t bases, int64_t index, svint32_t data)
347 {
348 // CHECK-LABEL: test_svst1h_scatter_u32base_index_s32
349 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
350 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
351 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 1
352 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %[[SHL]])
353 // CHECK: ret void
354 return SVE_ACLE_FUNC(svst1h_scatter,_u32base,_index,_s32)(pg, bases, index, data);
355 }
356
test_svst1h_scatter_u64base_index_s64(svbool_t pg,svuint64_t bases,int64_t index,svint64_t data)357 void test_svst1h_scatter_u64base_index_s64(svbool_t pg, svuint64_t bases, int64_t index, svint64_t data)
358 {
359 // CHECK-LABEL: test_svst1h_scatter_u64base_index_s64
360 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
361 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
362 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 1
363 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
364 // CHECK: ret void
365 return SVE_ACLE_FUNC(svst1h_scatter,_u64base,_index,_s64)(pg, bases, index, data);
366 }
367
test_svst1h_scatter_u32base_index_u32(svbool_t pg,svuint32_t bases,int64_t index,svuint32_t data)368 void test_svst1h_scatter_u32base_index_u32(svbool_t pg, svuint32_t bases, int64_t index, svuint32_t data)
369 {
370 // CHECK-LABEL: test_svst1h_scatter_u32base_index_u32
371 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 4 x i32> %data to <vscale x 4 x i16>
372 // CHECK-DAG: %[[PG:.*]] = call <vscale x 4 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv4i1(<vscale x 16 x i1> %pg)
373 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 1
374 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv4i16.nxv4i32(<vscale x 4 x i16> %[[TRUNC]], <vscale x 4 x i1> %[[PG]], <vscale x 4 x i32> %bases, i64 %[[SHL]])
375 // CHECK: ret void
376 return SVE_ACLE_FUNC(svst1h_scatter,_u32base,_index,_u32)(pg, bases, index, data);
377 }
378
test_svst1h_scatter_u64base_index_u64(svbool_t pg,svuint64_t bases,int64_t index,svuint64_t data)379 void test_svst1h_scatter_u64base_index_u64(svbool_t pg, svuint64_t bases, int64_t index, svuint64_t data)
380 {
381 // CHECK-LABEL: test_svst1h_scatter_u64base_index_u64
382 // CHECK-DAG: %[[TRUNC:.*]] = trunc <vscale x 2 x i64> %data to <vscale x 2 x i16>
383 // CHECK-DAG: %[[PG:.*]] = call <vscale x 2 x i1> @llvm.aarch64.sve.convert.from.svbool.nxv2i1(<vscale x 16 x i1> %pg)
384 // CHECK-DAG: %[[SHL:.*]] = shl i64 %index, 1
385 // CHECK: call void @llvm.aarch64.sve.st1.scatter.scalar.offset.nxv2i16.nxv2i64(<vscale x 2 x i16> %[[TRUNC]], <vscale x 2 x i1> %[[PG]], <vscale x 2 x i64> %bases, i64 %[[SHL]])
386 // CHECK: ret void
387 return SVE_ACLE_FUNC(svst1h_scatter,_u64base,_index,_u64)(pg, bases, index, data);
388 }
389