; RUN: llc -mtriple=aarch64--linux-gnu -mattr=+sve < %s 2>%t | FileCheck %s ; RUN: FileCheck --check-prefix=WARN --allow-empty %s <%t ; If this check fails please read test/CodeGen/AArch64/README for instructions on how to resolve it. ; WARN-NOT: warning ; ; Masked Loads ; define @masked_zload_nxv2i8(* %src, %mask) { ; CHECK-LABEL: masked_zload_nxv2i8: ; CHECK-NOT: ld1sb ; CHECK: ld1b { [[IN:z[0-9]+]].d }, [[PG:p[0-9]+]]/z, [x0] ; CHECK-NEXT: ret %load = call @llvm.masked.load.nxv2i8(* %src, i32 1, %mask, undef) %ext = zext %load to ret %ext } define @masked_zload_nxv2i16(* %src, %mask) { ; CHECK-LABEL: masked_zload_nxv2i16: ; CHECK-NOT: ld1sh ; CHECK: ld1h { [[IN:z[0-9]+]].d }, [[PG:p[0-9]+]]/z, [x0] ; CHECK-NEXT: ret %load = call @llvm.masked.load.nxv2i16(* %src, i32 1, %mask, undef) %ext = zext %load to ret %ext } define @masked_zload_nxv2i32(* %src, %mask) { ; CHECK-LABEL: masked_zload_nxv2i32: ; CHECK-NOT: ld1sw ; CHECK: ld1w { [[IN:z[0-9]+]].d }, [[PG:p[0-9]+]]/z, [x0] ; CHECK-NEXT: ret %load = call @llvm.masked.load.nxv2i32(* %src, i32 1, %mask, undef) %ext = zext %load to ret %ext } define @masked_zload_nxv4i8(* %src, %mask) { ; CHECK-LABEL: masked_zload_nxv4i8: ; CHECK-NOT: ld1sb ; CHECK: ld1b { [[IN:z[0-9]+]].s }, [[PG:p[0-9]+]]/z, [x0] ; CHECK-NEXT: ret %load = call @llvm.masked.load.nxv4i8(* %src, i32 1, %mask, undef) %ext = zext %load to ret %ext } define @masked_zload_nxv4i16(* %src, %mask) { ; CHECK-LABEL: masked_zload_nxv4i16: ; CHECK-NOT: ld1sh ; CHECK: ld1h { [[IN:z[0-9]+]].s }, [[PG:p[0-9]+]]/z, [x0] ; CHECK-NEXT: ret %load = call @llvm.masked.load.nxv4i16(* %src, i32 1, %mask, undef) %ext = zext %load to ret %ext } define @masked_zload_nxv8i8(* %src, %mask) { ; CHECK-LABEL: masked_zload_nxv8i8: ; CHECK-NOT: ld1sb ; CHECK: ld1b { [[IN:z[0-9]+]].h }, [[PG:p[0-9]+]]/z, [x0] ; CHECK-NEXT: ret %load = call @llvm.masked.load.nxv8i8(* %src, i32 1, %mask, undef) %ext = zext %load to ret %ext } declare @llvm.masked.load.nxv2i8(*, i32, , ) declare @llvm.masked.load.nxv2i16(*, i32, , ) declare @llvm.masked.load.nxv2i32(*, i32, , ) declare @llvm.masked.load.nxv4i8(*, i32, , ) declare @llvm.masked.load.nxv4i16(*, i32, , ) declare @llvm.masked.load.nxv8i8(*, i32, , )