1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2; RUN: opt -instcombine -S < %s | FileCheck %s
3
4target datalayout = "e-m:e-p:64:64:64-i64:64-f80:128-n8:16:32:64-S128"
5
6define i32 @test_load_cast_combine_tbaa(float* %ptr) {
7; Ensure (cast (load (...))) -> (load (cast (...))) preserves TBAA.
8; CHECK-LABEL: @test_load_cast_combine_tbaa(
9; CHECK-NEXT:  entry:
10; CHECK-NEXT:    [[TMP0:%.*]] = bitcast float* [[PTR:%.*]] to i32*
11; CHECK-NEXT:    [[L1:%.*]] = load i32, i32* [[TMP0]], align 4, [[TBAA0:!tbaa !.*]]
12; CHECK-NEXT:    ret i32 [[L1]]
13;
14entry:
15  %l = load float, float* %ptr, !tbaa !0
16  %c = bitcast float %l to i32
17  ret i32 %c
18}
19
20define i32 @test_load_cast_combine_noalias(float* %ptr) {
21; Ensure (cast (load (...))) -> (load (cast (...))) preserves no-alias metadata.
22; CHECK-LABEL: @test_load_cast_combine_noalias(
23; CHECK-NEXT:  entry:
24; CHECK-NEXT:    [[TMP0:%.*]] = bitcast float* [[PTR:%.*]] to i32*
25; CHECK-NEXT:    [[L1:%.*]] = load i32, i32* [[TMP0]], align 4, !alias.scope !3, !noalias !4
26; CHECK-NEXT:    ret i32 [[L1]]
27;
28entry:
29  %l = load float, float* %ptr, !alias.scope !3, !noalias !4
30  %c = bitcast float %l to i32
31  ret i32 %c
32}
33
34define float @test_load_cast_combine_range(i32* %ptr) {
35; Ensure (cast (load (...))) -> (load (cast (...))) drops range metadata. It
36; would be nice to preserve or update it somehow but this is hard when moving
37; between types.
38; CHECK-LABEL: @test_load_cast_combine_range(
39; CHECK-NEXT:  entry:
40; CHECK-NEXT:    [[TMP0:%.*]] = bitcast i32* [[PTR:%.*]] to float*
41; CHECK-NEXT:    [[L1:%.*]] = load float, float* [[TMP0]], align 4
42; CHECK-NEXT:    ret float [[L1]]
43;
44entry:
45  %l = load i32, i32* %ptr, !range !5
46  %c = bitcast i32 %l to float
47  ret float %c
48}
49
50define i32 @test_load_cast_combine_invariant(float* %ptr) {
51; Ensure (cast (load (...))) -> (load (cast (...))) preserves invariant metadata.
52; CHECK-LABEL: @test_load_cast_combine_invariant(
53; CHECK-NEXT:  entry:
54; CHECK-NEXT:    [[TMP0:%.*]] = bitcast float* [[PTR:%.*]] to i32*
55; CHECK-NEXT:    [[L1:%.*]] = load i32, i32* [[TMP0]], align 4, !invariant.load !7
56; CHECK-NEXT:    ret i32 [[L1]]
57;
58entry:
59  %l = load float, float* %ptr, !invariant.load !6
60  %c = bitcast float %l to i32
61  ret i32 %c
62}
63
64define i32 @test_load_cast_combine_nontemporal(float* %ptr) {
65; Ensure (cast (load (...))) -> (load (cast (...))) preserves nontemporal
66; metadata.
67; CHECK-LABEL: @test_load_cast_combine_nontemporal(
68; CHECK-NEXT:  entry:
69; CHECK-NEXT:    [[TMP0:%.*]] = bitcast float* [[PTR:%.*]] to i32*
70; CHECK-NEXT:    [[L1:%.*]] = load i32, i32* [[TMP0]], align 4, !nontemporal !8
71; CHECK-NEXT:    ret i32 [[L1]]
72;
73entry:
74  %l = load float, float* %ptr, !nontemporal !7
75  %c = bitcast float %l to i32
76  ret i32 %c
77}
78
79define i8* @test_load_cast_combine_align(i32** %ptr) {
80; Ensure (cast (load (...))) -> (load (cast (...))) preserves align
81; metadata.
82; CHECK-LABEL: @test_load_cast_combine_align(
83; CHECK-NEXT:  entry:
84; CHECK-NEXT:    [[TMP0:%.*]] = bitcast i32** [[PTR:%.*]] to i8**
85; CHECK-NEXT:    [[L1:%.*]] = load i8*, i8** [[TMP0]], align 8, !align !9
86; CHECK-NEXT:    ret i8* [[L1]]
87;
88entry:
89  %l = load i32*, i32** %ptr, !align !8
90  %c = bitcast i32* %l to i8*
91  ret i8* %c
92}
93
94define i8* @test_load_cast_combine_deref(i32** %ptr) {
95; Ensure (cast (load (...))) -> (load (cast (...))) preserves dereferenceable
96; metadata.
97; CHECK-LABEL: @test_load_cast_combine_deref(
98; CHECK-NEXT:  entry:
99; CHECK-NEXT:    [[TMP0:%.*]] = bitcast i32** [[PTR:%.*]] to i8**
100; CHECK-NEXT:    [[L1:%.*]] = load i8*, i8** [[TMP0]], align 8, !dereferenceable !9
101; CHECK-NEXT:    ret i8* [[L1]]
102;
103entry:
104  %l = load i32*, i32** %ptr, !dereferenceable !8
105  %c = bitcast i32* %l to i8*
106  ret i8* %c
107}
108
109define i8* @test_load_cast_combine_deref_or_null(i32** %ptr) {
110; Ensure (cast (load (...))) -> (load (cast (...))) preserves
111; dereferenceable_or_null metadata.
112; CHECK-LABEL: @test_load_cast_combine_deref_or_null(
113; CHECK-NEXT:  entry:
114; CHECK-NEXT:    [[TMP0:%.*]] = bitcast i32** [[PTR:%.*]] to i8**
115; CHECK-NEXT:    [[L1:%.*]] = load i8*, i8** [[TMP0]], align 8, !dereferenceable_or_null !9
116; CHECK-NEXT:    ret i8* [[L1]]
117;
118entry:
119  %l = load i32*, i32** %ptr, !dereferenceable_or_null !8
120  %c = bitcast i32* %l to i8*
121  ret i8* %c
122}
123
124define void @test_load_cast_combine_loop(float* %src, i32* %dst, i32 %n) {
125; Ensure (cast (load (...))) -> (load (cast (...))) preserves loop access
126; metadata.
127; CHECK-LABEL: @test_load_cast_combine_loop(
128; CHECK-NEXT:  entry:
129; CHECK-NEXT:    br label [[LOOP:%.*]]
130; CHECK:       loop:
131; CHECK-NEXT:    [[I:%.*]] = phi i32 [ 0, [[ENTRY:%.*]] ], [ [[I_NEXT:%.*]], [[LOOP]] ]
132; CHECK-NEXT:    [[TMP0:%.*]] = sext i32 [[I]] to i64
133; CHECK-NEXT:    [[SRC_GEP:%.*]] = getelementptr inbounds float, float* [[SRC:%.*]], i64 [[TMP0]]
134; CHECK-NEXT:    [[TMP1:%.*]] = sext i32 [[I]] to i64
135; CHECK-NEXT:    [[DST_GEP:%.*]] = getelementptr inbounds i32, i32* [[DST:%.*]], i64 [[TMP1]]
136; CHECK-NEXT:    [[TMP2:%.*]] = bitcast float* [[SRC_GEP]] to i32*
137; CHECK-NEXT:    [[L1:%.*]] = load i32, i32* [[TMP2]], align 4, !llvm.access.group !6
138; CHECK-NEXT:    store i32 [[L1]], i32* [[DST_GEP]], align 4
139; CHECK-NEXT:    [[I_NEXT]] = add i32 [[I]], 1
140; CHECK-NEXT:    [[CMP:%.*]] = icmp slt i32 [[I_NEXT]], [[N:%.*]]
141; CHECK-NEXT:    br i1 [[CMP]], label [[LOOP]], label [[EXIT:%.*]], [[LOOP1:!llvm.loop !.*]]
142; CHECK:       exit:
143; CHECK-NEXT:    ret void
144;
145entry:
146  br label %loop
147
148loop:
149  %i = phi i32 [ 0, %entry ], [ %i.next, %loop ]
150  %src.gep = getelementptr inbounds float, float* %src, i32 %i
151  %dst.gep = getelementptr inbounds i32, i32* %dst, i32 %i
152  %l = load float, float* %src.gep, !llvm.access.group !9
153  %c = bitcast float %l to i32
154  store i32 %c, i32* %dst.gep
155  %i.next = add i32 %i, 1
156  %cmp = icmp slt i32 %i.next, %n
157  br i1 %cmp, label %loop, label %exit, !llvm.loop !1
158
159exit:
160  ret void
161}
162
163define void @test_load_cast_combine_nonnull(float** %ptr) {
164; CHECK-LABEL: @test_load_cast_combine_nonnull(
165; CHECK-NEXT:  entry:
166; CHECK-NEXT:    [[P:%.*]] = load float*, float** [[PTR:%.*]], align 8, !nonnull !7
167; CHECK-NEXT:    [[GEP:%.*]] = getelementptr float*, float** [[PTR]], i64 42
168; CHECK-NEXT:    store float* [[P]], float** [[GEP]], align 8
169; CHECK-NEXT:    ret void
170;
171entry:
172  %p = load float*, float** %ptr, !nonnull !6
173  %gep = getelementptr float*, float** %ptr, i32 42
174  store float* %p, float** %gep
175  ret void
176}
177
178!0 = !{!1, !1, i64 0}
179!1 = !{!"scalar type", !2}
180!2 = !{!"root"}
181!3 = distinct !{!3, !4}
182!4 = distinct !{!4, !{!"llvm.loop.parallel_accesses", !9}}
183!5 = !{i32 0, i32 42}
184!6 = !{}
185!7 = !{i32 1}
186!8 = !{i64 8}
187!9 = distinct !{}
188