1; NOTE: Assertions have been autogenerated by utils/update_test_checks.py
2; RUN: opt -instcombine %s -S | FileCheck %s
3
4@block = global [64 x [8192 x i8]] zeroinitializer, align 1
5
6define <2 x i8*> @vectorindex1() {
7; CHECK-LABEL: @vectorindex1(
8; CHECK-NEXT:    ret <2 x i8*> getelementptr inbounds ([64 x [8192 x i8]], [64 x [8192 x i8]]* @block, <2 x i64> zeroinitializer, <2 x i64> <i64 1, i64 2>, <2 x i64> zeroinitializer)
9;
10  %1 = getelementptr inbounds [64 x [8192 x i8]], [64 x [8192 x i8]]* @block, i64 0, <2 x i64> <i64 0, i64 1>, i64 8192
11  ret <2 x i8*> %1
12}
13
14define <2 x i8*> @vectorindex2() {
15; CHECK-LABEL: @vectorindex2(
16; CHECK-NEXT:    ret <2 x i8*> getelementptr inbounds ([64 x [8192 x i8]], [64 x [8192 x i8]]* @block, <2 x i64> zeroinitializer, <2 x i64> <i64 1, i64 2>, <2 x i64> <i64 8191, i64 1>)
17;
18  %1 = getelementptr inbounds [64 x [8192 x i8]], [64 x [8192 x i8]]* @block, i64 0, i64 1, <2 x i64> <i64 8191, i64 8193>
19  ret <2 x i8*> %1
20}
21
22define <2 x i8*> @vectorindex3() {
23; CHECK-LABEL: @vectorindex3(
24; CHECK-NEXT:    ret <2 x i8*> getelementptr inbounds ([64 x [8192 x i8]], [64 x [8192 x i8]]* @block, <2 x i64> zeroinitializer, <2 x i64> <i64 0, i64 2>, <2 x i64> <i64 8191, i64 1>)
25;
26  %1 = getelementptr inbounds [64 x [8192 x i8]], [64 x [8192 x i8]]* @block, i64 0, <2 x i64> <i64 0, i64 1>, <2 x i64> <i64 8191, i64 8193>
27  ret <2 x i8*> %1
28}
29
30; Negative test - datalayout's alloc size for the 2 types must match.
31
32define i32* @bitcast_vec_to_array_gep(<7 x i32>* %x, i64 %y, i64 %z) {
33; CHECK-LABEL: @bitcast_vec_to_array_gep(
34; CHECK-NEXT:    [[ARR_PTR:%.*]] = bitcast <7 x i32>* [[X:%.*]] to [7 x i32]*
35; CHECK-NEXT:    [[GEP:%.*]] = getelementptr [7 x i32], [7 x i32]* [[ARR_PTR]], i64 [[Y:%.*]], i64 [[Z:%.*]]
36; CHECK-NEXT:    ret i32* [[GEP]]
37;
38  %arr_ptr = bitcast <7 x i32>* %x to [7 x i32]*
39  %gep = getelementptr [7 x i32], [7 x i32]* %arr_ptr, i64 %y, i64 %z
40  ret i32* %gep
41}
42
43; Negative test - datalayout's alloc size for the 2 types must match.
44
45define i32* @bitcast_array_to_vec_gep([3 x i32]* %x, i64 %y, i64 %z) {
46; CHECK-LABEL: @bitcast_array_to_vec_gep(
47; CHECK-NEXT:    [[VEC_PTR:%.*]] = bitcast [3 x i32]* [[X:%.*]] to <3 x i32>*
48; CHECK-NEXT:    [[GEP:%.*]] = getelementptr inbounds <3 x i32>, <3 x i32>* [[VEC_PTR]], i64 [[Y:%.*]], i64 [[Z:%.*]]
49; CHECK-NEXT:    ret i32* [[GEP]]
50;
51  %vec_ptr = bitcast [3 x i32]* %x to <3 x i32>*
52  %gep = getelementptr inbounds <3 x i32>, <3 x i32>* %vec_ptr, i64 %y, i64 %z
53  ret i32* %gep
54}
55
56; Sizes and types match - safe to remove bitcast.
57
58define i32* @bitcast_vec_to_array_gep_matching_alloc_size(<4 x i32>* %x, i64 %y, i64 %z) {
59; CHECK-LABEL: @bitcast_vec_to_array_gep_matching_alloc_size(
60; CHECK-NEXT:    [[GEP:%.*]] = getelementptr <4 x i32>, <4 x i32>* [[X:%.*]], i64 [[Y:%.*]], i64 [[Z:%.*]]
61; CHECK-NEXT:    ret i32* [[GEP]]
62;
63  %arr_ptr = bitcast <4 x i32>* %x to [4 x i32]*
64  %gep = getelementptr [4 x i32], [4 x i32]* %arr_ptr, i64 %y, i64 %z
65  ret i32* %gep
66}
67
68; Sizes and types match - safe to remove bitcast.
69
70define i32* @bitcast_array_to_vec_gep_matching_alloc_size([4 x i32]* %x, i64 %y, i64 %z) {
71; CHECK-LABEL: @bitcast_array_to_vec_gep_matching_alloc_size(
72; CHECK-NEXT:    [[GEP:%.*]] = getelementptr inbounds [4 x i32], [4 x i32]* [[X:%.*]], i64 [[Y:%.*]], i64 [[Z:%.*]]
73; CHECK-NEXT:    ret i32* [[GEP]]
74;
75  %vec_ptr = bitcast [4 x i32]* %x to <4 x i32>*
76  %gep = getelementptr inbounds <4 x i32>, <4 x i32>* %vec_ptr, i64 %y, i64 %z
77  ret i32* %gep
78}
79
80; Negative test - datalayout's alloc size for the 2 types must match.
81
82define i32 addrspace(3)* @bitcast_vec_to_array_addrspace(<7 x i32>* %x, i64 %y, i64 %z) {
83; CHECK-LABEL: @bitcast_vec_to_array_addrspace(
84; CHECK-NEXT:    [[ARR_PTR:%.*]] = bitcast <7 x i32>* [[X:%.*]] to [7 x i32]*
85; CHECK-NEXT:    [[ASC:%.*]] = addrspacecast [7 x i32]* [[ARR_PTR]] to [7 x i32] addrspace(3)*
86; CHECK-NEXT:    [[GEP:%.*]] = getelementptr [7 x i32], [7 x i32] addrspace(3)* [[ASC]], i64 [[Y:%.*]], i64 [[Z:%.*]]
87; CHECK-NEXT:    ret i32 addrspace(3)* [[GEP]]
88;
89  %arr_ptr = bitcast <7 x i32>* %x to [7 x i32]*
90  %asc = addrspacecast [7 x i32]* %arr_ptr to [7 x i32] addrspace(3)*
91  %gep = getelementptr [7 x i32], [7 x i32] addrspace(3)* %asc, i64 %y, i64 %z
92  ret i32 addrspace(3)* %gep
93}
94
95; Negative test - datalayout's alloc size for the 2 types must match.
96
97define i32 addrspace(3)* @inbounds_bitcast_vec_to_array_addrspace(<7 x i32>* %x, i64 %y, i64 %z) {
98; CHECK-LABEL: @inbounds_bitcast_vec_to_array_addrspace(
99; CHECK-NEXT:    [[ARR_PTR:%.*]] = bitcast <7 x i32>* [[X:%.*]] to [7 x i32]*
100; CHECK-NEXT:    [[ASC:%.*]] = addrspacecast [7 x i32]* [[ARR_PTR]] to [7 x i32] addrspace(3)*
101; CHECK-NEXT:    [[GEP:%.*]] = getelementptr inbounds [7 x i32], [7 x i32] addrspace(3)* [[ASC]], i64 [[Y:%.*]], i64 [[Z:%.*]]
102; CHECK-NEXT:    ret i32 addrspace(3)* [[GEP]]
103;
104  %arr_ptr = bitcast <7 x i32>* %x to [7 x i32]*
105  %asc = addrspacecast [7 x i32]* %arr_ptr to [7 x i32] addrspace(3)*
106  %gep = getelementptr inbounds [7 x i32], [7 x i32] addrspace(3)* %asc, i64 %y, i64 %z
107  ret i32 addrspace(3)* %gep
108}
109
110; Sizes and types match - safe to remove bitcast.
111
112define i32 addrspace(3)* @bitcast_vec_to_array_addrspace_matching_alloc_size(<4 x i32>* %x, i64 %y, i64 %z) {
113; CHECK-LABEL: @bitcast_vec_to_array_addrspace_matching_alloc_size(
114; CHECK-NEXT:    [[GEP:%.*]] = getelementptr <4 x i32>, <4 x i32>* [[X:%.*]], i64 [[Y:%.*]], i64 [[Z:%.*]]
115; CHECK-NEXT:    [[TMP1:%.*]] = addrspacecast i32* [[GEP]] to i32 addrspace(3)*
116; CHECK-NEXT:    ret i32 addrspace(3)* [[TMP1]]
117;
118  %arr_ptr = bitcast <4 x i32>* %x to [4 x i32]*
119  %asc = addrspacecast [4 x i32]* %arr_ptr to [4 x i32] addrspace(3)*
120  %gep = getelementptr [4 x i32], [4 x i32] addrspace(3)* %asc, i64 %y, i64 %z
121  ret i32 addrspace(3)* %gep
122}
123
124; Sizes and types match - safe to remove bitcast.
125
126define i32 addrspace(3)* @inbounds_bitcast_vec_to_array_addrspace_matching_alloc_size(<4 x i32>* %x, i64 %y, i64 %z) {
127; CHECK-LABEL: @inbounds_bitcast_vec_to_array_addrspace_matching_alloc_size(
128; CHECK-NEXT:    [[GEP:%.*]] = getelementptr inbounds <4 x i32>, <4 x i32>* [[X:%.*]], i64 [[Y:%.*]], i64 [[Z:%.*]]
129; CHECK-NEXT:    [[TMP1:%.*]] = addrspacecast i32* [[GEP]] to i32 addrspace(3)*
130; CHECK-NEXT:    ret i32 addrspace(3)* [[TMP1]]
131;
132  %arr_ptr = bitcast <4 x i32>* %x to [4 x i32]*
133  %asc = addrspacecast [4 x i32]* %arr_ptr to [4 x i32] addrspace(3)*
134  %gep = getelementptr inbounds [4 x i32], [4 x i32] addrspace(3)* %asc, i64 %y, i64 %z
135  ret i32 addrspace(3)* %gep
136}
137
138; Negative test - avoid doing bitcast on i8*, because '16' should be scaled by 'vscale'.
139
140define i8* @test_accumulate_constant_offset_vscale_nonzero(<vscale x 16 x i1> %pg, i8* %base) {
141; CHECK-LABEL: @test_accumulate_constant_offset_vscale_nonzero
142; CHECK-NEXT:   %bc = bitcast i8* %base to <vscale x 16 x i8>*
143; CHECK-NEXT:   %gep = getelementptr <vscale x 16 x i8>, <vscale x 16 x i8>* %bc, i64 1, i64 4
144; CHECK-NEXT:   ret i8* %gep
145  %bc = bitcast i8* %base to <vscale x 16 x i8>*
146  %gep = getelementptr <vscale x 16 x i8>, <vscale x 16 x i8>* %bc, i64 1, i64 4
147  ret i8* %gep
148}
149
150define i8* @test_accumulate_constant_offset_vscale_zero(<vscale x 16 x i1> %pg, i8* %base) {
151; CHECK-LABEL: @test_accumulate_constant_offset_vscale_zero
152; CHECK-NEXT:   %[[RES:.*]] = getelementptr i8, i8* %base, i64 4
153; CHECK-NEXT:   ret i8* %[[RES]]
154  %bc = bitcast i8* %base to <vscale x 16 x i8>*
155  %gep = getelementptr <vscale x 16 x i8>, <vscale x 16 x i8>* %bc, i64 0, i64 4
156  ret i8* %gep
157}
158