1; RUN: opt %loadPolly -analyze -polly-ast -polly-process-unprofitable -polly-allow-nonaffine < %s | FileCheck %s
2;
3; @test1
4; Make sure we generate the correct aliasing check for a fixed-size memset operation.
5; CHECK: if (1 && (&MemRef_tmp0[15] <= &MemRef_tmp1[0] || &MemRef_tmp1[32] <= &MemRef_tmp0[14]))
6;
7; @test2
8; Make sure we generate the correct aliasing check for a variable-size memset operation.
9; CHECK: if (1 && (&MemRef_tmp0[15] <= &MemRef_tmp1[0] || &MemRef_tmp1[n] <= &MemRef_tmp0[14]))
10;
11; @test3
12; We can't do anything interesting with a non-affine memset; just make sure it doesn't crash.
13;
14target datalayout = "e-m:e-i64:64-f80:128-n8:16:32:64-S128"
15
16%struct.info = type { i32, %struct.ctr*, i32, %struct.ord*, %struct.ctr*, i32, i8*, i32, i32, double }
17%struct.ctr = type { i32, i8, i8, i32 }
18%struct.ord = type { i32, i8 }
19
20; Function Attrs: argmemonly nounwind
21declare void @llvm.memset.p0i8.i64(i8* nocapture, i8, i64, i32, i1) #0
22
23define void @test1(%struct.info** %ppIdxInfo) {
24entry:
25  %tmp0 = load %struct.info*, %struct.info** %ppIdxInfo, align 8
26  br label %if.end125
27
28if.end125:                                        ; preds = %entry
29  %tmp1 = load %struct.ctr*, %struct.ctr** undef, align 8
30  br label %for.end143
31
32for.end143:                                       ; preds = %if.end125
33  %tmp2 = bitcast %struct.ctr* %tmp1 to i8*
34  tail call void @llvm.memset.p0i8.i64(i8* %tmp2, i8 0, i64 32, i32 4, i1 false)
35  %needToFreeIdxStr = getelementptr inbounds %struct.info, %struct.info* %tmp0, i64 0, i32 7
36  %tmp3 = load i32, i32* %needToFreeIdxStr, align 8
37  br i1 false, label %if.end149, label %if.then148
38
39if.then148:                                       ; preds = %for.end143
40  br label %if.end149
41
42if.end149:                                        ; preds = %if.then148, %for.end143
43  ret void
44}
45
46define void @test2(%struct.info** %ppIdxInfo, i64 %n) {
47entry:
48  %tmp0 = load %struct.info*, %struct.info** %ppIdxInfo, align 8
49  br label %if.end125
50
51if.end125:                                        ; preds = %entry
52  %tmp1 = load %struct.ctr*, %struct.ctr** undef, align 8
53  br label %for.end143
54
55for.end143:                                       ; preds = %if.end125
56  %tmp2 = bitcast %struct.ctr* %tmp1 to i8*
57  tail call void @llvm.memset.p0i8.i64(i8* %tmp2, i8 0, i64 %n, i32 4, i1 false)
58  %needToFreeIdxStr = getelementptr inbounds %struct.info, %struct.info* %tmp0, i64 0, i32 7
59  %tmp3 = load i32, i32* %needToFreeIdxStr, align 8
60  br i1 false, label %if.end149, label %if.then148
61
62if.then148:                                       ; preds = %for.end143
63  br label %if.end149
64
65if.end149:                                        ; preds = %if.then148, %for.end143
66  ret void
67}
68
69define i32 @test3(i32* %x, i32 %n) {
70entry:
71  br label %entry.split
72
73entry.split:                                      ; preds = %entry
74  %conv = sext i32 %n to i64
75  %cmp8 = icmp sgt i32 %n, 0
76  br i1 %cmp8, label %for.body.lr.ph, label %for.cond.cleanup
77
78for.body.lr.ph:                                   ; preds = %entry.split
79  %tmp0 = bitcast i32* %x to i8*
80  br label %for.body
81
82for.cond.cleanup:                                 ; preds = %for.body, %entry.split
83  ret i32 0
84
85for.body:                                         ; preds = %for.body, %for.body.lr.ph
86  %i.09 = phi i64 [ 0, %for.body.lr.ph ], [ %inc, %for.body ]
87  %mul = mul nsw i64 %i.09, %i.09
88  tail call void @llvm.memset.p0i8.i64(i8* %tmp0, i8 0, i64 %mul, i32 4, i1 false)
89  %add = add nuw nsw i64 %i.09, 1000
90  %arrayidx = getelementptr inbounds i32, i32* %x, i64 %add
91  store i32 5, i32* %arrayidx, align 4
92  %inc = add nuw nsw i64 %i.09, 1
93  %exitcond = icmp eq i64 %inc, %conv
94  br i1 %exitcond, label %for.cond.cleanup, label %for.body
95}
96
97attributes #0 = { argmemonly nounwind }
98