1 // Copyright 2016, VIXL authors
2 // All rights reserved.
3 //
4 // Redistribution and use in source and binary forms, with or without
5 // modification, are permitted provided that the following conditions are met:
6 //
7 // * Redistributions of source code must retain the above copyright notice,
8 // this list of conditions and the following disclaimer.
9 // * Redistributions in binary form must reproduce the above copyright notice,
10 // this list of conditions and the following disclaimer in the documentation
11 // and/or other materials provided with the distribution.
12 // * Neither the name of ARM Limited nor the names of its contributors may be
13 // used to endorse or promote products derived from this software without
14 // specific prior written permission.
15 //
16 // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS CONTRIBUTORS "AS IS" AND
17 // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 // DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
20 // FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
21 // DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
22 // SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
23 // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
24 // OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
25 // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26
27 #include <stdlib.h>
28
29 #include "test-runner.h"
30
31 #ifdef VIXL_INCLUDE_TARGET_AARCH32
32 #include "aarch32/macro-assembler-aarch32.h"
33 #endif
34
35 #ifdef VIXL_INCLUDE_TARGET_AARCH64
36 #include "aarch64/macro-assembler-aarch64.h"
37 #endif
38
39 #define STRINGIFY(x) #x
40
41 #define TEST_AARCH32(Name) \
42 namespace aarch32 { \
43 void Test_##Name##_AArch32_Impl(); \
44 } \
45 void Test_##Name##_AArch32() { aarch32::Test_##Name##_AArch32_Impl(); } \
46 Test test_##Name##_AArch32(STRINGIFY(AARCH32_SCRATCH_##Name), \
47 &Test_##Name##_AArch32); \
48 void aarch32::Test_##Name##_AArch32_Impl()
49
50 #define TEST_AARCH64(Name) \
51 namespace aarch64 { \
52 void Test_##Name##_AArch64_Impl(); \
53 } \
54 void Test_##Name##_AArch64() { aarch64::Test_##Name##_AArch64_Impl(); } \
55 Test test_##Name##_AArch64(STRINGIFY(AARCH64_SCRATCH_##Name), \
56 &Test_##Name##_AArch64); \
57 void aarch64::Test_##Name##_AArch64_Impl()
58
59 #define SETUP() MacroAssembler masm
60 #define TEARDOWN()
61
62 #define __ masm.
63
64 namespace vixl {
65
66 // UseScratchRegisterScopes must be able to nest perfectly. That is, they may
67 // nest, but nested scopes must not outlive less-nested scopes.
68 template <typename MacroAssembler, typename UseScratchRegisterScope>
69 class PerfectNestingTestHelper {
70 public:
PerfectNestingTestHelper(MacroAssembler * masm)71 explicit PerfectNestingTestHelper(MacroAssembler* masm) : masm_(masm) {
72 uint16_t seed[3] = {4, 5, 6};
73 seed48(seed);
74 }
Run()75 void Run() {
76 UseScratchRegisterScope* top_scope =
77 masm_->GetCurrentScratchRegisterScope();
78 int descendents = 0;
79 while (descendents < kMinimumDescendentScopeCount) descendents += Run(0);
80 VIXL_CHECK(masm_->GetCurrentScratchRegisterScope() == top_scope);
81 }
82
83 private:
Run(int depth)84 int Run(int depth) {
85 // As the depth increases, the probability of recursion decreases.
86 // At depth = kDepthLimit, we never recurse.
87 int max_children = static_cast<int>(std::abs(mrand48()) % kDepthLimit);
88 int children = std::max(0, max_children - depth);
89 int descendents = children;
90 while (children-- > 0) {
91 UseScratchRegisterScope scope(masm_);
92 VIXL_CHECK(masm_->GetCurrentScratchRegisterScope() == &scope);
93 descendents += Run(depth + 1);
94 VIXL_CHECK(masm_->GetCurrentScratchRegisterScope() == &scope);
95 }
96 return descendents;
97 }
98
99 MacroAssembler* masm_;
100 static const int kDepthLimit = 12;
101 static const int kMinimumDescendentScopeCount = 10000;
102 };
103
104 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_AARCH32(perfect_nesting)105 TEST_AARCH32(perfect_nesting) {
106 SETUP();
107 PerfectNestingTestHelper<MacroAssembler, UseScratchRegisterScope>(&masm)
108 .Run();
109 TEARDOWN();
110 }
111 #endif // VIXL_INCLUDE_TARGET_AARCH32
112
113 #ifdef VIXL_INCLUDE_TARGET_AARCH64
TEST_AARCH64(perfect_nesting)114 TEST_AARCH64(perfect_nesting) {
115 SETUP();
116 PerfectNestingTestHelper<MacroAssembler, UseScratchRegisterScope>(&masm)
117 .Run();
118 TEARDOWN();
119 }
120 #endif // VIXL_INCLUDE_TARGET_AARCH64
121
122
123 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_AARCH32(v_registers)124 TEST_AARCH32(v_registers) {
125 SETUP();
126 {
127 UseScratchRegisterScope temps(&masm);
128 temps.Include(VRegisterList(q0, q1, q2, q3));
129
130 // This test assumes that low-numbered registers are allocated first. The
131 // implementation is allowed to use a different strategy; if it does, the
132 // test will need to be updated.
133 // TODO: Write more flexible (and thorough) tests.
134
135 VIXL_CHECK(q0.Is(temps.AcquireQ()));
136 VIXL_CHECK(!temps.IsAvailable(q0));
137 VIXL_CHECK(!temps.IsAvailable(d0));
138 VIXL_CHECK(!temps.IsAvailable(d1));
139 VIXL_CHECK(!temps.IsAvailable(s0));
140 VIXL_CHECK(!temps.IsAvailable(s1));
141 VIXL_CHECK(!temps.IsAvailable(s2));
142 VIXL_CHECK(!temps.IsAvailable(s3));
143
144 VIXL_CHECK(d2.Is(temps.AcquireV(64)));
145 VIXL_CHECK(!temps.IsAvailable(q1));
146 VIXL_CHECK(!temps.IsAvailable(d2));
147 VIXL_CHECK(temps.IsAvailable(d3));
148 VIXL_CHECK(!temps.IsAvailable(s4));
149 VIXL_CHECK(!temps.IsAvailable(s5));
150 VIXL_CHECK(temps.IsAvailable(s6));
151 VIXL_CHECK(temps.IsAvailable(s7));
152
153 VIXL_CHECK(s6.Is(temps.AcquireS()));
154 VIXL_CHECK(!temps.IsAvailable(d3));
155 VIXL_CHECK(!temps.IsAvailable(s6));
156 VIXL_CHECK(temps.IsAvailable(s7));
157
158 VIXL_CHECK(q2.Is(temps.AcquireV(128)));
159 VIXL_CHECK(!temps.IsAvailable(q2));
160 VIXL_CHECK(!temps.IsAvailable(d4));
161 VIXL_CHECK(!temps.IsAvailable(d5));
162 VIXL_CHECK(!temps.IsAvailable(s8));
163 VIXL_CHECK(!temps.IsAvailable(s9));
164 VIXL_CHECK(!temps.IsAvailable(s10));
165 VIXL_CHECK(!temps.IsAvailable(s11));
166 VIXL_CHECK(temps.IsAvailable(s7));
167
168 VIXL_CHECK(d6.Is(temps.AcquireD()));
169 VIXL_CHECK(!temps.IsAvailable(q3));
170 VIXL_CHECK(!temps.IsAvailable(d6));
171 VIXL_CHECK(temps.IsAvailable(d7));
172 VIXL_CHECK(!temps.IsAvailable(s12));
173 VIXL_CHECK(!temps.IsAvailable(s13));
174 VIXL_CHECK(temps.IsAvailable(s14));
175 VIXL_CHECK(temps.IsAvailable(s15));
176 VIXL_CHECK(temps.IsAvailable(s7));
177
178 VIXL_CHECK(s7.Is(temps.AcquireS()));
179 }
180 TEARDOWN();
181 }
182 #endif // VIXL_INCLUDE_TARGET_AARCH32
183
184
185 #ifdef VIXL_INCLUDE_TARGET_AARCH32
TEST_AARCH32(include_exclude)186 TEST_AARCH32(include_exclude) {
187 SETUP();
188 {
189 UseScratchRegisterScope temps(&masm);
190 temps.Include(r0, r1, r2, r3);
191 temps.Include(s0, s1, d1, q1);
192
193 VIXL_CHECK(temps.IsAvailable(r0));
194 VIXL_CHECK(temps.IsAvailable(r1));
195 VIXL_CHECK(temps.IsAvailable(r2));
196 VIXL_CHECK(temps.IsAvailable(r3));
197
198 VIXL_CHECK(temps.IsAvailable(s0));
199
200 VIXL_CHECK(temps.IsAvailable(s1));
201
202 VIXL_CHECK(temps.IsAvailable(d1));
203 VIXL_CHECK(temps.IsAvailable(s2));
204 VIXL_CHECK(temps.IsAvailable(s3));
205
206 VIXL_CHECK(temps.IsAvailable(q1));
207 VIXL_CHECK(temps.IsAvailable(d2));
208 VIXL_CHECK(temps.IsAvailable(d3));
209 VIXL_CHECK(temps.IsAvailable(s4));
210 VIXL_CHECK(temps.IsAvailable(s5));
211 VIXL_CHECK(temps.IsAvailable(s6));
212 VIXL_CHECK(temps.IsAvailable(s7));
213
214 // Test local exclusion.
215 {
216 UseScratchRegisterScope local_temps(&masm);
217 local_temps.Exclude(r1, r2);
218 local_temps.Exclude(s1, q1);
219
220 VIXL_CHECK(temps.IsAvailable(r0));
221 VIXL_CHECK(!temps.IsAvailable(r1));
222 VIXL_CHECK(!temps.IsAvailable(r2));
223 VIXL_CHECK(temps.IsAvailable(r3));
224
225 VIXL_CHECK(temps.IsAvailable(s0));
226
227 VIXL_CHECK(!temps.IsAvailable(s1));
228
229 VIXL_CHECK(temps.IsAvailable(d1));
230 VIXL_CHECK(temps.IsAvailable(s2));
231 VIXL_CHECK(temps.IsAvailable(s3));
232
233 VIXL_CHECK(!temps.IsAvailable(q1));
234 VIXL_CHECK(!temps.IsAvailable(d2));
235 VIXL_CHECK(!temps.IsAvailable(d3));
236 VIXL_CHECK(!temps.IsAvailable(s4));
237 VIXL_CHECK(!temps.IsAvailable(s5));
238 VIXL_CHECK(!temps.IsAvailable(s6));
239 VIXL_CHECK(!temps.IsAvailable(s7));
240 }
241
242 // This time, exclude part of included registers, making sure the entire
243 // register gets excluded.
244 {
245 UseScratchRegisterScope local_temps(&masm);
246 local_temps.Exclude(s2, d3);
247
248 VIXL_CHECK(temps.IsAvailable(r0));
249 VIXL_CHECK(temps.IsAvailable(r1));
250 VIXL_CHECK(temps.IsAvailable(r2));
251 VIXL_CHECK(temps.IsAvailable(r3));
252
253 VIXL_CHECK(temps.IsAvailable(s0));
254
255 VIXL_CHECK(temps.IsAvailable(s1));
256
257 // Excluding s2 should exclude d1 but not s3.
258 VIXL_CHECK(!temps.IsAvailable(d1));
259 VIXL_CHECK(!temps.IsAvailable(s2));
260 VIXL_CHECK(temps.IsAvailable(s3));
261
262 // Excluding d3 should exclude q1, s7 and s6 but not d2, s5, s4.
263 VIXL_CHECK(!temps.IsAvailable(q1));
264 VIXL_CHECK(temps.IsAvailable(d2));
265 VIXL_CHECK(!temps.IsAvailable(d3));
266 VIXL_CHECK(temps.IsAvailable(s4));
267 VIXL_CHECK(temps.IsAvailable(s5));
268 VIXL_CHECK(!temps.IsAvailable(s6));
269 VIXL_CHECK(!temps.IsAvailable(s7));
270 }
271
272 // Make sure the initial state was restored.
273
274 VIXL_CHECK(temps.IsAvailable(r0));
275 VIXL_CHECK(temps.IsAvailable(r1));
276 VIXL_CHECK(temps.IsAvailable(r2));
277 VIXL_CHECK(temps.IsAvailable(r3));
278
279 VIXL_CHECK(temps.IsAvailable(s0));
280
281 VIXL_CHECK(temps.IsAvailable(s1));
282
283 VIXL_CHECK(temps.IsAvailable(d1));
284 VIXL_CHECK(temps.IsAvailable(s2));
285 VIXL_CHECK(temps.IsAvailable(s3));
286
287 VIXL_CHECK(temps.IsAvailable(q1));
288 VIXL_CHECK(temps.IsAvailable(d2));
289 VIXL_CHECK(temps.IsAvailable(d3));
290 VIXL_CHECK(temps.IsAvailable(s4));
291 VIXL_CHECK(temps.IsAvailable(s5));
292 VIXL_CHECK(temps.IsAvailable(s6));
293 VIXL_CHECK(temps.IsAvailable(s7));
294 }
295 TEARDOWN();
296 }
297 #endif // VIXL_INCLUDE_TARGET_AARCH32
298
299 } // namespace vixl
300