1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <dirent.h>
18 #include <errno.h>
19 #include <string.h>
20 #include <sys/types.h>
21
22 #include <fstream>
23 #include <map>
24 #include <regex>
25
26 #include "gtest/gtest.h"
27
28 #include "jni/quick/calling_convention.h"
29 #include "utils/arm/jni_macro_assembler_arm_vixl.h"
30 #include "utils/assembler_test_base.h"
31
32 #include "base/hex_dump.h"
33 #include "base/macros.h"
34 #include "base/malloc_arena_pool.h"
35 #include "common_runtime_test.h"
36
37 namespace art HIDDEN {
38 namespace arm {
39
40 // Include results file (generated manually)
41 #include "assembler_thumb_test_expected.cc.inc"
42
43 class ArmVIXLAssemblerTest : public AssemblerTestBase {
44 public:
ArmVIXLAssemblerTest()45 ArmVIXLAssemblerTest() : pool(), allocator(&pool), assembler(&allocator) { }
46
47 protected:
GetIsa()48 InstructionSet GetIsa() override { return InstructionSet::kThumb2; }
49
DumpAndCheck(std::vector<uint8_t> & code,const char * testname,const std::string & expected)50 void DumpAndCheck(std::vector<uint8_t>& code, const char* testname, const std::string& expected) {
51 #ifndef ART_TARGET_ANDROID
52 std::string obj_file = scratch_dir_->GetPath() + testname + ".o";
53 WriteElf</*IsElf64=*/false>(obj_file, InstructionSet::kThumb2, code);
54 std::string disassembly;
55 ASSERT_TRUE(Disassemble(obj_file, &disassembly));
56
57 // objdump on buildbot seems to sometimes add annotation like in "bne #226 <.text+0x1e8>".
58 // It is unclear why it does not reproduce locally. As work-around, remove the annotation.
59 std::regex annotation_re(" <\\.text\\+\\w+>");
60 disassembly = std::regex_replace(disassembly, annotation_re, "");
61
62 std::string expected2 = "\n" +
63 obj_file + ": file format elf32-littlearm\n\n"
64 "Disassembly of section .text:\n\n"
65 "00000000 <.text>:\n" +
66 expected;
67 EXPECT_EQ(expected2, disassembly);
68 if (expected2 != disassembly) {
69 std::string out = " \"" + Replace(disassembly, "\n", "\\n\"\n \"") + "\"";
70 printf("C++ formatted disassembler output for %s:\n%s\n", testname, out.c_str());
71 }
72 #endif // ART_TARGET_ANDROID
73 }
74
75 #define __ assembler.
76
EmitAndCheck(const char * testname,const char * expected)77 void EmitAndCheck(const char* testname, const char* expected) {
78 __ FinalizeCode();
79 size_t cs = __ CodeSize();
80 std::vector<uint8_t> managed_code(cs);
81 MemoryRegion code(&managed_code[0], managed_code.size());
82 __ CopyInstructions(code);
83
84 DumpAndCheck(managed_code, testname, expected);
85 }
86
87 #undef __
88
89 #define __ assembler.
90
91 MallocArenaPool pool;
92 ArenaAllocator allocator;
93 ArmVIXLJNIMacroAssembler assembler;
94 };
95
TEST_F(ArmVIXLAssemblerTest,VixlJniHelpers)96 TEST_F(ArmVIXLAssemblerTest, VixlJniHelpers) {
97 // Run the test only with Baker read barriers, as the expected
98 // generated code contains a Marking Register refresh instruction.
99 TEST_DISABLED_WITHOUT_BAKER_READ_BARRIERS();
100
101 const bool is_static = true;
102 const bool is_synchronized = false;
103 const bool is_fast_native = false;
104 const bool is_critical_native = false;
105 const char* shorty = "IIFII";
106
107 std::unique_ptr<JniCallingConvention> jni_conv(
108 JniCallingConvention::Create(&allocator,
109 is_static,
110 is_synchronized,
111 is_fast_native,
112 is_critical_native,
113 shorty,
114 InstructionSet::kThumb2));
115 std::unique_ptr<ManagedRuntimeCallingConvention> mr_conv(
116 ManagedRuntimeCallingConvention::Create(
117 &allocator, is_static, is_synchronized, shorty, InstructionSet::kThumb2));
118 const int frame_size(jni_conv->FrameSize());
119 ArrayRef<const ManagedRegister> callee_save_regs = jni_conv->CalleeSaveRegisters();
120
121 const ManagedRegister method_register = ArmManagedRegister::FromCoreRegister(R0);
122 const ManagedRegister hidden_arg_register = ArmManagedRegister::FromCoreRegister(R4);
123 const ManagedRegister scratch_register = ArmManagedRegister::FromCoreRegister(R12);
124
125 __ BuildFrame(frame_size, mr_conv->MethodRegister(), callee_save_regs);
126
127 // Spill arguments.
128 mr_conv->ResetIterator(FrameOffset(frame_size));
129 for (; mr_conv->HasNext(); mr_conv->Next()) {
130 if (mr_conv->IsCurrentParamInRegister()) {
131 size_t size = mr_conv->IsCurrentParamALongOrDouble() ? 8u : 4u;
132 __ Store(mr_conv->CurrentParamStackOffset(), mr_conv->CurrentParamRegister(), size);
133 }
134 }
135 __ IncreaseFrameSize(32);
136
137 // Loads
138 __ IncreaseFrameSize(4096);
139 __ Load(method_register, FrameOffset(32), 4);
140 __ Load(method_register, FrameOffset(124), 4);
141 __ Load(method_register, FrameOffset(132), 4);
142 __ Load(method_register, FrameOffset(1020), 4);
143 __ Load(method_register, FrameOffset(1024), 4);
144 __ Load(scratch_register, FrameOffset(4092), 4);
145 __ Load(scratch_register, FrameOffset(4096), 4);
146 __ LoadRawPtrFromThread(scratch_register, ThreadOffset32(512));
147
148 // Stores
149 __ Store(FrameOffset(32), method_register, 4);
150 __ Store(FrameOffset(124), method_register, 4);
151 __ Store(FrameOffset(132), method_register, 4);
152 __ Store(FrameOffset(1020), method_register, 4);
153 __ Store(FrameOffset(1024), method_register, 4);
154 __ Store(FrameOffset(4092), scratch_register, 4);
155 __ Store(FrameOffset(4096), scratch_register, 4);
156 __ StoreRawPtr(FrameOffset(48), scratch_register);
157 __ StoreStackPointerToThread(ThreadOffset32(512), false);
158 __ StoreStackPointerToThread(ThreadOffset32(512), true);
159
160 // MoveArguments
161 static constexpr FrameOffset kInvalidReferenceOffset =
162 JNIMacroAssembler<kArmPointerSize>::kInvalidReferenceOffset;
163 static constexpr size_t kNativePointerSize = static_cast<size_t>(kArmPointerSize);
164 // Normal or @FastNative with parameters (Object, long, long, int, Object).
165 // Note: This shall not spill the reference R1 to [sp, #36]. The JNI compiler spills
166 // references in an separate initial pass before moving arguments and creating `jobject`s.
167 ArgumentLocation move_dests1[] = {
168 ArgumentLocation(ArmManagedRegister::FromCoreRegister(R2), kNativePointerSize),
169 ArgumentLocation(FrameOffset(0), 2 * kVRegSize),
170 ArgumentLocation(FrameOffset(8), 2 * kVRegSize),
171 ArgumentLocation(FrameOffset(16), kVRegSize),
172 ArgumentLocation(FrameOffset(20), kNativePointerSize),
173 };
174 ArgumentLocation move_srcs1[] = {
175 ArgumentLocation(ArmManagedRegister::FromCoreRegister(R1), kVRegSize),
176 ArgumentLocation(ArmManagedRegister::FromRegisterPair(R2_R3), 2 * kVRegSize),
177 ArgumentLocation(FrameOffset(48), 2 * kVRegSize),
178 ArgumentLocation(FrameOffset(56), kVRegSize),
179 ArgumentLocation(FrameOffset(60), kVRegSize),
180 };
181 FrameOffset move_refs1[] {
182 FrameOffset(36),
183 FrameOffset(kInvalidReferenceOffset),
184 FrameOffset(kInvalidReferenceOffset),
185 FrameOffset(kInvalidReferenceOffset),
186 FrameOffset(60),
187 };
188 __ MoveArguments(ArrayRef<ArgumentLocation>(move_dests1),
189 ArrayRef<ArgumentLocation>(move_srcs1),
190 ArrayRef<FrameOffset>(move_refs1));
191 // @CriticalNative with parameters (long, long, long, int).
192 ArgumentLocation move_dests2[] = {
193 ArgumentLocation(ArmManagedRegister::FromRegisterPair(R0_R1), 2 * kVRegSize),
194 ArgumentLocation(ArmManagedRegister::FromRegisterPair(R2_R3), 2 * kVRegSize),
195 ArgumentLocation(FrameOffset(0), 2 * kVRegSize),
196 ArgumentLocation(FrameOffset(8), kVRegSize),
197 };
198 ArgumentLocation move_srcs2[] = {
199 ArgumentLocation(ArmManagedRegister::FromRegisterPair(R2_R3), 2 * kVRegSize),
200 ArgumentLocation(FrameOffset(28), kVRegSize),
201 ArgumentLocation(FrameOffset(32), 2 * kVRegSize),
202 ArgumentLocation(FrameOffset(40), kVRegSize),
203 };
204 FrameOffset move_refs2[] {
205 FrameOffset(kInvalidReferenceOffset),
206 FrameOffset(kInvalidReferenceOffset),
207 FrameOffset(kInvalidReferenceOffset),
208 FrameOffset(kInvalidReferenceOffset),
209 };
210 __ MoveArguments(ArrayRef<ArgumentLocation>(move_dests2),
211 ArrayRef<ArgumentLocation>(move_srcs2),
212 ArrayRef<FrameOffset>(move_refs2));
213
214 // Other
215 __ Call(method_register, FrameOffset(48));
216 __ Copy(FrameOffset(48), FrameOffset(44), 4);
217 __ GetCurrentThread(method_register);
218 __ GetCurrentThread(FrameOffset(48));
219 __ Move(hidden_arg_register, method_register, 4);
220 __ VerifyObject(scratch_register, false);
221
222 // Note: `CreateJObject()` may need the scratch register IP. Test with another high register.
223 const ManagedRegister high_register = ArmManagedRegister::FromCoreRegister(R11);
224 __ CreateJObject(high_register, FrameOffset(48), high_register, true);
225 __ CreateJObject(high_register, FrameOffset(48), high_register, false);
226 __ CreateJObject(method_register, FrameOffset(48), high_register, true);
227 __ CreateJObject(method_register, FrameOffset(0), high_register, true);
228 __ CreateJObject(method_register, FrameOffset(1028), high_register, true);
229 __ CreateJObject(high_register, FrameOffset(1028), high_register, true);
230
231 std::unique_ptr<JNIMacroLabel> exception_slow_path = __ CreateLabel();
232 __ ExceptionPoll(exception_slow_path.get());
233
234 // Push the target out of range of branch emitted by ExceptionPoll.
235 for (int i = 0; i < 64; i++) {
236 __ Store(FrameOffset(2047), scratch_register, 4);
237 }
238
239 __ DecreaseFrameSize(4096);
240 __ DecreaseFrameSize(32);
241 __ RemoveFrame(frame_size, callee_save_regs, /* may_suspend= */ true);
242
243 __ Bind(exception_slow_path.get());
244 __ DeliverPendingException();
245
246 EmitAndCheck("VixlJniHelpers", VixlJniHelpersResults);
247 }
248
249 #undef __
250
251 // TODO: Avoid these macros.
252 #define R0 vixl::aarch32::r0
253 #define R2 vixl::aarch32::r2
254 #define R4 vixl::aarch32::r4
255 #define R12 vixl::aarch32::r12
256
257 #define __ assembler.asm_.
258
TEST_F(ArmVIXLAssemblerTest,VixlLoadFromOffset)259 TEST_F(ArmVIXLAssemblerTest, VixlLoadFromOffset) {
260 __ LoadFromOffset(kLoadWord, R2, R4, 12);
261 __ LoadFromOffset(kLoadWord, R2, R4, 0xfff);
262 __ LoadFromOffset(kLoadWord, R2, R4, 0x1000);
263 __ LoadFromOffset(kLoadWord, R2, R4, 0x1000a4);
264 __ LoadFromOffset(kLoadWord, R2, R4, 0x101000);
265 __ LoadFromOffset(kLoadWord, R4, R4, 0x101000);
266 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 12);
267 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0xfff);
268 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x1000);
269 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x1000a4);
270 __ LoadFromOffset(kLoadUnsignedHalfword, R2, R4, 0x101000);
271 __ LoadFromOffset(kLoadUnsignedHalfword, R4, R4, 0x101000);
272 __ LoadFromOffset(kLoadWordPair, R2, R4, 12);
273 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x3fc);
274 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x400);
275 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x400a4);
276 __ LoadFromOffset(kLoadWordPair, R2, R4, 0x40400);
277 __ LoadFromOffset(kLoadWordPair, R4, R4, 0x40400);
278
279 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler());
280 temps.Exclude(R12);
281 __ LoadFromOffset(kLoadWord, R0, R12, 12); // 32-bit because of R12.
282 temps.Include(R12);
283 __ LoadFromOffset(kLoadWord, R2, R4, 0xa4 - 0x100000);
284
285 __ LoadFromOffset(kLoadSignedByte, R2, R4, 12);
286 __ LoadFromOffset(kLoadUnsignedByte, R2, R4, 12);
287 __ LoadFromOffset(kLoadSignedHalfword, R2, R4, 12);
288
289 EmitAndCheck("VixlLoadFromOffset", VixlLoadFromOffsetResults);
290 }
291
TEST_F(ArmVIXLAssemblerTest,VixlStoreToOffset)292 TEST_F(ArmVIXLAssemblerTest, VixlStoreToOffset) {
293 __ StoreToOffset(kStoreWord, R2, R4, 12);
294 __ StoreToOffset(kStoreWord, R2, R4, 0xfff);
295 __ StoreToOffset(kStoreWord, R2, R4, 0x1000);
296 __ StoreToOffset(kStoreWord, R2, R4, 0x1000a4);
297 __ StoreToOffset(kStoreWord, R2, R4, 0x101000);
298 __ StoreToOffset(kStoreWord, R4, R4, 0x101000);
299 __ StoreToOffset(kStoreHalfword, R2, R4, 12);
300 __ StoreToOffset(kStoreHalfword, R2, R4, 0xfff);
301 __ StoreToOffset(kStoreHalfword, R2, R4, 0x1000);
302 __ StoreToOffset(kStoreHalfword, R2, R4, 0x1000a4);
303 __ StoreToOffset(kStoreHalfword, R2, R4, 0x101000);
304 __ StoreToOffset(kStoreHalfword, R4, R4, 0x101000);
305 __ StoreToOffset(kStoreWordPair, R2, R4, 12);
306 __ StoreToOffset(kStoreWordPair, R2, R4, 0x3fc);
307 __ StoreToOffset(kStoreWordPair, R2, R4, 0x400);
308 __ StoreToOffset(kStoreWordPair, R2, R4, 0x400a4);
309 __ StoreToOffset(kStoreWordPair, R2, R4, 0x40400);
310 __ StoreToOffset(kStoreWordPair, R4, R4, 0x40400);
311
312 vixl::aarch32::UseScratchRegisterScope temps(assembler.asm_.GetVIXLAssembler());
313 temps.Exclude(R12);
314 __ StoreToOffset(kStoreWord, R0, R12, 12); // 32-bit because of R12.
315 temps.Include(R12);
316 __ StoreToOffset(kStoreWord, R2, R4, 0xa4 - 0x100000);
317
318 __ StoreToOffset(kStoreByte, R2, R4, 12);
319
320 EmitAndCheck("VixlStoreToOffset", VixlStoreToOffsetResults);
321 }
322
323 #undef __
324 } // namespace arm
325 } // namespace art
326