1 /*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "jni_macro_assembler_arm.h"
18
19 #include <algorithm>
20
21 #include "assembler_thumb2.h"
22 #include "base/arena_allocator.h"
23 #include "base/bit_utils.h"
24 #include "base/logging.h"
25 #include "entrypoints/quick/quick_entrypoints.h"
26 #include "offsets.h"
27 #include "thread.h"
28
29 namespace art {
30 namespace arm {
31
32 constexpr size_t kFramePointerSize = static_cast<size_t>(kArmPointerSize);
33
34 // Slowpath entered when Thread::Current()->_exception is non-null
35 class ArmExceptionSlowPath FINAL : public SlowPath {
36 public:
ArmExceptionSlowPath(ArmManagedRegister scratch,size_t stack_adjust)37 ArmExceptionSlowPath(ArmManagedRegister scratch, size_t stack_adjust)
38 : scratch_(scratch), stack_adjust_(stack_adjust) {
39 }
40 void Emit(Assembler *sp_asm) OVERRIDE;
41 private:
42 const ArmManagedRegister scratch_;
43 const size_t stack_adjust_;
44 };
45
ArmJNIMacroAssembler(ArenaAllocator * arena,InstructionSet isa)46 ArmJNIMacroAssembler::ArmJNIMacroAssembler(ArenaAllocator* arena, InstructionSet isa) {
47 switch (isa) {
48 case kArm:
49 case kThumb2:
50 asm_.reset(new (arena) Thumb2Assembler(arena));
51 break;
52
53 default:
54 LOG(FATAL) << isa;
55 UNREACHABLE();
56 }
57 }
58
~ArmJNIMacroAssembler()59 ArmJNIMacroAssembler::~ArmJNIMacroAssembler() {
60 }
61
CodeSize() const62 size_t ArmJNIMacroAssembler::CodeSize() const {
63 return asm_->CodeSize();
64 }
65
cfi()66 DebugFrameOpCodeWriterForAssembler& ArmJNIMacroAssembler::cfi() {
67 return asm_->cfi();
68 }
69
FinalizeCode()70 void ArmJNIMacroAssembler::FinalizeCode() {
71 asm_->FinalizeCode();
72 }
73
FinalizeInstructions(const MemoryRegion & region)74 void ArmJNIMacroAssembler::FinalizeInstructions(const MemoryRegion& region) {
75 asm_->FinalizeInstructions(region);
76 }
77
DWARFReg(Register reg)78 static dwarf::Reg DWARFReg(Register reg) {
79 return dwarf::Reg::ArmCore(static_cast<int>(reg));
80 }
81
DWARFReg(SRegister reg)82 static dwarf::Reg DWARFReg(SRegister reg) {
83 return dwarf::Reg::ArmFp(static_cast<int>(reg));
84 }
85
86 #define __ asm_->
87
BuildFrame(size_t frame_size,ManagedRegister method_reg,ArrayRef<const ManagedRegister> callee_save_regs,const ManagedRegisterEntrySpills & entry_spills)88 void ArmJNIMacroAssembler::BuildFrame(size_t frame_size,
89 ManagedRegister method_reg,
90 ArrayRef<const ManagedRegister> callee_save_regs,
91 const ManagedRegisterEntrySpills& entry_spills) {
92 CHECK_EQ(CodeSize(), 0U); // Nothing emitted yet
93 CHECK_ALIGNED(frame_size, kStackAlignment);
94 CHECK_EQ(R0, method_reg.AsArm().AsCoreRegister());
95
96 // Push callee saves and link register.
97 RegList core_spill_mask = 1 << LR;
98 uint32_t fp_spill_mask = 0;
99 for (const ManagedRegister& reg : callee_save_regs) {
100 if (reg.AsArm().IsCoreRegister()) {
101 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
102 } else {
103 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
104 }
105 }
106 __ PushList(core_spill_mask);
107 cfi().AdjustCFAOffset(POPCOUNT(core_spill_mask) * kFramePointerSize);
108 cfi().RelOffsetForMany(DWARFReg(Register(0)), 0, core_spill_mask, kFramePointerSize);
109 if (fp_spill_mask != 0) {
110 __ vpushs(SRegister(CTZ(fp_spill_mask)), POPCOUNT(fp_spill_mask));
111 cfi().AdjustCFAOffset(POPCOUNT(fp_spill_mask) * kFramePointerSize);
112 cfi().RelOffsetForMany(DWARFReg(SRegister(0)), 0, fp_spill_mask, kFramePointerSize);
113 }
114
115 // Increase frame to required size.
116 int pushed_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
117 CHECK_GT(frame_size, pushed_values * kFramePointerSize); // Must at least have space for Method*.
118 IncreaseFrameSize(frame_size - pushed_values * kFramePointerSize); // handles CFI as well.
119
120 // Write out Method*.
121 __ StoreToOffset(kStoreWord, R0, SP, 0);
122
123 // Write out entry spills.
124 int32_t offset = frame_size + kFramePointerSize;
125 for (size_t i = 0; i < entry_spills.size(); ++i) {
126 ArmManagedRegister reg = entry_spills.at(i).AsArm();
127 if (reg.IsNoRegister()) {
128 // only increment stack offset.
129 ManagedRegisterSpill spill = entry_spills.at(i);
130 offset += spill.getSize();
131 } else if (reg.IsCoreRegister()) {
132 __ StoreToOffset(kStoreWord, reg.AsCoreRegister(), SP, offset);
133 offset += 4;
134 } else if (reg.IsSRegister()) {
135 __ StoreSToOffset(reg.AsSRegister(), SP, offset);
136 offset += 4;
137 } else if (reg.IsDRegister()) {
138 __ StoreDToOffset(reg.AsDRegister(), SP, offset);
139 offset += 8;
140 }
141 }
142 }
143
RemoveFrame(size_t frame_size,ArrayRef<const ManagedRegister> callee_save_regs)144 void ArmJNIMacroAssembler::RemoveFrame(size_t frame_size,
145 ArrayRef<const ManagedRegister> callee_save_regs) {
146 CHECK_ALIGNED(frame_size, kStackAlignment);
147 cfi().RememberState();
148
149 // Compute callee saves to pop and PC.
150 RegList core_spill_mask = 1 << PC;
151 uint32_t fp_spill_mask = 0;
152 for (const ManagedRegister& reg : callee_save_regs) {
153 if (reg.AsArm().IsCoreRegister()) {
154 core_spill_mask |= 1 << reg.AsArm().AsCoreRegister();
155 } else {
156 fp_spill_mask |= 1 << reg.AsArm().AsSRegister();
157 }
158 }
159
160 // Decrease frame to start of callee saves.
161 int pop_values = POPCOUNT(core_spill_mask) + POPCOUNT(fp_spill_mask);
162 CHECK_GT(frame_size, pop_values * kFramePointerSize);
163 DecreaseFrameSize(frame_size - (pop_values * kFramePointerSize)); // handles CFI as well.
164
165 if (fp_spill_mask != 0) {
166 __ vpops(SRegister(CTZ(fp_spill_mask)), POPCOUNT(fp_spill_mask));
167 cfi().AdjustCFAOffset(-kFramePointerSize * POPCOUNT(fp_spill_mask));
168 cfi().RestoreMany(DWARFReg(SRegister(0)), fp_spill_mask);
169 }
170
171 // Pop callee saves and PC.
172 __ PopList(core_spill_mask);
173
174 // The CFI should be restored for any code that follows the exit block.
175 cfi().RestoreState();
176 cfi().DefCFAOffset(frame_size);
177 }
178
IncreaseFrameSize(size_t adjust)179 void ArmJNIMacroAssembler::IncreaseFrameSize(size_t adjust) {
180 __ AddConstant(SP, -adjust);
181 cfi().AdjustCFAOffset(adjust);
182 }
183
DecreaseFrameSizeImpl(ArmAssembler * assembler,size_t adjust)184 static void DecreaseFrameSizeImpl(ArmAssembler* assembler, size_t adjust) {
185 assembler->AddConstant(SP, adjust);
186 assembler->cfi().AdjustCFAOffset(-adjust);
187 }
188
DecreaseFrameSize(size_t adjust)189 void ArmJNIMacroAssembler::DecreaseFrameSize(size_t adjust) {
190 DecreaseFrameSizeImpl(asm_.get(), adjust);
191 }
192
Store(FrameOffset dest,ManagedRegister msrc,size_t size)193 void ArmJNIMacroAssembler::Store(FrameOffset dest, ManagedRegister msrc, size_t size) {
194 ArmManagedRegister src = msrc.AsArm();
195 if (src.IsNoRegister()) {
196 CHECK_EQ(0u, size);
197 } else if (src.IsCoreRegister()) {
198 CHECK_EQ(4u, size);
199 __ StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
200 } else if (src.IsRegisterPair()) {
201 CHECK_EQ(8u, size);
202 __ StoreToOffset(kStoreWord, src.AsRegisterPairLow(), SP, dest.Int32Value());
203 __ StoreToOffset(kStoreWord, src.AsRegisterPairHigh(), SP, dest.Int32Value() + 4);
204 } else if (src.IsSRegister()) {
205 __ StoreSToOffset(src.AsSRegister(), SP, dest.Int32Value());
206 } else {
207 CHECK(src.IsDRegister()) << src;
208 __ StoreDToOffset(src.AsDRegister(), SP, dest.Int32Value());
209 }
210 }
211
StoreRef(FrameOffset dest,ManagedRegister msrc)212 void ArmJNIMacroAssembler::StoreRef(FrameOffset dest, ManagedRegister msrc) {
213 ArmManagedRegister src = msrc.AsArm();
214 CHECK(src.IsCoreRegister()) << src;
215 __ StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
216 }
217
StoreRawPtr(FrameOffset dest,ManagedRegister msrc)218 void ArmJNIMacroAssembler::StoreRawPtr(FrameOffset dest, ManagedRegister msrc) {
219 ArmManagedRegister src = msrc.AsArm();
220 CHECK(src.IsCoreRegister()) << src;
221 __ StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
222 }
223
StoreSpanning(FrameOffset dest,ManagedRegister msrc,FrameOffset in_off,ManagedRegister mscratch)224 void ArmJNIMacroAssembler::StoreSpanning(FrameOffset dest,
225 ManagedRegister msrc,
226 FrameOffset in_off,
227 ManagedRegister mscratch) {
228 ArmManagedRegister src = msrc.AsArm();
229 ArmManagedRegister scratch = mscratch.AsArm();
230 __ StoreToOffset(kStoreWord, src.AsCoreRegister(), SP, dest.Int32Value());
231 __ LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, in_off.Int32Value());
232 __ StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value() + sizeof(uint32_t));
233 }
234
CopyRef(FrameOffset dest,FrameOffset src,ManagedRegister mscratch)235 void ArmJNIMacroAssembler::CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister mscratch) {
236 ArmManagedRegister scratch = mscratch.AsArm();
237 __ LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
238 __ StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
239 }
240
LoadRef(ManagedRegister mdest,ManagedRegister mbase,MemberOffset offs,bool unpoison_reference)241 void ArmJNIMacroAssembler::LoadRef(ManagedRegister mdest,
242 ManagedRegister mbase,
243 MemberOffset offs,
244 bool unpoison_reference) {
245 ArmManagedRegister base = mbase.AsArm();
246 ArmManagedRegister dst = mdest.AsArm();
247 CHECK(base.IsCoreRegister()) << base;
248 CHECK(dst.IsCoreRegister()) << dst;
249 __ LoadFromOffset(kLoadWord,
250 dst.AsCoreRegister(),
251 base.AsCoreRegister(),
252 offs.Int32Value());
253 if (unpoison_reference) {
254 __ MaybeUnpoisonHeapReference(dst.AsCoreRegister());
255 }
256 }
257
LoadRef(ManagedRegister mdest,FrameOffset src)258 void ArmJNIMacroAssembler::LoadRef(ManagedRegister mdest, FrameOffset src) {
259 ArmManagedRegister dst = mdest.AsArm();
260 CHECK(dst.IsCoreRegister()) << dst;
261 __ LoadFromOffset(kLoadWord, dst.AsCoreRegister(), SP, src.Int32Value());
262 }
263
LoadRawPtr(ManagedRegister mdest,ManagedRegister mbase,Offset offs)264 void ArmJNIMacroAssembler::LoadRawPtr(ManagedRegister mdest,
265 ManagedRegister mbase,
266 Offset offs) {
267 ArmManagedRegister base = mbase.AsArm();
268 ArmManagedRegister dst = mdest.AsArm();
269 CHECK(base.IsCoreRegister()) << base;
270 CHECK(dst.IsCoreRegister()) << dst;
271 __ LoadFromOffset(kLoadWord,
272 dst.AsCoreRegister(),
273 base.AsCoreRegister(),
274 offs.Int32Value());
275 }
276
StoreImmediateToFrame(FrameOffset dest,uint32_t imm,ManagedRegister mscratch)277 void ArmJNIMacroAssembler::StoreImmediateToFrame(FrameOffset dest,
278 uint32_t imm,
279 ManagedRegister mscratch) {
280 ArmManagedRegister scratch = mscratch.AsArm();
281 CHECK(scratch.IsCoreRegister()) << scratch;
282 __ LoadImmediate(scratch.AsCoreRegister(), imm);
283 __ StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
284 }
285
EmitLoad(ArmAssembler * assembler,ManagedRegister m_dst,Register src_register,int32_t src_offset,size_t size)286 static void EmitLoad(ArmAssembler* assembler,
287 ManagedRegister m_dst,
288 Register src_register,
289 int32_t src_offset,
290 size_t size) {
291 ArmManagedRegister dst = m_dst.AsArm();
292 if (dst.IsNoRegister()) {
293 CHECK_EQ(0u, size) << dst;
294 } else if (dst.IsCoreRegister()) {
295 CHECK_EQ(4u, size) << dst;
296 assembler->LoadFromOffset(kLoadWord, dst.AsCoreRegister(), src_register, src_offset);
297 } else if (dst.IsRegisterPair()) {
298 CHECK_EQ(8u, size) << dst;
299 assembler->LoadFromOffset(kLoadWord, dst.AsRegisterPairLow(), src_register, src_offset);
300 assembler->LoadFromOffset(kLoadWord, dst.AsRegisterPairHigh(), src_register, src_offset + 4);
301 } else if (dst.IsSRegister()) {
302 assembler->LoadSFromOffset(dst.AsSRegister(), src_register, src_offset);
303 } else {
304 CHECK(dst.IsDRegister()) << dst;
305 assembler->LoadDFromOffset(dst.AsDRegister(), src_register, src_offset);
306 }
307 }
308
Load(ManagedRegister m_dst,FrameOffset src,size_t size)309 void ArmJNIMacroAssembler::Load(ManagedRegister m_dst, FrameOffset src, size_t size) {
310 EmitLoad(asm_.get(), m_dst, SP, src.Int32Value(), size);
311 }
312
LoadFromThread(ManagedRegister m_dst,ThreadOffset32 src,size_t size)313 void ArmJNIMacroAssembler::LoadFromThread(ManagedRegister m_dst, ThreadOffset32 src, size_t size) {
314 EmitLoad(asm_.get(), m_dst, TR, src.Int32Value(), size);
315 }
316
LoadRawPtrFromThread(ManagedRegister m_dst,ThreadOffset32 offs)317 void ArmJNIMacroAssembler::LoadRawPtrFromThread(ManagedRegister m_dst, ThreadOffset32 offs) {
318 ArmManagedRegister dst = m_dst.AsArm();
319 CHECK(dst.IsCoreRegister()) << dst;
320 __ LoadFromOffset(kLoadWord, dst.AsCoreRegister(), TR, offs.Int32Value());
321 }
322
CopyRawPtrFromThread(FrameOffset fr_offs,ThreadOffset32 thr_offs,ManagedRegister mscratch)323 void ArmJNIMacroAssembler::CopyRawPtrFromThread(FrameOffset fr_offs,
324 ThreadOffset32 thr_offs,
325 ManagedRegister mscratch) {
326 ArmManagedRegister scratch = mscratch.AsArm();
327 CHECK(scratch.IsCoreRegister()) << scratch;
328 __ LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), TR, thr_offs.Int32Value());
329 __ StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
330 }
331
CopyRawPtrToThread(ThreadOffset32 thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)332 void ArmJNIMacroAssembler::CopyRawPtrToThread(ThreadOffset32 thr_offs,
333 FrameOffset fr_offs,
334 ManagedRegister mscratch) {
335 ArmManagedRegister scratch = mscratch.AsArm();
336 CHECK(scratch.IsCoreRegister()) << scratch;
337 __ LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, fr_offs.Int32Value());
338 __ StoreToOffset(kStoreWord, scratch.AsCoreRegister(), TR, thr_offs.Int32Value());
339 }
340
StoreStackOffsetToThread(ThreadOffset32 thr_offs,FrameOffset fr_offs,ManagedRegister mscratch)341 void ArmJNIMacroAssembler::StoreStackOffsetToThread(ThreadOffset32 thr_offs,
342 FrameOffset fr_offs,
343 ManagedRegister mscratch) {
344 ArmManagedRegister scratch = mscratch.AsArm();
345 CHECK(scratch.IsCoreRegister()) << scratch;
346 __ AddConstant(scratch.AsCoreRegister(), SP, fr_offs.Int32Value(), AL);
347 __ StoreToOffset(kStoreWord, scratch.AsCoreRegister(), TR, thr_offs.Int32Value());
348 }
349
StoreStackPointerToThread(ThreadOffset32 thr_offs)350 void ArmJNIMacroAssembler::StoreStackPointerToThread(ThreadOffset32 thr_offs) {
351 __ StoreToOffset(kStoreWord, SP, TR, thr_offs.Int32Value());
352 }
353
SignExtend(ManagedRegister,size_t)354 void ArmJNIMacroAssembler::SignExtend(ManagedRegister /*mreg*/, size_t /*size*/) {
355 UNIMPLEMENTED(FATAL) << "no sign extension necessary for arm";
356 }
357
ZeroExtend(ManagedRegister,size_t)358 void ArmJNIMacroAssembler::ZeroExtend(ManagedRegister /*mreg*/, size_t /*size*/) {
359 UNIMPLEMENTED(FATAL) << "no zero extension necessary for arm";
360 }
361
Move(ManagedRegister m_dst,ManagedRegister m_src,size_t)362 void ArmJNIMacroAssembler::Move(ManagedRegister m_dst, ManagedRegister m_src, size_t /*size*/) {
363 ArmManagedRegister dst = m_dst.AsArm();
364 ArmManagedRegister src = m_src.AsArm();
365 if (!dst.Equals(src)) {
366 if (dst.IsCoreRegister()) {
367 CHECK(src.IsCoreRegister()) << src;
368 __ mov(dst.AsCoreRegister(), ShifterOperand(src.AsCoreRegister()));
369 } else if (dst.IsDRegister()) {
370 if (src.IsDRegister()) {
371 __ vmovd(dst.AsDRegister(), src.AsDRegister());
372 } else {
373 // VMOV Dn, Rlo, Rhi (Dn = {Rlo, Rhi})
374 CHECK(src.IsRegisterPair()) << src;
375 __ vmovdrr(dst.AsDRegister(), src.AsRegisterPairLow(), src.AsRegisterPairHigh());
376 }
377 } else if (dst.IsSRegister()) {
378 if (src.IsSRegister()) {
379 __ vmovs(dst.AsSRegister(), src.AsSRegister());
380 } else {
381 // VMOV Sn, Rn (Sn = Rn)
382 CHECK(src.IsCoreRegister()) << src;
383 __ vmovsr(dst.AsSRegister(), src.AsCoreRegister());
384 }
385 } else {
386 CHECK(dst.IsRegisterPair()) << dst;
387 CHECK(src.IsRegisterPair()) << src;
388 // Ensure that the first move doesn't clobber the input of the second.
389 if (src.AsRegisterPairHigh() != dst.AsRegisterPairLow()) {
390 __ mov(dst.AsRegisterPairLow(), ShifterOperand(src.AsRegisterPairLow()));
391 __ mov(dst.AsRegisterPairHigh(), ShifterOperand(src.AsRegisterPairHigh()));
392 } else {
393 __ mov(dst.AsRegisterPairHigh(), ShifterOperand(src.AsRegisterPairHigh()));
394 __ mov(dst.AsRegisterPairLow(), ShifterOperand(src.AsRegisterPairLow()));
395 }
396 }
397 }
398 }
399
Copy(FrameOffset dest,FrameOffset src,ManagedRegister mscratch,size_t size)400 void ArmJNIMacroAssembler::Copy(FrameOffset dest,
401 FrameOffset src,
402 ManagedRegister mscratch,
403 size_t size) {
404 ArmManagedRegister scratch = mscratch.AsArm();
405 CHECK(scratch.IsCoreRegister()) << scratch;
406 CHECK(size == 4 || size == 8) << size;
407 if (size == 4) {
408 __ LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
409 __ StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
410 } else if (size == 8) {
411 __ LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value());
412 __ StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value());
413 __ LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, src.Int32Value() + 4);
414 __ StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, dest.Int32Value() + 4);
415 }
416 }
417
Copy(FrameOffset dest,ManagedRegister src_base,Offset src_offset,ManagedRegister mscratch,size_t size)418 void ArmJNIMacroAssembler::Copy(FrameOffset dest,
419 ManagedRegister src_base,
420 Offset src_offset,
421 ManagedRegister mscratch,
422 size_t size) {
423 Register scratch = mscratch.AsArm().AsCoreRegister();
424 CHECK_EQ(size, 4u);
425 __ LoadFromOffset(kLoadWord, scratch, src_base.AsArm().AsCoreRegister(), src_offset.Int32Value());
426 __ StoreToOffset(kStoreWord, scratch, SP, dest.Int32Value());
427 }
428
Copy(ManagedRegister dest_base,Offset dest_offset,FrameOffset src,ManagedRegister mscratch,size_t size)429 void ArmJNIMacroAssembler::Copy(ManagedRegister dest_base,
430 Offset dest_offset,
431 FrameOffset src,
432 ManagedRegister mscratch,
433 size_t size) {
434 Register scratch = mscratch.AsArm().AsCoreRegister();
435 CHECK_EQ(size, 4u);
436 __ LoadFromOffset(kLoadWord, scratch, SP, src.Int32Value());
437 __ StoreToOffset(kStoreWord,
438 scratch,
439 dest_base.AsArm().AsCoreRegister(),
440 dest_offset.Int32Value());
441 }
442
Copy(FrameOffset,FrameOffset,Offset,ManagedRegister,size_t)443 void ArmJNIMacroAssembler::Copy(FrameOffset /*dst*/,
444 FrameOffset /*src_base*/,
445 Offset /*src_offset*/,
446 ManagedRegister /*mscratch*/,
447 size_t /*size*/) {
448 UNIMPLEMENTED(FATAL);
449 }
450
Copy(ManagedRegister dest,Offset dest_offset,ManagedRegister src,Offset src_offset,ManagedRegister mscratch,size_t size)451 void ArmJNIMacroAssembler::Copy(ManagedRegister dest,
452 Offset dest_offset,
453 ManagedRegister src,
454 Offset src_offset,
455 ManagedRegister mscratch,
456 size_t size) {
457 CHECK_EQ(size, 4u);
458 Register scratch = mscratch.AsArm().AsCoreRegister();
459 __ LoadFromOffset(kLoadWord, scratch, src.AsArm().AsCoreRegister(), src_offset.Int32Value());
460 __ StoreToOffset(kStoreWord, scratch, dest.AsArm().AsCoreRegister(), dest_offset.Int32Value());
461 }
462
Copy(FrameOffset,Offset,FrameOffset,Offset,ManagedRegister,size_t)463 void ArmJNIMacroAssembler::Copy(FrameOffset /*dst*/,
464 Offset /*dest_offset*/,
465 FrameOffset /*src*/,
466 Offset /*src_offset*/,
467 ManagedRegister /*scratch*/,
468 size_t /*size*/) {
469 UNIMPLEMENTED(FATAL);
470 }
471
CreateHandleScopeEntry(ManagedRegister mout_reg,FrameOffset handle_scope_offset,ManagedRegister min_reg,bool null_allowed)472 void ArmJNIMacroAssembler::CreateHandleScopeEntry(ManagedRegister mout_reg,
473 FrameOffset handle_scope_offset,
474 ManagedRegister min_reg,
475 bool null_allowed) {
476 ArmManagedRegister out_reg = mout_reg.AsArm();
477 ArmManagedRegister in_reg = min_reg.AsArm();
478 CHECK(in_reg.IsNoRegister() || in_reg.IsCoreRegister()) << in_reg;
479 CHECK(out_reg.IsCoreRegister()) << out_reg;
480 if (null_allowed) {
481 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
482 // the address in the handle scope holding the reference.
483 // e.g. out_reg = (handle == 0) ? 0 : (SP+handle_offset)
484 if (in_reg.IsNoRegister()) {
485 __ LoadFromOffset(kLoadWord, out_reg.AsCoreRegister(), SP, handle_scope_offset.Int32Value());
486 in_reg = out_reg;
487 }
488 __ cmp(in_reg.AsCoreRegister(), ShifterOperand(0));
489 if (!out_reg.Equals(in_reg)) {
490 __ it(EQ, kItElse);
491 __ LoadImmediate(out_reg.AsCoreRegister(), 0, EQ);
492 } else {
493 __ it(NE);
494 }
495 __ AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), NE);
496 } else {
497 __ AddConstant(out_reg.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), AL);
498 }
499 }
500
CreateHandleScopeEntry(FrameOffset out_off,FrameOffset handle_scope_offset,ManagedRegister mscratch,bool null_allowed)501 void ArmJNIMacroAssembler::CreateHandleScopeEntry(FrameOffset out_off,
502 FrameOffset handle_scope_offset,
503 ManagedRegister mscratch,
504 bool null_allowed) {
505 ArmManagedRegister scratch = mscratch.AsArm();
506 CHECK(scratch.IsCoreRegister()) << scratch;
507 if (null_allowed) {
508 __ LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value());
509 // Null values get a handle scope entry value of 0. Otherwise, the handle scope entry is
510 // the address in the handle scope holding the reference.
511 // e.g. scratch = (scratch == 0) ? 0 : (SP+handle_scope_offset)
512 __ cmp(scratch.AsCoreRegister(), ShifterOperand(0));
513 __ it(NE);
514 __ AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), NE);
515 } else {
516 __ AddConstant(scratch.AsCoreRegister(), SP, handle_scope_offset.Int32Value(), AL);
517 }
518 __ StoreToOffset(kStoreWord, scratch.AsCoreRegister(), SP, out_off.Int32Value());
519 }
520
LoadReferenceFromHandleScope(ManagedRegister mout_reg,ManagedRegister min_reg)521 void ArmJNIMacroAssembler::LoadReferenceFromHandleScope(ManagedRegister mout_reg,
522 ManagedRegister min_reg) {
523 ArmManagedRegister out_reg = mout_reg.AsArm();
524 ArmManagedRegister in_reg = min_reg.AsArm();
525 CHECK(out_reg.IsCoreRegister()) << out_reg;
526 CHECK(in_reg.IsCoreRegister()) << in_reg;
527 Label null_arg;
528 if (!out_reg.Equals(in_reg)) {
529 __ LoadImmediate(out_reg.AsCoreRegister(), 0, EQ); // TODO: why EQ?
530 }
531 __ cmp(in_reg.AsCoreRegister(), ShifterOperand(0));
532 __ it(NE);
533 __ LoadFromOffset(kLoadWord, out_reg.AsCoreRegister(), in_reg.AsCoreRegister(), 0, NE);
534 }
535
VerifyObject(ManagedRegister,bool)536 void ArmJNIMacroAssembler::VerifyObject(ManagedRegister /*src*/, bool /*could_be_null*/) {
537 // TODO: not validating references.
538 }
539
VerifyObject(FrameOffset,bool)540 void ArmJNIMacroAssembler::VerifyObject(FrameOffset /*src*/, bool /*could_be_null*/) {
541 // TODO: not validating references.
542 }
543
Call(ManagedRegister mbase,Offset offset,ManagedRegister mscratch)544 void ArmJNIMacroAssembler::Call(ManagedRegister mbase,
545 Offset offset,
546 ManagedRegister mscratch) {
547 ArmManagedRegister base = mbase.AsArm();
548 ArmManagedRegister scratch = mscratch.AsArm();
549 CHECK(base.IsCoreRegister()) << base;
550 CHECK(scratch.IsCoreRegister()) << scratch;
551 __ LoadFromOffset(kLoadWord,
552 scratch.AsCoreRegister(),
553 base.AsCoreRegister(),
554 offset.Int32Value());
555 __ blx(scratch.AsCoreRegister());
556 // TODO: place reference map on call.
557 }
558
Call(FrameOffset base,Offset offset,ManagedRegister mscratch)559 void ArmJNIMacroAssembler::Call(FrameOffset base, Offset offset, ManagedRegister mscratch) {
560 ArmManagedRegister scratch = mscratch.AsArm();
561 CHECK(scratch.IsCoreRegister()) << scratch;
562 // Call *(*(SP + base) + offset)
563 __ LoadFromOffset(kLoadWord, scratch.AsCoreRegister(), SP, base.Int32Value());
564 __ LoadFromOffset(kLoadWord,
565 scratch.AsCoreRegister(),
566 scratch.AsCoreRegister(),
567 offset.Int32Value());
568 __ blx(scratch.AsCoreRegister());
569 // TODO: place reference map on call
570 }
571
CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,ManagedRegister scratch ATTRIBUTE_UNUSED)572 void ArmJNIMacroAssembler::CallFromThread(ThreadOffset32 offset ATTRIBUTE_UNUSED,
573 ManagedRegister scratch ATTRIBUTE_UNUSED) {
574 UNIMPLEMENTED(FATAL);
575 }
576
GetCurrentThread(ManagedRegister tr)577 void ArmJNIMacroAssembler::GetCurrentThread(ManagedRegister tr) {
578 __ mov(tr.AsArm().AsCoreRegister(), ShifterOperand(TR));
579 }
580
GetCurrentThread(FrameOffset offset,ManagedRegister)581 void ArmJNIMacroAssembler::GetCurrentThread(FrameOffset offset, ManagedRegister /*scratch*/) {
582 __ StoreToOffset(kStoreWord, TR, SP, offset.Int32Value(), AL);
583 }
584
ExceptionPoll(ManagedRegister mscratch,size_t stack_adjust)585 void ArmJNIMacroAssembler::ExceptionPoll(ManagedRegister mscratch, size_t stack_adjust) {
586 ArmManagedRegister scratch = mscratch.AsArm();
587 ArmExceptionSlowPath* slow = new (__ GetArena()) ArmExceptionSlowPath(scratch, stack_adjust);
588 __ GetBuffer()->EnqueueSlowPath(slow);
589 __ LoadFromOffset(kLoadWord,
590 scratch.AsCoreRegister(),
591 TR,
592 Thread::ExceptionOffset<kArmPointerSize>().Int32Value());
593 __ cmp(scratch.AsCoreRegister(), ShifterOperand(0));
594 __ b(slow->Entry(), NE);
595 }
596
CreateLabel()597 std::unique_ptr<JNIMacroLabel> ArmJNIMacroAssembler::CreateLabel() {
598 return std::unique_ptr<JNIMacroLabel>(new ArmJNIMacroLabel());
599 }
600
Jump(JNIMacroLabel * label)601 void ArmJNIMacroAssembler::Jump(JNIMacroLabel* label) {
602 CHECK(label != nullptr);
603 __ b(ArmJNIMacroLabel::Cast(label)->AsArm());
604 }
605
Jump(JNIMacroLabel * label,JNIMacroUnaryCondition condition,ManagedRegister test)606 void ArmJNIMacroAssembler::Jump(JNIMacroLabel* label,
607 JNIMacroUnaryCondition condition,
608 ManagedRegister test) {
609 CHECK(label != nullptr);
610
611 arm::Condition arm_cond;
612 switch (condition) {
613 case JNIMacroUnaryCondition::kZero:
614 arm_cond = EQ;
615 break;
616 case JNIMacroUnaryCondition::kNotZero:
617 arm_cond = NE;
618 break;
619 default:
620 LOG(FATAL) << "Not implemented condition: " << static_cast<int>(condition);
621 UNREACHABLE();
622 }
623 __ cmp(test.AsArm().AsCoreRegister(), ShifterOperand(0));
624 __ b(ArmJNIMacroLabel::Cast(label)->AsArm(), arm_cond);
625 }
626
Bind(JNIMacroLabel * label)627 void ArmJNIMacroAssembler::Bind(JNIMacroLabel* label) {
628 CHECK(label != nullptr);
629 __ Bind(ArmJNIMacroLabel::Cast(label)->AsArm());
630 }
631
632 #undef __
633
Emit(Assembler * sasm)634 void ArmExceptionSlowPath::Emit(Assembler* sasm) {
635 ArmAssembler* sp_asm = down_cast<ArmAssembler*>(sasm);
636 #define __ sp_asm->
637 __ Bind(&entry_);
638 if (stack_adjust_ != 0) { // Fix up the frame.
639 DecreaseFrameSizeImpl(sp_asm, stack_adjust_);
640 }
641 // Pass exception object as argument.
642 // Don't care about preserving R0 as this call won't return.
643 __ mov(R0, ShifterOperand(scratch_.AsCoreRegister()));
644 // Set up call to Thread::Current()->pDeliverException.
645 __ LoadFromOffset(kLoadWord,
646 R12,
647 TR,
648 QUICK_ENTRYPOINT_OFFSET(kArmPointerSize, pDeliverException).Int32Value());
649 __ blx(R12);
650 #undef __
651 }
652
MemoryBarrier(ManagedRegister mscratch)653 void ArmJNIMacroAssembler::MemoryBarrier(ManagedRegister mscratch) {
654 CHECK_EQ(mscratch.AsArm().AsCoreRegister(), R12);
655 asm_->dmb(SY);
656 }
657
658 } // namespace arm
659 } // namespace art
660