1 /* 2 * Copyright (C) 2016 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_COMPILER_UTILS_ARM64_JNI_MACRO_ASSEMBLER_ARM64_H_ 18 #define ART_COMPILER_UTILS_ARM64_JNI_MACRO_ASSEMBLER_ARM64_H_ 19 20 #include <stdint.h> 21 #include <memory> 22 #include <vector> 23 24 #include <android-base/logging.h> 25 26 #include "assembler_arm64.h" 27 #include "base/arena_containers.h" 28 #include "base/enums.h" 29 #include "base/macros.h" 30 #include "offsets.h" 31 #include "utils/assembler.h" 32 #include "utils/jni_macro_assembler.h" 33 34 // TODO(VIXL): Make VIXL compile with -Wshadow. 35 #pragma GCC diagnostic push 36 #pragma GCC diagnostic ignored "-Wshadow" 37 #include "aarch64/macro-assembler-aarch64.h" 38 #pragma GCC diagnostic pop 39 40 namespace art { 41 namespace arm64 { 42 43 class Arm64JNIMacroAssembler FINAL : public JNIMacroAssemblerFwd<Arm64Assembler, PointerSize::k64> { 44 public: Arm64JNIMacroAssembler(ArenaAllocator * allocator)45 explicit Arm64JNIMacroAssembler(ArenaAllocator* allocator) 46 : JNIMacroAssemblerFwd(allocator), 47 exception_blocks_(allocator->Adapter(kArenaAllocAssembler)) {} 48 49 ~Arm64JNIMacroAssembler(); 50 51 // Finalize the code. 52 void FinalizeCode() OVERRIDE; 53 54 // Emit code that will create an activation on the stack. 55 void BuildFrame(size_t frame_size, 56 ManagedRegister method_reg, 57 ArrayRef<const ManagedRegister> callee_save_regs, 58 const ManagedRegisterEntrySpills& entry_spills) OVERRIDE; 59 60 // Emit code that will remove an activation from the stack. 61 void RemoveFrame(size_t frame_size, 62 ArrayRef<const ManagedRegister> callee_save_regs, 63 bool may_suspend) OVERRIDE; 64 65 void IncreaseFrameSize(size_t adjust) OVERRIDE; 66 void DecreaseFrameSize(size_t adjust) OVERRIDE; 67 68 // Store routines. 69 void Store(FrameOffset offs, ManagedRegister src, size_t size) OVERRIDE; 70 void StoreRef(FrameOffset dest, ManagedRegister src) OVERRIDE; 71 void StoreRawPtr(FrameOffset dest, ManagedRegister src) OVERRIDE; 72 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) OVERRIDE; 73 void StoreStackOffsetToThread(ThreadOffset64 thr_offs, 74 FrameOffset fr_offs, 75 ManagedRegister scratch) OVERRIDE; 76 void StoreStackPointerToThread(ThreadOffset64 thr_offs) OVERRIDE; 77 void StoreSpanning(FrameOffset dest, 78 ManagedRegister src, 79 FrameOffset in_off, 80 ManagedRegister scratch) OVERRIDE; 81 82 // Load routines. 83 void Load(ManagedRegister dest, FrameOffset src, size_t size) OVERRIDE; 84 void LoadFromThread(ManagedRegister dest, ThreadOffset64 src, size_t size) OVERRIDE; 85 void LoadRef(ManagedRegister dest, FrameOffset src) OVERRIDE; 86 void LoadRef(ManagedRegister dest, 87 ManagedRegister base, 88 MemberOffset offs, 89 bool unpoison_reference) OVERRIDE; 90 void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) OVERRIDE; 91 void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset64 offs) OVERRIDE; 92 93 // Copying routines. 94 void Move(ManagedRegister dest, ManagedRegister src, size_t size) OVERRIDE; 95 void CopyRawPtrFromThread(FrameOffset fr_offs, 96 ThreadOffset64 thr_offs, 97 ManagedRegister scratch) OVERRIDE; 98 void CopyRawPtrToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs, ManagedRegister scratch) 99 OVERRIDE; 100 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) OVERRIDE; 101 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) OVERRIDE; 102 void Copy(FrameOffset dest, 103 ManagedRegister src_base, 104 Offset src_offset, 105 ManagedRegister scratch, 106 size_t size) OVERRIDE; 107 void Copy(ManagedRegister dest_base, 108 Offset dest_offset, 109 FrameOffset src, 110 ManagedRegister scratch, 111 size_t size) OVERRIDE; 112 void Copy(FrameOffset dest, 113 FrameOffset src_base, 114 Offset src_offset, 115 ManagedRegister scratch, 116 size_t size) OVERRIDE; 117 void Copy(ManagedRegister dest, 118 Offset dest_offset, 119 ManagedRegister src, 120 Offset src_offset, 121 ManagedRegister scratch, 122 size_t size) OVERRIDE; 123 void Copy(FrameOffset dest, 124 Offset dest_offset, 125 FrameOffset src, 126 Offset src_offset, 127 ManagedRegister scratch, 128 size_t size) OVERRIDE; 129 void MemoryBarrier(ManagedRegister scratch) OVERRIDE; 130 131 // Sign extension. 132 void SignExtend(ManagedRegister mreg, size_t size) OVERRIDE; 133 134 // Zero extension. 135 void ZeroExtend(ManagedRegister mreg, size_t size) OVERRIDE; 136 137 // Exploit fast access in managed code to Thread::Current(). 138 void GetCurrentThread(ManagedRegister tr) OVERRIDE; 139 void GetCurrentThread(FrameOffset dest_offset, ManagedRegister scratch) OVERRIDE; 140 141 // Set up out_reg to hold a Object** into the handle scope, or to be null if the 142 // value is null and null_allowed. in_reg holds a possibly stale reference 143 // that can be used to avoid loading the handle scope entry to see if the value is 144 // null. 145 void CreateHandleScopeEntry(ManagedRegister out_reg, 146 FrameOffset handlescope_offset, 147 ManagedRegister in_reg, 148 bool null_allowed) OVERRIDE; 149 150 // Set up out_off to hold a Object** into the handle scope, or to be null if the 151 // value is null and null_allowed. 152 void CreateHandleScopeEntry(FrameOffset out_off, 153 FrameOffset handlescope_offset, 154 ManagedRegister scratch, 155 bool null_allowed) OVERRIDE; 156 157 // src holds a handle scope entry (Object**) load this into dst. 158 void LoadReferenceFromHandleScope(ManagedRegister dst, ManagedRegister src) OVERRIDE; 159 160 // Heap::VerifyObject on src. In some cases (such as a reference to this) we 161 // know that src may not be null. 162 void VerifyObject(ManagedRegister src, bool could_be_null) OVERRIDE; 163 void VerifyObject(FrameOffset src, bool could_be_null) OVERRIDE; 164 165 // Call to address held at [base+offset]. 166 void Call(ManagedRegister base, Offset offset, ManagedRegister scratch) OVERRIDE; 167 void Call(FrameOffset base, Offset offset, ManagedRegister scratch) OVERRIDE; 168 void CallFromThread(ThreadOffset64 offset, ManagedRegister scratch) OVERRIDE; 169 170 // Generate code to check if Thread::Current()->exception_ is non-null 171 // and branch to a ExceptionSlowPath if it is. 172 void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) OVERRIDE; 173 174 // Create a new label that can be used with Jump/Bind calls. 175 std::unique_ptr<JNIMacroLabel> CreateLabel() OVERRIDE; 176 // Emit an unconditional jump to the label. 177 void Jump(JNIMacroLabel* label) OVERRIDE; 178 // Emit a conditional jump to the label by applying a unary condition test to the register. 179 void Jump(JNIMacroLabel* label, JNIMacroUnaryCondition cond, ManagedRegister test) OVERRIDE; 180 // Code at this offset will serve as the target for the Jump call. 181 void Bind(JNIMacroLabel* label) OVERRIDE; 182 183 private: 184 class Arm64Exception { 185 public: Arm64Exception(Arm64ManagedRegister scratch,size_t stack_adjust)186 Arm64Exception(Arm64ManagedRegister scratch, size_t stack_adjust) 187 : scratch_(scratch), stack_adjust_(stack_adjust) {} 188 Entry()189 vixl::aarch64::Label* Entry() { return &exception_entry_; } 190 191 // Register used for passing Thread::Current()->exception_ . 192 const Arm64ManagedRegister scratch_; 193 194 // Stack adjust for ExceptionPool. 195 const size_t stack_adjust_; 196 197 vixl::aarch64::Label exception_entry_; 198 199 private: 200 DISALLOW_COPY_AND_ASSIGN(Arm64Exception); 201 }; 202 203 // Emits Exception block. 204 void EmitExceptionPoll(Arm64Exception *exception); 205 206 void StoreWToOffset(StoreOperandType type, 207 WRegister source, 208 XRegister base, 209 int32_t offset); 210 void StoreToOffset(XRegister source, XRegister base, int32_t offset); 211 void StoreSToOffset(SRegister source, XRegister base, int32_t offset); 212 void StoreDToOffset(DRegister source, XRegister base, int32_t offset); 213 214 void LoadImmediate(XRegister dest, 215 int32_t value, 216 vixl::aarch64::Condition cond = vixl::aarch64::al); 217 void Load(Arm64ManagedRegister dst, XRegister src, int32_t src_offset, size_t size); 218 void LoadWFromOffset(LoadOperandType type, 219 WRegister dest, 220 XRegister base, 221 int32_t offset); 222 void LoadFromOffset(XRegister dest, XRegister base, int32_t offset); 223 void LoadSFromOffset(SRegister dest, XRegister base, int32_t offset); 224 void LoadDFromOffset(DRegister dest, XRegister base, int32_t offset); 225 void AddConstant(XRegister rd, 226 int32_t value, 227 vixl::aarch64::Condition cond = vixl::aarch64::al); 228 void AddConstant(XRegister rd, 229 XRegister rn, 230 int32_t value, 231 vixl::aarch64::Condition cond = vixl::aarch64::al); 232 233 // List of exception blocks to generate at the end of the code cache. 234 ArenaVector<std::unique_ptr<Arm64Exception>> exception_blocks_; 235 }; 236 237 class Arm64JNIMacroLabel FINAL 238 : public JNIMacroLabelCommon<Arm64JNIMacroLabel, 239 vixl::aarch64::Label, 240 InstructionSet::kArm64> { 241 public: AsArm64()242 vixl::aarch64::Label* AsArm64() { 243 return AsPlatformLabel(); 244 } 245 }; 246 247 } // namespace arm64 248 } // namespace art 249 250 #endif // ART_COMPILER_UTILS_ARM64_JNI_MACRO_ASSEMBLER_ARM64_H_ 251