1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_
18 #define ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_
19
20 #include <vector>
21
22 #include <android-base/logging.h>
23
24 #include "arch/instruction_set.h"
25 #include "base/arena_allocator.h"
26 #include "base/arena_object.h"
27 #include "base/array_ref.h"
28 #include "base/enums.h"
29 #include "base/macros.h"
30 #include "managed_register.h"
31 #include "offsets.h"
32
33 namespace art {
34
35 class ArenaAllocator;
36 class DebugFrameOpCodeWriterForAssembler;
37 class InstructionSetFeatures;
38 class MemoryRegion;
39 class JNIMacroLabel;
40
41 enum class JNIMacroUnaryCondition {
42 kZero,
43 kNotZero
44 };
45
46 template <PointerSize kPointerSize>
47 class JNIMacroAssembler : public DeletableArenaObject<kArenaAllocAssembler> {
48 public:
49 static std::unique_ptr<JNIMacroAssembler<kPointerSize>> Create(
50 ArenaAllocator* allocator,
51 InstructionSet instruction_set,
52 const InstructionSetFeatures* instruction_set_features = nullptr);
53
54 // Finalize the code; emit slow paths, fixup branches, add literal pool, etc.
55 virtual void FinalizeCode() = 0;
56
57 // Size of generated code
58 virtual size_t CodeSize() const = 0;
59
60 // Copy instructions out of assembly buffer into the given region of memory
61 virtual void FinalizeInstructions(const MemoryRegion& region) = 0;
62
63 // Emit code that will create an activation on the stack
64 virtual void BuildFrame(size_t frame_size,
65 ManagedRegister method_reg,
66 ArrayRef<const ManagedRegister> callee_save_regs,
67 const ManagedRegisterEntrySpills& entry_spills) = 0;
68
69 // Emit code that will remove an activation from the stack
70 //
71 // Argument `may_suspend` must be `true` if the compiled method may be
72 // suspended during its execution (otherwise `false`, if it is impossible
73 // to suspend during its execution).
74 virtual void RemoveFrame(size_t frame_size,
75 ArrayRef<const ManagedRegister> callee_save_regs,
76 bool may_suspend) = 0;
77
78 virtual void IncreaseFrameSize(size_t adjust) = 0;
79 virtual void DecreaseFrameSize(size_t adjust) = 0;
80
81 // Store routines
82 virtual void Store(FrameOffset offs, ManagedRegister src, size_t size) = 0;
83 virtual void StoreRef(FrameOffset dest, ManagedRegister src) = 0;
84 virtual void StoreRawPtr(FrameOffset dest, ManagedRegister src) = 0;
85
86 virtual void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) = 0;
87
88 virtual void StoreStackOffsetToThread(ThreadOffset<kPointerSize> thr_offs,
89 FrameOffset fr_offs,
90 ManagedRegister scratch) = 0;
91
92 virtual void StoreStackPointerToThread(ThreadOffset<kPointerSize> thr_offs) = 0;
93
94 virtual void StoreSpanning(FrameOffset dest,
95 ManagedRegister src,
96 FrameOffset in_off,
97 ManagedRegister scratch) = 0;
98
99 // Load routines
100 virtual void Load(ManagedRegister dest, FrameOffset src, size_t size) = 0;
101
102 virtual void LoadFromThread(ManagedRegister dest,
103 ThreadOffset<kPointerSize> src,
104 size_t size) = 0;
105
106 virtual void LoadRef(ManagedRegister dest, FrameOffset src) = 0;
107 // If unpoison_reference is true and kPoisonReference is true, then we negate the read reference.
108 virtual void LoadRef(ManagedRegister dest,
109 ManagedRegister base,
110 MemberOffset offs,
111 bool unpoison_reference) = 0;
112
113 virtual void LoadRawPtr(ManagedRegister dest, ManagedRegister base, Offset offs) = 0;
114
115 virtual void LoadRawPtrFromThread(ManagedRegister dest, ThreadOffset<kPointerSize> offs) = 0;
116
117 // Copying routines
118 virtual void Move(ManagedRegister dest, ManagedRegister src, size_t size) = 0;
119
120 virtual void CopyRawPtrFromThread(FrameOffset fr_offs,
121 ThreadOffset<kPointerSize> thr_offs,
122 ManagedRegister scratch) = 0;
123
124 virtual void CopyRawPtrToThread(ThreadOffset<kPointerSize> thr_offs,
125 FrameOffset fr_offs,
126 ManagedRegister scratch) = 0;
127
128 virtual void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) = 0;
129
130 virtual void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) = 0;
131
132 virtual void Copy(FrameOffset dest,
133 ManagedRegister src_base,
134 Offset src_offset,
135 ManagedRegister scratch,
136 size_t size) = 0;
137
138 virtual void Copy(ManagedRegister dest_base,
139 Offset dest_offset,
140 FrameOffset src,
141 ManagedRegister scratch,
142 size_t size) = 0;
143
144 virtual void Copy(FrameOffset dest,
145 FrameOffset src_base,
146 Offset src_offset,
147 ManagedRegister scratch,
148 size_t size) = 0;
149
150 virtual void Copy(ManagedRegister dest,
151 Offset dest_offset,
152 ManagedRegister src,
153 Offset src_offset,
154 ManagedRegister scratch,
155 size_t size) = 0;
156
157 virtual void Copy(FrameOffset dest,
158 Offset dest_offset,
159 FrameOffset src,
160 Offset src_offset,
161 ManagedRegister scratch,
162 size_t size) = 0;
163
164 virtual void MemoryBarrier(ManagedRegister scratch) = 0;
165
166 // Sign extension
167 virtual void SignExtend(ManagedRegister mreg, size_t size) = 0;
168
169 // Zero extension
170 virtual void ZeroExtend(ManagedRegister mreg, size_t size) = 0;
171
172 // Exploit fast access in managed code to Thread::Current()
173 virtual void GetCurrentThread(ManagedRegister tr) = 0;
174 virtual void GetCurrentThread(FrameOffset dest_offset, ManagedRegister scratch) = 0;
175
176 // Set up out_reg to hold a Object** into the handle scope, or to be null if the
177 // value is null and null_allowed. in_reg holds a possibly stale reference
178 // that can be used to avoid loading the handle scope entry to see if the value is
179 // null.
180 virtual void CreateHandleScopeEntry(ManagedRegister out_reg,
181 FrameOffset handlescope_offset,
182 ManagedRegister in_reg,
183 bool null_allowed) = 0;
184
185 // Set up out_off to hold a Object** into the handle scope, or to be null if the
186 // value is null and null_allowed.
187 virtual void CreateHandleScopeEntry(FrameOffset out_off,
188 FrameOffset handlescope_offset,
189 ManagedRegister scratch,
190 bool null_allowed) = 0;
191
192 // src holds a handle scope entry (Object**) load this into dst
193 virtual void LoadReferenceFromHandleScope(ManagedRegister dst, ManagedRegister src) = 0;
194
195 // Heap::VerifyObject on src. In some cases (such as a reference to this) we
196 // know that src may not be null.
197 virtual void VerifyObject(ManagedRegister src, bool could_be_null) = 0;
198 virtual void VerifyObject(FrameOffset src, bool could_be_null) = 0;
199
200 // Jump to address held at [base+offset] (used for tail calls).
201 virtual void Jump(ManagedRegister base, Offset offset, ManagedRegister scratch) = 0;
202
203 // Call to address held at [base+offset]
204 virtual void Call(ManagedRegister base, Offset offset, ManagedRegister scratch) = 0;
205 virtual void Call(FrameOffset base, Offset offset, ManagedRegister scratch) = 0;
206 virtual void CallFromThread(ThreadOffset<kPointerSize> offset, ManagedRegister scratch) = 0;
207
208 // Generate code to check if Thread::Current()->exception_ is non-null
209 // and branch to a ExceptionSlowPath if it is.
210 virtual void ExceptionPoll(ManagedRegister scratch, size_t stack_adjust) = 0;
211
212 // Create a new label that can be used with Jump/Bind calls.
213 virtual std::unique_ptr<JNIMacroLabel> CreateLabel() = 0;
214 // Emit an unconditional jump to the label.
215 virtual void Jump(JNIMacroLabel* label) = 0;
216 // Emit a conditional jump to the label by applying a unary condition test to the register.
217 virtual void Jump(JNIMacroLabel* label, JNIMacroUnaryCondition cond, ManagedRegister test) = 0;
218 // Code at this offset will serve as the target for the Jump call.
219 virtual void Bind(JNIMacroLabel* label) = 0;
220
~JNIMacroAssembler()221 virtual ~JNIMacroAssembler() {}
222
223 /**
224 * @brief Buffer of DWARF's Call Frame Information opcodes.
225 * @details It is used by debuggers and other tools to unwind the call stack.
226 */
227 virtual DebugFrameOpCodeWriterForAssembler& cfi() = 0;
228
SetEmitRunTimeChecksInDebugMode(bool value)229 void SetEmitRunTimeChecksInDebugMode(bool value) {
230 emit_run_time_checks_in_debug_mode_ = value;
231 }
232
233 protected:
JNIMacroAssembler()234 JNIMacroAssembler() {}
235
236 // Should run-time checks be emitted in debug mode?
237 bool emit_run_time_checks_in_debug_mode_ = false;
238 };
239
240 // A "Label" class used with the JNIMacroAssembler
241 // allowing one to use branches (jumping from one place to another).
242 //
243 // This is just an interface, so every platform must provide
244 // its own implementation of it.
245 //
246 // It is only safe to use a label created
247 // via JNIMacroAssembler::CreateLabel with that same macro assembler.
248 class JNIMacroLabel {
249 public:
250 virtual ~JNIMacroLabel() = 0;
251
252 const InstructionSet isa_;
253 protected:
JNIMacroLabel(InstructionSet isa)254 explicit JNIMacroLabel(InstructionSet isa) : isa_(isa) {}
255 };
256
~JNIMacroLabel()257 inline JNIMacroLabel::~JNIMacroLabel() {
258 // Compulsory definition for a pure virtual destructor
259 // to avoid linking errors.
260 }
261
262 template <typename T, PointerSize kPointerSize>
263 class JNIMacroAssemblerFwd : public JNIMacroAssembler<kPointerSize> {
264 public:
FinalizeCode()265 void FinalizeCode() override {
266 asm_.FinalizeCode();
267 }
268
CodeSize()269 size_t CodeSize() const override {
270 return asm_.CodeSize();
271 }
272
FinalizeInstructions(const MemoryRegion & region)273 void FinalizeInstructions(const MemoryRegion& region) override {
274 asm_.FinalizeInstructions(region);
275 }
276
cfi()277 DebugFrameOpCodeWriterForAssembler& cfi() override {
278 return asm_.cfi();
279 }
280
281 protected:
JNIMacroAssemblerFwd(ArenaAllocator * allocator)282 explicit JNIMacroAssemblerFwd(ArenaAllocator* allocator) : asm_(allocator) {}
283
284 T asm_;
285 };
286
287 template <typename Self, typename PlatformLabel, InstructionSet kIsa>
288 class JNIMacroLabelCommon : public JNIMacroLabel {
289 public:
Cast(JNIMacroLabel * label)290 static Self* Cast(JNIMacroLabel* label) {
291 CHECK(label != nullptr);
292 CHECK_EQ(kIsa, label->isa_);
293
294 return reinterpret_cast<Self*>(label);
295 }
296
297 protected:
AsPlatformLabel()298 PlatformLabel* AsPlatformLabel() {
299 return &label_;
300 }
301
JNIMacroLabelCommon()302 JNIMacroLabelCommon() : JNIMacroLabel(kIsa) {
303 }
304
~JNIMacroLabelCommon()305 ~JNIMacroLabelCommon() override {}
306
307 private:
308 PlatformLabel label_;
309 };
310
311 } // namespace art
312
313 #endif // ART_COMPILER_UTILS_JNI_MACRO_ASSEMBLER_H_
314