1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_MIRROR_OBJECT_H_
18 #define ART_RUNTIME_MIRROR_OBJECT_H_
19 
20 #include "atomic.h"
21 #include "base/casts.h"
22 #include "base/enums.h"
23 #include "globals.h"
24 #include "obj_ptr.h"
25 #include "object_reference.h"
26 #include "offsets.h"
27 #include "verify_object.h"
28 
29 namespace art {
30 
31 class ArtField;
32 class ArtMethod;
33 class ImageWriter;
34 class LockWord;
35 class Monitor;
36 struct ObjectOffsets;
37 class Thread;
38 class VoidFunctor;
39 
40 namespace mirror {
41 
42 class Array;
43 class Class;
44 class ClassLoader;
45 class DexCache;
46 class FinalizerReference;
47 template<class T> class ObjectArray;
48 template<class T> class PrimitiveArray;
49 typedef PrimitiveArray<uint8_t> BooleanArray;
50 typedef PrimitiveArray<int8_t> ByteArray;
51 typedef PrimitiveArray<uint16_t> CharArray;
52 typedef PrimitiveArray<double> DoubleArray;
53 typedef PrimitiveArray<float> FloatArray;
54 typedef PrimitiveArray<int32_t> IntArray;
55 typedef PrimitiveArray<int64_t> LongArray;
56 typedef PrimitiveArray<int16_t> ShortArray;
57 class Reference;
58 class String;
59 class Throwable;
60 
61 // Fields within mirror objects aren't accessed directly so that the appropriate amount of
62 // handshaking is done with GC (for example, read and write barriers). This macro is used to
63 // compute an offset for the Set/Get methods defined in Object that can safely access fields.
64 #define OFFSET_OF_OBJECT_MEMBER(type, field) \
65     MemberOffset(OFFSETOF_MEMBER(type, field))
66 
67 // Checks that we don't do field assignments which violate the typing system.
68 static constexpr bool kCheckFieldAssignments = false;
69 
70 // Size of Object.
71 static constexpr uint32_t kObjectHeaderSize = kUseBrooksReadBarrier ? 16 : 8;
72 
73 // C++ mirror of java.lang.Object
74 class MANAGED LOCKABLE Object {
75  public:
76   // The number of vtable entries in java.lang.Object.
77   static constexpr size_t kVTableLength = 11;
78 
79   // The size of the java.lang.Class representing a java.lang.Object.
80   static uint32_t ClassSize(PointerSize pointer_size);
81 
82   // Size of an instance of java.lang.Object.
InstanceSize()83   static constexpr uint32_t InstanceSize() {
84     return sizeof(Object);
85   }
86 
ClassOffset()87   static MemberOffset ClassOffset() {
88     return OFFSET_OF_OBJECT_MEMBER(Object, klass_);
89   }
90 
91   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
92            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
93   ALWAYS_INLINE Class* GetClass() REQUIRES_SHARED(Locks::mutator_lock_);
94 
95   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
96   void SetClass(ObjPtr<Class> new_klass) REQUIRES_SHARED(Locks::mutator_lock_);
97 
98   // Get the read barrier state with a fake address dependency.
99   // '*fake_address_dependency' will be set to 0.
100   ALWAYS_INLINE uint32_t GetReadBarrierState(uintptr_t* fake_address_dependency)
101       REQUIRES_SHARED(Locks::mutator_lock_);
102   // This version does not offer any special mechanism to prevent load-load reordering.
103   ALWAYS_INLINE uint32_t GetReadBarrierState() REQUIRES_SHARED(Locks::mutator_lock_);
104   // Get the read barrier state with a load-acquire.
105   ALWAYS_INLINE uint32_t GetReadBarrierStateAcquire() REQUIRES_SHARED(Locks::mutator_lock_);
106 
107 #ifndef USE_BAKER_OR_BROOKS_READ_BARRIER
108   NO_RETURN
109 #endif
110   ALWAYS_INLINE void SetReadBarrierState(uint32_t rb_state) REQUIRES_SHARED(Locks::mutator_lock_);
111 
112   template<bool kCasRelease = false>
113   ALWAYS_INLINE bool AtomicSetReadBarrierState(uint32_t expected_rb_state, uint32_t rb_state)
114       REQUIRES_SHARED(Locks::mutator_lock_);
115 
116   ALWAYS_INLINE uint32_t GetMarkBit() REQUIRES_SHARED(Locks::mutator_lock_);
117 
118   ALWAYS_INLINE bool AtomicSetMarkBit(uint32_t expected_mark_bit, uint32_t mark_bit)
119       REQUIRES_SHARED(Locks::mutator_lock_);
120 
121   // Assert that the read barrier state is in the default (white) state.
122   ALWAYS_INLINE void AssertReadBarrierState() const REQUIRES_SHARED(Locks::mutator_lock_);
123 
124   // The verifier treats all interfaces as java.lang.Object and relies on runtime checks in
125   // invoke-interface to detect incompatible interface types.
126   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
127   bool VerifierInstanceOf(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
128   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
129   ALWAYS_INLINE bool InstanceOf(ObjPtr<Class> klass) REQUIRES_SHARED(Locks::mutator_lock_);
130 
131   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
132   size_t SizeOf() REQUIRES_SHARED(Locks::mutator_lock_);
133 
134   Object* Clone(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_)
135       REQUIRES(!Roles::uninterruptible_);
136 
137   int32_t IdentityHashCode()
138       REQUIRES_SHARED(Locks::mutator_lock_)
139       REQUIRES(!Locks::thread_list_lock_,
140                !Locks::thread_suspend_count_lock_);
141 
MonitorOffset()142   static MemberOffset MonitorOffset() {
143     return OFFSET_OF_OBJECT_MEMBER(Object, monitor_);
144   }
145 
146   // As_volatile can be false if the mutators are suspended. This is an optimization since it
147   // avoids the barriers.
148   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
149   LockWord GetLockWord(bool as_volatile) REQUIRES_SHARED(Locks::mutator_lock_);
150   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
151   void SetLockWord(LockWord new_val, bool as_volatile) REQUIRES_SHARED(Locks::mutator_lock_);
152   bool CasLockWordWeakSequentiallyConsistent(LockWord old_val, LockWord new_val)
153       REQUIRES_SHARED(Locks::mutator_lock_);
154   bool CasLockWordWeakRelaxed(LockWord old_val, LockWord new_val)
155       REQUIRES_SHARED(Locks::mutator_lock_);
156   bool CasLockWordWeakAcquire(LockWord old_val, LockWord new_val)
157       REQUIRES_SHARED(Locks::mutator_lock_);
158   bool CasLockWordWeakRelease(LockWord old_val, LockWord new_val)
159       REQUIRES_SHARED(Locks::mutator_lock_);
160   uint32_t GetLockOwnerThreadId();
161 
162   // Try to enter the monitor, returns non null if we succeeded.
163   mirror::Object* MonitorTryEnter(Thread* self)
164       EXCLUSIVE_LOCK_FUNCTION()
165       REQUIRES(!Roles::uninterruptible_)
166       REQUIRES_SHARED(Locks::mutator_lock_);
167   mirror::Object* MonitorEnter(Thread* self)
168       EXCLUSIVE_LOCK_FUNCTION()
169       REQUIRES(!Roles::uninterruptible_)
170       REQUIRES_SHARED(Locks::mutator_lock_);
171   bool MonitorExit(Thread* self)
172       REQUIRES(!Roles::uninterruptible_)
173       REQUIRES_SHARED(Locks::mutator_lock_)
174       UNLOCK_FUNCTION();
175   void Notify(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
176   void NotifyAll(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
177   void Wait(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
178   void Wait(Thread* self, int64_t timeout, int32_t nanos) REQUIRES_SHARED(Locks::mutator_lock_);
179 
180   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
181            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
182   bool IsClass() REQUIRES_SHARED(Locks::mutator_lock_);
183   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
184            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
185   Class* AsClass() REQUIRES_SHARED(Locks::mutator_lock_);
186 
187   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
188            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
189   bool IsObjectArray() REQUIRES_SHARED(Locks::mutator_lock_);
190   template<class T,
191            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
192            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
193   ObjectArray<T>* AsObjectArray() REQUIRES_SHARED(Locks::mutator_lock_);
194 
195   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
196            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
197   bool IsClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
198   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
199            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
200   ClassLoader* AsClassLoader() REQUIRES_SHARED(Locks::mutator_lock_);
201 
202   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
203            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
204   bool IsDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
205   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
206            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
207   DexCache* AsDexCache() REQUIRES_SHARED(Locks::mutator_lock_);
208 
209   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
210            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
211   bool IsArrayInstance() REQUIRES_SHARED(Locks::mutator_lock_);
212   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
213            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
214   Array* AsArray() REQUIRES_SHARED(Locks::mutator_lock_);
215 
216   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
217   BooleanArray* AsBooleanArray() REQUIRES_SHARED(Locks::mutator_lock_);
218   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
219   ByteArray* AsByteArray() REQUIRES_SHARED(Locks::mutator_lock_);
220   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
221   ByteArray* AsByteSizedArray() REQUIRES_SHARED(Locks::mutator_lock_);
222 
223   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
224   CharArray* AsCharArray() REQUIRES_SHARED(Locks::mutator_lock_);
225   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
226   ShortArray* AsShortArray() REQUIRES_SHARED(Locks::mutator_lock_);
227   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
228   ShortArray* AsShortSizedArray() REQUIRES_SHARED(Locks::mutator_lock_);
229 
230   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
231            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
232   bool IsIntArray() REQUIRES_SHARED(Locks::mutator_lock_);
233   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
234            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
235   IntArray* AsIntArray() REQUIRES_SHARED(Locks::mutator_lock_);
236 
237   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
238            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
239   bool IsLongArray() REQUIRES_SHARED(Locks::mutator_lock_);
240   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
241            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
242   LongArray* AsLongArray() REQUIRES_SHARED(Locks::mutator_lock_);
243 
244   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
245   bool IsFloatArray() REQUIRES_SHARED(Locks::mutator_lock_);
246   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
247   FloatArray* AsFloatArray() REQUIRES_SHARED(Locks::mutator_lock_);
248 
249   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
250   bool IsDoubleArray() REQUIRES_SHARED(Locks::mutator_lock_);
251   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
252   DoubleArray* AsDoubleArray() REQUIRES_SHARED(Locks::mutator_lock_);
253 
254   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
255            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
256   bool IsString() REQUIRES_SHARED(Locks::mutator_lock_);
257 
258   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
259            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
260   String* AsString() REQUIRES_SHARED(Locks::mutator_lock_);
261 
262   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
263   Throwable* AsThrowable() REQUIRES_SHARED(Locks::mutator_lock_);
264 
265   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
266            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
267   bool IsReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
268   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
269            ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
270   Reference* AsReference() REQUIRES_SHARED(Locks::mutator_lock_);
271   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
272   bool IsWeakReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
273   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
274   bool IsSoftReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
275   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
276   bool IsFinalizerReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
277   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
278   FinalizerReference* AsFinalizerReference() REQUIRES_SHARED(Locks::mutator_lock_);
279   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
280   bool IsPhantomReferenceInstance() REQUIRES_SHARED(Locks::mutator_lock_);
281 
282   // Accessor for Java type fields.
283   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
284       ReadBarrierOption kReadBarrierOption = kWithReadBarrier, bool kIsVolatile = false>
285   ALWAYS_INLINE T* GetFieldObject(MemberOffset field_offset)
286       REQUIRES_SHARED(Locks::mutator_lock_);
287 
288   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
289       ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
290   ALWAYS_INLINE T* GetFieldObjectVolatile(MemberOffset field_offset)
291       REQUIRES_SHARED(Locks::mutator_lock_);
292 
293   template<bool kTransactionActive,
294            bool kCheckTransaction = true,
295            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
296            bool kIsVolatile = false>
297   ALWAYS_INLINE void SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset,
298                                                        ObjPtr<Object> new_value)
299       REQUIRES_SHARED(Locks::mutator_lock_);
300 
301   template<bool kTransactionActive,
302            bool kCheckTransaction = true,
303            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
304            bool kIsVolatile = false>
305   ALWAYS_INLINE void SetFieldObject(MemberOffset field_offset, ObjPtr<Object> new_value)
306       REQUIRES_SHARED(Locks::mutator_lock_);
307 
308   template<bool kTransactionActive,
309            bool kCheckTransaction = true,
310            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
311   ALWAYS_INLINE void SetFieldObjectVolatile(MemberOffset field_offset,
312                                             ObjPtr<Object> new_value)
313       REQUIRES_SHARED(Locks::mutator_lock_);
314 
315   template<bool kTransactionActive,
316            bool kCheckTransaction = true,
317            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
318   bool CasFieldWeakSequentiallyConsistentObject(MemberOffset field_offset,
319                                                 ObjPtr<Object> old_value,
320                                                 ObjPtr<Object> new_value)
321       REQUIRES_SHARED(Locks::mutator_lock_);
322   template<bool kTransactionActive,
323            bool kCheckTransaction = true,
324            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
325   bool CasFieldWeakSequentiallyConsistentObjectWithoutWriteBarrier(MemberOffset field_offset,
326                                                                    ObjPtr<Object> old_value,
327                                                                    ObjPtr<Object> new_value)
328       REQUIRES_SHARED(Locks::mutator_lock_);
329   template<bool kTransactionActive,
330            bool kCheckTransaction = true,
331            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
332   bool CasFieldStrongSequentiallyConsistentObject(MemberOffset field_offset,
333                                                   ObjPtr<Object> old_value,
334                                                   ObjPtr<Object> new_value)
335       REQUIRES_SHARED(Locks::mutator_lock_);
336   template<bool kTransactionActive,
337            bool kCheckTransaction = true,
338            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
339   bool CasFieldStrongSequentiallyConsistentObjectWithoutWriteBarrier(MemberOffset field_offset,
340                                                                      ObjPtr<Object> old_value,
341                                                                      ObjPtr<Object> new_value)
342       REQUIRES_SHARED(Locks::mutator_lock_);
343   template<bool kTransactionActive,
344            bool kCheckTransaction = true,
345            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
346   bool CasFieldWeakRelaxedObjectWithoutWriteBarrier(MemberOffset field_offset,
347                                                     ObjPtr<Object> old_value,
348                                                     ObjPtr<Object> new_value)
349       REQUIRES_SHARED(Locks::mutator_lock_);
350   template<bool kTransactionActive,
351            bool kCheckTransaction = true,
352            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
353   bool CasFieldWeakReleaseObjectWithoutWriteBarrier(MemberOffset field_offset,
354                                                     ObjPtr<Object> old_value,
355                                                     ObjPtr<Object> new_value)
356       REQUIRES_SHARED(Locks::mutator_lock_);
357 
358   template<bool kTransactionActive,
359            bool kCheckTransaction = true,
360            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
361   bool CasFieldStrongRelaxedObjectWithoutWriteBarrier(MemberOffset field_offset,
362                                                       ObjPtr<Object> old_value,
363                                                       ObjPtr<Object> new_value)
364       REQUIRES_SHARED(Locks::mutator_lock_);
365   template<bool kTransactionActive,
366            bool kCheckTransaction = true,
367            VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
368   bool CasFieldStrongReleaseObjectWithoutWriteBarrier(MemberOffset field_offset,
369                                                       ObjPtr<Object> old_value,
370                                                       ObjPtr<Object> new_value)
371       REQUIRES_SHARED(Locks::mutator_lock_);
372 
373   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
374   HeapReference<Object>* GetFieldObjectReferenceAddr(MemberOffset field_offset);
375 
376   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
377   ALWAYS_INLINE uint8_t GetFieldBoolean(MemberOffset field_offset)
378       REQUIRES_SHARED(Locks::mutator_lock_);
379 
380   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
381   ALWAYS_INLINE int8_t GetFieldByte(MemberOffset field_offset)
382       REQUIRES_SHARED(Locks::mutator_lock_);
383 
384   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
385   ALWAYS_INLINE uint8_t GetFieldBooleanVolatile(MemberOffset field_offset)
386       REQUIRES_SHARED(Locks::mutator_lock_);
387 
388   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
389   ALWAYS_INLINE int8_t GetFieldByteVolatile(MemberOffset field_offset)
390       REQUIRES_SHARED(Locks::mutator_lock_);
391 
392   template<bool kTransactionActive, bool kCheckTransaction = true,
393       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
394   ALWAYS_INLINE void SetFieldBoolean(MemberOffset field_offset, uint8_t new_value)
395       REQUIRES_SHARED(Locks::mutator_lock_);
396 
397   template<bool kTransactionActive, bool kCheckTransaction = true,
398       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
399   ALWAYS_INLINE void SetFieldByte(MemberOffset field_offset, int8_t new_value)
400       REQUIRES_SHARED(Locks::mutator_lock_);
401 
402   template<bool kTransactionActive, bool kCheckTransaction = true,
403       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
404   ALWAYS_INLINE void SetFieldBooleanVolatile(MemberOffset field_offset, uint8_t new_value)
405       REQUIRES_SHARED(Locks::mutator_lock_);
406 
407   template<bool kTransactionActive, bool kCheckTransaction = true,
408       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
409   ALWAYS_INLINE void SetFieldByteVolatile(MemberOffset field_offset, int8_t new_value)
410       REQUIRES_SHARED(Locks::mutator_lock_);
411 
412   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
413   ALWAYS_INLINE uint16_t GetFieldChar(MemberOffset field_offset)
414       REQUIRES_SHARED(Locks::mutator_lock_);
415 
416   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
417   ALWAYS_INLINE int16_t GetFieldShort(MemberOffset field_offset)
418       REQUIRES_SHARED(Locks::mutator_lock_);
419 
420   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
421   ALWAYS_INLINE uint16_t GetFieldCharVolatile(MemberOffset field_offset)
422       REQUIRES_SHARED(Locks::mutator_lock_);
423 
424   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
425   ALWAYS_INLINE int16_t GetFieldShortVolatile(MemberOffset field_offset)
426       REQUIRES_SHARED(Locks::mutator_lock_);
427 
428   template<bool kTransactionActive, bool kCheckTransaction = true,
429       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
430   ALWAYS_INLINE void SetFieldChar(MemberOffset field_offset, uint16_t new_value)
431       REQUIRES_SHARED(Locks::mutator_lock_);
432 
433   template<bool kTransactionActive, bool kCheckTransaction = true,
434       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
435   ALWAYS_INLINE void SetFieldShort(MemberOffset field_offset, int16_t new_value)
436       REQUIRES_SHARED(Locks::mutator_lock_);
437 
438   template<bool kTransactionActive, bool kCheckTransaction = true,
439       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
440   ALWAYS_INLINE void SetFieldCharVolatile(MemberOffset field_offset, uint16_t new_value)
441       REQUIRES_SHARED(Locks::mutator_lock_);
442 
443   template<bool kTransactionActive, bool kCheckTransaction = true,
444       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
445   ALWAYS_INLINE void SetFieldShortVolatile(MemberOffset field_offset, int16_t new_value)
446       REQUIRES_SHARED(Locks::mutator_lock_);
447 
448   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
GetField32(MemberOffset field_offset)449   ALWAYS_INLINE int32_t GetField32(MemberOffset field_offset)
450       REQUIRES_SHARED(Locks::mutator_lock_) {
451     if (kVerifyFlags & kVerifyThis) {
452       VerifyObject(this);
453     }
454     return GetField<int32_t, kIsVolatile>(field_offset);
455   }
456 
457   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
GetField32Volatile(MemberOffset field_offset)458   ALWAYS_INLINE int32_t GetField32Volatile(MemberOffset field_offset)
459       REQUIRES_SHARED(Locks::mutator_lock_) {
460     return GetField32<kVerifyFlags, true>(field_offset);
461   }
462 
463   template<bool kTransactionActive, bool kCheckTransaction = true,
464       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
465   ALWAYS_INLINE void SetField32(MemberOffset field_offset, int32_t new_value)
466       REQUIRES_SHARED(Locks::mutator_lock_);
467 
468   template<bool kTransactionActive, bool kCheckTransaction = true,
469       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
470   ALWAYS_INLINE void SetField32Volatile(MemberOffset field_offset, int32_t new_value)
471       REQUIRES_SHARED(Locks::mutator_lock_);
472 
473   template<bool kTransactionActive, bool kCheckTransaction = true,
474       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
475   ALWAYS_INLINE bool CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset,
476                                                           int32_t old_value, int32_t new_value)
477       REQUIRES_SHARED(Locks::mutator_lock_);
478 
479   template<bool kTransactionActive, bool kCheckTransaction = true,
480       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
481   bool CasFieldWeakRelaxed32(MemberOffset field_offset, int32_t old_value,
482                              int32_t new_value) ALWAYS_INLINE
483       REQUIRES_SHARED(Locks::mutator_lock_);
484 
485   template<bool kTransactionActive, bool kCheckTransaction = true,
486       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
487   bool CasFieldWeakAcquire32(MemberOffset field_offset, int32_t old_value,
488                              int32_t new_value) ALWAYS_INLINE
489       REQUIRES_SHARED(Locks::mutator_lock_);
490 
491   template<bool kTransactionActive, bool kCheckTransaction = true,
492       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
493   bool CasFieldWeakRelease32(MemberOffset field_offset, int32_t old_value,
494                              int32_t new_value) ALWAYS_INLINE
495       REQUIRES_SHARED(Locks::mutator_lock_);
496 
497   template<bool kTransactionActive, bool kCheckTransaction = true,
498       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
499   bool CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset, int32_t old_value,
500                                               int32_t new_value) ALWAYS_INLINE
501       REQUIRES_SHARED(Locks::mutator_lock_);
502 
503   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
GetField64(MemberOffset field_offset)504   ALWAYS_INLINE int64_t GetField64(MemberOffset field_offset)
505       REQUIRES_SHARED(Locks::mutator_lock_) {
506     if (kVerifyFlags & kVerifyThis) {
507       VerifyObject(this);
508     }
509     return GetField<int64_t, kIsVolatile>(field_offset);
510   }
511 
512   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
GetField64Volatile(MemberOffset field_offset)513   ALWAYS_INLINE int64_t GetField64Volatile(MemberOffset field_offset)
514       REQUIRES_SHARED(Locks::mutator_lock_) {
515     return GetField64<kVerifyFlags, true>(field_offset);
516   }
517 
518   template<bool kTransactionActive, bool kCheckTransaction = true,
519       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
520   ALWAYS_INLINE void SetField64(MemberOffset field_offset, int64_t new_value)
521       REQUIRES_SHARED(Locks::mutator_lock_);
522 
523   template<bool kTransactionActive, bool kCheckTransaction = true,
524       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
525   ALWAYS_INLINE void SetField64Volatile(MemberOffset field_offset, int64_t new_value)
526       REQUIRES_SHARED(Locks::mutator_lock_);
527 
528   template<bool kTransactionActive, bool kCheckTransaction = true,
529       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
530   bool CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value,
531                                             int64_t new_value)
532       REQUIRES_SHARED(Locks::mutator_lock_);
533 
534   template<bool kTransactionActive, bool kCheckTransaction = true,
535       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
536   bool CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset, int64_t old_value,
537                                               int64_t new_value)
538       REQUIRES_SHARED(Locks::mutator_lock_);
539 
540   template<bool kTransactionActive, bool kCheckTransaction = true,
541       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
SetFieldPtr(MemberOffset field_offset,T new_value)542   void SetFieldPtr(MemberOffset field_offset, T new_value)
543       REQUIRES_SHARED(Locks::mutator_lock_) {
544     SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>(
545         field_offset, new_value, kRuntimePointerSize);
546   }
547   template<bool kTransactionActive, bool kCheckTransaction = true,
548       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
SetFieldPtr64(MemberOffset field_offset,T new_value)549   void SetFieldPtr64(MemberOffset field_offset, T new_value)
550       REQUIRES_SHARED(Locks::mutator_lock_) {
551     SetFieldPtrWithSize<kTransactionActive, kCheckTransaction, kVerifyFlags>(
552         field_offset, new_value, 8u);
553   }
554 
555   template<bool kTransactionActive, bool kCheckTransaction = true,
556       VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, typename T>
SetFieldPtrWithSize(MemberOffset field_offset,T new_value,PointerSize pointer_size)557   ALWAYS_INLINE void SetFieldPtrWithSize(MemberOffset field_offset,
558                                          T new_value,
559                                          PointerSize pointer_size)
560       REQUIRES_SHARED(Locks::mutator_lock_) {
561     if (pointer_size == PointerSize::k32) {
562       uintptr_t ptr  = reinterpret_cast<uintptr_t>(new_value);
563       DCHECK_EQ(static_cast<uint32_t>(ptr), ptr);  // Check that we dont lose any non 0 bits.
564       SetField32<kTransactionActive, kCheckTransaction, kVerifyFlags>(
565           field_offset, static_cast<int32_t>(static_cast<uint32_t>(ptr)));
566     } else {
567       SetField64<kTransactionActive, kCheckTransaction, kVerifyFlags>(
568           field_offset, reinterpret_cast64<int64_t>(new_value));
569     }
570   }
571   // TODO fix thread safety analysis broken by the use of template. This should be
572   // REQUIRES_SHARED(Locks::mutator_lock_).
573   template <bool kVisitNativeRoots = true,
574             VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
575             ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
576             typename Visitor,
577             typename JavaLangRefVisitor = VoidFunctor>
578   void VisitReferences(const Visitor& visitor, const JavaLangRefVisitor& ref_visitor)
579       NO_THREAD_SAFETY_ANALYSIS;
580 
581   ArtField* FindFieldByOffset(MemberOffset offset) REQUIRES_SHARED(Locks::mutator_lock_);
582 
583   // Used by object_test.
584   static void SetHashCodeSeed(uint32_t new_seed);
585   // Generate an identity hash code. Public for object test.
586   static uint32_t GenerateIdentityHashCode();
587 
588   // Returns a human-readable form of the name of the *class* of the given object.
589   // So given an instance of java.lang.String, the output would
590   // be "java.lang.String". Given an array of int, the output would be "int[]".
591   // Given String.class, the output would be "java.lang.Class<java.lang.String>".
592   static std::string PrettyTypeOf(ObjPtr<mirror::Object> obj)
593       REQUIRES_SHARED(Locks::mutator_lock_);
594   std::string PrettyTypeOf()
595       REQUIRES_SHARED(Locks::mutator_lock_);
596 
597  protected:
598   // Accessors for non-Java type fields
599   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
GetFieldPtr(MemberOffset field_offset)600   T GetFieldPtr(MemberOffset field_offset)
601       REQUIRES_SHARED(Locks::mutator_lock_) {
602     return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset, kRuntimePointerSize);
603   }
604   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
GetFieldPtr64(MemberOffset field_offset)605   T GetFieldPtr64(MemberOffset field_offset)
606       REQUIRES_SHARED(Locks::mutator_lock_) {
607     return GetFieldPtrWithSize<T, kVerifyFlags, kIsVolatile>(field_offset,
608                                                              PointerSize::k64);
609   }
610 
611   template<class T, VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags, bool kIsVolatile = false>
GetFieldPtrWithSize(MemberOffset field_offset,PointerSize pointer_size)612   ALWAYS_INLINE T GetFieldPtrWithSize(MemberOffset field_offset, PointerSize pointer_size)
613       REQUIRES_SHARED(Locks::mutator_lock_) {
614     if (pointer_size == PointerSize::k32) {
615       uint64_t address = static_cast<uint32_t>(GetField32<kVerifyFlags, kIsVolatile>(field_offset));
616       return reinterpret_cast<T>(static_cast<uintptr_t>(address));
617     } else {
618       int64_t v = GetField64<kVerifyFlags, kIsVolatile>(field_offset);
619       return reinterpret_cast64<T>(v);
620     }
621   }
622 
623   // TODO: Fixme when anotatalysis works with visitors.
624   template<bool kIsStatic,
625           VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
626           ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
627           typename Visitor>
628   void VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) HOT_ATTR
629       NO_THREAD_SAFETY_ANALYSIS;
630   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
631            ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
632            typename Visitor>
633   void VisitInstanceFieldsReferences(ObjPtr<mirror::Class> klass, const Visitor& visitor) HOT_ATTR
634       REQUIRES_SHARED(Locks::mutator_lock_);
635   template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags,
636            ReadBarrierOption kReadBarrierOption = kWithReadBarrier,
637            typename Visitor>
638   void VisitStaticFieldsReferences(ObjPtr<mirror::Class> klass, const Visitor& visitor) HOT_ATTR
639       REQUIRES_SHARED(Locks::mutator_lock_);
640 
641  private:
642   template<typename kSize, bool kIsVolatile>
SetField(MemberOffset field_offset,kSize new_value)643   ALWAYS_INLINE void SetField(MemberOffset field_offset, kSize new_value)
644       REQUIRES_SHARED(Locks::mutator_lock_) {
645     uint8_t* raw_addr = reinterpret_cast<uint8_t*>(this) + field_offset.Int32Value();
646     kSize* addr = reinterpret_cast<kSize*>(raw_addr);
647     if (kIsVolatile) {
648       reinterpret_cast<Atomic<kSize>*>(addr)->StoreSequentiallyConsistent(new_value);
649     } else {
650       reinterpret_cast<Atomic<kSize>*>(addr)->StoreJavaData(new_value);
651     }
652   }
653 
654   template<typename kSize, bool kIsVolatile>
GetField(MemberOffset field_offset)655   ALWAYS_INLINE kSize GetField(MemberOffset field_offset)
656       REQUIRES_SHARED(Locks::mutator_lock_) {
657     const uint8_t* raw_addr = reinterpret_cast<const uint8_t*>(this) + field_offset.Int32Value();
658     const kSize* addr = reinterpret_cast<const kSize*>(raw_addr);
659     if (kIsVolatile) {
660       return reinterpret_cast<const Atomic<kSize>*>(addr)->LoadSequentiallyConsistent();
661     } else {
662       return reinterpret_cast<const Atomic<kSize>*>(addr)->LoadJavaData();
663     }
664   }
665 
666   // Get a field with acquire semantics.
667   template<typename kSize>
668   ALWAYS_INLINE kSize GetFieldAcquire(MemberOffset field_offset)
669       REQUIRES_SHARED(Locks::mutator_lock_);
670 
671   // Verify the type correctness of stores to fields.
672   // TODO: This can cause thread suspension and isn't moving GC safe.
673   void CheckFieldAssignmentImpl(MemberOffset field_offset, ObjPtr<Object> new_value)
674       REQUIRES_SHARED(Locks::mutator_lock_);
CheckFieldAssignment(MemberOffset field_offset,ObjPtr<Object> new_value)675   void CheckFieldAssignment(MemberOffset field_offset, ObjPtr<Object>new_value)
676       REQUIRES_SHARED(Locks::mutator_lock_) {
677     if (kCheckFieldAssignments) {
678       CheckFieldAssignmentImpl(field_offset, new_value);
679     }
680   }
681 
682   // A utility function that copies an object in a read barrier and write barrier-aware way.
683   // This is internally used by Clone() and Class::CopyOf(). If the object is finalizable,
684   // it is the callers job to call Heap::AddFinalizerReference.
685   static Object* CopyObject(ObjPtr<mirror::Object> dest,
686                             ObjPtr<mirror::Object> src,
687                             size_t num_bytes)
688       REQUIRES_SHARED(Locks::mutator_lock_);
689 
690   static Atomic<uint32_t> hash_code_seed;
691 
692   // The Class representing the type of the object.
693   HeapReference<Class> klass_;
694   // Monitor and hash code information.
695   uint32_t monitor_;
696 
697 #ifdef USE_BROOKS_READ_BARRIER
698   // Note names use a 'x' prefix and the x_rb_ptr_ is of type int
699   // instead of Object to go with the alphabetical/by-type field order
700   // on the Java side.
701   uint32_t x_rb_ptr_;      // For the Brooks pointer.
702   uint32_t x_xpadding_;    // For 8-byte alignment. TODO: get rid of this.
703 #endif
704 
705   friend class art::ImageWriter;
706   friend class art::Monitor;
707   friend struct art::ObjectOffsets;  // for verifying offset information
708   friend class CopyObjectVisitor;  // for CopyObject().
709   friend class CopyClassVisitor;   // for CopyObject().
710   DISALLOW_ALLOCATION();
711   DISALLOW_IMPLICIT_CONSTRUCTORS(Object);
712 };
713 
714 }  // namespace mirror
715 }  // namespace art
716 
717 #endif  // ART_RUNTIME_MIRROR_OBJECT_H_
718