1 /*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #ifndef ART_RUNTIME_MIRROR_OBJECT_INL_H_
18 #define ART_RUNTIME_MIRROR_OBJECT_INL_H_
19
20 #include "object.h"
21
22 #include "art_field.h"
23 #include "art_method.h"
24 #include "atomic.h"
25 #include "array-inl.h"
26 #include "class.h"
27 #include "lock_word-inl.h"
28 #include "monitor.h"
29 #include "object_array-inl.h"
30 #include "read_barrier-inl.h"
31 #include "runtime.h"
32 #include "reference.h"
33 #include "throwable.h"
34
35 namespace art {
36 namespace mirror {
37
ClassSize()38 inline uint32_t Object::ClassSize() {
39 uint32_t vtable_entries = kVTableLength;
40 return Class::ComputeClassSize(true, vtable_entries, 0, 0, 0);
41 }
42
43 template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
GetClass()44 inline Class* Object::GetClass() {
45 return GetFieldObject<Class, kVerifyFlags, kReadBarrierOption>(
46 OFFSET_OF_OBJECT_MEMBER(Object, klass_));
47 }
48
49 template<VerifyObjectFlags kVerifyFlags>
SetClass(Class * new_klass)50 inline void Object::SetClass(Class* new_klass) {
51 // new_klass may be NULL prior to class linker initialization.
52 // We don't mark the card as this occurs as part of object allocation. Not all objects have
53 // backing cards, such as large objects.
54 // We use non transactional version since we can't undo this write. We also disable checking as
55 // we may run in transaction mode here.
56 SetFieldObjectWithoutWriteBarrier<false, false,
57 static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis)>(
58 OFFSET_OF_OBJECT_MEMBER(Object, klass_), new_klass);
59 }
60
GetLockWord(bool as_volatile)61 inline LockWord Object::GetLockWord(bool as_volatile) {
62 if (as_volatile) {
63 return LockWord(GetField32Volatile(OFFSET_OF_OBJECT_MEMBER(Object, monitor_)));
64 }
65 return LockWord(GetField32(OFFSET_OF_OBJECT_MEMBER(Object, monitor_)));
66 }
67
SetLockWord(LockWord new_val,bool as_volatile)68 inline void Object::SetLockWord(LockWord new_val, bool as_volatile) {
69 // Force use of non-transactional mode and do not check.
70 if (as_volatile) {
71 SetField32Volatile<false, false>(OFFSET_OF_OBJECT_MEMBER(Object, monitor_), new_val.GetValue());
72 } else {
73 SetField32<false, false>(OFFSET_OF_OBJECT_MEMBER(Object, monitor_), new_val.GetValue());
74 }
75 }
76
CasLockWordWeakSequentiallyConsistent(LockWord old_val,LockWord new_val)77 inline bool Object::CasLockWordWeakSequentiallyConsistent(LockWord old_val, LockWord new_val) {
78 // Force use of non-transactional mode and do not check.
79 return CasFieldWeakSequentiallyConsistent32<false, false>(
80 OFFSET_OF_OBJECT_MEMBER(Object, monitor_), old_val.GetValue(), new_val.GetValue());
81 }
82
CasLockWordWeakRelaxed(LockWord old_val,LockWord new_val)83 inline bool Object::CasLockWordWeakRelaxed(LockWord old_val, LockWord new_val) {
84 // Force use of non-transactional mode and do not check.
85 return CasFieldWeakRelaxed32<false, false>(
86 OFFSET_OF_OBJECT_MEMBER(Object, monitor_), old_val.GetValue(), new_val.GetValue());
87 }
88
GetLockOwnerThreadId()89 inline uint32_t Object::GetLockOwnerThreadId() {
90 return Monitor::GetLockOwnerThreadId(this);
91 }
92
MonitorEnter(Thread * self)93 inline mirror::Object* Object::MonitorEnter(Thread* self) {
94 return Monitor::MonitorEnter(self, this);
95 }
96
MonitorExit(Thread * self)97 inline bool Object::MonitorExit(Thread* self) {
98 return Monitor::MonitorExit(self, this);
99 }
100
Notify(Thread * self)101 inline void Object::Notify(Thread* self) {
102 Monitor::Notify(self, this);
103 }
104
NotifyAll(Thread * self)105 inline void Object::NotifyAll(Thread* self) {
106 Monitor::NotifyAll(self, this);
107 }
108
Wait(Thread * self)109 inline void Object::Wait(Thread* self) {
110 Monitor::Wait(self, this, 0, 0, true, kWaiting);
111 }
112
Wait(Thread * self,int64_t ms,int32_t ns)113 inline void Object::Wait(Thread* self, int64_t ms, int32_t ns) {
114 Monitor::Wait(self, this, ms, ns, true, kTimedWaiting);
115 }
116
GetReadBarrierPointer()117 inline Object* Object::GetReadBarrierPointer() {
118 #ifdef USE_BAKER_OR_BROOKS_READ_BARRIER
119 DCHECK(kUseBakerOrBrooksReadBarrier);
120 return GetFieldObject<Object, kVerifyNone, kWithoutReadBarrier>(
121 OFFSET_OF_OBJECT_MEMBER(Object, x_rb_ptr_));
122 #else
123 LOG(FATAL) << "Unreachable";
124 return nullptr;
125 #endif
126 }
127
SetReadBarrierPointer(Object * rb_ptr)128 inline void Object::SetReadBarrierPointer(Object* rb_ptr) {
129 #ifdef USE_BAKER_OR_BROOKS_READ_BARRIER
130 DCHECK(kUseBakerOrBrooksReadBarrier);
131 // We don't mark the card as this occurs as part of object allocation. Not all objects have
132 // backing cards, such as large objects.
133 SetFieldObjectWithoutWriteBarrier<false, false, kVerifyNone>(
134 OFFSET_OF_OBJECT_MEMBER(Object, x_rb_ptr_), rb_ptr);
135 #else
136 LOG(FATAL) << "Unreachable";
137 #endif
138 }
139
AtomicSetReadBarrierPointer(Object * expected_rb_ptr,Object * rb_ptr)140 inline bool Object::AtomicSetReadBarrierPointer(Object* expected_rb_ptr, Object* rb_ptr) {
141 #ifdef USE_BAKER_OR_BROOKS_READ_BARRIER
142 DCHECK(kUseBakerOrBrooksReadBarrier);
143 MemberOffset offset = OFFSET_OF_OBJECT_MEMBER(Object, x_rb_ptr_);
144 byte* raw_addr = reinterpret_cast<byte*>(this) + offset.SizeValue();
145 Atomic<uint32_t>* atomic_rb_ptr = reinterpret_cast<Atomic<uint32_t>*>(raw_addr);
146 HeapReference<Object> expected_ref(HeapReference<Object>::FromMirrorPtr(expected_rb_ptr));
147 HeapReference<Object> new_ref(HeapReference<Object>::FromMirrorPtr(rb_ptr));
148 do {
149 if (UNLIKELY(atomic_rb_ptr->LoadRelaxed() != expected_ref.reference_)) {
150 // Lost the race.
151 return false;
152 }
153 } while (!atomic_rb_ptr->CompareExchangeWeakSequentiallyConsistent(expected_ref.reference_,
154 new_ref.reference_));
155 DCHECK_EQ(new_ref.reference_, atomic_rb_ptr->LoadRelaxed());
156 return true;
157 #else
158 LOG(FATAL) << "Unreachable";
159 return false;
160 #endif
161 }
162
AssertReadBarrierPointer()163 inline void Object::AssertReadBarrierPointer() const {
164 if (kUseBakerReadBarrier) {
165 Object* obj = const_cast<Object*>(this);
166 DCHECK(obj->GetReadBarrierPointer() == nullptr)
167 << "Bad Baker pointer: obj=" << reinterpret_cast<void*>(obj)
168 << " ptr=" << reinterpret_cast<void*>(obj->GetReadBarrierPointer());
169 } else if (kUseBrooksReadBarrier) {
170 Object* obj = const_cast<Object*>(this);
171 DCHECK_EQ(obj, obj->GetReadBarrierPointer())
172 << "Bad Brooks pointer: obj=" << reinterpret_cast<void*>(obj)
173 << " ptr=" << reinterpret_cast<void*>(obj->GetReadBarrierPointer());
174 } else {
175 LOG(FATAL) << "Unreachable";
176 }
177 }
178
179 template<VerifyObjectFlags kVerifyFlags>
VerifierInstanceOf(Class * klass)180 inline bool Object::VerifierInstanceOf(Class* klass) {
181 DCHECK(klass != NULL);
182 DCHECK(GetClass<kVerifyFlags>() != NULL);
183 return klass->IsInterface() || InstanceOf(klass);
184 }
185
186 template<VerifyObjectFlags kVerifyFlags>
InstanceOf(Class * klass)187 inline bool Object::InstanceOf(Class* klass) {
188 DCHECK(klass != NULL);
189 DCHECK(GetClass<kVerifyNone>() != NULL);
190 return klass->IsAssignableFrom(GetClass<kVerifyFlags>());
191 }
192
193 template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
IsClass()194 inline bool Object::IsClass() {
195 Class* java_lang_Class = GetClass<kVerifyFlags, kReadBarrierOption>()->
196 template GetClass<kVerifyFlags, kReadBarrierOption>();
197 return GetClass<static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis),
198 kReadBarrierOption>() == java_lang_Class;
199 }
200
201 template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
AsClass()202 inline Class* Object::AsClass() {
203 DCHECK((IsClass<kVerifyFlags, kReadBarrierOption>()));
204 return down_cast<Class*>(this);
205 }
206
207 template<VerifyObjectFlags kVerifyFlags>
IsObjectArray()208 inline bool Object::IsObjectArray() {
209 constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
210 return IsArrayInstance<kVerifyFlags>() &&
211 !GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitive();
212 }
213
214 template<class T, VerifyObjectFlags kVerifyFlags>
AsObjectArray()215 inline ObjectArray<T>* Object::AsObjectArray() {
216 DCHECK(IsObjectArray<kVerifyFlags>());
217 return down_cast<ObjectArray<T>*>(this);
218 }
219
220 template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
IsArrayInstance()221 inline bool Object::IsArrayInstance() {
222 return GetClass<kVerifyFlags, kReadBarrierOption>()->
223 template IsArrayClass<kVerifyFlags, kReadBarrierOption>();
224 }
225
226 template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
IsArtField()227 inline bool Object::IsArtField() {
228 return GetClass<kVerifyFlags, kReadBarrierOption>()->
229 template IsArtFieldClass<kReadBarrierOption>();
230 }
231
232 template<VerifyObjectFlags kVerifyFlags>
AsArtField()233 inline ArtField* Object::AsArtField() {
234 DCHECK(IsArtField<kVerifyFlags>());
235 return down_cast<ArtField*>(this);
236 }
237
238 template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
IsArtMethod()239 inline bool Object::IsArtMethod() {
240 return GetClass<kVerifyFlags, kReadBarrierOption>()->
241 template IsArtMethodClass<kReadBarrierOption>();
242 }
243
244 template<VerifyObjectFlags kVerifyFlags>
AsArtMethod()245 inline ArtMethod* Object::AsArtMethod() {
246 DCHECK(IsArtMethod<kVerifyFlags>());
247 return down_cast<ArtMethod*>(this);
248 }
249
250 template<VerifyObjectFlags kVerifyFlags>
IsReferenceInstance()251 inline bool Object::IsReferenceInstance() {
252 return GetClass<kVerifyFlags>()->IsTypeOfReferenceClass();
253 }
254
255 template<VerifyObjectFlags kVerifyFlags>
AsReference()256 inline Reference* Object::AsReference() {
257 DCHECK(IsReferenceInstance<kVerifyFlags>());
258 return down_cast<Reference*>(this);
259 }
260
261 template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
AsArray()262 inline Array* Object::AsArray() {
263 DCHECK((IsArrayInstance<kVerifyFlags, kReadBarrierOption>()));
264 return down_cast<Array*>(this);
265 }
266
267 template<VerifyObjectFlags kVerifyFlags>
AsBooleanArray()268 inline BooleanArray* Object::AsBooleanArray() {
269 constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
270 DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
271 DCHECK(GetClass<kNewFlags>()->GetComponentType()->IsPrimitiveBoolean());
272 return down_cast<BooleanArray*>(this);
273 }
274
275 template<VerifyObjectFlags kVerifyFlags>
AsByteArray()276 inline ByteArray* Object::AsByteArray() {
277 static const VerifyObjectFlags kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
278 DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
279 DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveByte());
280 return down_cast<ByteArray*>(this);
281 }
282
283 template<VerifyObjectFlags kVerifyFlags>
AsByteSizedArray()284 inline ByteArray* Object::AsByteSizedArray() {
285 constexpr VerifyObjectFlags kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
286 DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
287 DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveByte() ||
288 GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveBoolean());
289 return down_cast<ByteArray*>(this);
290 }
291
292 template<VerifyObjectFlags kVerifyFlags>
AsCharArray()293 inline CharArray* Object::AsCharArray() {
294 constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
295 DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
296 DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveChar());
297 return down_cast<CharArray*>(this);
298 }
299
300 template<VerifyObjectFlags kVerifyFlags>
AsShortArray()301 inline ShortArray* Object::AsShortArray() {
302 constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
303 DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
304 DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveShort());
305 return down_cast<ShortArray*>(this);
306 }
307
308 template<VerifyObjectFlags kVerifyFlags>
AsShortSizedArray()309 inline ShortArray* Object::AsShortSizedArray() {
310 constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
311 DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
312 DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveShort() ||
313 GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveChar());
314 return down_cast<ShortArray*>(this);
315 }
316
317 template<VerifyObjectFlags kVerifyFlags>
AsIntArray()318 inline IntArray* Object::AsIntArray() {
319 constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
320 DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
321 DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveInt() ||
322 GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveFloat());
323 return down_cast<IntArray*>(this);
324 }
325
326 template<VerifyObjectFlags kVerifyFlags>
AsLongArray()327 inline LongArray* Object::AsLongArray() {
328 constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
329 DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
330 DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveLong() ||
331 GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveDouble());
332 return down_cast<LongArray*>(this);
333 }
334
335 template<VerifyObjectFlags kVerifyFlags>
AsFloatArray()336 inline FloatArray* Object::AsFloatArray() {
337 constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
338 DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
339 DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveFloat());
340 return down_cast<FloatArray*>(this);
341 }
342
343 template<VerifyObjectFlags kVerifyFlags>
AsDoubleArray()344 inline DoubleArray* Object::AsDoubleArray() {
345 constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
346 DCHECK(GetClass<kVerifyFlags>()->IsArrayClass());
347 DCHECK(GetClass<kNewFlags>()->template GetComponentType<kNewFlags>()->IsPrimitiveDouble());
348 return down_cast<DoubleArray*>(this);
349 }
350
351 template<VerifyObjectFlags kVerifyFlags>
AsString()352 inline String* Object::AsString() {
353 DCHECK(GetClass<kVerifyFlags>()->IsStringClass());
354 return down_cast<String*>(this);
355 }
356
357 template<VerifyObjectFlags kVerifyFlags>
AsThrowable()358 inline Throwable* Object::AsThrowable() {
359 DCHECK(GetClass<kVerifyFlags>()->IsThrowableClass());
360 return down_cast<Throwable*>(this);
361 }
362
363 template<VerifyObjectFlags kVerifyFlags>
IsWeakReferenceInstance()364 inline bool Object::IsWeakReferenceInstance() {
365 return GetClass<kVerifyFlags>()->IsWeakReferenceClass();
366 }
367
368 template<VerifyObjectFlags kVerifyFlags>
IsSoftReferenceInstance()369 inline bool Object::IsSoftReferenceInstance() {
370 return GetClass<kVerifyFlags>()->IsSoftReferenceClass();
371 }
372
373 template<VerifyObjectFlags kVerifyFlags>
IsFinalizerReferenceInstance()374 inline bool Object::IsFinalizerReferenceInstance() {
375 return GetClass<kVerifyFlags>()->IsFinalizerReferenceClass();
376 }
377
378 template<VerifyObjectFlags kVerifyFlags>
AsFinalizerReference()379 inline FinalizerReference* Object::AsFinalizerReference() {
380 DCHECK(IsFinalizerReferenceInstance<kVerifyFlags>());
381 return down_cast<FinalizerReference*>(this);
382 }
383
384 template<VerifyObjectFlags kVerifyFlags>
IsPhantomReferenceInstance()385 inline bool Object::IsPhantomReferenceInstance() {
386 return GetClass<kVerifyFlags>()->IsPhantomReferenceClass();
387 }
388
389 template<VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
SizeOf()390 inline size_t Object::SizeOf() {
391 size_t result;
392 constexpr auto kNewFlags = static_cast<VerifyObjectFlags>(kVerifyFlags & ~kVerifyThis);
393 if (IsArrayInstance<kVerifyFlags, kReadBarrierOption>()) {
394 result = AsArray<kNewFlags, kReadBarrierOption>()->
395 template SizeOf<kNewFlags, kReadBarrierOption>();
396 } else if (IsClass<kNewFlags, kReadBarrierOption>()) {
397 result = AsClass<kNewFlags, kReadBarrierOption>()->
398 template SizeOf<kNewFlags, kReadBarrierOption>();
399 } else {
400 result = GetClass<kNewFlags, kReadBarrierOption>()->
401 template GetObjectSize<kNewFlags, kReadBarrierOption>();
402 }
403 DCHECK_GE(result, sizeof(Object))
404 << " class=" << PrettyTypeOf(GetClass<kNewFlags, kReadBarrierOption>());
405 DCHECK(!(IsArtField<kNewFlags, kReadBarrierOption>()) || result == sizeof(ArtField));
406 return result;
407 }
408
409 template<VerifyObjectFlags kVerifyFlags, bool kIsVolatile>
GetField32(MemberOffset field_offset)410 inline int32_t Object::GetField32(MemberOffset field_offset) {
411 if (kVerifyFlags & kVerifyThis) {
412 VerifyObject(this);
413 }
414 const byte* raw_addr = reinterpret_cast<const byte*>(this) + field_offset.Int32Value();
415 const int32_t* word_addr = reinterpret_cast<const int32_t*>(raw_addr);
416 if (UNLIKELY(kIsVolatile)) {
417 return reinterpret_cast<const Atomic<int32_t>*>(word_addr)->LoadSequentiallyConsistent();
418 } else {
419 return reinterpret_cast<const Atomic<int32_t>*>(word_addr)->LoadJavaData();
420 }
421 }
422
423 template<VerifyObjectFlags kVerifyFlags>
GetField32Volatile(MemberOffset field_offset)424 inline int32_t Object::GetField32Volatile(MemberOffset field_offset) {
425 return GetField32<kVerifyFlags, true>(field_offset);
426 }
427
428 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
429 bool kIsVolatile>
SetField32(MemberOffset field_offset,int32_t new_value)430 inline void Object::SetField32(MemberOffset field_offset, int32_t new_value) {
431 if (kCheckTransaction) {
432 DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
433 }
434 if (kTransactionActive) {
435 Runtime::Current()->RecordWriteField32(this, field_offset,
436 GetField32<kVerifyFlags, kIsVolatile>(field_offset),
437 kIsVolatile);
438 }
439 if (kVerifyFlags & kVerifyThis) {
440 VerifyObject(this);
441 }
442 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
443 int32_t* word_addr = reinterpret_cast<int32_t*>(raw_addr);
444 if (kIsVolatile) {
445 reinterpret_cast<Atomic<int32_t>*>(word_addr)->StoreSequentiallyConsistent(new_value);
446 } else {
447 reinterpret_cast<Atomic<int32_t>*>(word_addr)->StoreJavaData(new_value);
448 }
449 }
450
451 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
SetField32Volatile(MemberOffset field_offset,int32_t new_value)452 inline void Object::SetField32Volatile(MemberOffset field_offset, int32_t new_value) {
453 SetField32<kTransactionActive, kCheckTransaction, kVerifyFlags, true>(field_offset, new_value);
454 }
455
456 // TODO: Pass memory_order_ and strong/weak as arguments to avoid code duplication?
457
458 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset,int32_t old_value,int32_t new_value)459 inline bool Object::CasFieldWeakSequentiallyConsistent32(MemberOffset field_offset,
460 int32_t old_value, int32_t new_value) {
461 if (kCheckTransaction) {
462 DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
463 }
464 if (kTransactionActive) {
465 Runtime::Current()->RecordWriteField32(this, field_offset, old_value, true);
466 }
467 if (kVerifyFlags & kVerifyThis) {
468 VerifyObject(this);
469 }
470 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
471 AtomicInteger* atomic_addr = reinterpret_cast<AtomicInteger*>(raw_addr);
472
473 return atomic_addr->CompareExchangeWeakSequentiallyConsistent(old_value, new_value);
474 }
475
476 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
CasFieldWeakRelaxed32(MemberOffset field_offset,int32_t old_value,int32_t new_value)477 inline bool Object::CasFieldWeakRelaxed32(MemberOffset field_offset,
478 int32_t old_value, int32_t new_value) {
479 if (kCheckTransaction) {
480 DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
481 }
482 if (kTransactionActive) {
483 Runtime::Current()->RecordWriteField32(this, field_offset, old_value, true);
484 }
485 if (kVerifyFlags & kVerifyThis) {
486 VerifyObject(this);
487 }
488 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
489 AtomicInteger* atomic_addr = reinterpret_cast<AtomicInteger*>(raw_addr);
490
491 return atomic_addr->CompareExchangeWeakRelaxed(old_value, new_value);
492 }
493
494 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset,int32_t old_value,int32_t new_value)495 inline bool Object::CasFieldStrongSequentiallyConsistent32(MemberOffset field_offset,
496 int32_t old_value, int32_t new_value) {
497 if (kCheckTransaction) {
498 DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
499 }
500 if (kTransactionActive) {
501 Runtime::Current()->RecordWriteField32(this, field_offset, old_value, true);
502 }
503 if (kVerifyFlags & kVerifyThis) {
504 VerifyObject(this);
505 }
506 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
507 AtomicInteger* atomic_addr = reinterpret_cast<AtomicInteger*>(raw_addr);
508
509 return atomic_addr->CompareExchangeStrongSequentiallyConsistent(old_value, new_value);
510 }
511
512 template<VerifyObjectFlags kVerifyFlags, bool kIsVolatile>
GetField64(MemberOffset field_offset)513 inline int64_t Object::GetField64(MemberOffset field_offset) {
514 if (kVerifyFlags & kVerifyThis) {
515 VerifyObject(this);
516 }
517 const byte* raw_addr = reinterpret_cast<const byte*>(this) + field_offset.Int32Value();
518 const int64_t* addr = reinterpret_cast<const int64_t*>(raw_addr);
519 if (kIsVolatile) {
520 return reinterpret_cast<const Atomic<int64_t>*>(addr)->LoadSequentiallyConsistent();
521 } else {
522 return reinterpret_cast<const Atomic<int64_t>*>(addr)->LoadJavaData();
523 }
524 }
525
526 template<VerifyObjectFlags kVerifyFlags>
GetField64Volatile(MemberOffset field_offset)527 inline int64_t Object::GetField64Volatile(MemberOffset field_offset) {
528 return GetField64<kVerifyFlags, true>(field_offset);
529 }
530
531 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
532 bool kIsVolatile>
SetField64(MemberOffset field_offset,int64_t new_value)533 inline void Object::SetField64(MemberOffset field_offset, int64_t new_value) {
534 if (kCheckTransaction) {
535 DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
536 }
537 if (kTransactionActive) {
538 Runtime::Current()->RecordWriteField64(this, field_offset,
539 GetField64<kVerifyFlags, kIsVolatile>(field_offset),
540 kIsVolatile);
541 }
542 if (kVerifyFlags & kVerifyThis) {
543 VerifyObject(this);
544 }
545 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
546 int64_t* addr = reinterpret_cast<int64_t*>(raw_addr);
547 if (kIsVolatile) {
548 reinterpret_cast<Atomic<int64_t>*>(addr)->StoreSequentiallyConsistent(new_value);
549 } else {
550 reinterpret_cast<Atomic<int64_t>*>(addr)->StoreJavaData(new_value);
551 }
552 }
553
554 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
SetField64Volatile(MemberOffset field_offset,int64_t new_value)555 inline void Object::SetField64Volatile(MemberOffset field_offset, int64_t new_value) {
556 return SetField64<kTransactionActive, kCheckTransaction, kVerifyFlags, true>(field_offset,
557 new_value);
558 }
559
560 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset,int64_t old_value,int64_t new_value)561 inline bool Object::CasFieldWeakSequentiallyConsistent64(MemberOffset field_offset,
562 int64_t old_value, int64_t new_value) {
563 if (kCheckTransaction) {
564 DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
565 }
566 if (kTransactionActive) {
567 Runtime::Current()->RecordWriteField64(this, field_offset, old_value, true);
568 }
569 if (kVerifyFlags & kVerifyThis) {
570 VerifyObject(this);
571 }
572 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
573 Atomic<int64_t>* atomic_addr = reinterpret_cast<Atomic<int64_t>*>(raw_addr);
574 return atomic_addr->CompareExchangeWeakSequentiallyConsistent(old_value, new_value);
575 }
576
577 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset,int64_t old_value,int64_t new_value)578 inline bool Object::CasFieldStrongSequentiallyConsistent64(MemberOffset field_offset,
579 int64_t old_value, int64_t new_value) {
580 if (kCheckTransaction) {
581 DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
582 }
583 if (kTransactionActive) {
584 Runtime::Current()->RecordWriteField64(this, field_offset, old_value, true);
585 }
586 if (kVerifyFlags & kVerifyThis) {
587 VerifyObject(this);
588 }
589 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
590 Atomic<int64_t>* atomic_addr = reinterpret_cast<Atomic<int64_t>*>(raw_addr);
591 return atomic_addr->CompareExchangeStrongSequentiallyConsistent(old_value, new_value);
592 }
593
594 template<class T, VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption,
595 bool kIsVolatile>
GetFieldObject(MemberOffset field_offset)596 inline T* Object::GetFieldObject(MemberOffset field_offset) {
597 if (kVerifyFlags & kVerifyThis) {
598 VerifyObject(this);
599 }
600 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
601 HeapReference<T>* objref_addr = reinterpret_cast<HeapReference<T>*>(raw_addr);
602 T* result = ReadBarrier::Barrier<T, kReadBarrierOption>(this, field_offset, objref_addr);
603 if (kIsVolatile) {
604 // TODO: Refactor to use a SequentiallyConsistent load instead.
605 QuasiAtomic::ThreadFenceAcquire(); // Ensure visibility of operations preceding store.
606 }
607 if (kVerifyFlags & kVerifyReads) {
608 VerifyObject(result);
609 }
610 return result;
611 }
612
613 template<class T, VerifyObjectFlags kVerifyFlags, ReadBarrierOption kReadBarrierOption>
GetFieldObjectVolatile(MemberOffset field_offset)614 inline T* Object::GetFieldObjectVolatile(MemberOffset field_offset) {
615 return GetFieldObject<T, kVerifyFlags, kReadBarrierOption, true>(field_offset);
616 }
617
618 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
619 bool kIsVolatile>
SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset,Object * new_value)620 inline void Object::SetFieldObjectWithoutWriteBarrier(MemberOffset field_offset,
621 Object* new_value) {
622 if (kCheckTransaction) {
623 DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
624 }
625 if (kTransactionActive) {
626 mirror::Object* obj;
627 if (kIsVolatile) {
628 obj = GetFieldObjectVolatile<Object>(field_offset);
629 } else {
630 obj = GetFieldObject<Object>(field_offset);
631 }
632 Runtime::Current()->RecordWriteFieldReference(this, field_offset, obj, true);
633 }
634 if (kVerifyFlags & kVerifyThis) {
635 VerifyObject(this);
636 }
637 if (kVerifyFlags & kVerifyWrites) {
638 VerifyObject(new_value);
639 }
640 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
641 HeapReference<Object>* objref_addr = reinterpret_cast<HeapReference<Object>*>(raw_addr);
642 if (kIsVolatile) {
643 // TODO: Refactor to use a SequentiallyConsistent store instead.
644 QuasiAtomic::ThreadFenceRelease(); // Ensure that prior accesses are visible before store.
645 objref_addr->Assign(new_value);
646 QuasiAtomic::ThreadFenceSequentiallyConsistent();
647 // Ensure this store occurs before any volatile loads.
648 } else {
649 objref_addr->Assign(new_value);
650 }
651 }
652
653 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags,
654 bool kIsVolatile>
SetFieldObject(MemberOffset field_offset,Object * new_value)655 inline void Object::SetFieldObject(MemberOffset field_offset, Object* new_value) {
656 SetFieldObjectWithoutWriteBarrier<kTransactionActive, kCheckTransaction, kVerifyFlags,
657 kIsVolatile>(field_offset, new_value);
658 if (new_value != nullptr) {
659 Runtime::Current()->GetHeap()->WriteBarrierField(this, field_offset, new_value);
660 // TODO: Check field assignment could theoretically cause thread suspension, TODO: fix this.
661 CheckFieldAssignment(field_offset, new_value);
662 }
663 }
664
665 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
SetFieldObjectVolatile(MemberOffset field_offset,Object * new_value)666 inline void Object::SetFieldObjectVolatile(MemberOffset field_offset, Object* new_value) {
667 SetFieldObject<kTransactionActive, kCheckTransaction, kVerifyFlags, true>(field_offset,
668 new_value);
669 }
670
671 template <VerifyObjectFlags kVerifyFlags>
GetFieldObjectReferenceAddr(MemberOffset field_offset)672 inline HeapReference<Object>* Object::GetFieldObjectReferenceAddr(MemberOffset field_offset) {
673 if (kVerifyFlags & kVerifyThis) {
674 VerifyObject(this);
675 }
676 return reinterpret_cast<HeapReference<Object>*>(reinterpret_cast<byte*>(this) +
677 field_offset.Int32Value());
678 }
679
680 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
CasFieldWeakSequentiallyConsistentObject(MemberOffset field_offset,Object * old_value,Object * new_value)681 inline bool Object::CasFieldWeakSequentiallyConsistentObject(MemberOffset field_offset,
682 Object* old_value, Object* new_value) {
683 if (kCheckTransaction) {
684 DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
685 }
686 if (kVerifyFlags & kVerifyThis) {
687 VerifyObject(this);
688 }
689 if (kVerifyFlags & kVerifyWrites) {
690 VerifyObject(new_value);
691 }
692 if (kVerifyFlags & kVerifyReads) {
693 VerifyObject(old_value);
694 }
695 if (kTransactionActive) {
696 Runtime::Current()->RecordWriteFieldReference(this, field_offset, old_value, true);
697 }
698 HeapReference<Object> old_ref(HeapReference<Object>::FromMirrorPtr(old_value));
699 HeapReference<Object> new_ref(HeapReference<Object>::FromMirrorPtr(new_value));
700 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
701 Atomic<uint32_t>* atomic_addr = reinterpret_cast<Atomic<uint32_t>*>(raw_addr);
702
703 bool success = atomic_addr->CompareExchangeWeakSequentiallyConsistent(old_ref.reference_,
704 new_ref.reference_);
705
706 if (success) {
707 Runtime::Current()->GetHeap()->WriteBarrierField(this, field_offset, new_value);
708 }
709 return success;
710 }
711
712 template<bool kTransactionActive, bool kCheckTransaction, VerifyObjectFlags kVerifyFlags>
CasFieldStrongSequentiallyConsistentObject(MemberOffset field_offset,Object * old_value,Object * new_value)713 inline bool Object::CasFieldStrongSequentiallyConsistentObject(MemberOffset field_offset,
714 Object* old_value, Object* new_value) {
715 if (kCheckTransaction) {
716 DCHECK_EQ(kTransactionActive, Runtime::Current()->IsActiveTransaction());
717 }
718 if (kVerifyFlags & kVerifyThis) {
719 VerifyObject(this);
720 }
721 if (kVerifyFlags & kVerifyWrites) {
722 VerifyObject(new_value);
723 }
724 if (kVerifyFlags & kVerifyReads) {
725 VerifyObject(old_value);
726 }
727 if (kTransactionActive) {
728 Runtime::Current()->RecordWriteFieldReference(this, field_offset, old_value, true);
729 }
730 HeapReference<Object> old_ref(HeapReference<Object>::FromMirrorPtr(old_value));
731 HeapReference<Object> new_ref(HeapReference<Object>::FromMirrorPtr(new_value));
732 byte* raw_addr = reinterpret_cast<byte*>(this) + field_offset.Int32Value();
733 Atomic<uint32_t>* atomic_addr = reinterpret_cast<Atomic<uint32_t>*>(raw_addr);
734
735 bool success = atomic_addr->CompareExchangeStrongSequentiallyConsistent(old_ref.reference_,
736 new_ref.reference_);
737
738 if (success) {
739 Runtime::Current()->GetHeap()->WriteBarrierField(this, field_offset, new_value);
740 }
741 return success;
742 }
743
744 template<bool kVisitClass, bool kIsStatic, typename Visitor>
VisitFieldsReferences(uint32_t ref_offsets,const Visitor & visitor)745 inline void Object::VisitFieldsReferences(uint32_t ref_offsets, const Visitor& visitor) {
746 if (LIKELY(ref_offsets != CLASS_WALK_SUPER)) {
747 if (!kVisitClass) {
748 // Mask out the class from the reference offsets.
749 ref_offsets ^= kWordHighBitMask;
750 }
751 DCHECK_EQ(ClassOffset().Uint32Value(), 0U);
752 // Found a reference offset bitmap. Visit the specified offsets.
753 while (ref_offsets != 0) {
754 size_t right_shift = CLZ(ref_offsets);
755 MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
756 visitor(this, field_offset, kIsStatic);
757 ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
758 }
759 } else {
760 // There is no reference offset bitmap. In the non-static case, walk up the class
761 // inheritance hierarchy and find reference offsets the hard way. In the static case, just
762 // consider this class.
763 for (mirror::Class* klass = kIsStatic ? AsClass() : GetClass(); klass != nullptr;
764 klass = kIsStatic ? nullptr : klass->GetSuperClass()) {
765 size_t num_reference_fields =
766 kIsStatic ? klass->NumReferenceStaticFields() : klass->NumReferenceInstanceFields();
767 if (num_reference_fields == 0u) {
768 continue;
769 }
770 MemberOffset field_offset = kIsStatic
771 ? klass->GetFirstReferenceStaticFieldOffset()
772 : klass->GetFirstReferenceInstanceFieldOffset();
773 for (size_t i = 0; i < num_reference_fields; ++i) {
774 // TODO: Do a simpler check?
775 if (kVisitClass || field_offset.Uint32Value() != ClassOffset().Uint32Value()) {
776 visitor(this, field_offset, kIsStatic);
777 }
778 field_offset = MemberOffset(field_offset.Uint32Value() +
779 sizeof(mirror::HeapReference<mirror::Object>));
780 }
781 }
782 }
783 }
784
785 template<bool kVisitClass, typename Visitor>
VisitInstanceFieldsReferences(mirror::Class * klass,const Visitor & visitor)786 inline void Object::VisitInstanceFieldsReferences(mirror::Class* klass, const Visitor& visitor) {
787 VisitFieldsReferences<kVisitClass, false>(
788 klass->GetReferenceInstanceOffsets<kVerifyNone>(), visitor);
789 }
790
791 template<bool kVisitClass, typename Visitor>
VisitStaticFieldsReferences(mirror::Class * klass,const Visitor & visitor)792 inline void Object::VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) {
793 DCHECK(!klass->IsTemp());
794 klass->VisitFieldsReferences<kVisitClass, true>(
795 klass->GetReferenceStaticOffsets<kVerifyNone>(), visitor);
796 }
797
798 template <const bool kVisitClass, VerifyObjectFlags kVerifyFlags, typename Visitor,
799 typename JavaLangRefVisitor>
VisitReferences(const Visitor & visitor,const JavaLangRefVisitor & ref_visitor)800 inline void Object::VisitReferences(const Visitor& visitor,
801 const JavaLangRefVisitor& ref_visitor) {
802 mirror::Class* klass = GetClass<kVerifyFlags>();
803 if (klass == Class::GetJavaLangClass()) {
804 AsClass<kVerifyNone>()->VisitReferences<kVisitClass>(klass, visitor);
805 } else if (klass->IsArrayClass()) {
806 if (klass->IsObjectArrayClass<kVerifyNone>()) {
807 AsObjectArray<mirror::Object, kVerifyNone>()->VisitReferences<kVisitClass>(visitor);
808 } else if (kVisitClass) {
809 visitor(this, ClassOffset(), false);
810 }
811 } else {
812 DCHECK(!klass->IsVariableSize());
813 VisitInstanceFieldsReferences<kVisitClass>(klass, visitor);
814 if (UNLIKELY(klass->IsTypeOfReferenceClass<kVerifyNone>())) {
815 ref_visitor(klass, AsReference());
816 }
817 }
818 }
819 } // namespace mirror
820 } // namespace art
821
822 #endif // ART_RUNTIME_MIRROR_OBJECT_INL_H_
823