/art/runtime/ |
D | subtype_check.h | 476 SubtypeCheckBitsAndStatus old_value = ReadField(klass); in WriteSubtypeCheckBits() local 479 SubtypeCheckBits old_bits = old_value.subtype_check_info_; in WriteSubtypeCheckBits() 486 full_old = old_value; in WriteSubtypeCheckBits() 487 old_status = old_value.status_; in WriteSubtypeCheckBits() 489 new_value = old_value; in WriteSubtypeCheckBits() 504 old_value.int32_alias_, in WriteSubtypeCheckBits() 520 SubtypeCheckBitsAndStatus old_value = ReadField(klass); in WriteStatusImpl() local 521 old_status = old_value.status_; in WriteStatusImpl() 528 new_value = old_value; in WriteStatusImpl() 533 old_value.int32_alias_, in WriteStatusImpl() [all …]
|
D | art_method.cc | 928 bool CompareExchange(uintptr_t ptr, uintptr_t old_value, uintptr_t new_value) { in CompareExchange() argument 930 T cast_old_value = dchecked_integral_cast<T>(old_value); in CompareExchange() 946 uintptr_t old_value = reinterpret_cast<uintptr_t>(current_entry_point); in SetEntryPointFromQuickCompiledCodePtrSize() local 950 ? CompareExchange<uint32_t>(ptr, old_value, new_value) in SetEntryPointFromQuickCompiledCodePtrSize() 951 : CompareExchange<uint64_t>(ptr, old_value, new_value); in SetEntryPointFromQuickCompiledCodePtrSize()
|
D | subtype_check_test.cc | 90 int32_t old_value, in CasField32() 96 if (old_value == GetField32Volatile(offset)) { in CasField32()
|
/art/runtime/mirror/ |
D | object-inl.h | 557 int64_t old_value, in CasFieldWeakSequentiallyConsistent64() argument 563 bool success = atomic_addr->CompareAndSetWeakSequentiallyConsistent(old_value, new_value); in CasFieldWeakSequentiallyConsistent64() 566 this, field_offset, old_value, /*is_volatile=*/ true); in CasFieldWeakSequentiallyConsistent64() 573 int64_t old_value, in CasFieldStrongSequentiallyConsistent64() argument 579 bool success = atomic_addr->CompareAndSetStrongSequentiallyConsistent(old_value, new_value); in CasFieldStrongSequentiallyConsistent64() 582 this, field_offset, old_value, /*is_volatile=*/ true); in CasFieldStrongSequentiallyConsistent64() 620 ObjPtr<Object> old_value = in SetFieldObjectWithoutWriteBarrier() local 623 this, field_offset, old_value, kIsVolatile); in SetFieldObjectWithoutWriteBarrier() 670 ObjPtr<Object> old_value, in CasFieldObjectWithoutWriteBarrier() argument 675 VerifyCAS<kVerifyFlags>(new_value, old_value); in CasFieldObjectWithoutWriteBarrier() [all …]
|
D | object-readbarrier-inl.h | 43 int32_t old_value, in CasField32() argument 56 bool success = atomic_addr->CompareAndSet(old_value, new_value, mode, memory_order); in CasField32() 59 this, field_offset, old_value, /*is_volatile=*/ true); in CasField32()
|
D | var_handle.cc | 416 T old_value = atom->exchange(new_value_, MO); in Access() local 417 StoreResult(old_value, result_); in Access() 444 T old_value = atom->load(std::memory_order_relaxed); in Apply() local 447 new_value = old_value + addend; in Apply() 448 } while (!atom->compare_exchange_weak(old_value, new_value, MO, std::memory_order_relaxed)); in Apply() 449 return old_value; in Apply() 460 T old_value = AtomicGetAndAddOperator<T, kIsFloatingPoint, MO>::Apply(addr, addend_); in Access() local 461 StoreResult(old_value, result_); in Access() 511 T old_value = atom->fetch_or(value_, MO); in Access() local 512 StoreResult(old_value, result_); in Access() [all …]
|
D | object.h | 327 ObjPtr<Object> old_value, 336 ObjPtr<Object> old_value, 346 ObjPtr<Object> old_value, 507 int32_t old_value, 549 int64_t old_value, 557 int64_t old_value, 780 ALWAYS_INLINE void VerifyCAS(ObjPtr<mirror::Object> new_value, ObjPtr<mirror::Object> old_value) in VerifyCAS() argument 783 VerifyRead<kVerifyFlags>(old_value); in VerifyCAS()
|
D | class-inl.h | 1118 T old_value = GetFieldPtrWithSize<T, kVerifyFlags>(member_offset, pointer_size); in FixupNativePointer() local 1119 T new_value = visitor(old_value, address); in FixupNativePointer() 1120 if (old_value != new_value) { in FixupNativePointer()
|
/art/runtime/base/ |
D | quasi_atomic.h | 142 static bool Cas64(int64_t old_value, int64_t new_value, volatile int64_t* addr) { in Cas64() argument 144 return __sync_bool_compare_and_swap(addr, old_value, new_value); in Cas64() 146 return SwapMutexCas64(old_value, new_value, addr); in Cas64() 167 static bool SwapMutexCas64(int64_t old_value, int64_t new_value, volatile int64_t* addr);
|
D | quasi_atomic.cc | 58 bool QuasiAtomic::SwapMutexCas64(int64_t old_value, int64_t new_value, volatile int64_t* addr) { in SwapMutexCas64() argument 60 if (*addr == old_value) { in SwapMutexCas64()
|
/art/compiler/optimizing/ |
D | intrinsics_riscv64.cc | 1075 XRegister old_value, in EmitLoadReserved() argument 1079 __ LrW(old_value, ptr, aqrl); in EmitLoadReserved() 1082 __ LrW(old_value, ptr, aqrl); in EmitLoadReserved() 1088 __ ZextW(old_value, old_value); in EmitLoadReserved() 1091 __ LrD(old_value, ptr, aqrl); in EmitLoadReserved() 1242 XRegister old_value, in GenerateCompareAndSet() argument 1279 EmitLoadReserved(assembler, type, ptr, old_value, load_aqrl); in GenerateCompareAndSet() 1286 __ And(masked, old_value, mask); in GenerateCompareAndSet() 1290 to_store = old_value; in GenerateCompareAndSet() 1293 __ Xor(to_store, old_value, masked); in GenerateCompareAndSet() [all …]
|
D | intrinsics_arm_vixl.cc | 3263 Location old_value) { in EmitLoadExclusive() argument 3268 __ Ldrexb(RegisterFrom(old_value), MemOperand(ptr)); in EmitLoadExclusive() 3272 __ Ldrexh(RegisterFrom(old_value), MemOperand(ptr)); in EmitLoadExclusive() 3276 __ Ldrex(RegisterFrom(old_value), MemOperand(ptr)); in EmitLoadExclusive() 3279 __ Ldrexd(LowRegisterFrom(old_value), HighRegisterFrom(old_value), MemOperand(ptr)); in EmitLoadExclusive() 3287 __ Sxtb(RegisterFrom(old_value), RegisterFrom(old_value)); in EmitLoadExclusive() 3290 __ Sxth(RegisterFrom(old_value), RegisterFrom(old_value)); in EmitLoadExclusive() 3293 assembler->MaybeUnpoisonHeapReference(RegisterFrom(old_value)); in EmitLoadExclusive() 3343 Location old_value, in GenerateCompareAndSet() argument 3353 DCHECK(old_value.IsRegisterPair()); in GenerateCompareAndSet() [all …]
|
D | intrinsics_arm64.cc | 1142 Register old_value, in EmitLoadExclusive() argument 1151 __ Ldaxrb(old_value, MemOperand(ptr)); in EmitLoadExclusive() 1153 __ Ldxrb(old_value, MemOperand(ptr)); in EmitLoadExclusive() 1159 __ Ldaxrh(old_value, MemOperand(ptr)); in EmitLoadExclusive() 1161 __ Ldxrh(old_value, MemOperand(ptr)); in EmitLoadExclusive() 1168 __ Ldaxr(old_value, MemOperand(ptr)); in EmitLoadExclusive() 1170 __ Ldxr(old_value, MemOperand(ptr)); in EmitLoadExclusive() 1179 __ Sxtb(old_value, old_value); in EmitLoadExclusive() 1182 __ Sxth(old_value, old_value); in EmitLoadExclusive() 1185 assembler->MaybeUnpoisonHeapReference(old_value); in EmitLoadExclusive() [all …]
|
D | load_store_elimination.cc | 365 static constexpr Value PartialUnknown(Value old_value) { in PartialUnknown() argument 366 if (old_value.IsInvalid() || old_value.IsPureUnknown()) { in PartialUnknown() 369 return Value(PriorValueHolder(old_value)); in PartialUnknown() 1723 Value old_value = record.value; in VisitGetLocation() local 1726 KeepStores(old_value); in VisitGetLocation()
|
/art/openjdkjvmti/ |
D | ti_search.cc | 149 ScopedLocalRef<jobject> old_value(self->GetJniEnv(), in Update() local 154 DCHECK(old_value.get() != nullptr); in Update() 156 str_value = self->DecodeJObject(old_value.get())->AsString()->ToModifiedUtf8(); in Update() 157 self->GetJniEnv()->DeleteLocalRef(old_value.release()); in Update()
|
/art/runtime/gc/accounting/ |
D | card_table-inl.h | 33 static inline bool byte_cas(uint8_t old_value, uint8_t new_value, uint8_t* address) { in byte_cas() argument 36 return byte_atomic->CompareAndSetWeakRelaxed(old_value, new_value); in byte_cas() 48 const uintptr_t old_word = cur_word | (static_cast<uintptr_t>(old_value) << shift_in_bits); in byte_cas()
|
/art/libartbase/base/ |
D | leb128.h | 183 uint32_t old_value = DecodeUnsignedLeb128(&old_end); in UpdateUnsignedLeb128() local 184 DCHECK_LE(UnsignedLeb128Size(value), UnsignedLeb128Size(old_value)); in UpdateUnsignedLeb128()
|
D | leb128_test.cc | 403 uint32_t old_value = uleb128_tests[i].decoded; in TEST() local 406 if (new_value <= old_value) { in TEST() 408 uint8_t* old_end = EncodeUnsignedLeb128(encoded_data, old_value); in TEST()
|
D | stl_util.h | 112 void ReplaceElement(Container& container, const T& old_value, const T& new_value) { in ReplaceElement() argument 113 auto it = std::find(container.begin(), container.end(), old_value); in ReplaceElement()
|
/art/test/ |
D | default_run.py | 207 old_value = getattr(args, name) 208 …assert isinstance(new_value, old_value.__class__), name + " should have type " + str(old_value.__c… 209 if isinstance(old_value, list): 210 setattr(args, name, old_value + new_value) # Lists get merged.
|
/art/runtime/gc/space/ |
D | image_space.cc | 442 T* old_value = root->template Read<kWithoutReadBarrier>(); in PatchGcRoot() local 443 DCHECK(kMayBeNull || old_value != nullptr); in PatchGcRoot() 444 if (!kMayBeNull || old_value != nullptr) { in PatchGcRoot() 445 *root = GcRoot<T>(heap_visitor_(old_value)); in PatchGcRoot() 453 T* old_value = reinterpret_cast64<T*>(*raw_entry); in PatchNativePointer() local 454 DCHECK(kMayBeNull || old_value != nullptr); in PatchNativePointer() 455 if (!kMayBeNull || old_value != nullptr) { in PatchNativePointer() 456 T* new_value = native_visitor_(old_value); in PatchNativePointer() 461 T* old_value = reinterpret_cast32<T*>(*raw_entry); in PatchNativePointer() local 462 DCHECK(kMayBeNull || old_value != nullptr); in PatchNativePointer() [all …]
|
/art/runtime/interpreter/mterp/arm64ng/ |
D | main.S | 725 .macro UPDATE_REGISTERS_FOR_STRING_INIT old_value, new_value 729 cmp wip2, \old_value
|
/art/runtime/interpreter/mterp/armng/ |
D | main.S | 716 .macro UPDATE_REGISTERS_FOR_STRING_INIT old_value, new_value 720 cmp lr, \old_value
|
/art/runtime/interpreter/mterp/x86_64ng/ |
D | main.S | 1086 .macro UPDATE_REGISTERS_FOR_STRING_INIT old_value, new_value 1090 cmpl (%r9), \old_value
|
/art/runtime/interpreter/mterp/x86ng/ |
D | main.S | 1085 .macro UPDATE_REGISTERS_FOR_STRING_INIT old_value, new_value 1089 cmpl (%edx), \old_value
|