Home
last modified time | relevance | path

Searched refs:TR (Results 1 – 20 of 20) sorted by relevance

/art/disassembler/
Ddisassembler_arm64.cc39 TR = 19, enumerator
50 if (reg.GetCode() == TR) { in AppendRegisterNameToOutput()
129 if (instr->GetRn() == TR) { in VisitLoadStoreUnsignedOffsetInstr()
140 target->GetRn() == TR && in VisitUnconditionalBranchInstr()
Ddisassembler_arm.cc40 static const vixl::aarch32::Register tr(TR);
Ddisassembler_riscv64.cc47 TR = 9, enumerator
365 if (rs1 == TR && offset >= 0) { in PrintLoadStoreAddress()
/art/compiler/utils/riscv64/
Djni_macro_assembler_riscv64.cc199 __ Stored(src, TR, offs.Int32Value()); in StoreStackPointerToThread()
238 Riscv64ManagedRegister tr = Riscv64ManagedRegister::FromXRegister(TR); in LoadRawPtrFromThread()
403 __ Mv(dest.AsRiscv64().AsXRegister(), TR); in GetCurrentThread()
407 __ Stored(TR, SP, offset.Int32Value()); in GetCurrentThread()
454 Call(Riscv64ManagedRegister::FromXRegister(TR), offset); in CallFromThread()
474 __ LrW(scratch, TR, AqRl::kNone); in TryToTransitionFromRunnableToNative()
482 __ ScW(scratch, scratch2, TR, AqRl::kRelease); in TryToTransitionFromRunnableToNative()
486 __ Stored(Zero, TR, thread_held_mutex_mutator_lock_offset.Int32Value()); in TryToTransitionFromRunnableToNative()
511 __ LrW(scratch, TR, AqRl::kAcquire); in TryToTransitionFromNativeToRunnable()
521 __ ScW(scratch, Zero, TR, AqRl::kNone); in TryToTransitionFromNativeToRunnable()
[all …]
/art/compiler/utils/arm64/
Djni_macro_assembler_arm64.cc68 ___ Mov(reg_x(dest.AsArm64().AsXRegister()), reg_x(TR)); in GetCurrentThread()
72 StoreToOffset(TR, SP, offset.Int32Value()); in GetCurrentThread()
210 ___ Str(scratch, MEM_OP(reg_x(TR), tr_offs.Int32Value())); in StoreStackPointerToThread()
314 LoadFromOffset(dst.AsXRegister(), TR, offs.Int32Value()); in LoadRawPtrFromThread()
645 ___ Ldr(lr, MEM_OP(reg_x(TR), offset.Int32Value())); in CallFromThread()
725 ___ Ldxr(scratch, MEM_OP(reg_x(TR))); in TryToTransitionFromRunnableToNative()
730 ___ Stlxr(scratch, scratch2, MEM_OP(reg_x(TR))); in TryToTransitionFromRunnableToNative()
734 ___ Str(xzr, MEM_OP(reg_x(TR), thread_held_mutex_mutator_lock_offset.Int32Value())); in TryToTransitionFromRunnableToNative()
757 ___ Ldaxr(scratch, MEM_OP(reg_x(TR))); in TryToTransitionFromNativeToRunnable()
765 ___ Stxr(scratch, wzr, MEM_OP(reg_x(TR))); in TryToTransitionFromNativeToRunnable()
[all …]
Dassembler_arm64.cc192 vixl::aarch64::Register tr = reg_x(TR); // Thread Register. in GenerateMarkingRegisterCheck()
Dmanaged_register_arm64_test.cc628 EXPECT_TRUE(vixl::aarch64::x19.Is(Arm64Assembler::reg_x(TR))); in TEST()
/art/cmdline/detail/
Dcmdline_parser_detail.h54 template <typename TL, typename TR>
55 static std::true_type EqualityOperatorTest(const TL& left, const TR& right,
/art/runtime/arch/arm/
Dregisters_arm.h46 TR = 9, // ART Thread Register enumerator
Dcontext_arm.cc112 DCHECK_EQ(reinterpret_cast<uintptr_t>(Thread::Current()), gprs[TR]); in DoLongJump()
/art/runtime/arch/riscv64/
Dregisters_riscv64.h70 TR = S1, // ART Thread Register - managed runtime enumerator
Dcontext_riscv64.cc140 gprs[TR] = reinterpret_cast<uintptr_t>(Thread::Current()); in DoLongJump()
/art/compiler/trampolines/
Dtrampoline_compiler.cc109 __ JumpTo(Arm64ManagedRegister::FromXRegister(TR), Offset(offset.Int32Value()), in CreateTrampoline()
142 __ Loadd(tmp, TR, offset.Int32Value()); in CreateTrampoline()
/art/runtime/arch/arm64/
Dregisters_arm64.h65 TR = X19, // ART Thread Register - Managed Runtime (Callee Saved Reg) enumerator
Dcontext_arm64.cc162 DCHECK_EQ(reinterpret_cast<uintptr_t>(Thread::Current()), gprs[TR]); in DoLongJump()
/art/compiler/optimizing/
Dcode_generator_riscv64.cc161 DCHECK(T0 <= reg && reg <= T6 && reg != TR) << reg; in ReadBarrierMarkEntrypointOffset()
286 __ Loadd(RA, TR, entrypoint_offset); in EmitNativeCode()
722 DCHECK(ref_reg >= T0 && ref_reg != TR); in EmitNativeCode()
1181 __ Loadw(tmp, TR, Thread::ThreadFlagsOffset<kRiscv64PointerSize>().Int32Value()); in GenerateSuspendCheck()
1279 __ Loadd(temp.AsRegister<XRegister>(), TR, entry_point_offset); in EmitBakerReadBarierMarkingCheck()
1936 __ Loadd(tmp, TR, entry_point_offset); in GenerateReferenceLoadWithBakerReadBarrier()
2448 __ Loadd(card, TR, Thread::CardTableOffset<kRiscv64PointerSize>().Int32Value()); in MarkGCCard()
2475 __ Loadd(card, TR, Thread::CardTableOffset<kRiscv64PointerSize>().Int32Value()); in CheckGCCardIsValid()
2484 __ Loadw(temp, TR, Thread::IsGcMarkingOffset<kRiscv64PointerSize>().Int32Value()); in CheckGCCardIsValid()
2646 __ Loadd(tmp, TR, trace_buffer_index_offset); in GenerateMethodEntryExitHook()
[all …]
Dintrinsics_riscv64.cc766 __ Loadwu(temp, TR, Thread::WeakRefAccessEnabledOffset<kRiscv64PointerSize>().Int32Value()); in VisitReferenceGetReferent()
838 __ Loadwu(tmp, TR, Thread::IsGcMarkingOffset<kRiscv64PointerSize>().Int32Value()); in VisitReferenceRefersTo()
4723 __ Loadwu(out, TR, Thread::PeerOffset<kRiscv64PointerSize>().Int32Value()); in VisitThreadCurrentThread()
4739 __ Loadw(out, TR, Thread::InterruptedOffset<kRiscv64PointerSize>().Int32Value()); in VisitThreadInterrupted()
4741 __ Storew(Zero, TR, Thread::InterruptedOffset<kRiscv64PointerSize>().Int32Value()); in VisitThreadInterrupted()
Dcode_generator_arm64.cc5457 assembler.JumpTo(ManagedRegister(arm64::TR), offset, ManagedRegister(arm64::IP0)); in EmitThunkCode()
Dcode_generator_arm_vixl.cc2136 blocked_core_registers_[TR] = true; in SetupBlockedRegisters()
/art/compiler/utils/arm/
Dassembler_arm_vixl.cc39 extern const vixl32::Register tr(TR);