/art/dex2oat/linker/arm/ |
D | relative_patcher_thumb2_test.cc | 329 void TestBakerFieldWide(uint32_t offset, uint32_t ref_reg); 330 void TestBakerFieldNarrow(uint32_t offset, uint32_t ref_reg); 704 void Thumb2RelativePatcherTest::TestBakerFieldWide(uint32_t offset, uint32_t ref_reg) { in TestBakerFieldWide() argument 712 uint32_t ldr = kLdrWInsn | offset | (base_reg << 16) | (ref_reg << 12); in TestBakerFieldWide() 734 uint32_t ldr = kLdrWInsn | offset | (base_reg << 16) | (ref_reg << 12); in TestBakerFieldWide() 799 void Thumb2RelativePatcherTest::TestBakerFieldNarrow(uint32_t offset, uint32_t ref_reg) { in TestBakerFieldNarrow() argument 810 uint32_t ldr = kLdrInsn | (offset << (6 - 2)) | (base_reg << 3) | ref_reg; in TestBakerFieldNarrow() 835 uint32_t ldr = kLdrInsn | (offset << (6 - 2)) | (base_reg << 3) | ref_reg; in TestBakerFieldNarrow() 903 uint32_t ref_reg; in TEST_F() member 913 TestBakerFieldWide(test_case.offset, test_case.ref_reg); in TEST_F() [all …]
|
/art/dex2oat/linker/arm64/ |
D | relative_patcher_arm64_test.cc | 559 void TestBakerField(uint32_t offset, uint32_t ref_reg); 1039 void Arm64RelativePatcherTest::TestBakerField(uint32_t offset, uint32_t ref_reg) { in TestBakerField() argument 1053 uint32_t ldr = kLdrWInsn | (offset << (10 - 2)) | (base_reg << 5) | ref_reg; in TestBakerField() 1075 uint32_t ldr = kLdrWInsn | (offset << (10 - 2)) | (base_reg << 5) | ref_reg; in TestBakerField() 1129 uint32_t ref_reg; in TEST_F() member 1138 TestBakerField(test_case.offset, test_case.ref_reg); in TEST_F() 1298 uint32_t ref_reg = (base_reg == 2) ? 3u : 2u; in TEST_F() local 1299 return kLdrWLsl2Insn | (index_reg << 16) | (base_reg << 5) | ref_reg; in TEST_F()
|
/art/compiler/optimizing/ |
D | code_generator_x86_64.cc | 500 Register ref_reg = ref_cpu_reg.AsRegister(); in EmitNativeCode() local 502 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg; in EmitNativeCode() 526 DCHECK_NE(ref_reg, RSP); in EmitNativeCode() 527 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg; in EmitNativeCode() 543 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg); in EmitNativeCode() 594 Register ref_reg = ref_cpu_reg.AsRegister(); in EmitNativeCode() local 596 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg; in EmitNativeCode() 617 DCHECK_NE(ref_reg, RSP); in EmitNativeCode() 618 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg; in EmitNativeCode() 634 Thread::ReadBarrierMarkEntryPointsOffset<kX86_64PointerSize>(ref_reg); in EmitNativeCode() [all …]
|
D | code_generator_x86.cc | 488 Register ref_reg = ref_.AsRegister<Register>(); in EmitNativeCode() local 490 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg; in EmitNativeCode() 507 __ MaybeUnpoisonHeapReference(ref_reg); in EmitNativeCode() 513 DCHECK_NE(ref_reg, ESP); in EmitNativeCode() 514 DCHECK(0 <= ref_reg && ref_reg < kNumberOfCpuRegisters) << ref_reg; in EmitNativeCode() 529 int32_t entry_point_offset = Thread::ReadBarrierMarkEntryPointsOffset<kX86PointerSize>(ref_reg); in EmitNativeCode() 575 Register ref_reg = ref_.AsRegister<Register>(); in EmitNativeCode() local 577 DCHECK(!locations->GetLiveRegisters()->ContainsCoreRegister(ref_reg)) << ref_reg; in EmitNativeCode() 597 __ MaybeUnpoisonHeapReference(ref_reg); in EmitNativeCode() 601 __ movl(temp_, ref_reg); in EmitNativeCode() [all …]
|
D | code_generator_arm64.cc | 6606 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference); in GenerateFieldLoadWithBakerReadBarrier() local 6609 __ ldar(ref_reg, src); in GenerateFieldLoadWithBakerReadBarrier() 6611 __ ldr(ref_reg, src); in GenerateFieldLoadWithBakerReadBarrier() 6619 __ neg(ref_reg, Operand(ref_reg)); in GenerateFieldLoadWithBakerReadBarrier() 6689 Register ref_reg = RegisterFrom(ref, DataType::Type::kReference); in GenerateArrayLoadWithBakerReadBarrier() local 6722 __ ldr(ref_reg, MemOperand(temp.X(), index_reg.X(), LSL, scale_factor)); in GenerateArrayLoadWithBakerReadBarrier() 6727 __ neg(ref_reg, Operand(ref_reg)); in GenerateArrayLoadWithBakerReadBarrier()
|
D | code_generator_arm_vixl.cc | 8922 vixl32::Register ref_reg = RegisterFrom(ref, DataType::Type::kReference); in GenerateFieldLoadWithBakerReadBarrier() local 8923 bool narrow = CanEmitNarrowLdr(ref_reg, src.GetBaseRegister(), src.GetOffsetImmediate()); in GenerateFieldLoadWithBakerReadBarrier() 8944 __ ldr(EncodingSize(narrow ? Narrow : Wide), ref_reg, src); in GenerateFieldLoadWithBakerReadBarrier() 8952 __ rsbs(EncodingSize(Narrow), ref_reg, ref_reg, Operand(0)); in GenerateFieldLoadWithBakerReadBarrier() 8954 __ rsb(EncodingSize(Wide), ref_reg, ref_reg, Operand(0)); in GenerateFieldLoadWithBakerReadBarrier() 9020 vixl32::Register ref_reg = RegisterFrom(ref, DataType::Type::kReference); in GenerateArrayLoadWithBakerReadBarrier() local 9040 __ ldr(ref_reg, MemOperand(data_reg, index_reg, vixl32::LSL, scale_factor)); in GenerateArrayLoadWithBakerReadBarrier() 9044 __ rsb(EncodingSize(Wide), ref_reg, ref_reg, Operand(0)); in GenerateArrayLoadWithBakerReadBarrier()
|
/art/compiler/utils/arm64/ |
D | jni_macro_assembler_arm64.cc | 305 WRegister ref_reg = dst.AsOverlappingWRegister(); in LoadRef() local 306 asm_.MaybeUnpoisonHeapReference(reg_w(ref_reg)); in LoadRef()
|