Home
last modified time | relevance | path

Searched refs:scratch (Results 1 – 25 of 26) sorted by relevance

12

/art/compiler/utils/arm64/
Djni_macro_assembler_arm64.cc168 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in StoreImmediateToFrame() local
169 CHECK(scratch.IsXRegister()) << scratch; in StoreImmediateToFrame()
170 LoadImmediate(scratch.AsXRegister(), imm); in StoreImmediateToFrame()
171 StoreWToOffset(kStoreWord, scratch.AsOverlappingWRegister(), SP, in StoreImmediateToFrame()
178 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in StoreStackOffsetToThread() local
179 CHECK(scratch.IsXRegister()) << scratch; in StoreStackOffsetToThread()
180 AddConstant(scratch.AsXRegister(), SP, fr_offs.Int32Value()); in StoreStackOffsetToThread()
181 StoreToOffset(scratch.AsXRegister(), TR, tr_offs.Int32Value()); in StoreStackOffsetToThread()
196 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in StoreSpanning() local
198 LoadFromOffset(scratch.AsXRegister(), SP, in_off.Int32Value()); in StoreSpanning()
[all …]
Djni_macro_assembler_arm64.h72 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) override;
75 ManagedRegister scratch) override;
80 ManagedRegister scratch) override;
97 ManagedRegister scratch) override;
98 void CopyRawPtrToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
100 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) override;
101 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) override;
105 ManagedRegister scratch,
110 ManagedRegister scratch,
115 ManagedRegister scratch,
[all …]
Dassembler_arm64.cc97 Arm64ManagedRegister scratch = m_scratch.AsArm64(); in JumpTo() local
99 CHECK(scratch.IsXRegister()) << scratch; in JumpTo()
102 temps.Exclude(reg_x(base.AsXRegister()), reg_x(scratch.AsXRegister())); in JumpTo()
103 ___ Ldr(reg_x(scratch.AsXRegister()), MEM_OP(reg_x(base.AsXRegister()), offs.Int32Value())); in JumpTo()
104 ___ Br(reg_x(scratch.AsXRegister())); in JumpTo()
/art/compiler/utils/arm/
Djni_macro_assembler_arm_vixl.h70 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) override;
74 ManagedRegister scratch) override;
81 ManagedRegister scratch) override;
106 ManagedRegister scratch) override;
110 ManagedRegister scratch) override;
112 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) override;
114 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) override;
119 ManagedRegister scratch,
125 ManagedRegister scratch,
131 ManagedRegister scratch,
[all …]
Djni_macro_assembler_arm_vixl.cc277 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in StoreSpanning() local
280 temps.Exclude(scratch); in StoreSpanning()
281 asm_.LoadFromOffset(kLoadWord, scratch, sp, in_off.Int32Value()); in StoreSpanning()
282 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value() + 4); in StoreSpanning()
288 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in CopyRef() local
290 temps.Exclude(scratch); in CopyRef()
291 asm_.LoadFromOffset(kLoadWord, scratch, sp, src.Int32Value()); in CopyRef()
292 asm_.StoreToOffset(kStoreWord, scratch, sp, dest.Int32Value()); in CopyRef()
324 vixl::aarch32::Register scratch = AsVIXLRegister(mscratch.AsArm()); in StoreImmediateToFrame() local
326 temps.Exclude(scratch); in StoreImmediateToFrame()
[all …]
/art/compiler/utils/x86/
Djni_macro_assembler_x86.h63 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) override;
67 ManagedRegister scratch) override;
72 ManagedRegister scratch) override;
93 ManagedRegister scratch) override;
95 void CopyRawPtrToThread(ThreadOffset32 thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
98 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) override;
100 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) override;
102 void Copy(FrameOffset dest, ManagedRegister src_base, Offset src_offset, ManagedRegister scratch,
105 void Copy(ManagedRegister dest_base, Offset dest_offset, FrameOffset src, ManagedRegister scratch,
108 void Copy(FrameOffset dest, FrameOffset src_base, Offset src_offset, ManagedRegister scratch,
[all …]
Djni_macro_assembler_x86.cc190 X86ManagedRegister scratch = mscratch.AsX86(); in StoreStackOffsetToThread() local
191 CHECK(scratch.IsCpuRegister()); in StoreStackOffsetToThread()
192 __ leal(scratch.AsCpuRegister(), Address(ESP, fr_offs)); in StoreStackOffsetToThread()
193 __ fs()->movl(Address::Absolute(thr_offs), scratch.AsCpuRegister()); in StoreStackOffsetToThread()
344 X86ManagedRegister scratch = mscratch.AsX86(); in CopyRef() local
345 CHECK(scratch.IsCpuRegister()); in CopyRef()
346 __ movl(scratch.AsCpuRegister(), Address(ESP, src)); in CopyRef()
347 __ movl(Address(ESP, dest), scratch.AsCpuRegister()); in CopyRef()
353 X86ManagedRegister scratch = mscratch.AsX86(); in CopyRawPtrFromThread() local
354 CHECK(scratch.IsCpuRegister()); in CopyRawPtrFromThread()
[all …]
/art/compiler/utils/x86_64/
Djni_macro_assembler_x86_64.h64 void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) override;
68 ManagedRegister scratch) override;
75 ManagedRegister scratch) override;
98 ManagedRegister scratch) override;
100 void CopyRawPtrToThread(ThreadOffset64 thr_offs, FrameOffset fr_offs, ManagedRegister scratch)
103 void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) override;
105 void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) override;
110 ManagedRegister scratch,
116 ManagedRegister scratch,
122 ManagedRegister scratch,
[all …]
Djni_macro_assembler_x86_64.cc227 X86_64ManagedRegister scratch = mscratch.AsX86_64(); in StoreStackOffsetToThread() local
228 CHECK(scratch.IsCpuRegister()); in StoreStackOffsetToThread()
229 __ leaq(scratch.AsCpuRegister(), Address(CpuRegister(RSP), fr_offs)); in StoreStackOffsetToThread()
230 __ gs()->movq(Address::Absolute(thr_offs, true), scratch.AsCpuRegister()); in StoreStackOffsetToThread()
392 X86_64ManagedRegister scratch = mscratch.AsX86_64(); in CopyRef() local
393 CHECK(scratch.IsCpuRegister()); in CopyRef()
394 __ movl(scratch.AsCpuRegister(), Address(CpuRegister(RSP), src)); in CopyRef()
395 __ movl(Address(CpuRegister(RSP), dest), scratch.AsCpuRegister()); in CopyRef()
401 X86_64ManagedRegister scratch = mscratch.AsX86_64(); in CopyRawPtrFromThread() local
402 CHECK(scratch.IsCpuRegister()); in CopyRawPtrFromThread()
[all …]
/art/compiler/utils/
Djni_macro_assembler.h86 virtual void StoreImmediateToFrame(FrameOffset dest, uint32_t imm, ManagedRegister scratch) = 0;
90 ManagedRegister scratch) = 0;
97 ManagedRegister scratch) = 0;
122 ManagedRegister scratch) = 0;
126 ManagedRegister scratch) = 0;
128 virtual void CopyRef(FrameOffset dest, FrameOffset src, ManagedRegister scratch) = 0;
130 virtual void Copy(FrameOffset dest, FrameOffset src, ManagedRegister scratch, size_t size) = 0;
135 ManagedRegister scratch,
141 ManagedRegister scratch,
147 ManagedRegister scratch,
[all …]
Dswap_space_test.cc37 ScratchFile scratch; in SwapTest() local
38 int fd = scratch.GetFd(); in SwapTest()
39 unlink(scratch.GetFilename().c_str()); in SwapTest()
72 scratch.Close(); in SwapTest()
/art/compiler/optimizing/
Dparallel_move_resolver.cc264 int scratch = -1; in AllocateScratchRegister() local
267 scratch = reg; in AllocateScratchRegister()
272 if (scratch == -1) { in AllocateScratchRegister()
274 scratch = if_scratch; in AllocateScratchRegister()
279 return scratch; in AllocateScratchRegister()
381 for (Location scratch : scratches_) { in AddScratchLocation() local
382 CHECK(!loc.Equals(scratch)); in AddScratchLocation()
458 Location scratch = AllocateScratchLocationFor(kind); in PerformMove() local
462 move->SetDestination(scratch); in PerformMove()
465 UpdateMoveSource(source, scratch); in PerformMove()
[all …]
Dcode_generator_vector_arm_vixl.cc903 /*out*/ vixl32::Register* scratch) { in VecAddress() argument
919 *scratch = temps_scope->Acquire(); in VecAddress()
920 __ Add(*scratch, base, Operand(RegisterFrom(index), ShiftType::LSL, shift)); in VecAddress()
922 return MemOperand(*scratch, offset); in VecAddress()
929 /*out*/ vixl32::Register* scratch) { in VecAddressUnaligned() argument
943 __ Add(*scratch, base, offset); in VecAddressUnaligned()
945 *scratch = temps_scope->Acquire(); in VecAddressUnaligned()
946 __ Add(*scratch, base, offset); in VecAddressUnaligned()
947 __ Add(*scratch, *scratch, Operand(RegisterFrom(index), ShiftType::LSL, shift)); in VecAddressUnaligned()
949 return AlignedMemOperand(*scratch, kNoAlignment); in VecAddressUnaligned()
[all …]
Dparallel_move_test.cc116 Location scratch = GetScratchLocation(kind); in AllocateScratchLocationFor() local
117 if (scratch.Equals(Location::NoLocation())) { in AllocateScratchLocationFor()
121 scratch = (kind == Location::kRegister) ? Location::RegisterLocation(scratch_index_) in AllocateScratchLocationFor()
125 return scratch; in AllocateScratchLocationFor()
Dcode_generator_vector_arm64.cc1403 /*out*/ Register* scratch) { in VecAddress() argument
1425 *scratch = temps_scope->AcquireSameSizeAs(base); in VecAddress()
1426 __ Add(*scratch, base, Operand(WRegisterFrom(index), LSL, shift)); in VecAddress()
1427 return HeapOperand(*scratch, offset); in VecAddress()
1440 Register scratch; in VisitVecLoad() local
1459 VecAddress(instruction, &temps, 1, /*is_string_char_at*/ true, &scratch)); in VisitVecLoad()
1462 if (scratch.IsValid()) { in VisitVecLoad()
1463 temps.Release(scratch); // if used, no longer needed in VisitVecLoad()
1467 __ Ldr(reg, VecAddress(instruction, &temps, size, /*is_string_char_at*/ true, &scratch)); in VisitVecLoad()
1481 __ Ldr(reg, VecAddress(instruction, &temps, size, instruction->IsStringCharAt(), &scratch)); in VisitVecLoad()
[all …]
Dcode_generator_arm_vixl.h413 /*out*/ vixl32::Register* scratch);
418 /*out*/ vixl32::Register* scratch);
Dcode_generator_arm64.h349 /*out*/ vixl::aarch64::Register* scratch);
Dcode_generator_arm64.cc1032 Location scratch = GetScratchLocation(kind); in AllocateScratchLocationFor() local
1033 if (!scratch.Equals(Location::NoLocation())) { in AllocateScratchLocationFor()
1034 return scratch; in AllocateScratchLocationFor()
1038 scratch = LocationFrom(vixl_temps_.AcquireX()); in AllocateScratchLocationFor()
1041 scratch = LocationFrom(codegen_->GetGraph()->HasSIMD() in AllocateScratchLocationFor()
1045 AddScratchLocation(scratch); in AllocateScratchLocationFor()
1046 return scratch; in AllocateScratchLocationFor()
/art/test/626-checker-arm64-scratch-register/
Dinfo.txt1 Regression test checking that the ARM64 scratch register pool is not
/art/test/635-checker-arm64-volatile-load-cc/
Dinfo.txt1 Regression test checking that the VIXL ARM64 scratch register pool is
/art/test/646-checker-arraycopy-large-cst-pos/
Dinfo.txt1 Regression test for an issue with a depleted VIXL scratch register
/art/test/572-checker-array-get-regression/
Dinfo.txt3 used to require too many scratch (temporary) registers.
/art/runtime/
Ddexopt_test.cc117 std::optional<ScratchDir> scratch; in GenerateOatForTest() local
119 scratch.emplace(); // Create the scratch directory for the generated boot image. in GenerateOatForTest()
120 std::string alternate_image_location = GenerateAlternateImage(scratch->GetPath()); in GenerateOatForTest()
/art/dex2oat/
Ddex2oat_image_test.cc89 ScratchDir scratch; in CompileImageAndGetSizes() local
90 std::string filename_prefix = scratch.GetPath() + "boot"; in CompileImageAndGetSizes()
259 ScratchDir scratch; in TEST_F() local
260 const std::string& scratch_dir = scratch.GetPath(); in TEST_F()
/art/runtime/gc/space/
Dimage_space_test.cc57 ScratchDir scratch; in TEST_F() local
58 const std::string& scratch_dir = scratch.GetPath(); in TEST_F()

12