/art/compiler/optimizing/ |
D | register_allocator.cc | 676 Location source, in AddInputMoveFor() argument 678 if (source.Equals(destination)) return; in AddInputMoveFor() 694 move->AddMove(new (allocator_) MoveOperands(source, destination)); in AddInputMoveFor() 698 Location source, in InsertParallelMoveAt() argument 700 if (source.Equals(destination)) return; in InsertParallelMoveAt() 737 move->AddMove(new (allocator_) MoveOperands(source, destination)); in InsertParallelMoveAt() 741 Location source, in InsertParallelMoveAtExitOf() argument 743 if (source.Equals(destination)) return; in InsertParallelMoveAtExitOf() 759 move->AddMove(new (allocator_) MoveOperands(source, destination)); in InsertParallelMoveAtExitOf() 763 Location source, in InsertParallelMoveAtEntryOf() argument [all …]
|
D | register_allocator.h | 110 void InsertParallelMoveAtExitOf(HBasicBlock* block, Location source, Location destination) const; 111 void InsertParallelMoveAtEntryOf(HBasicBlock* block, Location source, Location destination) const; 112 void InsertMoveAfter(HInstruction* instruction, Location source, Location destination) const; 113 void AddInputMoveFor(HInstruction* instruction, Location source, Location destination) const; 114 void InsertParallelMoveAt(size_t position, Location source, Location destination) const;
|
D | code_generator_arm.cc | 368 void CodeGeneratorARM::Move32(Location destination, Location source) { in Move32() argument 369 if (source.Equals(destination)) { in Move32() 373 if (source.IsRegister()) { in Move32() 374 __ Mov(destination.AsArm().AsCoreRegister(), source.AsArm().AsCoreRegister()); in Move32() 376 __ ldr(destination.AsArm().AsCoreRegister(), Address(SP, source.GetStackIndex())); in Move32() 380 if (source.IsRegister()) { in Move32() 381 __ str(source.AsArm().AsCoreRegister(), Address(SP, destination.GetStackIndex())); in Move32() 383 __ ldr(IP, Address(SP, source.GetStackIndex())); in Move32() 389 void CodeGeneratorARM::Move64(Location destination, Location source) { in Move64() argument 390 if (source.Equals(destination)) { in Move64() [all …]
|
D | code_generator_x86.cc | 343 void CodeGeneratorX86::Move32(Location destination, Location source) { in Move32() argument 344 if (source.Equals(destination)) { in Move32() 348 if (source.IsRegister()) { in Move32() 349 __ movl(destination.AsX86().AsCpuRegister(), source.AsX86().AsCpuRegister()); in Move32() 351 DCHECK(source.IsStackSlot()); in Move32() 352 __ movl(destination.AsX86().AsCpuRegister(), Address(ESP, source.GetStackIndex())); in Move32() 355 if (source.IsRegister()) { in Move32() 356 __ movl(Address(ESP, destination.GetStackIndex()), source.AsX86().AsCpuRegister()); in Move32() 358 DCHECK(source.IsStackSlot()); in Move32() 359 __ pushl(Address(ESP, source.GetStackIndex())); in Move32() [all …]
|
D | code_generator_x86_64.cc | 274 void CodeGeneratorX86_64::Move(Location destination, Location source) { in Move() argument 275 if (source.Equals(destination)) { in Move() 279 if (source.IsRegister()) { in Move() 280 __ movq(destination.AsX86_64().AsCpuRegister(), source.AsX86_64().AsCpuRegister()); in Move() 281 } else if (source.IsStackSlot()) { in Move() 282 …__ movl(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex())); in Move() 284 DCHECK(source.IsDoubleStackSlot()); in Move() 285 …__ movq(destination.AsX86_64().AsCpuRegister(), Address(CpuRegister(RSP), source.GetStackIndex())); in Move() 288 if (source.IsRegister()) { in Move() 289 …__ movl(Address(CpuRegister(RSP), destination.GetStackIndex()), source.AsX86_64().AsCpuRegister()); in Move() [all …]
|
D | parallel_move_resolver.cc | 132 Location source = move->GetSource(); in PerformMove() local 137 if (other_move.Blocks(source)) { in PerformMove() 140 moves_.Get(i)->SetSource(source); in PerformMove()
|
D | code_generator_x86.h | 181 void Move32(Location destination, Location source); 183 void Move64(Location destination, Location source);
|
D | code_generator_arm.h | 179 void Move32(Location destination, Location source); 181 void Move64(Location destination, Location source);
|
D | code_generator_x86_64.h | 183 void Move(Location destination, Location source);
|
D | nodes.h | 1471 MoveOperands(Location source, Location destination) in MoveOperands() argument 1472 : source_(source), destination_(destination) {} in MoveOperands()
|
/art/runtime/base/ |
D | casts.h | 82 inline Dest bit_cast(const Source& source) { in bit_cast() argument 87 memcpy(&dest, &source, sizeof(dest)); in bit_cast()
|
/art/test/098-ddmc/ |
D | expected.txt | 2 …mber of class name strings: 0 number of method name strings: 0 number of source file name strings:… 14 …mber of class name strings: 0 number of method name strings: 0 number of source file name strings:… 23 …mber of class name strings: 0 number of method name strings: 0 number of source file name strings:…
|
/art/compiler/utils/arm64/ |
D | assembler_arm64.cc | 92 void Arm64Assembler::StoreWToOffset(StoreOperandType type, WRegister source, in StoreWToOffset() argument 96 ___ Strb(reg_w(source), MEM_OP(reg_x(base), offset)); in StoreWToOffset() 99 ___ Strh(reg_w(source), MEM_OP(reg_x(base), offset)); in StoreWToOffset() 102 ___ Str(reg_w(source), MEM_OP(reg_x(base), offset)); in StoreWToOffset() 109 void Arm64Assembler::StoreToOffset(Register source, Register base, int32_t offset) { in StoreToOffset() argument 110 CHECK_NE(source, SP); in StoreToOffset() 111 ___ Str(reg_x(source), MEM_OP(reg_x(base), offset)); in StoreToOffset() 114 void Arm64Assembler::StoreSToOffset(SRegister source, Register base, int32_t offset) { in StoreSToOffset() argument 115 ___ Str(reg_s(source), MEM_OP(reg_x(base), offset)); in StoreSToOffset() 118 void Arm64Assembler::StoreDToOffset(DRegister source, Register base, int32_t offset) { in StoreDToOffset() argument [all …]
|
D | assembler_arm64.h | 222 void StoreWToOffset(StoreOperandType type, WRegister source, 224 void StoreToOffset(Register source, Register base, int32_t offset); 225 void StoreSToOffset(SRegister source, Register base, int32_t offset); 226 void StoreDToOffset(DRegister source, Register base, int32_t offset);
|
/art/runtime/ |
D | thread_pool.h | 142 virtual void StealFrom(Thread* self, WorkStealingTask* source) = 0;
|
D | elf_file.h | 146 bool CheckSectionsLinked(const byte* source, const byte* target) const;
|
D | elf_file.cc | 338 bool ElfFile::CheckSectionsLinked(const byte* source, const byte* target) const { in CheckSectionsLinked() argument 351 if (Begin() + section_header->sh_offset == source) { in CheckSectionsLinked()
|
D | class_linker.cc | 3267 std::string source; in InsertClass() local 3269 source += " from "; in InsertClass() 3270 source += dex_cache->GetLocation()->ToModifiedUtf8(); in InsertClass() 3272 LOG(INFO) << "Loaded class " << descriptor << source; in InsertClass()
|
/art/ |
D | NOTICE | 40 including but not limited to software source code, documentation 41 source, and configuration files. 69 communication on electronic mailing lists, source code control systems, 219 * Redistributions of source code must retain the above copyright notice,
|
/art/compiler/dex/ |
D | mir_graph.cc | 1790 bool MIRGraph::HasSuspendTestBetween(BasicBlock* source, BasicBlockId target_id) { in HasSuspendTestBetween() argument 1793 if (source == nullptr || target == nullptr) in HasSuspendTestBetween() 1799 if (bb == source) in HasSuspendTestBetween() 1801 if (source->dominators->IsBitSet(bb->id) && bb->dominators->IsBitSet(target_id)) in HasSuspendTestBetween()
|
D | mir_graph.h | 1078 bool HasSuspendTestBetween(BasicBlock* source, BasicBlockId target_id);
|