/art/runtime/ |
D | linear_alloc.cc | 23 LinearAlloc::LinearAlloc(ArenaPool* pool) : lock_("linear alloc"), allocator_(pool) { in LinearAlloc() 28 return allocator_.Realloc(ptr, old_size, new_size); in Realloc() 33 return allocator_.Alloc(size); in Alloc() 38 return allocator_.AllocAlign16(size); in AllocAlign16() 43 return allocator_.BytesUsed(); in GetUsedMemory() 48 return allocator_.GetArenaPool(); in GetArenaPool() 53 return allocator_.Contains(ptr); in Contains() 57 return allocator_.Contains(ptr); in ContainsUnsafe()
|
D | monitor_pool.cc | 60 void* chunk = allocator_.allocate(kChunkSize); in AllocateChunk() 99 allocator_.deallocate(reinterpret_cast<uint8_t*>(monitor_chunks_[i][j]), kChunkSize); in FreeInternal()
|
/art/libartbase/base/ |
D | arena_containers.h | 143 allocator_(allocator) { in ArenaAllocatorAdapterKind() 148 allocator_(other.allocator_) { in ArenaAllocatorAdapter() 155 ArenaAllocator* allocator_; 179 allocator_(allocator) { in ArenaAllocatorAdapter() 184 allocator_(other.allocator_) { in ArenaAllocatorAdapter() 200 return allocator_->AllocArray<T>(n, ArenaAllocatorAdapterKind::Kind()); 203 allocator_->MakeInaccessible(p, sizeof(T) * n); in deallocate() 216 ArenaAllocator* allocator_; 229 return lhs.allocator_ == rhs.allocator_;
|
D | arena_bit_vector.cc | 66 return allocator_->Alloc(size, this->Kind()); in Alloc() 73 : ArenaBitVectorAllocatorKind(kind), allocator_(allocator) { } in ArenaBitVectorAllocator() 75 ArenaAlloc* const allocator_; member in art::ArenaBitVectorAllocator
|
D | bit_vector.cc | 33 allocator_(allocator), in BitVector() 66 allocator_->Free(storage_); in ~BitVector() 360 static_cast<uint32_t*>(allocator_->Alloc(new_size * kWordBytes)); in EnsureSize() 367 allocator_->Free(storage_); in EnsureSize() 376 return allocator_; in GetAllocator()
|
/art/compiler/utils/ |
D | assembler.cc | 29 : allocator_(allocator) { in AssemblerBuffer() 31 contents_ = allocator_->AllocArray<uint8_t>(kInitialBufferCapacity, kArenaAllocAssembler); in AssemblerBuffer() 48 if (allocator_->IsRunningOnMemoryTool()) { in ~AssemblerBuffer() 49 allocator_->MakeInaccessible(contents_, Capacity()); in ~AssemblerBuffer() 84 allocator_->Realloc(contents_, old_capacity, new_capacity, kArenaAllocAssembler)); in ExtendCapacity()
|
D | jni_macro_assembler_test.h | 58 allocator_.reset(new ArenaAllocator(&pool_)); in SetUp() 59 assembler_.reset(CreateAssembler(allocator_.get())); in SetUp() 66 allocator_.reset(); in TearDown() 93 std::unique_ptr<ArenaAllocator> allocator_; variable
|
/art/compiler/optimizing/ |
D | instruction_builder.cc | 95 : allocator_(graph->GetAllocator()), in HInstructionBuilder() 146 HPhi* phi = new (allocator_) HPhi( in GetLocalsForWithAllocation() 147 allocator_, in GetLocalsForWithAllocation() 196 HPhi* phi = new (allocator_) HPhi( in InitializeBlockLocals() 197 allocator_, in InitializeBlockLocals() 235 HPhi* phi = new (allocator_) HPhi( in InitializeBlockLocals() 236 allocator_, in InitializeBlockLocals() 297 HEnvironment* environment = new (allocator_) HEnvironment( in InitializeInstruction() 298 allocator_, in InitializeInstruction() 314 HNullCheck* null_check = new (allocator_) HNullCheck(ref, dex_pc); in LoadNullCheckedLocal() [all …]
|
D | bounds_check_elimination.cc | 292 : allocator_(allocator), lower_(lower), upper_(upper) {} in ValueRange() 301 ScopedArenaAllocator* GetAllocator() const { return allocator_; } in GetAllocator() 329 return new (allocator_) ValueRange( in Narrow() 330 allocator_, in Narrow() 350 return new (allocator_) ValueRange(allocator_, lower, upper); in Add() 354 ScopedArenaAllocator* const allocator_; member in art::ValueRange 514 allocator_(graph->GetArenaStack()), in BCEVisitor() 518 allocator_.Adapter(kArenaAllocBoundsCheckElimination)), in BCEVisitor() 519 allocator_.Adapter(kArenaAllocBoundsCheckElimination)), in BCEVisitor() 521 allocator_.Adapter(kArenaAllocBoundsCheckElimination)), in BCEVisitor() [all …]
|
D | gvn.cc | 41 : allocator_(allocator), in ValueSet() 54 : allocator_(allocator), in ValueSet() 92 buckets_[index] = new (allocator_) Node(instruction, hash_code, buckets_[index]); in Add() 177 buckets_[new_index] = node->Dup(allocator_, buckets_[new_index]); in PopulateFromInternal() 233 clone_current = node->Dup(allocator_, nullptr); in CloneBucket() 332 ScopedArenaAllocator* const allocator_; member in art::ValueSet 359 allocator_(graph->GetArenaStack()), in GlobalValueNumberer() 361 sets_(graph->GetBlocks().size(), nullptr, allocator_.Adapter(kArenaAllocGvn)), in GlobalValueNumberer() 363 &allocator_, graph->GetBlocks().size(), /* expandable= */ false, kArenaAllocGvn) { in GlobalValueNumberer() 375 ScopedArenaAllocator allocator_; member in art::GlobalValueNumberer [all …]
|
D | block_builder.cc | 31 : allocator_(graph->GetAllocator()), in HBasicBlockBuilder() 55 block = new (allocator_) HBasicBlock(graph_, semantic_dex_pc); in MaybeCreateBlockAt() 339 HBasicBlock* new_catch_block = new (allocator_) HBasicBlock(graph_, address); in InsertTryBoundaryBlocks() 340 new_catch_block->AddInstruction(new (allocator_) HGoto(address)); in InsertTryBoundaryBlocks() 348 new (allocator_) TryCatchInformation(iterator.GetHandlerTypeIndex(), *dex_file_)); in InsertTryBoundaryBlocks() 365 HTryBoundary* try_entry = new (allocator_) HTryBoundary( in InsertTryBoundaryBlocks() 394 new (allocator_) HTryBoundary(HTryBoundary::BoundaryKind::kExit, successor->GetDexPc()); in InsertTryBoundaryBlocks() 402 ArenaSet<uint32_t> targets(allocator_->Adapter(kArenaAllocGraphBuilder)); in InsertSynthesizedLoopsForOsr() 430 HBasicBlock* loop_block = new (allocator_) HBasicBlock(graph_, block->GetDexPc()); in InsertSynthesizedLoopsForOsr() 439 loop_block->AddInstruction(new (allocator_) HIf(graph_->GetIntConstant(0), kNoDexPc)); in InsertSynthesizedLoopsForOsr() [all …]
|
D | ssa_liveness_analysis.h | 305 UsePosition* new_use = new (allocator_) UsePosition(instruction, temp_index, position); in AddTempUse() 355 UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position); 366 new (allocator_) EnvUsePosition(environment, input_index, position); 370 UsePosition* new_use = new (allocator_) UsePosition(instruction, input_index, position); 378 new (allocator_) LiveRange(start_block_position, position, nullptr); 394 new (allocator_) LiveRange(start_block_position, position, first_range_); 404 new (allocator_) UsePosition(instruction, input_index, block->GetLifetimeEnd()); in AddPhiUse() 411 new (allocator_) LiveRange(start, end, first_range_); in AddRange() 420 first_range_ = range_search_start_ = new (allocator_) LiveRange(start, end, first_range_); in AddRange() 438 new (allocator_) LiveRange(start, end, nullptr); in AddLoopRange() [all …]
|
D | intrinsics_x86_64.cc | 44 : allocator_(codegen->GetGraph()->GetAllocator()), codegen_(codegen) { in IntrinsicLocationsBuilderX86_64() 153 CreateFPToIntLocations(allocator_, invoke); in VisitDoubleDoubleToRawLongBits() 156 CreateIntToFPLocations(allocator_, invoke); in VisitDoubleLongBitsToDouble() 167 CreateFPToIntLocations(allocator_, invoke); in VisitFloatFloatToRawIntBits() 170 CreateIntToFPLocations(allocator_, invoke); in VisitFloatIntBitsToFloat() 211 CreateIntToIntLocations(allocator_, invoke); in VisitIntegerReverseBytes() 219 CreateIntToIntLocations(allocator_, invoke); in VisitLongReverseBytes() 227 CreateIntToIntLocations(allocator_, invoke); in VisitShortReverseBytes() 242 CreateFPToFPLocations(allocator_, invoke); in VisitMathSqrt() 273 CreateSSE41FPToFPLocations(allocator_, invoke, codegen_); in VisitMathCeil() [all …]
|
D | register_allocation_resolver.cc | 28 : allocator_(codegen->GetGraph()->GetAllocator()), in RegisterAllocationResolver() 537 move = new (allocator_) HParallelMove(allocator_); in AddInputMoveFor() 585 move = new (allocator_) HParallelMove(allocator_); in InsertParallelMoveAt() 600 move = new (allocator_) HParallelMove(allocator_); in InsertParallelMoveAt() 616 move = new (allocator_) HParallelMove(allocator_); in InsertParallelMoveAt() 648 move = new (allocator_) HParallelMove(allocator_); in InsertParallelMoveAtExitOf() 670 move = new (allocator_) HParallelMove(allocator_); in InsertParallelMoveAtEntryOf() 694 move = new (allocator_) HParallelMove(allocator_); in InsertMoveAfter()
|
D | intrinsics_arm64.h | 43 : allocator_(allocator), codegen_(codegen) {} in IntrinsicLocationsBuilderARM64() 60 ArenaAllocator* const allocator_;
|
D | graph_checker.h | 40 allocator_(graph->GetArenaStack()), in HGraphDelegateVisitor() 41 seen_ids_(&allocator_, graph->GetCurrentInstructionId(), false, kArenaAllocGraphChecker), in HGraphDelegateVisitor() 128 ScopedArenaAllocator allocator_; variable
|
D | intrinsics_arm64.cc | 201 CreateFPToIntLocations(allocator_, invoke); in VisitDoubleDoubleToRawLongBits() 204 CreateIntToFPLocations(allocator_, invoke); in VisitDoubleLongBitsToDouble() 215 CreateFPToIntLocations(allocator_, invoke); in VisitFloatFloatToRawIntBits() 218 CreateIntToFPLocations(allocator_, invoke); in VisitFloatIntBitsToFloat() 293 CreateIntToIntLocations(allocator_, invoke); in VisitIntegerReverseBytes() 301 CreateIntToIntLocations(allocator_, invoke); in VisitLongReverseBytes() 309 CreateIntToIntLocations(allocator_, invoke); in VisitShortReverseBytes() 328 CreateIntToIntLocations(allocator_, invoke); in VisitIntegerNumberOfLeadingZeros() 336 CreateIntToIntLocations(allocator_, invoke); in VisitLongNumberOfLeadingZeros() 356 CreateIntToIntLocations(allocator_, invoke); in VisitIntegerNumberOfTrailingZeros() [all …]
|
D | execution_subgraph.cc | 33 allocator_(allocator), in ExecutionSubgraph() 36 allocator_->Adapter(kArenaAllocLSA)), 38 allocator_, graph_->GetBlocks().size(), /*expandable=*/ false, kArenaAllocLSA), 264 excluded_list_.emplace(allocator_->Adapter(kArenaAllocLSA)); in RecalculateExcludedCohort() 267 ArenaBitVector unreachable(allocator_, graph_->GetBlocks().size(), false, kArenaAllocLSA); in RecalculateExcludedCohort() 271 res.emplace_back(allocator_, graph_); in RecalculateExcludedCohort()
|
D | intrinsics_x86.cc | 46 : allocator_(codegen->GetGraph()->GetAllocator()), in IntrinsicLocationsBuilderX86() 223 CreateFPToIntLocations(allocator_, invoke, /* is64bit= */ true); in VisitDoubleDoubleToRawLongBits() 226 CreateIntToFPLocations(allocator_, invoke, /* is64bit= */ true); in VisitDoubleLongBitsToDouble() 237 CreateFPToIntLocations(allocator_, invoke, /* is64bit= */ false); in VisitFloatFloatToRawIntBits() 240 CreateIntToFPLocations(allocator_, invoke, /* is64bit= */ false); in VisitFloatIntBitsToFloat() 292 CreateIntToIntLocations(allocator_, invoke); in VisitIntegerReverseBytes() 300 CreateLongToLongLocations(allocator_, invoke); in VisitLongReverseBytes() 321 CreateIntToIntLocations(allocator_, invoke); in VisitShortReverseBytes() 336 CreateFPToFPLocations(allocator_, invoke); in VisitMathSqrt() 367 CreateSSE41FPToFPLocations(allocator_, invoke, codegen_); in VisitMathCeil() [all …]
|
D | scheduler.h | 325 : allocator_(allocator), in SchedulingGraph() 327 nodes_map_(allocator_->Adapter(kArenaAllocScheduler)), in SchedulingGraph() 332 new (allocator_) SchedulingNode(instr, allocator_, is_scheduling_barrier)); 389 ScopedArenaAllocator* const allocator_; variable
|
D | parallel_move_resolver.h | 129 allocator_(allocator) { in ParallelMoveResolverNoSwap() 197 ArenaAllocator* const allocator_; variable
|
D | load_store_analysis.h | 52 allocator_(allocator), in ReferenceInfo() 151 ScopedArenaAllocator* allocator_; variable 251 allocator_(allocator), in HeapLocationCollector() 511 ref_info = new (allocator_) ReferenceInfo(instruction, allocator_, pos, lse_type_); in GetOrCreateReferenceInfo() 536 HeapLocation* heap_loc = new (allocator_) in MaybeCreateHeapLocation() 644 ScopedArenaAllocator* allocator_; variable
|
D | stack_map_stream.h | 40 : allocator_(allocator), in StackMapStream() 117 ScopedArenaAllocator* allocator_; variable
|
/art/runtime/verifier/ |
D | reg_type_cache.cc | 179 char* ptr = allocator_.AllocArray<char>(str.length()); in AddString() 213 new (&allocator_) PreciseReferenceType(klass, AddString(sv_descriptor), entries_.size()); in From() 215 entry = new (&allocator_) ReferenceType(klass, AddString(sv_descriptor), entries_.size()); in From() 229 new (&allocator_) UnresolvedReferenceType(AddString(sv_descriptor), entries_.size())); in From() 240 return AddEntry(new (&allocator_) UnresolvedReferenceType(AddString("a"), entries_.size())); in MakeUnresolvedReference() 269 new (&allocator_) PreciseReferenceType(klass, descriptor, entries_.size())) in InsertClass() 270 : new (&allocator_) ReferenceType(klass, descriptor, entries_.size()); in InsertClass() 291 allocator_(allocator), in RegTypeCache() 397 ArenaBitVector types(&allocator_, in FromUnresolvedMerge() 477 return AddEntry(new (&allocator_) UnresolvedMergedType(resolved_parts_merged, in FromUnresolvedMerge() [all …]
|
/art/runtime/mirror/ |
D | class_loader.h | 60 GetField64(OFFSET_OF_OBJECT_MEMBER(ClassLoader, allocator_))); in GetAllocator() 64 SetField64<false>(OFFSET_OF_OBJECT_MEMBER(ClassLoader, allocator_), in SetAllocator() 85 uint64_t allocator_; variable
|