/art/compiler/optimizing/ |
D | nodes.cc | 24 block->SetBlockId(blocks_.Size()); in AddBlock() 29 ArenaBitVector visiting(arena_, blocks_.Size(), false); in FindBackEdges() 34 for (size_t i = 0; i < blocks_.Size(); ++i) { in RemoveDeadBlocks() 37 for (size_t j = 0; j < block->GetSuccessors().Size(); ++j) { in RemoveDeadBlocks() 58 for (size_t i = 0; i < block->GetSuccessors().Size(); i++) { in VisitBlockForBackEdges() 70 ArenaBitVector visited(arena_, blocks_.Size(), false); in BuildDominatorTree() 87 GrowableArray<size_t> visits(arena_, blocks_.Size()); in BuildDominatorTree() 88 visits.SetSize(blocks_.Size()); in BuildDominatorTree() 90 for (size_t i = 0; i < entry_block_->GetSuccessors().Size(); i++) { in BuildDominatorTree() 96 ArenaBitVector visited(arena_, blocks_.Size(), false); in FindCommonDominator() [all …]
|
D | ssa_builder.cc | 33 for (size_t i = 0; i < loop_headers_.Size(); i++) { in BuildSsa() 37 for (size_t pred = 0; pred < block->GetPredecessors().Size(); pred++) { in BuildSsa() 72 for (size_t local = 0; local < current_locals_->Size(); local++) { in VisitBasicBlock() 84 } else if (block->GetPredecessors().Size() > 0) { in VisitBasicBlock() 87 for (size_t local = 0; local < current_locals_->Size(); local++) { in VisitBasicBlock() 92 for (size_t i = 0, e = block->GetPredecessors().Size(); i < e; ++i) { in VisitBasicBlock() 110 GetGraph()->GetArena(), local, block->GetPredecessors().Size(), Primitive::kPrimVoid); in VisitBasicBlock() 111 for (size_t i = 0; i < block->GetPredecessors().Size(); i++) { in VisitBasicBlock() 147 GetGraph()->GetArena(), current_locals_->Size()); in VisitInstruction()
|
D | parallel_move_resolver.cc | 28 for (size_t i = 0; i < moves_.Size(); ++i) { in EmitNativeCode() 39 for (size_t i = 0; i < moves_.Size(); ++i) { in EmitNativeCode() 86 for (size_t i = 0; i < moves_.Size(); ++i) { in PerformMove() 118 for (size_t i = 0; i < moves_.Size(); ++i) { in PerformMove() 135 for (size_t i = 0; i < moves_.Size(); ++i) { in PerformMove() 151 for (size_t i = 0; i < moves_.Size(); ++i) { in IsScratchLocation() 157 for (size_t i = 0; i < moves_.Size(); ++i) { in IsScratchLocation()
|
D | stack_map_stream.h | 81 entry.dex_register_maps_start_index = dex_register_maps_.Size(); in AddStackMapEntry() 82 entry.inline_infos_start_index = inline_infos_.Size(); in AddStackMapEntry() 112 return stack_maps_.Size() * (StackMap<T>::kFixedSize + StackMaskEncodingSize(stack_mask_max_)); in ComputeStackMapSize() 117 return stack_maps_.Size() * DexRegisterMap::kFixedSize in ComputeDexRegisterMapSize() 119 + (dex_register_maps_.Size() * DexRegisterMap::SingleEntrySize()); in ComputeDexRegisterMapSize() 123 return inline_infos_.Size() * InlineInfo::SingleEntrySize() in ComputeInlineInfoSize() 150 code_info.SetNumberOfStackMaps(stack_maps_.Size()); in FillIn() 155 for (size_t i = 0, e = stack_maps_.Size(); i < e; ++i) { in FillIn()
|
D | register_allocator.cc | 52 for (size_t i = 0, e = graph.GetBlocks().Size(); i < e; ++i) { in CanAllocateRegistersFor() 215 for (size_t i = 0, e = physical_register_intervals_.Size(); i < e; ++i) { in ValidateInternal() 222 return ValidateIntervals(intervals, spill_slots_.Size(), *codegen_, allocator_, in ValidateInternal() 244 for (size_t i = 0, e = intervals.Size(); i < e; ++i) { in ValidateIntervals() 320 for (size_t i = 0; i < active_.Size(); ++i) { in LinearScan() 335 for (size_t i = 0; i < inactive_.Size(); ++i) { in LinearScan() 378 for (size_t i = 0, e = inactive_.Size(); i < e; ++i) { in TryAllocateFreeReg() 388 for (size_t i = 0, e = active_.Size(); i < e; ++i) { in TryAllocateFreeReg() 446 for (size_t i = 0, e = active_.Size(); i < e; ++i) { in AllocateBlockedReg() 463 for (size_t i = 0, e = inactive_.Size(); i < e; ++i) { in AllocateBlockedReg() [all …]
|
D | code_generator.cc | 37 block_labels_.SetSize(blocks.Size()); in CompileBaseline() 49 for (size_t i = 0, e = blocks.Size(); i < e; ++i) { in CompileBaseline() 75 block_labels_.SetSize(blocks.Size()); in CompileOptimized() 78 for (size_t i = 0, e = blocks.Size(); i < e; ++i) { in CompileOptimized() 96 for (size_t i = 0, e = slow_paths_.Size(); i < e; ++i) { in GenerateSlowPaths() 282 for (size_t i = 0; i < pc_infos_.Size(); i++) { in BuildNativeGCMap() 289 GcMapBuilder builder(data, pc_infos_.Size(), max_native_offset, dex_gc_map.RegWidth()); in BuildNativeGCMap() 290 for (size_t i = 0; i < pc_infos_.Size(); i++) { in BuildNativeGCMap() 302 uint32_t pc2dex_entries = pc_infos_.Size(); in BuildMappingTable()
|
D | ssa_builder.h | 32 locals_for_(graph->GetArena(), graph->GetBlocks().Size()) { in SsaBuilder() 33 locals_for_.SetSize(graph->GetBlocks().Size()); in SsaBuilder()
|
D | ssa_liveness_analysis.cc | 57 size_t number_of_successors = block->GetSuccessors().Size(); in VisitBlockForLinearization() 88 ArenaBitVector visited(graph_.GetArena(), graph_.GetBlocks().Size(), false); in LinearizeGraph() 175 for (size_t i = 0, e = block->GetSuccessors().Size(); i < e; ++i) { in ComputeLiveRanges() 218 for (size_t i = 0, e = environment->Size(); i < e; ++i) { in ComputeLiveRanges() 277 for (size_t i = 0, e = block.GetSuccessors().Size(); i < e; ++i) { in UpdateLiveOut()
|
D | graph_test.cc | 162 ASSERT_EQ(if_block->GetPredecessors().Size(), 2u); in TEST() 198 ASSERT_EQ(if_block->GetPredecessors().Size(), 2u); in TEST() 239 ASSERT_EQ(loop_block->GetPredecessors().Size(), 2u); in TEST() 242 ASSERT_EQ(if_instr->IfTrueSuccessor()->GetSuccessors().Size(), 1u); in TEST() 278 ASSERT_EQ(loop_block->GetPredecessors().Size(), 2u); in TEST() 281 ASSERT_EQ(if_instr->IfFalseSuccessor()->GetSuccessors().Size(), 1u); in TEST()
|
D | find_loops_test.cc | 48 for (size_t i = 0, e = graph->GetBlocks().Size(); i < e; ++i) { in TEST() 60 for (size_t i = 0, e = graph->GetBlocks().Size(); i < e; ++i) { in TEST() 75 for (size_t i = 0, e = graph->GetBlocks().Size(); i < e; ++i) { in TEST() 91 for (size_t i = 0, e = graph->GetBlocks().Size(); i < e; ++i) { in TEST() 105 for (size_t i = 0, e = graph->GetBlocks().Size(); i < e; ++i) { in TEST()
|
/art/runtime/ |
D | reference_table_test.cc | 40 EXPECT_EQ(0U, rt.Size()); in TEST_F() 45 EXPECT_EQ(0U, rt.Size()); in TEST_F() 49 EXPECT_EQ(0U, rt.Size()); in TEST_F() 54 EXPECT_EQ(1U, rt.Size()); in TEST_F() 65 EXPECT_EQ(i + 2, rt.Size()); in TEST_F() 84 EXPECT_EQ(10U, rt.Size()); in TEST_F() 93 EXPECT_EQ(9 - i, rt.Size()); in TEST_F()
|
D | intern_table_test.cc | 49 TEST_F(InternTableTest, Size) { in TEST_F() argument 52 EXPECT_EQ(0U, t.Size()); in TEST_F() 58 EXPECT_EQ(1U, t.Size()); in TEST_F() 60 EXPECT_EQ(2U, t.Size()); in TEST_F() 110 EXPECT_EQ(4U, t.Size()); in TEST_F() 121 EXPECT_EQ(2U, t.Size()); in TEST_F() 127 EXPECT_EQ(3U, t.Size()); in TEST_F()
|
D | intern_table.cc | 37 size_t InternTable::Size() const { in Size() function in art::InternTable 39 return strong_interns_.Size() + weak_interns_.Size(); in Size() 44 return strong_interns_.Size(); in StrongSize() 49 return weak_interns_.Size(); in WeakSize() 321 VLOG(heap) << "Swapping " << pre_zygote_table_.Size() << " interns to the pre zygote table"; in SwapPostZygoteWithPreZygote() 358 size_t InternTable::Table::Size() const { in Size() function in art::InternTable::Table 359 return pre_zygote_table_.Size() + post_zygote_table_.Size(); in Size()
|
D | method_helper.cc | 46 return (other_types == nullptr) || (other_types->Size() == 0); in HasSameSignatureWithDifferentClassLoaders() 48 return types->Size() == 0; in HasSameSignatureWithDifferentClassLoaders() 50 uint32_t num_types = types->Size(); in HasSameSignatureWithDifferentClassLoaders() 51 if (UNLIKELY(num_types != other_types->Size())) { in HasSameSignatureWithDifferentClassLoaders()
|
/art/compiler/utils/x86/ |
D | assembler_x86_test.cc | 27 ASSERT_EQ(static_cast<size_t>(1), buffer.Size()); in TEST() 29 ASSERT_EQ(static_cast<size_t>(5), buffer.Size()); in TEST()
|
/art/compiler/dex/ |
D | dataflow_iterator.h | 340 : DataflowIterator(mir_graph, 0, mir_graph->GetTopologicalSortOrder()->Size()) { in TopologicalSortIterator() 372 : DataflowIterator(mir_graph, 0, mir_graph->GetTopologicalSortOrder()->Size()) { in RepeatingTopologicalSortIterator() 411 : DataflowIterator(mir_graph, 0, mir_graph->GetTopologicalSortOrder()->Size()), in LoopRepeatingTopologicalSortIterator() 419 DCHECK_EQ(loop_head_stack_->Size(), 0u); in LoopRepeatingTopologicalSortIterator() 423 DCHECK_EQ(loop_head_stack_->Size(), 0u); in ~LoopRepeatingTopologicalSortIterator()
|
/art/compiler/utils/ |
D | growable_array.h | 59 if (idx_ >= g_list_->Size()) { in Next() 124 DCHECK(index <= Size()); in InsertAt() 126 for (size_t i = Size() - 1; i > index; --i) { in InsertAt() 181 size_t Size() const { return num_used_; } in Size() function
|
D | assembler.cc | 52 CHECK_EQ(Size(), 0U); in AssemblerBuffer() 72 MemoryRegion from(reinterpret_cast<void*>(contents()), Size()); in FinalizeInstructions() 83 size_t old_size = Size(); in ExtendCapacity() 103 CHECK_EQ(Size(), old_size); in ExtendCapacity()
|
D | assembler.h | 187 CHECK_LE(position, Size() - static_cast<int>(sizeof(T))); in Load() 192 CHECK_LE(position, Size() - static_cast<int>(sizeof(T))); in Store() 200 size_t nbytes = Size() - oldposition; in Move() 208 fixup->set_position(Size()); in EmitFixup() 234 size_t Size() const { in Size() function 288 int ComputeGap() { return buffer_->Capacity() - buffer_->Size(); } in ComputeGap() 360 virtual size_t CodeSize() const { return buffer_.Size(); } in CodeSize()
|
/art/runtime/base/ |
D | hash_set_test.cc | 68 ASSERT_EQ(hash_set.Size(), 0U); in TEST_F() 75 ASSERT_EQ(hash_set.Size(), 0U); in TEST_F() 92 ASSERT_EQ(strings.size(), hash_set.Size()); in TEST_F() 173 ASSERT_EQ(hash_set.Size(), std_set.size()); in TEST_F() 175 static_cast<ssize_t>(hash_set.Size())); in TEST_F()
|
D | hash_set.h | 264 size_t Size() const { in Size() function 268 Resize(Size() / max_load_factor_); in ShrinkToMaximumLoad() 289 return static_cast<double>(Size()) / static_cast<double>(NumBuckets()); in CalculateLoadFactor() 359 size_t min_index = static_cast<size_t>(Size() / min_load_factor_); in Expand() 370 DCHECK_GE(new_size, Size()); in Resize()
|
/art/runtime/gc/space/ |
D | space_test.h | 393 EXPECT_LE(space->Size(), growth_limit); in SizeFootPrintGrowthLimitAndTrimBody() 397 EXPECT_GE(space->Size(), footprint); in SizeFootPrintGrowthLimitAndTrimBody() 431 EXPECT_GE(space->Size(), footprint); // invariant in SizeFootPrintGrowthLimitAndTrimBody() 456 EXPECT_LE(space->Size(), growth_limit); in SizeFootPrintGrowthLimitAndTrimBody() 460 EXPECT_GE(space->Size(), amount_allocated); in SizeFootPrintGrowthLimitAndTrimBody() 476 EXPECT_GE(space->Size(), amount_allocated); in SizeFootPrintGrowthLimitAndTrimBody() 477 EXPECT_LE(space->Size(), growth_limit); in SizeFootPrintGrowthLimitAndTrimBody() 499 EXPECT_GE(space->Size(), footprint); // invariant in SizeFootPrintGrowthLimitAndTrimBody() 529 EXPECT_GE(space->Size(), footprint); in SizeFootPrintGrowthLimitAndTrimBody() 530 EXPECT_LE(space->Size(), growth_limit); in SizeFootPrintGrowthLimitAndTrimBody() [all …]
|
D | malloc_space.cc | 125 if (Size() > growth_limit_) { in SetGrowthLimit() 171 size_t size = RoundUp(Size(), kPageSize); in CreateZygoteSpace() 186 VLOG(heap) << "Size " << GetMemMap()->Size(); in CreateZygoteSpace() 225 << ",size=" << PrettySize(Size()) << ",capacity=" << PrettySize(Capacity()) in Dump()
|
/art/runtime/gc/accounting/ |
D | space_bitmap.cc | 99 DCHECK_EQ(Size(), source_bitmap->Size()); in CopyFrom() 100 …std::copy(source_bitmap->Begin(), source_bitmap->Begin() + source_bitmap->Size() / kWordSize, Begi… in CopyFrom() 155 CHECK_LT(end, live_bitmap.Size() / kWordSize); in SweepWalk() 252 uintptr_t end = Size() / kWordSize; in InOrderWalk()
|
/art/oatdump/ |
D | oatdump.cc | 245 os << oat_file_.Size() << "\n\n"; in Dump() 343 offsets_.insert(oat_file_.Size()); in AddAllOffsets() 481 if (oat_method_offsets_offset > oat_file_.Size()) { in DumpOatMethod() 484 oat_method_offsets_offset, oat_file_.Size()); in DumpOatMethod() 493 if (aligned_code_begin > oat_file_.Size()) { in DumpOatMethod() 496 aligned_code_begin, oat_file_.Size()); in DumpOatMethod() 507 if (gc_map_offset > oat_file_.Size()) { in DumpOatMethod() 510 gc_map_offset, oat_file_.Size()); in DumpOatMethod() 527 if (method_header_offset > oat_file_.Size()) { in DumpOatMethod() 530 method_header_offset, oat_file_.Size()); in DumpOatMethod() [all …]
|