Lines Matching refs:idx
333 size_t idx = pm_idx + 1; in FreePages() local
335 while (idx < end && page_map_[idx] == pm_part_type) { in FreePages()
336 page_map_[idx] = kPageMapEmpty; in FreePages()
338 idx++; in FreePages()
548 RosAlloc::Run* RosAlloc::AllocRun(Thread* self, size_t idx) { in AllocRun() argument
552 new_run = reinterpret_cast<Run*>(AllocPages(self, numOfPages[idx], kPageMapRun)); in AllocRun()
558 new_run->size_bracket_idx_ = idx; in AllocRun()
561 if (kUsePrefetchDuringAllocRun && idx < kNumThreadLocalSizeBrackets) { in AllocRun()
568 const size_t num_of_slots = numOfSlots[idx]; in AllocRun()
569 const size_t bracket_size = bracketSizes[idx]; in AllocRun()
571 uint8_t* begin = reinterpret_cast<uint8_t*>(new_run) + headerSizes[idx]; in AllocRun()
582 RosAlloc::Run* RosAlloc::RefillRun(Thread* self, size_t idx) { in RefillRun() argument
584 auto* const bt = &non_full_runs_[idx]; in RefillRun()
595 return AllocRun(self, idx); in RefillRun()
598 inline void* RosAlloc::AllocFromCurrentRunUnlocked(Thread* self, size_t idx) { in AllocFromCurrentRunUnlocked() argument
599 Run* current_run = current_runs_[idx]; in AllocFromCurrentRunUnlocked()
606 full_runs_[idx].insert(current_run); in AllocFromCurrentRunUnlocked()
610 << " into full_runs_[" << std::dec << idx << "]"; in AllocFromCurrentRunUnlocked()
612 DCHECK(non_full_runs_[idx].find(current_run) == non_full_runs_[idx].end()); in AllocFromCurrentRunUnlocked()
613 DCHECK(full_runs_[idx].find(current_run) != full_runs_[idx].end()); in AllocFromCurrentRunUnlocked()
615 current_run = RefillRun(self, idx); in AllocFromCurrentRunUnlocked()
618 current_runs_[idx] = dedicated_full_run_; in AllocFromCurrentRunUnlocked()
622 DCHECK(non_full_runs_[idx].find(current_run) == non_full_runs_[idx].end()); in AllocFromCurrentRunUnlocked()
623 DCHECK(full_runs_[idx].find(current_run) == full_runs_[idx].end()); in AllocFromCurrentRunUnlocked()
625 current_runs_[idx] = current_run; in AllocFromCurrentRunUnlocked()
642 size_t idx = SizeToIndexAndBracketSize(size, &bracket_size); in AllocFromRunThreadUnsafe() local
644 void* slot_addr = AllocFromCurrentRunUnlocked(self, idx); in AllocFromRunThreadUnsafe()
661 size_t idx = SizeToIndexAndBracketSize(size, &bracket_size); in AllocFromRun() local
663 if (LIKELY(idx < kNumThreadLocalSizeBrackets)) { in AllocFromRun()
665 Run* thread_local_run = reinterpret_cast<Run*>(self->GetRosAllocRun(idx)); in AllocFromRun()
669 MutexLock mu(self, *size_bracket_locks_[idx]); in AllocFromRun()
670 CHECK(non_full_runs_[idx].find(thread_local_run) == non_full_runs_[idx].end()); in AllocFromRun()
671 CHECK(full_runs_[idx].find(thread_local_run) == full_runs_[idx].end()); in AllocFromRun()
682 MutexLock mu(self, *size_bracket_locks_[idx]); in AllocFromRun()
696 full_runs_[idx].insert(thread_local_run); in AllocFromRun()
700 << " into full_runs_[" << std::dec << idx << "]"; in AllocFromRun()
703 DCHECK(non_full_runs_[idx].find(thread_local_run) == non_full_runs_[idx].end()); in AllocFromRun()
704 DCHECK(full_runs_[idx].find(thread_local_run) != full_runs_[idx].end()); in AllocFromRun()
707 thread_local_run = RefillRun(self, idx); in AllocFromRun()
709 self->SetRosAllocRun(idx, dedicated_full_run_); in AllocFromRun()
712 DCHECK(non_full_runs_[idx].find(thread_local_run) == non_full_runs_[idx].end()); in AllocFromRun()
713 DCHECK(full_runs_[idx].find(thread_local_run) == full_runs_[idx].end()); in AllocFromRun()
715 self->SetRosAllocRun(idx, thread_local_run); in AllocFromRun()
741 MutexLock mu(self, *size_bracket_locks_[idx]); in AllocFromRun()
742 slot_addr = AllocFromCurrentRunUnlocked(self, idx); in AllocFromRun()
763 const size_t idx = run->size_bracket_idx_; in FreeFromRun() local
764 const size_t bracket_size = bracketSizes[idx]; in FreeFromRun()
766 MutexLock brackets_mu(self, *size_bracket_locks_[idx]); in FreeFromRun()
776 DCHECK(non_full_runs_[idx].find(run) == non_full_runs_[idx].end()); in FreeFromRun()
777 DCHECK(full_runs_[idx].find(run) == full_runs_[idx].end()); in FreeFromRun()
788 auto* non_full_runs = &non_full_runs_[idx]; in FreeFromRun()
799 if (run == current_runs_[idx]) { in FreeFromRun()
800 current_runs_[idx] = dedicated_full_run_; in FreeFromRun()
802 DCHECK(non_full_runs_[idx].find(run) == non_full_runs_[idx].end()); in FreeFromRun()
803 DCHECK(full_runs_[idx].find(run) == full_runs_[idx].end()); in FreeFromRun()
813 if (run != current_runs_[idx]) { in FreeFromRun()
814 auto* full_runs = kIsDebugBuild ? &full_runs_[idx] : nullptr; in FreeFromRun()
831 << " into non_full_runs_[" << std::dec << idx << "]"; in FreeFromRun()
842 const uint8_t idx = size_bracket_idx_; in FreeListToStr() local
843 const size_t bracket_size = bracketSizes[idx]; in FreeListToStr()
860 size_t idx = size_bracket_idx_; in Dump() local
864 << " size_bracket_idx=" << idx in Dump()
876 const uint8_t idx = size_bracket_idx_; in FreeSlot() local
877 const size_t bracket_size = bracketSizes[idx]; in FreeSlot()
892 const uint8_t idx = size_bracket_idx_; in MergeThreadLocalFreeListToFreeList() local
899 *is_all_free_after_out = free_list_.Size() == numOfSlots[idx]; in MergeThreadLocalFreeListToFreeList()
928 const uint8_t idx = size_bracket_idx_; in AddToFreeListShared() local
929 const size_t bracket_size = bracketSizes[idx]; in AddToFreeListShared()
942 const uint8_t idx = size_bracket_idx_; in ZeroHeaderAndSlotHeaders() local
950 memset(this, 0, headerSizes[idx]); in ZeroHeaderAndSlotHeaders()
953 const size_t size = numOfPages[idx] * kPageSize; in ZeroHeaderAndSlotHeaders()
962 const uint8_t idx = size_bracket_idx_; in ZeroData() local
964 memset(slot_begin, 0, numOfSlots[idx] * bracketSizes[idx]); in ZeroData()
969 size_t idx = size_bracket_idx_; in InspectAllSlots() local
970 uint8_t* slot_base = reinterpret_cast<uint8_t*>(this) + headerSizes[idx]; in InspectAllSlots()
971 size_t num_slots = numOfSlots[idx]; in InspectAllSlots()
972 size_t bracket_size = IndexToBracketSize(idx); in InspectAllSlots()
974 reinterpret_cast<uint8_t*>(this) + numOfPages[idx] * kPageSize); in InspectAllSlots()
1106 size_t idx = run->size_bracket_idx_; in BulkFree() local
1107 MutexLock brackets_mu(self, *size_bracket_locks_[idx]); in BulkFree()
1110 DCHECK(non_full_runs_[idx].find(run) == non_full_runs_[idx].end()); in BulkFree()
1111 DCHECK(full_runs_[idx].find(run) == full_runs_[idx].end()); in BulkFree()
1129 auto* non_full_runs = &non_full_runs_[idx]; in BulkFree()
1130 auto* full_runs = kIsDebugBuild ? &full_runs_[idx] : nullptr; in BulkFree()
1134 bool run_was_current = run == current_runs_[idx]; in BulkFree()
1174 if (run == current_runs_[idx]) { in BulkFree()
1195 << " into non_full_runs_[" << std::dec << idx; in BulkFree()
1277 size_t idx = run->size_bracket_idx_; in DumpPageMap() local
1279 << " idx=" << idx in DumpPageMap()
1280 << " numOfPages=" << numOfPages[idx] in DumpPageMap()
1313 size_t idx = pm_idx + 1; in UsableSize() local
1315 while (idx < end && page_map_[idx] == kPageMapLargeObjectPart) { in UsableSize()
1317 idx++; in UsableSize()
1335 size_t idx = run->size_bracket_idx_; in UsableSize() local
1337 - (reinterpret_cast<uint8_t*>(run) + headerSizes[idx]); in UsableSize()
1338 DCHECK_EQ(offset_from_slot_base % bracketSizes[idx], static_cast<size_t>(0)); in UsableSize()
1339 return IndexToBracketSize(idx); in UsableSize()
1438 size_t idx = i + 1; in InspectAll() local
1439 while (idx < pm_end && page_map_[idx] == kPageMapLargeObjectPart) { in InspectAll()
1441 idx++; in InspectAll()
1511 for (size_t idx = 0; idx < kNumThreadLocalSizeBrackets; idx++) { in RevokeThreadLocalRuns() local
1512 MutexLock mu(self, *size_bracket_locks_[idx]); in RevokeThreadLocalRuns()
1513 Run* thread_local_run = reinterpret_cast<Run*>(thread->GetRosAllocRun(idx)); in RevokeThreadLocalRuns()
1519 thread->SetRosAllocRun(idx, dedicated_full_run_); in RevokeThreadLocalRuns()
1523 free_bytes += num_free_slots * bracketSizes[idx]; in RevokeThreadLocalRuns()
1535 DCHECK(non_full_runs_[idx].find(thread_local_run) == non_full_runs_[idx].end()); in RevokeThreadLocalRuns()
1536 DCHECK(full_runs_[idx].find(thread_local_run) == full_runs_[idx].end()); in RevokeThreadLocalRuns()
1537 RevokeRun(self, idx, thread_local_run); in RevokeThreadLocalRuns()
1543 void RosAlloc::RevokeRun(Thread* self, size_t idx, Run* run) { in RevokeRun() argument
1544 size_bracket_locks_[idx]->AssertHeld(self); in RevokeRun()
1548 full_runs_[idx].insert(run); in RevokeRun()
1549 DCHECK(full_runs_[idx].find(run) != full_runs_[idx].end()); in RevokeRun()
1553 << " into full_runs_[" << std::dec << idx << "]"; in RevokeRun()
1561 non_full_runs_[idx].insert(run); in RevokeRun()
1562 DCHECK(non_full_runs_[idx].find(run) != non_full_runs_[idx].end()); in RevokeRun()
1566 << " into non_full_runs_[" << std::dec << idx << "]"; in RevokeRun()
1574 for (size_t idx = 0; idx < kNumThreadLocalSizeBrackets; ++idx) { in RevokeThreadUnsafeCurrentRuns() local
1575 MutexLock mu(self, *size_bracket_locks_[idx]); in RevokeThreadUnsafeCurrentRuns()
1576 if (current_runs_[idx] != dedicated_full_run_) { in RevokeThreadUnsafeCurrentRuns()
1577 RevokeRun(self, idx, current_runs_[idx]); in RevokeThreadUnsafeCurrentRuns()
1578 current_runs_[idx] = dedicated_full_run_; in RevokeThreadUnsafeCurrentRuns()
1602 for (size_t idx = 0; idx < kNumThreadLocalSizeBrackets; idx++) { in AssertThreadLocalRunsAreRevoked() local
1603 MutexLock mu(self, *size_bracket_locks_[idx]); in AssertThreadLocalRunsAreRevoked()
1604 Run* thread_local_run = reinterpret_cast<Run*>(thread->GetRosAllocRun(idx)); in AssertThreadLocalRunsAreRevoked()
1619 for (size_t idx = 0; idx < kNumThreadLocalSizeBrackets; ++idx) { in AssertAllThreadLocalRunsAreRevoked() local
1620 MutexLock brackets_mu(self, *size_bracket_locks_[idx]); in AssertAllThreadLocalRunsAreRevoked()
1621 CHECK_EQ(current_runs_[idx], dedicated_full_run_); in AssertAllThreadLocalRunsAreRevoked()
1787 size_t idx = i + 1; in Verify() local
1788 while (idx < pm_end && page_map_[idx] == kPageMapLargeObjectPart) { in Verify()
1790 idx++; in Verify()
1816 size_t idx = run->size_bracket_idx_; in Verify() local
1817 CHECK_LT(idx, kNumOfSizeBrackets) << "Out of range size bracket index : " << idx; in Verify()
1818 size_t num_pages = numOfPages[idx]; in Verify()
1872 const size_t idx = size_bracket_idx_; in Verify() local
1873 CHECK_LT(idx, kNumOfSizeBrackets) << "Out of range size bracket index : " << Dump(); in Verify()
1874 uint8_t* slot_base = reinterpret_cast<uint8_t*>(this) + headerSizes[idx]; in Verify()
1875 const size_t num_slots = numOfSlots[idx]; in Verify()
1876 size_t bracket_size = IndexToBracketSize(idx); in Verify()
1878 reinterpret_cast<uint8_t*>(this) + numOfPages[idx] * kPageSize) in Verify()
1895 CHECK_EQ(i, idx) in Verify()
1912 if (idx == i) { in Verify()
1927 auto& non_full_runs = rosalloc->non_full_runs_[idx]; in Verify()
1937 auto& full_runs = rosalloc->full_runs_[idx]; in Verify()
1974 CHECK_EQ(SizeToIndex(obj_size + memory_tool_modifier), idx) in Verify()
1976 << "obj_size=" << obj_size << "(" << obj_size + memory_tool_modifier << "), idx=" << idx in Verify()
2127 size_t idx = i + 1; in DumpStats() local
2128 while (idx < page_map_size_ && page_map_[idx] == kPageMapLargeObjectPart) { in DumpStats()
2130 idx++; in DumpStats()
2143 size_t idx = run->size_bracket_idx_; in DumpStats() local
2144 size_t num_pages = numOfPages[idx]; in DumpStats()
2145 num_runs[idx]++; in DumpStats()
2146 num_pages_runs[idx] += num_pages; in DumpStats()
2147 num_slots[idx] += numOfSlots[idx]; in DumpStats()
2149 num_used_slots[idx] += numOfSlots[idx] - num_free_slots; in DumpStats()
2150 num_metadata_bytes[idx] += headerSizes[idx]; in DumpStats()