/art/runtime/gc/ |
D | reference_processor.cc | 156 collector::GarbageCollector* collector) { in ProcessReferences() argument 161 collector_ = collector; in ProcessReferences() 169 if (kIsDebugBuild && collector->IsTransactionActive()) { in ProcessReferences() 186 soft_reference_queue_.ForwardSoftReferences(collector); in ProcessReferences() 187 collector->ProcessMarkStack(); in ProcessReferences() 193 soft_reference_queue_.ClearWhiteReferences(&cleared_references_, collector); in ProcessReferences() 194 weak_reference_queue_.ClearWhiteReferences(&cleared_references_, collector); in ProcessReferences() 202 finalizer_reference_queue_.EnqueueFinalizerReferences(&cleared_references_, collector); in ProcessReferences() 203 collector->ProcessMarkStack(); in ProcessReferences() 209 soft_reference_queue_.ClearWhiteReferences(&cleared_references_, collector); in ProcessReferences() [all …]
|
D | reference_queue.h | 46 namespace collector { 83 collector::GarbageCollector* collector) 95 collector::GarbageCollector* collector)
|
D | reference_processor.h | 39 namespace collector { 52 gc::collector::GarbageCollector* collector) 69 collector::GarbageCollector* collector) 98 collector::GarbageCollector* collector_ GUARDED_BY(Locks::reference_processor_lock_);
|
D | reference_queue.cc | 84 collector::ConcurrentCopying* concurrent_copying = heap->ConcurrentCopyingCollector(); in DisableReadBarrierForReference() 134 collector::GarbageCollector* collector) { in ClearWhiteReferences() argument 140 if (!collector->IsNullOrMarkedHeapReference(referent_addr, /*do_atomic_update=*/false)) { in ClearWhiteReferences() 156 collector::GarbageCollector* collector) { in EnqueueFinalizerReferences() argument 162 if (!collector->IsNullOrMarkedHeapReference(referent_addr, /*do_atomic_update=*/false)) { in EnqueueFinalizerReferences() 163 ObjPtr<mirror::Object> forward_address = collector->MarkObject(referent_addr->AsMirrorPtr()); in EnqueueFinalizerReferences()
|
D | heap.h | 87 namespace collector { 466 collector::GcType WaitForGcToComplete(GcCause cause, Thread* self) REQUIRES(!*gc_complete_lock_); 486 const collector::Iteration* GetCurrentGcIteration() const { in GetCurrentGcIteration() 489 collector::Iteration* GetCurrentGcIteration() { in GetCurrentGcIteration() 792 collector::ConcurrentCopying* ConcurrentCopyingCollector() { in ConcurrentCopyingCollector() 793 collector::ConcurrentCopying* active_collector = in ConcurrentCopyingCollector() 970 collector::GarbageCollector* Compact(space::ContinuousMemMapAllocSpace* target_space, 975 void LogGC(GcCause gc_cause, collector::GarbageCollector* collector); 978 void FinishGC(Thread* self, collector::GcType gc_type) REQUIRES(!*gc_complete_lock_); 1112 collector::GcType WaitForGcToCompleteLocked(GcCause cause, Thread* self) [all …]
|
D | heap.cc | 315 last_gc_type_(collector::kGcTypeNone), in Heap() 316 next_gc_type_(collector::kGcTypePartial), in Heap() 686 if (collector::SemiSpace::kUseRememberedSet && non_moving_space_ != main_space_) { in Heap() 724 garbage_collectors_.push_back(new collector::MarkSweep(this, concurrent)); in Heap() 725 garbage_collectors_.push_back(new collector::PartialMarkSweep(this, concurrent)); in Heap() 726 garbage_collectors_.push_back(new collector::StickyMarkSweep(this, concurrent)); in Heap() 733 semi_space_collector_ = new collector::SemiSpace(this); in Heap() 737 concurrent_copying_collector_ = new collector::ConcurrentCopying(this, in Heap() 743 young_concurrent_copying_collector_ = new collector::ConcurrentCopying( in Heap() 857 if (collector::SemiSpace::kUseRememberedSet) { in CreateMallocSpaceFromMemMap() [all …]
|
D | scoped_gc_critical_section.cc | 47 Runtime::Current()->GetHeap()->FinishGC(self_, collector::kGcTypeNone); in Exit() 70 Runtime::Current()->GetHeap()->FinishGC(self_, collector::kGcTypeNone); in ~ScopedInterruptibleGCCriticalSection()
|
/art/runtime/gc/accounting/ |
D | remembered_set.cc | 66 collector::GarbageCollector* collector) in RememberedSetReferenceVisitor() argument 67 : collector_(collector), target_space_(target_space), in RememberedSetReferenceVisitor() 108 collector::GarbageCollector* const collector_; 117 collector::GarbageCollector* collector) in RememberedSetObjectVisitor() argument 118 : collector_(collector), target_space_(target_space), in RememberedSetObjectVisitor() 129 collector::GarbageCollector* const collector_; 135 collector::GarbageCollector* collector) { in UpdateAndMarkReferences() argument 139 collector); in UpdateAndMarkReferences()
|
D | remembered_set.h | 31 namespace collector { 58 collector::GarbageCollector* collector)
|
D | heap_bitmap.h | 32 namespace collector { 79 friend class art::gc::collector::ConcurrentCopying;
|
/art/test/712-varhandle-invocations/src/ |
D | VarHandleUnitTest.java | 25 private final VarHandleUnitTestCollector collector; field in VarHandleUnitTest 27 public VarHandleUnitTest(VarHandleUnitTestCollector collector) { in VarHandleUnitTest() argument 28 this.collector = collector; in VarHandleUnitTest() 32 this.collector = DEFAULT_COLLECTOR; in VarHandleUnitTest() 101 collector.start(getClass().getSimpleName()); in run() 103 collector.skip(); in run() 113 collector.success(); in run() 115 collector.fail(lazyErrorLog.toString()); in run()
|
/art/runtime/ |
D | backtrace_helper.h | 58 BacktraceCollector collector(frames_, kMaxFrames, skip_count); in Collect() 59 collector.Collect(); in Collect() 60 num_frames_ = collector.NumFrames(); in Collect()
|
D | mutator_gc_coord.md | 6 thread stacks, the garbage collector needs to ensure that Java data processed 7 by the collector is consistent and complete. At these points, the mutators 9 collector. And they should not be modifying the data that is visible to the 10 collector. 12 Logically, the collector and mutator share a reader-writer lock on the Java 14 while running Java code or touching heap-related data structures. The collector 33 logically releases the mutator lock. When the garbage collector needs mutator 35 not touching Java data, and hence the collector can safely perform the required
|
D | thread_list.h | 34 namespace collector { 133 gc::collector::GarbageCollector* collector,
|
/art/runtime/gc/collector/ |
D | immune_spaces.cc | 29 namespace collector { namespace 75 VLOG(collector) << "Interval " << reinterpret_cast<const void*>(begin) << "-" in CreateLargestImmuneRegion() 103 VLOG(collector) << "Immune region " << largest_immune_region_.Begin() << "-" in CreateLargestImmuneRegion()
|
D | immune_region.cc | 24 namespace collector { namespace
|
D | gc_type.h | 24 namespace collector {
|
D | object_byte_pair.h | 24 namespace collector {
|
D | partial_mark_sweep.h | 24 namespace collector {
|
D | partial_mark_sweep.cc | 26 namespace collector { namespace
|
D | mark_sweep-inl.h | 30 namespace collector {
|
D | sticky_mark_sweep.h | 25 namespace collector {
|
/art/tools/veridex/ |
D | precise_hidden_api_finder.cc | 68 FlowAnalysisCollector collector(resolver, method); in Run() local 69 collector.Run(); in Run() 70 AddUsesAt(collector.GetUses(), method.GetReference()); in Run()
|
/art/test/074-gc-thrash/ |
D | info.txt | 1 This thrashes the memory allocator and garbage collector for a brief period.
|
/art/runtime/gc/space/ |
D | space.cc | 90 collector::ObjectBytePair ContinuousMemMapAllocSpace::Sweep(bool swap_bitmaps) { in Sweep() 95 return collector::ObjectBytePair(0, 0); in Sweep()
|