1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
19 
20 #include <memory>
21 
22 #include "atomic.h"
23 #include "barrier.h"
24 #include "base/macros.h"
25 #include "base/mutex.h"
26 #include "garbage_collector.h"
27 #include "gc_root.h"
28 #include "gc/accounting/heap_bitmap.h"
29 #include "immune_spaces.h"
30 #include "object_callbacks.h"
31 #include "offsets.h"
32 
33 namespace art {
34 
35 namespace mirror {
36 class Class;
37 class Object;
38 class Reference;
39 }  // namespace mirror
40 
41 class Thread;
42 enum VisitRootFlags : uint8_t;
43 
44 namespace gc {
45 
46 class Heap;
47 
48 namespace accounting {
49 template<typename T> class AtomicStack;
50 typedef AtomicStack<mirror::Object> ObjectStack;
51 }  // namespace accounting
52 
53 namespace collector {
54 
55 class MarkSweep : public GarbageCollector {
56  public:
57   MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
58 
~MarkSweep()59   ~MarkSweep() {}
60 
61   virtual void RunPhases() OVERRIDE REQUIRES(!mark_stack_lock_);
62   void InitializePhase();
63   void MarkingPhase() REQUIRES(!mark_stack_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
64   void PausePhase() REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_);
65   void ReclaimPhase() REQUIRES(!mark_stack_lock_) REQUIRES_SHARED(Locks::mutator_lock_);
66   void FinishPhase();
67   virtual void MarkReachableObjects()
68       REQUIRES(Locks::heap_bitmap_lock_)
69       REQUIRES(!mark_stack_lock_)
70       REQUIRES_SHARED(Locks::mutator_lock_);
71 
IsConcurrent()72   bool IsConcurrent() const {
73     return is_concurrent_;
74   }
75 
GetGcType()76   virtual GcType GetGcType() const OVERRIDE {
77     return kGcTypeFull;
78   }
79 
GetCollectorType()80   virtual CollectorType GetCollectorType() const OVERRIDE {
81     return is_concurrent_ ? kCollectorTypeCMS : kCollectorTypeMS;
82   }
83 
84   // Initializes internal structures.
85   void Init();
86 
87   // Find the default mark bitmap.
88   void FindDefaultSpaceBitmap() REQUIRES_SHARED(Locks::mutator_lock_);
89 
90   // Marks all objects in the root set at the start of a garbage collection.
91   void MarkRoots(Thread* self)
92       REQUIRES(Locks::heap_bitmap_lock_)
93       REQUIRES(!mark_stack_lock_)
94       REQUIRES_SHARED(Locks::mutator_lock_);
95 
96   void MarkNonThreadRoots()
97       REQUIRES(Locks::heap_bitmap_lock_)
98       REQUIRES(!mark_stack_lock_)
99       REQUIRES_SHARED(Locks::mutator_lock_);
100 
101   virtual void MarkConcurrentRoots(VisitRootFlags flags)
102       REQUIRES(Locks::heap_bitmap_lock_)
103       REQUIRES(!mark_stack_lock_)
104       REQUIRES_SHARED(Locks::mutator_lock_);
105 
106   void MarkRootsCheckpoint(Thread* self, bool revoke_ros_alloc_thread_local_buffers_at_checkpoint)
107       REQUIRES(Locks::heap_bitmap_lock_)
108       REQUIRES(!mark_stack_lock_)
109       REQUIRES_SHARED(Locks::mutator_lock_);
110 
111   // Builds a mark stack and recursively mark until it empties.
112   void RecursiveMark()
113       REQUIRES(Locks::heap_bitmap_lock_)
114       REQUIRES(!mark_stack_lock_)
115       REQUIRES_SHARED(Locks::mutator_lock_);
116 
117   // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
118   // the image. Mark that portion of the heap as immune.
119   virtual void BindBitmaps() REQUIRES_SHARED(Locks::mutator_lock_);
120 
121   // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
122   void RecursiveMarkDirtyObjects(bool paused, uint8_t minimum_age)
123       REQUIRES(Locks::heap_bitmap_lock_)
124       REQUIRES(!mark_stack_lock_)
125       REQUIRES_SHARED(Locks::mutator_lock_);
126 
127   // Remarks the root set after completing the concurrent mark.
128   void ReMarkRoots()
129       REQUIRES(Locks::heap_bitmap_lock_)
130       REQUIRES(!mark_stack_lock_)
131       REQUIRES_SHARED(Locks::mutator_lock_);
132 
133   void ProcessReferences(Thread* self)
134       REQUIRES(!mark_stack_lock_)
135       REQUIRES_SHARED(Locks::mutator_lock_);
136 
137   // Update and mark references from immune spaces.
138   void UpdateAndMarkModUnion()
139       REQUIRES(!mark_stack_lock_)
140       REQUIRES_SHARED(Locks::mutator_lock_);
141 
142   // Pre clean cards to reduce how much work is needed in the pause.
143   void PreCleanCards()
144       REQUIRES(Locks::heap_bitmap_lock_)
145       REQUIRES(!mark_stack_lock_)
146       REQUIRES_SHARED(Locks::mutator_lock_);
147 
148   // Sweeps unmarked objects to complete the garbage collection. Virtual as by default it sweeps
149   // all allocation spaces. Partial and sticky GCs want to just sweep a subset of the heap.
150   virtual void Sweep(bool swap_bitmaps)
151       REQUIRES(Locks::heap_bitmap_lock_)
152       REQUIRES_SHARED(Locks::mutator_lock_);
153 
154   // Sweeps unmarked objects to complete the garbage collection.
155   void SweepLargeObjects(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_);
156 
157   // Sweep only pointers within an array. WARNING: Trashes objects.
158   void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
159       REQUIRES(Locks::heap_bitmap_lock_)
160       REQUIRES_SHARED(Locks::mutator_lock_);
161 
162   // Blackens an object.
163   void ScanObject(mirror::Object* obj)
164       REQUIRES(Locks::heap_bitmap_lock_)
165       REQUIRES(!mark_stack_lock_)
166       REQUIRES_SHARED(Locks::mutator_lock_);
167 
168   // No thread safety analysis due to lambdas.
169   template<typename MarkVisitor, typename ReferenceVisitor>
170   void ScanObjectVisit(mirror::Object* obj,
171                        const MarkVisitor& visitor,
172                        const ReferenceVisitor& ref_visitor)
173       REQUIRES(Locks::heap_bitmap_lock_)
174       REQUIRES(!mark_stack_lock_)
175       REQUIRES_SHARED(Locks::mutator_lock_);
176 
177   void SweepSystemWeaks(Thread* self)
178       REQUIRES(!Locks::heap_bitmap_lock_)
179       REQUIRES_SHARED(Locks::mutator_lock_);
180 
181   static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
182       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
183 
184   void VerifySystemWeaks()
185       REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
186 
187   // Verify that an object is live, either in a live bitmap or in the allocation stack.
188   void VerifyIsLive(const mirror::Object* obj)
189       REQUIRES_SHARED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
190 
191   virtual bool IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* ref,
192                                            bool do_atomic_update) OVERRIDE
193       REQUIRES(Locks::heap_bitmap_lock_)
194       REQUIRES_SHARED(Locks::mutator_lock_);
195 
196   virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) OVERRIDE
197       REQUIRES(Locks::heap_bitmap_lock_)
198       REQUIRES(!mark_stack_lock_)
199       REQUIRES_SHARED(Locks::mutator_lock_);
200 
201   virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
202                           size_t count,
203                           const RootInfo& info) OVERRIDE
204       REQUIRES(Locks::heap_bitmap_lock_)
205       REQUIRES(!mark_stack_lock_)
206       REQUIRES_SHARED(Locks::mutator_lock_);
207 
208   // Marks an object.
209   virtual mirror::Object* MarkObject(mirror::Object* obj) OVERRIDE
210       REQUIRES(Locks::heap_bitmap_lock_)
211       REQUIRES(!mark_stack_lock_)
212       REQUIRES_SHARED(Locks::mutator_lock_);
213 
214   void MarkObject(mirror::Object* obj, mirror::Object* holder, MemberOffset offset)
215       REQUIRES(Locks::heap_bitmap_lock_)
216       REQUIRES(!mark_stack_lock_)
217       REQUIRES_SHARED(Locks::mutator_lock_);
218 
219   virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* ref,
220                                  bool do_atomic_update) OVERRIDE
221       REQUIRES(Locks::heap_bitmap_lock_)
222       REQUIRES(!mark_stack_lock_)
223       REQUIRES_SHARED(Locks::mutator_lock_);
224 
GetBarrier()225   Barrier& GetBarrier() {
226     return *gc_barrier_;
227   }
228 
229   // Schedules an unmarked object for reference processing.
230   void DelayReferenceReferent(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> reference)
231       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
232 
233  protected:
234   // Returns object if the object is marked in the heap bitmap, otherwise null.
235   virtual mirror::Object* IsMarked(mirror::Object* object) OVERRIDE
236       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
237 
238   void MarkObjectNonNull(mirror::Object* obj,
239                          mirror::Object* holder = nullptr,
240                          MemberOffset offset = MemberOffset(0))
241       REQUIRES(Locks::heap_bitmap_lock_)
242       REQUIRES(!mark_stack_lock_)
243       REQUIRES_SHARED(Locks::mutator_lock_);
244 
245   // Marks an object atomically, safe to use from multiple threads.
246   void MarkObjectNonNullParallel(mirror::Object* obj)
247       REQUIRES(!mark_stack_lock_)
248       REQUIRES_SHARED(Locks::mutator_lock_);
249 
250   // Returns true if we need to add obj to a mark stack.
251   bool MarkObjectParallel(mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
252 
253   // Verify the roots of the heap and print out information related to any invalid roots.
254   // Called in MarkObject, so may we may not hold the mutator lock.
255   void VerifySuspendedThreadRoots(std::ostream& os)
256       REQUIRES_SHARED(Locks::mutator_lock_);
257 
258   // Expand mark stack to 2x its current size.
259   void ExpandMarkStack()
260       REQUIRES(mark_stack_lock_)
261       REQUIRES_SHARED(Locks::mutator_lock_);
262 
263   void ResizeMarkStack(size_t new_size)
264       REQUIRES(mark_stack_lock_)
265       REQUIRES_SHARED(Locks::mutator_lock_);
266 
267   // Returns how many threads we should use for the current GC phase based on if we are paused,
268   // whether or not we care about pauses.
269   size_t GetThreadCount(bool paused) const;
270 
271   // Push a single reference on a mark stack.
272   void PushOnMarkStack(mirror::Object* obj)
273       REQUIRES(!mark_stack_lock_)
274       REQUIRES_SHARED(Locks::mutator_lock_);
275 
276   // Blackens objects grayed during a garbage collection.
277   void ScanGrayObjects(bool paused, uint8_t minimum_age)
278       REQUIRES(Locks::heap_bitmap_lock_)
279       REQUIRES(!mark_stack_lock_)
280       REQUIRES_SHARED(Locks::mutator_lock_);
281 
ProcessMarkStack()282   virtual void ProcessMarkStack()
283       OVERRIDE
284       REQUIRES(Locks::heap_bitmap_lock_)
285       REQUIRES(!mark_stack_lock_)
286       REQUIRES_SHARED(Locks::mutator_lock_) {
287     ProcessMarkStack(false);
288   }
289 
290   // Recursively blackens objects on the mark stack.
291   void ProcessMarkStack(bool paused)
292       REQUIRES(Locks::heap_bitmap_lock_)
293       REQUIRES(!mark_stack_lock_)
294       REQUIRES_SHARED(Locks::mutator_lock_);
295 
296   void ProcessMarkStackParallel(size_t thread_count)
297       REQUIRES(Locks::heap_bitmap_lock_)
298       REQUIRES(!mark_stack_lock_)
299       REQUIRES_SHARED(Locks::mutator_lock_);
300 
301   // Used to Get around thread safety annotations. The call is from MarkingPhase and is guarded by
302   // IsExclusiveHeld.
303   void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
304 
305   // Revoke all the thread-local buffers.
306   void RevokeAllThreadLocalBuffers();
307 
308   // Whether or not we count how many of each type of object were scanned.
309   static constexpr bool kCountScannedTypes = false;
310 
311   // Current space, we check this space first to avoid searching for the appropriate space for an
312   // object.
313   accounting::ContinuousSpaceBitmap* current_space_bitmap_;
314   // Cache the heap's mark bitmap to prevent having to do 2 loads during slow path marking.
315   accounting::HeapBitmap* mark_bitmap_;
316 
317   accounting::ObjectStack* mark_stack_;
318 
319   // Every object inside the immune spaces is assumed to be marked. Immune spaces that aren't in the
320   // immune region are handled by the normal marking logic.
321   ImmuneSpaces immune_spaces_;
322 
323   // Parallel finger.
324   AtomicInteger atomic_finger_;
325 
326   AtomicInteger no_reference_class_count_;
327   AtomicInteger normal_count_;
328   // Number of classes scanned, if kCountScannedTypes.
329   AtomicInteger class_count_;
330   // Number of object arrays scanned, if kCountScannedTypes.
331   AtomicInteger object_array_count_;
332   // Number of non-class/arrays scanned, if kCountScannedTypes.
333   AtomicInteger other_count_;
334   // Number of java.lang.ref.Reference instances.
335   AtomicInteger reference_count_;
336 
337   AtomicInteger large_object_test_;
338   AtomicInteger large_object_mark_;
339   AtomicInteger overhead_time_;
340   AtomicInteger work_chunks_created_;
341   AtomicInteger work_chunks_deleted_;
342   AtomicInteger mark_null_count_;
343   AtomicInteger mark_immune_count_;
344   AtomicInteger mark_fastpath_count_;
345   AtomicInteger mark_slowpath_count_;
346 
347   std::unique_ptr<Barrier> gc_barrier_;
348   Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
349 
350   const bool is_concurrent_;
351 
352   // Verification.
353   size_t live_stack_freeze_size_;
354 
355   std::unique_ptr<MemMap> sweep_array_free_buffer_mem_map_;
356 
357  private:
358   class CardScanTask;
359   class CheckpointMarkThreadRoots;
360   class DelayReferenceReferentVisitor;
361   template<bool kUseFinger> class MarkStackTask;
362   class MarkObjectSlowPath;
363   class RecursiveMarkTask;
364   class ScanObjectParallelVisitor;
365   class ScanObjectVisitor;
366   class VerifyRootMarkedVisitor;
367   class VerifyRootVisitor;
368   class VerifySystemWeakVisitor;
369 
370   DISALLOW_IMPLICIT_CONSTRUCTORS(MarkSweep);
371 };
372 
373 }  // namespace collector
374 }  // namespace gc
375 }  // namespace art
376 
377 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
378