1 /*
2  * Copyright (C) 2011 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
19 
20 #include <memory>
21 
22 #include "atomic.h"
23 #include "barrier.h"
24 #include "base/macros.h"
25 #include "base/mutex.h"
26 #include "garbage_collector.h"
27 #include "gc_root.h"
28 #include "gc/accounting/heap_bitmap.h"
29 #include "immune_spaces.h"
30 #include "object_callbacks.h"
31 #include "offsets.h"
32 
33 namespace art {
34 
35 namespace mirror {
36 class Class;
37 class Object;
38 class Reference;
39 }  // namespace mirror
40 
41 class Thread;
42 enum VisitRootFlags : uint8_t;
43 
44 namespace gc {
45 
46 class Heap;
47 
48 namespace accounting {
49 template<typename T> class AtomicStack;
50 typedef AtomicStack<mirror::Object> ObjectStack;
51 }  // namespace accounting
52 
53 namespace collector {
54 
55 class MarkSweep : public GarbageCollector {
56  public:
57   MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
58 
~MarkSweep()59   ~MarkSweep() {}
60 
61   virtual void RunPhases() OVERRIDE REQUIRES(!mark_stack_lock_);
62   void InitializePhase();
63   void MarkingPhase() REQUIRES(!mark_stack_lock_) SHARED_REQUIRES(Locks::mutator_lock_);
64   void PausePhase() REQUIRES(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_);
65   void ReclaimPhase() REQUIRES(!mark_stack_lock_) SHARED_REQUIRES(Locks::mutator_lock_);
66   void FinishPhase();
67   virtual void MarkReachableObjects()
68       REQUIRES(Locks::heap_bitmap_lock_)
69       REQUIRES(!mark_stack_lock_)
70       SHARED_REQUIRES(Locks::mutator_lock_);
71 
IsConcurrent()72   bool IsConcurrent() const {
73     return is_concurrent_;
74   }
75 
GetGcType()76   virtual GcType GetGcType() const OVERRIDE {
77     return kGcTypeFull;
78   }
79 
GetCollectorType()80   virtual CollectorType GetCollectorType() const OVERRIDE {
81     return is_concurrent_ ? kCollectorTypeCMS : kCollectorTypeMS;
82   }
83 
84   // Initializes internal structures.
85   void Init();
86 
87   // Find the default mark bitmap.
88   void FindDefaultSpaceBitmap() SHARED_REQUIRES(Locks::mutator_lock_);
89 
90   // Marks all objects in the root set at the start of a garbage collection.
91   void MarkRoots(Thread* self)
92       REQUIRES(Locks::heap_bitmap_lock_)
93       REQUIRES(!mark_stack_lock_)
94       SHARED_REQUIRES(Locks::mutator_lock_);
95 
96   void MarkNonThreadRoots()
97       REQUIRES(Locks::heap_bitmap_lock_)
98       REQUIRES(!mark_stack_lock_)
99       SHARED_REQUIRES(Locks::mutator_lock_);
100 
101   void MarkConcurrentRoots(VisitRootFlags flags)
102       REQUIRES(Locks::heap_bitmap_lock_)
103       REQUIRES(!mark_stack_lock_)
104       SHARED_REQUIRES(Locks::mutator_lock_);
105 
106   void MarkRootsCheckpoint(Thread* self, bool revoke_ros_alloc_thread_local_buffers_at_checkpoint)
107       REQUIRES(Locks::heap_bitmap_lock_)
108       REQUIRES(!mark_stack_lock_)
109       SHARED_REQUIRES(Locks::mutator_lock_);
110 
111   // Builds a mark stack and recursively mark until it empties.
112   void RecursiveMark()
113       REQUIRES(Locks::heap_bitmap_lock_)
114       REQUIRES(!mark_stack_lock_)
115       SHARED_REQUIRES(Locks::mutator_lock_);
116 
117   // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
118   // the image. Mark that portion of the heap as immune.
119   virtual void BindBitmaps() SHARED_REQUIRES(Locks::mutator_lock_);
120 
121   // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
122   void RecursiveMarkDirtyObjects(bool paused, uint8_t minimum_age)
123       REQUIRES(Locks::heap_bitmap_lock_)
124       REQUIRES(!mark_stack_lock_)
125       SHARED_REQUIRES(Locks::mutator_lock_);
126 
127   // Remarks the root set after completing the concurrent mark.
128   void ReMarkRoots()
129       REQUIRES(Locks::heap_bitmap_lock_)
130       REQUIRES(!mark_stack_lock_)
131       SHARED_REQUIRES(Locks::mutator_lock_);
132 
133   void ProcessReferences(Thread* self)
134       REQUIRES(!mark_stack_lock_)
135       SHARED_REQUIRES(Locks::mutator_lock_);
136 
137   // Update and mark references from immune spaces.
138   void UpdateAndMarkModUnion()
139       REQUIRES(!mark_stack_lock_)
140       SHARED_REQUIRES(Locks::mutator_lock_);
141 
142   // Pre clean cards to reduce how much work is needed in the pause.
143   void PreCleanCards()
144       REQUIRES(Locks::heap_bitmap_lock_)
145       REQUIRES(!mark_stack_lock_)
146       SHARED_REQUIRES(Locks::mutator_lock_);
147 
148   // Sweeps unmarked objects to complete the garbage collection. Virtual as by default it sweeps
149   // all allocation spaces. Partial and sticky GCs want to just sweep a subset of the heap.
150   virtual void Sweep(bool swap_bitmaps)
151       REQUIRES(Locks::heap_bitmap_lock_)
152       SHARED_REQUIRES(Locks::mutator_lock_);
153 
154   // Sweeps unmarked objects to complete the garbage collection.
155   void SweepLargeObjects(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_);
156 
157   // Sweep only pointers within an array. WARNING: Trashes objects.
158   void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
159       REQUIRES(Locks::heap_bitmap_lock_)
160       SHARED_REQUIRES(Locks::mutator_lock_);
161 
162   // Blackens an object.
163   void ScanObject(mirror::Object* obj)
164       REQUIRES(Locks::heap_bitmap_lock_)
165       REQUIRES(!mark_stack_lock_)
166       SHARED_REQUIRES(Locks::mutator_lock_);
167 
168   // No thread safety analysis due to lambdas.
169   template<typename MarkVisitor, typename ReferenceVisitor>
170   void ScanObjectVisit(mirror::Object* obj,
171                        const MarkVisitor& visitor,
172                        const ReferenceVisitor& ref_visitor)
173       REQUIRES(Locks::heap_bitmap_lock_)
174       REQUIRES(!mark_stack_lock_)
175       SHARED_REQUIRES(Locks::mutator_lock_);
176 
177   void SweepSystemWeaks(Thread* self)
178       REQUIRES(!Locks::heap_bitmap_lock_)
179       SHARED_REQUIRES(Locks::mutator_lock_);
180 
181   static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
182       SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
183 
184   void VerifySystemWeaks()
185       SHARED_REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
186 
187   // Verify that an object is live, either in a live bitmap or in the allocation stack.
188   void VerifyIsLive(const mirror::Object* obj)
189       SHARED_REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
190 
191   virtual bool IsMarkedHeapReference(mirror::HeapReference<mirror::Object>* ref) OVERRIDE
192       REQUIRES(Locks::heap_bitmap_lock_)
193       SHARED_REQUIRES(Locks::mutator_lock_);
194 
195   virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) OVERRIDE
196       REQUIRES(Locks::heap_bitmap_lock_)
197       REQUIRES(!mark_stack_lock_)
198       SHARED_REQUIRES(Locks::mutator_lock_);
199 
200   virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots,
201                           size_t count,
202                           const RootInfo& info) OVERRIDE
203       REQUIRES(Locks::heap_bitmap_lock_)
204       REQUIRES(!mark_stack_lock_)
205       SHARED_REQUIRES(Locks::mutator_lock_);
206 
207   // Marks an object.
208   virtual mirror::Object* MarkObject(mirror::Object* obj) OVERRIDE
209       REQUIRES(Locks::heap_bitmap_lock_)
210       REQUIRES(!mark_stack_lock_)
211       SHARED_REQUIRES(Locks::mutator_lock_);
212 
213   void MarkObject(mirror::Object* obj, mirror::Object* holder, MemberOffset offset)
214       REQUIRES(Locks::heap_bitmap_lock_)
215       REQUIRES(!mark_stack_lock_)
216       SHARED_REQUIRES(Locks::mutator_lock_);
217 
218   virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* ref) OVERRIDE
219       REQUIRES(Locks::heap_bitmap_lock_)
220       REQUIRES(!mark_stack_lock_)
221       SHARED_REQUIRES(Locks::mutator_lock_);
222 
GetBarrier()223   Barrier& GetBarrier() {
224     return *gc_barrier_;
225   }
226 
227   // Schedules an unmarked object for reference processing.
228   void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
229       SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
230 
231  protected:
232   // Returns object if the object is marked in the heap bitmap, otherwise null.
233   virtual mirror::Object* IsMarked(mirror::Object* object) OVERRIDE
234       SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
235 
236   void MarkObjectNonNull(mirror::Object* obj,
237                          mirror::Object* holder = nullptr,
238                          MemberOffset offset = MemberOffset(0))
239       REQUIRES(Locks::heap_bitmap_lock_)
240       REQUIRES(!mark_stack_lock_)
241       SHARED_REQUIRES(Locks::mutator_lock_);
242 
243   // Marks an object atomically, safe to use from multiple threads.
244   void MarkObjectNonNullParallel(mirror::Object* obj)
245       REQUIRES(!mark_stack_lock_)
246       SHARED_REQUIRES(Locks::mutator_lock_);
247 
248   // Returns true if we need to add obj to a mark stack.
249   bool MarkObjectParallel(mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
250 
251   // Verify the roots of the heap and print out information related to any invalid roots.
252   // Called in MarkObject, so may we may not hold the mutator lock.
253   void VerifyRoots()
254       NO_THREAD_SAFETY_ANALYSIS;
255 
256   // Expand mark stack to 2x its current size.
257   void ExpandMarkStack()
258       REQUIRES(mark_stack_lock_)
259       SHARED_REQUIRES(Locks::mutator_lock_);
260 
261   void ResizeMarkStack(size_t new_size)
262       REQUIRES(mark_stack_lock_)
263       SHARED_REQUIRES(Locks::mutator_lock_);
264 
265   // Returns how many threads we should use for the current GC phase based on if we are paused,
266   // whether or not we care about pauses.
267   size_t GetThreadCount(bool paused) const;
268 
269   // Push a single reference on a mark stack.
270   void PushOnMarkStack(mirror::Object* obj)
271       REQUIRES(!mark_stack_lock_)
272       SHARED_REQUIRES(Locks::mutator_lock_);
273 
274   // Blackens objects grayed during a garbage collection.
275   void ScanGrayObjects(bool paused, uint8_t minimum_age)
276       REQUIRES(Locks::heap_bitmap_lock_)
277       REQUIRES(!mark_stack_lock_)
278       SHARED_REQUIRES(Locks::mutator_lock_);
279 
ProcessMarkStack()280   virtual void ProcessMarkStack()
281       OVERRIDE
282       REQUIRES(Locks::heap_bitmap_lock_)
283       REQUIRES(!mark_stack_lock_)
284       SHARED_REQUIRES(Locks::mutator_lock_) {
285     ProcessMarkStack(false);
286   }
287 
288   // Recursively blackens objects on the mark stack.
289   void ProcessMarkStack(bool paused)
290       REQUIRES(Locks::heap_bitmap_lock_)
291       REQUIRES(!mark_stack_lock_)
292       SHARED_REQUIRES(Locks::mutator_lock_);
293 
294   void ProcessMarkStackParallel(size_t thread_count)
295       REQUIRES(Locks::heap_bitmap_lock_)
296       REQUIRES(!mark_stack_lock_)
297       SHARED_REQUIRES(Locks::mutator_lock_);
298 
299   // Used to Get around thread safety annotations. The call is from MarkingPhase and is guarded by
300   // IsExclusiveHeld.
301   void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
302 
303   // Revoke all the thread-local buffers.
304   void RevokeAllThreadLocalBuffers();
305 
306   // Whether or not we count how many of each type of object were scanned.
307   static constexpr bool kCountScannedTypes = false;
308 
309   // Current space, we check this space first to avoid searching for the appropriate space for an
310   // object.
311   accounting::ContinuousSpaceBitmap* current_space_bitmap_;
312   // Cache the heap's mark bitmap to prevent having to do 2 loads during slow path marking.
313   accounting::HeapBitmap* mark_bitmap_;
314 
315   accounting::ObjectStack* mark_stack_;
316 
317   // Every object inside the immune spaces is assumed to be marked. Immune spaces that aren't in the
318   // immune region are handled by the normal marking logic.
319   ImmuneSpaces immune_spaces_;
320 
321   // Parallel finger.
322   AtomicInteger atomic_finger_;
323 
324   AtomicInteger no_reference_class_count_;
325   AtomicInteger normal_count_;
326   // Number of classes scanned, if kCountScannedTypes.
327   AtomicInteger class_count_;
328   // Number of object arrays scanned, if kCountScannedTypes.
329   AtomicInteger object_array_count_;
330   // Number of non-class/arrays scanned, if kCountScannedTypes.
331   AtomicInteger other_count_;
332   // Number of java.lang.ref.Reference instances.
333   AtomicInteger reference_count_;
334 
335   AtomicInteger large_object_test_;
336   AtomicInteger large_object_mark_;
337   AtomicInteger overhead_time_;
338   AtomicInteger work_chunks_created_;
339   AtomicInteger work_chunks_deleted_;
340   AtomicInteger mark_null_count_;
341   AtomicInteger mark_immune_count_;
342   AtomicInteger mark_fastpath_count_;
343   AtomicInteger mark_slowpath_count_;
344 
345   std::unique_ptr<Barrier> gc_barrier_;
346   Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
347 
348   const bool is_concurrent_;
349 
350   // Verification.
351   size_t live_stack_freeze_size_;
352 
353   std::unique_ptr<MemMap> sweep_array_free_buffer_mem_map_;
354 
355  private:
356   friend class CardScanTask;
357   friend class CheckBitmapVisitor;
358   friend class CheckReferenceVisitor;
359   friend class CheckpointMarkThreadRoots;
360   friend class Heap;
361   friend class FifoMarkStackChunk;
362   friend class MarkObjectVisitor;
363   template<bool kUseFinger> friend class MarkStackTask;
364   friend class MarkSweepMarkObjectSlowPath;
365   friend class VerifyRootMarkedVisitor;
366   friend class VerifyRootVisitor;
367 
368   DISALLOW_IMPLICIT_CONSTRUCTORS(MarkSweep);
369 };
370 
371 }  // namespace collector
372 }  // namespace gc
373 }  // namespace art
374 
375 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
376