1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
19 
20 #include <deque>
21 #include <memory>  // For unique_ptr.
22 
23 #include "atomic.h"
24 #include "base/macros.h"
25 #include "base/mutex.h"
26 #include "garbage_collector.h"
27 #include "gc_root.h"
28 #include "gc/accounting/heap_bitmap.h"
29 #include "immune_region.h"
30 #include "lock_word.h"
31 #include "object_callbacks.h"
32 #include "offsets.h"
33 
34 namespace art {
35 
36 class Thread;
37 
38 namespace mirror {
39   class Class;
40   class Object;
41 }  // namespace mirror
42 
43 namespace gc {
44 
45 class Heap;
46 
47 namespace accounting {
48   template <typename T> class AtomicStack;
49   typedef AtomicStack<mirror::Object*> ObjectStack;
50 }  // namespace accounting
51 
52 namespace space {
53   class BumpPointerSpace;
54   class ContinuousMemMapAllocSpace;
55   class ContinuousSpace;
56 }  // namespace space
57 
58 namespace collector {
59 
60 class MarkCompact : public GarbageCollector {
61  public:
62   explicit MarkCompact(Heap* heap, const std::string& name_prefix = "");
~MarkCompact()63   ~MarkCompact() {}
64 
65   virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS;
66   void InitializePhase();
67   void MarkingPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
68       LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
69   void ReclaimPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
70       LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
71   void FinishPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
72   void MarkReachableObjects()
73       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
GetGcType()74   virtual GcType GetGcType() const OVERRIDE {
75     return kGcTypePartial;
76   }
GetCollectorType()77   virtual CollectorType GetCollectorType() const OVERRIDE {
78     return kCollectorTypeMC;
79   }
80 
81   // Sets which space we will be copying objects in.
82   void SetSpace(space::BumpPointerSpace* space);
83 
84   // Initializes internal structures.
85   void Init();
86 
87   // Find the default mark bitmap.
88   void FindDefaultMarkBitmap();
89 
90   void ScanObject(mirror::Object* obj)
91       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
92 
93   // Marks the root set at the start of a garbage collection.
94   void MarkRoots()
95       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
96 
97   // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
98   // the image. Mark that portion of the heap as immune.
99   void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
100       LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
101 
102   void UnBindBitmaps()
103       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
104 
105   void ProcessReferences(Thread* self) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
106       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
107 
108   // Sweeps unmarked objects to complete the garbage collection.
109   void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
110 
111   // Sweeps unmarked objects to complete the garbage collection.
112   void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
113 
114   void SweepSystemWeaks()
115       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
116 
117   static void MarkRootCallback(mirror::Object** root, void* arg, const RootInfo& root_info)
118       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
119 
120   static mirror::Object* MarkObjectCallback(mirror::Object* root, void* arg)
121       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
122 
123   static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr, void* arg)
124       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
125 
126   static bool HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object>* ref_ptr,
127                                           void* arg)
128       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
129 
130   static void ProcessMarkStackCallback(void* arg)
131       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
132 
133   static void DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref,
134                                              void* arg)
135       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
136 
137   // Schedules an unmarked object for reference processing.
138   void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
139       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
140 
141  protected:
142   // Returns null if the object is not marked, otherwise returns the forwarding address (same as
143   // object for non movable things).
144   mirror::Object* GetMarkedForwardAddress(mirror::Object* object) const
145       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
146       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
147 
148   static mirror::Object* MarkedForwardingAddressCallback(mirror::Object* object, void* arg)
149       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
150       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
151 
152   // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
153   // mark, otherwise we unmark.
154   bool MarkLargeObject(const mirror::Object* obj)
155       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
156       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
157 
158   // Expand mark stack to 2x its current size.
159   void ResizeMarkStack(size_t new_size);
160 
161   // Returns true if we should sweep the space.
162   bool ShouldSweepSpace(space::ContinuousSpace* space) const;
163 
164   // Push an object onto the mark stack.
165   void MarkStackPush(mirror::Object* obj);
166 
167   void UpdateAndMarkModUnion()
168       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
169       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
170 
171   // Recursively blackens objects on the mark stack.
172   void ProcessMarkStack()
173       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
174 
175   // 3 pass mark compact approach.
176   void Compact() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
177   // Calculate the forwarding address of objects marked as "live" in the objects_before_forwarding
178   // bitmap.
179   void CalculateObjectForwardingAddresses()
180       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
181   // Update the references of objects by using the forwarding addresses.
182   void UpdateReferences() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
183   static void UpdateRootCallback(mirror::Object** root, void* arg, const RootInfo& /*root_info*/)
184       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
185       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
186   // Move objects and restore lock words.
187   void MoveObjects() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
188   // Move a single object to its forward address.
189   void MoveObject(mirror::Object* obj, size_t len) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
190   // Mark a single object.
191   void MarkObject(mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
192                                                                 Locks::mutator_lock_);
193   bool IsMarked(const mirror::Object* obj) const
194       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
195   static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
196       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
197   void ForwardObject(mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
198                                                                    Locks::mutator_lock_);
199   // Update a single heap reference.
200   void UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference)
201       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
202       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
203   static void UpdateHeapReferenceCallback(mirror::HeapReference<mirror::Object>* reference,
204                                           void* arg)
205       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
206       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
207   // Update all of the references of a single object.
208   void UpdateObjectReferences(mirror::Object* obj)
209       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
210       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
211 
212   // Revoke all the thread-local buffers.
213   void RevokeAllThreadLocalBuffers();
214 
215   accounting::ObjectStack* mark_stack_;
216 
217   // Immune region, every object inside the immune region is assumed to be marked.
218   ImmuneRegion immune_region_;
219 
220   // Bump pointer space which we are collecting.
221   space::BumpPointerSpace* space_;
222   // Cached mark bitmap as an optimization.
223   accounting::HeapBitmap* mark_bitmap_;
224 
225   // The name of the collector.
226   std::string collector_name_;
227 
228   // The bump pointer in the space where the next forwarding address will be.
229   byte* bump_pointer_;
230   // How many live objects we have in the space.
231   size_t live_objects_in_space_;
232 
233   // Bitmap which describes which objects we have to move, need to do / 2 so that we can handle
234   // objects which are only 8 bytes.
235   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_before_forwarding_;
236   // Bitmap which describes which lock words we need to restore.
237   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_with_lockword_;
238   // Which lock words we need to restore as we are moving objects.
239   std::deque<LockWord> lock_words_to_restore_;
240 
241  private:
242   friend class BitmapSetSlowPathVisitor;
243   friend class CalculateObjectForwardingAddressVisitor;
244   friend class MarkCompactMarkObjectVisitor;
245   friend class MoveObjectVisitor;
246   friend class UpdateObjectReferencesVisitor;
247   friend class UpdateReferenceVisitor;
248   DISALLOW_COPY_AND_ASSIGN(MarkCompact);
249 };
250 
251 }  // namespace collector
252 }  // namespace gc
253 }  // namespace art
254 
255 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
256