1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
19 
20 #include <deque>
21 #include <memory>  // For unique_ptr.
22 
23 #include "atomic.h"
24 #include "base/macros.h"
25 #include "base/mutex.h"
26 #include "garbage_collector.h"
27 #include "gc_root.h"
28 #include "gc/accounting/heap_bitmap.h"
29 #include "immune_region.h"
30 #include "lock_word.h"
31 #include "object_callbacks.h"
32 #include "offsets.h"
33 
34 namespace art {
35 
36 class Thread;
37 
38 namespace mirror {
39   class Class;
40   class Object;
41 }  // namespace mirror
42 
43 namespace gc {
44 
45 class Heap;
46 
47 namespace accounting {
48   template <typename T> class AtomicStack;
49   typedef AtomicStack<mirror::Object> ObjectStack;
50 }  // namespace accounting
51 
52 namespace space {
53   class BumpPointerSpace;
54   class ContinuousMemMapAllocSpace;
55   class ContinuousSpace;
56 }  // namespace space
57 
58 namespace collector {
59 
60 class MarkCompact : public GarbageCollector {
61  public:
62   explicit MarkCompact(Heap* heap, const std::string& name_prefix = "");
~MarkCompact()63   ~MarkCompact() {}
64 
65   virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS;
66   void InitializePhase();
67   void MarkingPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
68       LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
69   void ReclaimPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
70       LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
71   void FinishPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
72   void MarkReachableObjects()
73       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
GetGcType()74   virtual GcType GetGcType() const OVERRIDE {
75     return kGcTypePartial;
76   }
GetCollectorType()77   virtual CollectorType GetCollectorType() const OVERRIDE {
78     return kCollectorTypeMC;
79   }
80 
81   // Sets which space we will be copying objects in.
82   void SetSpace(space::BumpPointerSpace* space);
83 
84   // Initializes internal structures.
85   void Init();
86 
87   // Find the default mark bitmap.
88   void FindDefaultMarkBitmap();
89 
90   void ScanObject(mirror::Object* obj)
91       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
92 
93   // Marks the root set at the start of a garbage collection.
94   void MarkRoots()
95       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
96 
97   // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
98   // the image. Mark that portion of the heap as immune.
99   void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
100       LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
101 
102   void UnBindBitmaps()
103       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
104 
105   void ProcessReferences(Thread* self) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
106       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
107 
108   // Sweeps unmarked objects to complete the garbage collection.
109   void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
110 
111   // Sweeps unmarked objects to complete the garbage collection.
112   void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
113 
114   void SweepSystemWeaks()
115       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
116 
117   virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
118       OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
119 
120   virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
121                           const RootInfo& info)
122       OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
123 
124   static mirror::Object* MarkObjectCallback(mirror::Object* root, void* arg)
125       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
126 
127   static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr, void* arg)
128       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
129 
130   static bool HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object>* ref_ptr,
131                                           void* arg)
132       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
133 
134   static void ProcessMarkStackCallback(void* arg)
135       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
136 
137   static void DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref,
138                                              void* arg)
139       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
140 
141   // Schedules an unmarked object for reference processing.
142   void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
143       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
144 
145  protected:
146   // Returns null if the object is not marked, otherwise returns the forwarding address (same as
147   // object for non movable things).
148   mirror::Object* GetMarkedForwardAddress(mirror::Object* object) const
149       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
150       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
151 
152   static mirror::Object* MarkedForwardingAddressCallback(mirror::Object* object, void* arg)
153       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
154       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
155 
156   // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
157   // mark, otherwise we unmark.
158   bool MarkLargeObject(const mirror::Object* obj)
159       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
160       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
161 
162   // Expand mark stack to 2x its current size.
163   void ResizeMarkStack(size_t new_size) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
164 
165   // Returns true if we should sweep the space.
166   bool ShouldSweepSpace(space::ContinuousSpace* space) const;
167 
168   // Push an object onto the mark stack.
169   void MarkStackPush(mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
170 
171   void UpdateAndMarkModUnion()
172       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
173       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
174 
175   // Recursively blackens objects on the mark stack.
176   void ProcessMarkStack()
177       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
178 
179   // 3 pass mark compact approach.
180   void Compact() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
181   // Calculate the forwarding address of objects marked as "live" in the objects_before_forwarding
182   // bitmap.
183   void CalculateObjectForwardingAddresses()
184       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
185   // Update the references of objects by using the forwarding addresses.
186   void UpdateReferences() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
187   static void UpdateRootCallback(mirror::Object** root, void* arg, const RootInfo& /*root_info*/)
188       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
189       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
190   // Move objects and restore lock words.
191   void MoveObjects() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
192   // Move a single object to its forward address.
193   void MoveObject(mirror::Object* obj, size_t len) EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
194   // Mark a single object.
195   void MarkObject(mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
196                                                                 Locks::mutator_lock_);
197   bool IsMarked(const mirror::Object* obj) const
198       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
199   static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
200       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
201   void ForwardObject(mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
202                                                                    Locks::mutator_lock_);
203   // Update a single heap reference.
204   void UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference)
205       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
206       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
207   static void UpdateHeapReferenceCallback(mirror::HeapReference<mirror::Object>* reference,
208                                           void* arg)
209       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
210       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
211   // Update all of the references of a single object.
212   void UpdateObjectReferences(mirror::Object* obj)
213       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
214       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
215 
216   // Revoke all the thread-local buffers.
217   void RevokeAllThreadLocalBuffers();
218 
219   accounting::ObjectStack* mark_stack_;
220 
221   // Immune region, every object inside the immune region is assumed to be marked.
222   ImmuneRegion immune_region_;
223 
224   // Bump pointer space which we are collecting.
225   space::BumpPointerSpace* space_;
226   // Cached mark bitmap as an optimization.
227   accounting::HeapBitmap* mark_bitmap_;
228 
229   // The name of the collector.
230   std::string collector_name_;
231 
232   // The bump pointer in the space where the next forwarding address will be.
233   uint8_t* bump_pointer_;
234   // How many live objects we have in the space.
235   size_t live_objects_in_space_;
236 
237   // Bitmap which describes which objects we have to move, need to do / 2 so that we can handle
238   // objects which are only 8 bytes.
239   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_before_forwarding_;
240   // Bitmap which describes which lock words we need to restore.
241   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_with_lockword_;
242   // Which lock words we need to restore as we are moving objects.
243   std::deque<LockWord> lock_words_to_restore_;
244 
245  private:
246   friend class BitmapSetSlowPathVisitor;
247   friend class CalculateObjectForwardingAddressVisitor;
248   friend class MarkCompactMarkObjectVisitor;
249   friend class MoveObjectVisitor;
250   friend class UpdateObjectReferencesVisitor;
251   friend class UpdateReferenceVisitor;
252   friend class UpdateRootVisitor;
253 
254   DISALLOW_IMPLICIT_CONSTRUCTORS(MarkCompact);
255 };
256 
257 }  // namespace collector
258 }  // namespace gc
259 }  // namespace art
260 
261 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
262