1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
18 #define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
19 
20 #include <deque>
21 #include <memory>  // For unique_ptr.
22 
23 #include "atomic.h"
24 #include "base/macros.h"
25 #include "base/mutex.h"
26 #include "garbage_collector.h"
27 #include "gc_root.h"
28 #include "gc/accounting/heap_bitmap.h"
29 #include "immune_spaces.h"
30 #include "lock_word.h"
31 #include "object_callbacks.h"
32 #include "offsets.h"
33 
34 namespace art {
35 
36 class Thread;
37 
38 namespace mirror {
39   class Class;
40   class Object;
41 }  // namespace mirror
42 
43 namespace gc {
44 
45 class Heap;
46 
47 namespace accounting {
48   template <typename T> class AtomicStack;
49   typedef AtomicStack<mirror::Object> ObjectStack;
50 }  // namespace accounting
51 
52 namespace space {
53   class BumpPointerSpace;
54   class ContinuousMemMapAllocSpace;
55   class ContinuousSpace;
56 }  // namespace space
57 
58 namespace collector {
59 
60 class MarkCompact : public GarbageCollector {
61  public:
62   explicit MarkCompact(Heap* heap, const std::string& name_prefix = "");
~MarkCompact()63   ~MarkCompact() {}
64 
65   virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS;
66   void InitializePhase();
67   void MarkingPhase() REQUIRES(Locks::mutator_lock_)
68       REQUIRES(!Locks::heap_bitmap_lock_);
69   void ReclaimPhase() REQUIRES(Locks::mutator_lock_)
70       REQUIRES(!Locks::heap_bitmap_lock_);
71   void FinishPhase() REQUIRES(Locks::mutator_lock_);
72   void MarkReachableObjects()
73       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
GetGcType()74   virtual GcType GetGcType() const OVERRIDE {
75     return kGcTypePartial;
76   }
GetCollectorType()77   virtual CollectorType GetCollectorType() const OVERRIDE {
78     return kCollectorTypeMC;
79   }
80 
81   // Sets which space we will be copying objects in.
82   void SetSpace(space::BumpPointerSpace* space);
83 
84   // Initializes internal structures.
85   void Init();
86 
87   // Find the default mark bitmap.
88   void FindDefaultMarkBitmap();
89 
90   void ScanObject(mirror::Object* obj)
91       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
92 
93   // Marks the root set at the start of a garbage collection.
94   void MarkRoots()
95       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
96 
97   // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
98   // the image. Mark that portion of the heap as immune.
99   void BindBitmaps() REQUIRES_SHARED(Locks::mutator_lock_)
100       REQUIRES(!Locks::heap_bitmap_lock_);
101 
102   void UnBindBitmaps()
103       REQUIRES(Locks::heap_bitmap_lock_);
104 
105   void ProcessReferences(Thread* self) REQUIRES(Locks::mutator_lock_)
106       REQUIRES(Locks::mutator_lock_);
107 
108   // Sweeps unmarked objects to complete the garbage collection.
109   void Sweep(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
110 
111   // Sweeps unmarked objects to complete the garbage collection.
112   void SweepLargeObjects(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_);
113 
114   void SweepSystemWeaks()
115       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
116 
117   virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
118       OVERRIDE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
119 
120   virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
121                           const RootInfo& info)
122       OVERRIDE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
123 
124   // Schedules an unmarked object for reference processing.
125   void DelayReferenceReferent(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> reference)
126       REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
127 
128  protected:
129   // Returns null if the object is not marked, otherwise returns the forwarding address (same as
130   // object for non movable things).
131   mirror::Object* GetMarkedForwardAddress(mirror::Object* object)
132       REQUIRES(Locks::mutator_lock_)
133       REQUIRES_SHARED(Locks::heap_bitmap_lock_);
134 
135   // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
136   // mark, otherwise we unmark.
137   bool MarkLargeObject(const mirror::Object* obj)
138       REQUIRES(Locks::heap_bitmap_lock_)
139       REQUIRES_SHARED(Locks::mutator_lock_);
140 
141   // Expand mark stack to 2x its current size.
142   void ResizeMarkStack(size_t new_size) REQUIRES_SHARED(Locks::mutator_lock_);
143 
144   // Returns true if we should sweep the space.
145   bool ShouldSweepSpace(space::ContinuousSpace* space) const;
146 
147   // Push an object onto the mark stack.
148   void MarkStackPush(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_);
149 
150   void UpdateAndMarkModUnion()
151       REQUIRES(Locks::heap_bitmap_lock_)
152       REQUIRES_SHARED(Locks::mutator_lock_);
153 
154   // Recursively blackens objects on the mark stack.
155   void ProcessMarkStack()
156       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
157 
158   // 3 pass mark compact approach.
159   void Compact() REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
160   // Calculate the forwarding address of objects marked as "live" in the objects_before_forwarding
161   // bitmap.
162   void CalculateObjectForwardingAddresses()
163       REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
164   // Update the references of objects by using the forwarding addresses.
165   void UpdateReferences() REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
166   // Move objects and restore lock words.
167   void MoveObjects() REQUIRES(Locks::mutator_lock_);
168   // Move a single object to its forward address.
169   void MoveObject(mirror::Object* obj, size_t len) REQUIRES(Locks::mutator_lock_);
170   // Mark a single object.
171   virtual mirror::Object* MarkObject(mirror::Object* obj) OVERRIDE
172       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
173   virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr,
174                                  bool do_atomic_update) OVERRIDE
175       REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
176   virtual mirror::Object* IsMarked(mirror::Object* obj) OVERRIDE
177       REQUIRES_SHARED(Locks::heap_bitmap_lock_)
178       REQUIRES(Locks::mutator_lock_);
179   virtual bool IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* obj,
180                                            bool do_atomic_update) OVERRIDE
181       REQUIRES_SHARED(Locks::heap_bitmap_lock_)
182       REQUIRES(Locks::mutator_lock_);
183   void ForwardObject(mirror::Object* obj) REQUIRES(Locks::heap_bitmap_lock_,
184                                                                    Locks::mutator_lock_);
185   // Update a single heap reference.
186   void UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference)
187       REQUIRES_SHARED(Locks::heap_bitmap_lock_)
188       REQUIRES(Locks::mutator_lock_);
189   // Update all of the references of a single object.
190   void UpdateObjectReferences(mirror::Object* obj)
191       REQUIRES_SHARED(Locks::heap_bitmap_lock_)
192       REQUIRES(Locks::mutator_lock_);
193 
194   // Revoke all the thread-local buffers.
195   void RevokeAllThreadLocalBuffers();
196 
197   accounting::ObjectStack* mark_stack_;
198 
199   // Every object inside the immune spaces is assumed to be marked.
200   ImmuneSpaces immune_spaces_;
201 
202   // Bump pointer space which we are collecting.
203   space::BumpPointerSpace* space_;
204   // Cached mark bitmap as an optimization.
205   accounting::HeapBitmap* mark_bitmap_;
206 
207   // The name of the collector.
208   std::string collector_name_;
209 
210   // The bump pointer in the space where the next forwarding address will be.
211   uint8_t* bump_pointer_;
212   // How many live objects we have in the space.
213   size_t live_objects_in_space_;
214 
215   // Bitmap which describes which objects we have to move, need to do / 2 so that we can handle
216   // objects which are only 8 bytes.
217   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_before_forwarding_;
218   // Bitmap which describes which lock words we need to restore.
219   std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_with_lockword_;
220   // Which lock words we need to restore as we are moving objects.
221   std::deque<LockWord> lock_words_to_restore_;
222 
223   // State whether or not we are updating references.
224   bool updating_references_;
225 
226  private:
227   class MarkObjectVisitor;
228   class UpdateObjectReferencesVisitor;
229   class UpdateReferenceVisitor;
230   class UpdateRootVisitor;
231 
232   DISALLOW_IMPLICIT_CONSTRUCTORS(MarkCompact);
233 };
234 
235 }  // namespace collector
236 }  // namespace gc
237 }  // namespace art
238 
239 #endif  // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_
240