1 /*
2  * Copyright (C) 2008 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
18 #define ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
19 
20 #include <limits.h>
21 #include <stdint.h>
22 #include <memory>
23 #include <set>
24 #include <vector>
25 
26 #include "base/mutex.h"
27 #include "globals.h"
28 #include "object_callbacks.h"
29 
30 namespace art {
31 
32 namespace mirror {
33   class Object;
34 }  // namespace mirror
35 class MemMap;
36 
37 namespace gc {
38 namespace accounting {
39 
40 template<size_t kAlignment>
41 class SpaceBitmap {
42  public:
43   typedef void ScanCallback(mirror::Object* obj, void* finger, void* arg);
44   typedef void SweepCallback(size_t ptr_count, mirror::Object** ptrs, void* arg);
45 
46   // Initialize a space bitmap so that it points to a bitmap large enough to cover a heap at
47   // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned.
48   static SpaceBitmap* Create(const std::string& name, uint8_t* heap_begin, size_t heap_capacity);
49 
50   // Initialize a space bitmap using the provided mem_map as the live bits. Takes ownership of the
51   // mem map. The address range covered starts at heap_begin and is of size equal to heap_capacity.
52   // Objects are kAlignement-aligned.
53   static SpaceBitmap* CreateFromMemMap(const std::string& name, MemMap* mem_map,
54                                        uint8_t* heap_begin, size_t heap_capacity);
55 
56   ~SpaceBitmap();
57 
58   // <offset> is the difference from .base to a pointer address.
59   // <index> is the index of .bits that contains the bit representing
60   //         <offset>.
OffsetToIndex(size_t offset)61   static constexpr size_t OffsetToIndex(size_t offset) {
62     return offset / kAlignment / kBitsPerIntPtrT;
63   }
64 
65   template<typename T>
IndexToOffset(T index)66   static constexpr T IndexToOffset(T index) {
67     return static_cast<T>(index * kAlignment * kBitsPerIntPtrT);
68   }
69 
70   // Bits are packed in the obvious way.
OffsetToMask(uintptr_t offset)71   static constexpr uintptr_t OffsetToMask(uintptr_t offset) {
72     return (static_cast<size_t>(1)) << ((offset / kAlignment) % kBitsPerIntPtrT);
73   }
74 
Set(const mirror::Object * obj)75   bool Set(const mirror::Object* obj) ALWAYS_INLINE {
76     return Modify<true>(obj);
77   }
78 
Clear(const mirror::Object * obj)79   bool Clear(const mirror::Object* obj) ALWAYS_INLINE {
80     return Modify<false>(obj);
81   }
82 
83   // Returns true if the object was previously marked.
84   bool AtomicTestAndSet(const mirror::Object* obj);
85 
86   // Fill the bitmap with zeroes.  Returns the bitmap's memory to the system as a side-effect.
87   void Clear();
88 
89   bool Test(const mirror::Object* obj) const;
90 
91   // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover,
92   // even if a bit has not been set for it.
HasAddress(const void * obj)93   bool HasAddress(const void* obj) const {
94     // If obj < heap_begin_ then offset underflows to some very large value past the end of the
95     // bitmap.
96     const uintptr_t offset = reinterpret_cast<uintptr_t>(obj) - heap_begin_;
97     const size_t index = OffsetToIndex(offset);
98     return index < bitmap_size_ / sizeof(intptr_t);
99   }
100 
101   void VisitRange(uintptr_t base, uintptr_t max, ObjectCallback* callback, void* arg) const;
102 
103   class ClearVisitor {
104    public:
ClearVisitor(SpaceBitmap * const bitmap)105     explicit ClearVisitor(SpaceBitmap* const bitmap)
106         : bitmap_(bitmap) {
107     }
108 
operator()109     void operator()(mirror::Object* obj) const {
110       bitmap_->Clear(obj);
111     }
112    private:
113     SpaceBitmap* const bitmap_;
114   };
115 
116   template <typename Visitor>
VisitRange(uintptr_t visit_begin,uintptr_t visit_end,const Visitor & visitor)117   void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const {
118     for (; visit_begin < visit_end; visit_begin += kAlignment) {
119       visitor(reinterpret_cast<mirror::Object*>(visit_begin));
120     }
121   }
122 
123   // Visit the live objects in the range [visit_begin, visit_end).
124   // TODO: Use lock annotations when clang is fixed.
125   // EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
126   template <typename Visitor>
127   void VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const
128       NO_THREAD_SAFETY_ANALYSIS;
129 
130   // Visits set bits in address order.  The callback is not permitted to change the bitmap bits or
131   // max during the traversal.
132   void Walk(ObjectCallback* callback, void* arg)
133       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
134 
135   // Visits set bits with an in order traversal.  The callback is not permitted to change the bitmap
136   // bits or max during the traversal.
137   void InOrderWalk(ObjectCallback* callback, void* arg)
138       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
139 
140   // Walk through the bitmaps in increasing address order, and find the object pointers that
141   // correspond to garbage objects.  Call <callback> zero or more times with lists of these object
142   // pointers. The callback is not permitted to increase the max of either bitmap.
143   static void SweepWalk(const SpaceBitmap& live, const SpaceBitmap& mark, uintptr_t base,
144                         uintptr_t max, SweepCallback* thunk, void* arg);
145 
146   void CopyFrom(SpaceBitmap* source_bitmap);
147 
148   // Starting address of our internal storage.
Begin()149   uintptr_t* Begin() {
150     return bitmap_begin_;
151   }
152 
153   // Size of our internal storage
Size()154   size_t Size() const {
155     return bitmap_size_;
156   }
157 
158   // Size in bytes of the memory that the bitmaps spans.
HeapSize()159   uint64_t HeapSize() const {
160     return IndexToOffset<uint64_t>(Size() / sizeof(intptr_t));
161   }
162 
SetHeapSize(size_t bytes)163   void SetHeapSize(size_t bytes) {
164     // TODO: Un-map the end of the mem map.
165     bitmap_size_ = OffsetToIndex(bytes) * sizeof(intptr_t);
166     CHECK_EQ(HeapSize(), bytes);
167   }
168 
HeapBegin()169   uintptr_t HeapBegin() const {
170     return heap_begin_;
171   }
172 
173   // The maximum address which the bitmap can span. (HeapBegin() <= object < HeapLimit()).
HeapLimit()174   uint64_t HeapLimit() const {
175     return static_cast<uint64_t>(HeapBegin()) + HeapSize();
176   }
177 
178   // Set the max address which can covered by the bitmap.
179   void SetHeapLimit(uintptr_t new_end);
180 
GetName()181   std::string GetName() const {
182     return name_;
183   }
184 
SetName(const std::string & name)185   void SetName(const std::string& name) {
186     name_ = name;
187   }
188 
189   std::string Dump() const;
190 
191   // Helper function for computing bitmap size based on a 64 bit capacity.
192   static size_t ComputeBitmapSize(uint64_t capacity);
193   static size_t ComputeHeapSize(uint64_t bitmap_bytes);
194 
195  private:
196   // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1,
197   // however, we document that this is expected on heap_end_
198   SpaceBitmap(const std::string& name, MemMap* mem_map, uintptr_t* bitmap_begin, size_t bitmap_size,
199               const void* heap_begin);
200 
201   template<bool kSetBit>
202   bool Modify(const mirror::Object* obj);
203 
204   // For an unvisited object, visit it then all its children found via fields.
205   static void WalkFieldsInOrder(SpaceBitmap* visited, ObjectCallback* callback, mirror::Object* obj,
206                                 void* arg) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
207   // Walk instance fields of the given Class. Separate function to allow recursion on the super
208   // class.
209   static void WalkInstanceFields(SpaceBitmap<kAlignment>* visited, ObjectCallback* callback,
210                                  mirror::Object* obj, mirror::Class* klass, void* arg)
211       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
212 
213   // Backing storage for bitmap.
214   std::unique_ptr<MemMap> mem_map_;
215 
216   // This bitmap itself, word sized for efficiency in scanning.
217   uintptr_t* const bitmap_begin_;
218 
219   // Size of this bitmap.
220   size_t bitmap_size_;
221 
222   // The base address of the heap, which corresponds to the word containing the first bit in the
223   // bitmap.
224   const uintptr_t heap_begin_;
225 
226   // Name of this bitmap.
227   std::string name_;
228 };
229 
230 typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap;
231 typedef SpaceBitmap<kLargeObjectAlignment> LargeObjectBitmap;
232 
233 template<size_t kAlignment>
234 std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap);
235 
236 }  // namespace accounting
237 }  // namespace gc
238 }  // namespace art
239 
240 #endif  // ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_
241