1 /* 2 * Copyright (C) 2008 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 #ifndef ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_ 18 #define ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_ 19 20 #include <limits.h> 21 #include <stdint.h> 22 #include <memory> 23 #include <set> 24 #include <vector> 25 26 #include "base/mutex.h" 27 #include "globals.h" 28 #include "object_callbacks.h" 29 30 namespace art { 31 32 namespace mirror { 33 class Class; 34 class Object; 35 } // namespace mirror 36 class MemMap; 37 38 namespace gc { 39 namespace accounting { 40 41 template<size_t kAlignment> 42 class SpaceBitmap { 43 public: 44 typedef void ScanCallback(mirror::Object* obj, void* finger, void* arg); 45 typedef void SweepCallback(size_t ptr_count, mirror::Object** ptrs, void* arg); 46 47 // Initialize a space bitmap so that it points to a bitmap large enough to cover a heap at 48 // heap_begin of heap_capacity bytes, where objects are guaranteed to be kAlignment-aligned. 49 static SpaceBitmap* Create(const std::string& name, uint8_t* heap_begin, size_t heap_capacity); 50 51 // Initialize a space bitmap using the provided mem_map as the live bits. Takes ownership of the 52 // mem map. The address range covered starts at heap_begin and is of size equal to heap_capacity. 53 // Objects are kAlignement-aligned. 54 static SpaceBitmap* CreateFromMemMap(const std::string& name, MemMap* mem_map, 55 uint8_t* heap_begin, size_t heap_capacity); 56 57 ~SpaceBitmap(); 58 59 // <offset> is the difference from .base to a pointer address. 60 // <index> is the index of .bits that contains the bit representing 61 // <offset>. OffsetToIndex(size_t offset)62 static constexpr size_t OffsetToIndex(size_t offset) { 63 return offset / kAlignment / kBitsPerIntPtrT; 64 } 65 66 template<typename T> IndexToOffset(T index)67 static constexpr T IndexToOffset(T index) { 68 return static_cast<T>(index * kAlignment * kBitsPerIntPtrT); 69 } 70 71 // Bits are packed in the obvious way. OffsetToMask(uintptr_t offset)72 static constexpr uintptr_t OffsetToMask(uintptr_t offset) { 73 return (static_cast<size_t>(1)) << ((offset / kAlignment) % kBitsPerIntPtrT); 74 } 75 Set(const mirror::Object * obj)76 bool Set(const mirror::Object* obj) ALWAYS_INLINE { 77 return Modify<true>(obj); 78 } 79 Clear(const mirror::Object * obj)80 bool Clear(const mirror::Object* obj) ALWAYS_INLINE { 81 return Modify<false>(obj); 82 } 83 84 // Returns true if the object was previously marked. 85 bool AtomicTestAndSet(const mirror::Object* obj); 86 87 // Fill the bitmap with zeroes. Returns the bitmap's memory to the system as a side-effect. 88 void Clear(); 89 90 bool Test(const mirror::Object* obj) const; 91 92 // Return true iff <obj> is within the range of pointers that this bitmap could potentially cover, 93 // even if a bit has not been set for it. HasAddress(const void * obj)94 bool HasAddress(const void* obj) const { 95 // If obj < heap_begin_ then offset underflows to some very large value past the end of the 96 // bitmap. 97 const uintptr_t offset = reinterpret_cast<uintptr_t>(obj) - heap_begin_; 98 const size_t index = OffsetToIndex(offset); 99 return index < bitmap_size_ / sizeof(intptr_t); 100 } 101 102 void VisitRange(uintptr_t base, uintptr_t max, ObjectCallback* callback, void* arg) const; 103 104 class ClearVisitor { 105 public: ClearVisitor(SpaceBitmap * const bitmap)106 explicit ClearVisitor(SpaceBitmap* const bitmap) 107 : bitmap_(bitmap) { 108 } 109 operator()110 void operator()(mirror::Object* obj) const { 111 bitmap_->Clear(obj); 112 } 113 private: 114 SpaceBitmap* const bitmap_; 115 }; 116 117 template <typename Visitor> VisitRange(uintptr_t visit_begin,uintptr_t visit_end,const Visitor & visitor)118 void VisitRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const { 119 for (; visit_begin < visit_end; visit_begin += kAlignment) { 120 visitor(reinterpret_cast<mirror::Object*>(visit_begin)); 121 } 122 } 123 124 // Visit the live objects in the range [visit_begin, visit_end). 125 // TODO: Use lock annotations when clang is fixed. 126 // REQUIRES(Locks::heap_bitmap_lock_) SHARED_REQUIRES(Locks::mutator_lock_); 127 template <typename Visitor> 128 void VisitMarkedRange(uintptr_t visit_begin, uintptr_t visit_end, const Visitor& visitor) const 129 NO_THREAD_SAFETY_ANALYSIS; 130 131 // Visits set bits in address order. The callback is not permitted to change the bitmap bits or 132 // max during the traversal. 133 void Walk(ObjectCallback* callback, void* arg) 134 SHARED_REQUIRES(Locks::heap_bitmap_lock_); 135 136 // Visits set bits with an in order traversal. The callback is not permitted to change the bitmap 137 // bits or max during the traversal. 138 void InOrderWalk(ObjectCallback* callback, void* arg) 139 SHARED_REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); 140 141 // Walk through the bitmaps in increasing address order, and find the object pointers that 142 // correspond to garbage objects. Call <callback> zero or more times with lists of these object 143 // pointers. The callback is not permitted to increase the max of either bitmap. 144 static void SweepWalk(const SpaceBitmap& live, const SpaceBitmap& mark, uintptr_t base, 145 uintptr_t max, SweepCallback* thunk, void* arg); 146 147 void CopyFrom(SpaceBitmap* source_bitmap); 148 149 // Starting address of our internal storage. Begin()150 uintptr_t* Begin() { 151 return bitmap_begin_; 152 } 153 154 // Size of our internal storage Size()155 size_t Size() const { 156 return bitmap_size_; 157 } 158 159 // Size in bytes of the memory that the bitmaps spans. HeapSize()160 uint64_t HeapSize() const { 161 return IndexToOffset<uint64_t>(Size() / sizeof(intptr_t)); 162 } 163 SetHeapSize(size_t bytes)164 void SetHeapSize(size_t bytes) { 165 // TODO: Un-map the end of the mem map. 166 bitmap_size_ = OffsetToIndex(bytes) * sizeof(intptr_t); 167 CHECK_EQ(HeapSize(), bytes); 168 } 169 HeapBegin()170 uintptr_t HeapBegin() const { 171 return heap_begin_; 172 } 173 174 // The maximum address which the bitmap can span. (HeapBegin() <= object < HeapLimit()). HeapLimit()175 uint64_t HeapLimit() const { 176 return static_cast<uint64_t>(HeapBegin()) + HeapSize(); 177 } 178 179 // Set the max address which can covered by the bitmap. 180 void SetHeapLimit(uintptr_t new_end); 181 GetName()182 std::string GetName() const { 183 return name_; 184 } 185 SetName(const std::string & name)186 void SetName(const std::string& name) { 187 name_ = name; 188 } 189 190 std::string Dump() const; 191 192 // Helper function for computing bitmap size based on a 64 bit capacity. 193 static size_t ComputeBitmapSize(uint64_t capacity); 194 static size_t ComputeHeapSize(uint64_t bitmap_bytes); 195 196 private: 197 // TODO: heap_end_ is initialized so that the heap bitmap is empty, this doesn't require the -1, 198 // however, we document that this is expected on heap_end_ 199 SpaceBitmap(const std::string& name, MemMap* mem_map, uintptr_t* bitmap_begin, size_t bitmap_size, 200 const void* heap_begin); 201 202 template<bool kSetBit> 203 bool Modify(const mirror::Object* obj); 204 205 // For an unvisited object, visit it then all its children found via fields. 206 static void WalkFieldsInOrder(SpaceBitmap* visited, ObjectCallback* callback, mirror::Object* obj, 207 void* arg) SHARED_REQUIRES(Locks::mutator_lock_); 208 // Walk instance fields of the given Class. Separate function to allow recursion on the super 209 // class. 210 static void WalkInstanceFields(SpaceBitmap<kAlignment>* visited, ObjectCallback* callback, 211 mirror::Object* obj, mirror::Class* klass, void* arg) 212 SHARED_REQUIRES(Locks::mutator_lock_); 213 214 // Backing storage for bitmap. 215 std::unique_ptr<MemMap> mem_map_; 216 217 // This bitmap itself, word sized for efficiency in scanning. 218 uintptr_t* const bitmap_begin_; 219 220 // Size of this bitmap. 221 size_t bitmap_size_; 222 223 // The base address of the heap, which corresponds to the word containing the first bit in the 224 // bitmap. 225 const uintptr_t heap_begin_; 226 227 // Name of this bitmap. 228 std::string name_; 229 }; 230 231 typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap; 232 typedef SpaceBitmap<kLargeObjectAlignment> LargeObjectBitmap; 233 234 template<size_t kAlignment> 235 std::ostream& operator << (std::ostream& stream, const SpaceBitmap<kAlignment>& bitmap); 236 237 } // namespace accounting 238 } // namespace gc 239 } // namespace art 240 241 #endif // ART_RUNTIME_GC_ACCOUNTING_SPACE_BITMAP_H_ 242