1 /*
2  * Copyright (C) 2008 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #include "space_bitmap-inl.h"
18 
19 #include "android-base/stringprintf.h"
20 
21 #include "art_field-inl.h"
22 #include "dex_file-inl.h"
23 #include "mem_map.h"
24 #include "mirror/object-inl.h"
25 #include "mirror/class-inl.h"
26 #include "mirror/object_array.h"
27 
28 namespace art {
29 namespace gc {
30 namespace accounting {
31 
32 using android::base::StringPrintf;
33 
34 template<size_t kAlignment>
ComputeBitmapSize(uint64_t capacity)35 size_t SpaceBitmap<kAlignment>::ComputeBitmapSize(uint64_t capacity) {
36   const uint64_t kBytesCoveredPerWord = kAlignment * kBitsPerIntPtrT;
37   return (RoundUp(capacity, kBytesCoveredPerWord) / kBytesCoveredPerWord) * sizeof(intptr_t);
38 }
39 
40 template<size_t kAlignment>
ComputeHeapSize(uint64_t bitmap_bytes)41 size_t SpaceBitmap<kAlignment>::ComputeHeapSize(uint64_t bitmap_bytes) {
42   return bitmap_bytes * kBitsPerByte * kAlignment;
43 }
44 
45 template<size_t kAlignment>
CreateFromMemMap(const std::string & name,MemMap * mem_map,uint8_t * heap_begin,size_t heap_capacity)46 SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::CreateFromMemMap(
47     const std::string& name, MemMap* mem_map, uint8_t* heap_begin, size_t heap_capacity) {
48   CHECK(mem_map != nullptr);
49   uintptr_t* bitmap_begin = reinterpret_cast<uintptr_t*>(mem_map->Begin());
50   const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
51   return new SpaceBitmap(name, mem_map, bitmap_begin, bitmap_size, heap_begin);
52 }
53 
54 template<size_t kAlignment>
SpaceBitmap(const std::string & name,MemMap * mem_map,uintptr_t * bitmap_begin,size_t bitmap_size,const void * heap_begin)55 SpaceBitmap<kAlignment>::SpaceBitmap(const std::string& name, MemMap* mem_map, uintptr_t* bitmap_begin,
56                                      size_t bitmap_size, const void* heap_begin)
57     : mem_map_(mem_map),
58       bitmap_begin_(reinterpret_cast<Atomic<uintptr_t>*>(bitmap_begin)),
59       bitmap_size_(bitmap_size),
60       heap_begin_(reinterpret_cast<uintptr_t>(heap_begin)),
61       name_(name) {
62   CHECK(bitmap_begin_ != nullptr);
63   CHECK_NE(bitmap_size, 0U);
64 }
65 
66 template<size_t kAlignment>
~SpaceBitmap()67 SpaceBitmap<kAlignment>::~SpaceBitmap() {}
68 
69 template<size_t kAlignment>
Create(const std::string & name,uint8_t * heap_begin,size_t heap_capacity)70 SpaceBitmap<kAlignment>* SpaceBitmap<kAlignment>::Create(
71     const std::string& name, uint8_t* heap_begin, size_t heap_capacity) {
72   // Round up since heap_capacity is not necessarily a multiple of kAlignment * kBitsPerWord.
73   const size_t bitmap_size = ComputeBitmapSize(heap_capacity);
74   std::string error_msg;
75   std::unique_ptr<MemMap> mem_map(MemMap::MapAnonymous(name.c_str(), nullptr, bitmap_size,
76                                                        PROT_READ | PROT_WRITE, false, false,
77                                                        &error_msg));
78   if (UNLIKELY(mem_map.get() == nullptr)) {
79     LOG(ERROR) << "Failed to allocate bitmap " << name << ": " << error_msg;
80     return nullptr;
81   }
82   return CreateFromMemMap(name, mem_map.release(), heap_begin, heap_capacity);
83 }
84 
85 template<size_t kAlignment>
SetHeapLimit(uintptr_t new_end)86 void SpaceBitmap<kAlignment>::SetHeapLimit(uintptr_t new_end) {
87   DCHECK_ALIGNED(new_end, kBitsPerIntPtrT * kAlignment);
88   size_t new_size = OffsetToIndex(new_end - heap_begin_) * sizeof(intptr_t);
89   if (new_size < bitmap_size_) {
90     bitmap_size_ = new_size;
91   }
92   // Not sure if doing this trim is necessary, since nothing past the end of the heap capacity
93   // should be marked.
94 }
95 
96 template<size_t kAlignment>
Dump() const97 std::string SpaceBitmap<kAlignment>::Dump() const {
98   return StringPrintf("%s: %p-%p", name_.c_str(), reinterpret_cast<void*>(HeapBegin()),
99                       reinterpret_cast<void*>(HeapLimit()));
100 }
101 
102 template<size_t kAlignment>
Clear()103 void SpaceBitmap<kAlignment>::Clear() {
104   if (bitmap_begin_ != nullptr) {
105     mem_map_->MadviseDontNeedAndZero();
106   }
107 }
108 
109 template<size_t kAlignment>
ClearRange(const mirror::Object * begin,const mirror::Object * end)110 void SpaceBitmap<kAlignment>::ClearRange(const mirror::Object* begin, const mirror::Object* end) {
111   uintptr_t begin_offset = reinterpret_cast<uintptr_t>(begin) - heap_begin_;
112   uintptr_t end_offset = reinterpret_cast<uintptr_t>(end) - heap_begin_;
113   // Align begin and end to word boundaries.
114   while (begin_offset < end_offset && OffsetBitIndex(begin_offset) != 0) {
115     Clear(reinterpret_cast<mirror::Object*>(heap_begin_ + begin_offset));
116     begin_offset += kAlignment;
117   }
118   while (begin_offset < end_offset && OffsetBitIndex(end_offset) != 0) {
119     end_offset -= kAlignment;
120     Clear(reinterpret_cast<mirror::Object*>(heap_begin_ + end_offset));
121   }
122   const uintptr_t start_index = OffsetToIndex(begin_offset);
123   const uintptr_t end_index = OffsetToIndex(end_offset);
124   ZeroAndReleasePages(reinterpret_cast<uint8_t*>(&bitmap_begin_[start_index]),
125                       (end_index - start_index) * sizeof(*bitmap_begin_));
126 }
127 
128 template<size_t kAlignment>
CopyFrom(SpaceBitmap * source_bitmap)129 void SpaceBitmap<kAlignment>::CopyFrom(SpaceBitmap* source_bitmap) {
130   DCHECK_EQ(Size(), source_bitmap->Size());
131   const size_t count = source_bitmap->Size() / sizeof(intptr_t);
132   Atomic<uintptr_t>* const src = source_bitmap->Begin();
133   Atomic<uintptr_t>* const dest = Begin();
134   for (size_t i = 0; i < count; ++i) {
135     dest[i].StoreRelaxed(src[i].LoadRelaxed());
136   }
137 }
138 
139 template<size_t kAlignment>
Walk(ObjectCallback * callback,void * arg)140 void SpaceBitmap<kAlignment>::Walk(ObjectCallback* callback, void* arg) {
141   CHECK(bitmap_begin_ != nullptr);
142   CHECK(callback != nullptr);
143 
144   uintptr_t end = OffsetToIndex(HeapLimit() - heap_begin_ - 1);
145   Atomic<uintptr_t>* bitmap_begin = bitmap_begin_;
146   for (uintptr_t i = 0; i <= end; ++i) {
147     uintptr_t w = bitmap_begin[i].LoadRelaxed();
148     if (w != 0) {
149       uintptr_t ptr_base = IndexToOffset(i) + heap_begin_;
150       do {
151         const size_t shift = CTZ(w);
152         mirror::Object* obj = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
153         (*callback)(obj, arg);
154         w ^= (static_cast<uintptr_t>(1)) << shift;
155       } while (w != 0);
156     }
157   }
158 }
159 
160 template<size_t kAlignment>
SweepWalk(const SpaceBitmap<kAlignment> & live_bitmap,const SpaceBitmap<kAlignment> & mark_bitmap,uintptr_t sweep_begin,uintptr_t sweep_end,SpaceBitmap::SweepCallback * callback,void * arg)161 void SpaceBitmap<kAlignment>::SweepWalk(const SpaceBitmap<kAlignment>& live_bitmap,
162                                         const SpaceBitmap<kAlignment>& mark_bitmap,
163                                         uintptr_t sweep_begin, uintptr_t sweep_end,
164                                         SpaceBitmap::SweepCallback* callback, void* arg) {
165   CHECK(live_bitmap.bitmap_begin_ != nullptr);
166   CHECK(mark_bitmap.bitmap_begin_ != nullptr);
167   CHECK_EQ(live_bitmap.heap_begin_, mark_bitmap.heap_begin_);
168   CHECK_EQ(live_bitmap.bitmap_size_, mark_bitmap.bitmap_size_);
169   CHECK(callback != nullptr);
170   CHECK_LE(sweep_begin, sweep_end);
171   CHECK_GE(sweep_begin, live_bitmap.heap_begin_);
172 
173   if (sweep_end <= sweep_begin) {
174     return;
175   }
176 
177   // TODO: rewrite the callbacks to accept a std::vector<mirror::Object*> rather than a mirror::Object**?
178   constexpr size_t buffer_size = sizeof(intptr_t) * kBitsPerIntPtrT;
179 #ifdef __LP64__
180   // Heap-allocate for smaller stack frame.
181   std::unique_ptr<mirror::Object*[]> pointer_buf_ptr(new mirror::Object*[buffer_size]);
182   mirror::Object** pointer_buf = pointer_buf_ptr.get();
183 #else
184   // Stack-allocate buffer as it's small enough.
185   mirror::Object* pointer_buf[buffer_size];
186 #endif
187   mirror::Object** pb = &pointer_buf[0];
188 
189   size_t start = OffsetToIndex(sweep_begin - live_bitmap.heap_begin_);
190   size_t end = OffsetToIndex(sweep_end - live_bitmap.heap_begin_ - 1);
191   CHECK_LT(end, live_bitmap.Size() / sizeof(intptr_t));
192   Atomic<uintptr_t>* live = live_bitmap.bitmap_begin_;
193   Atomic<uintptr_t>* mark = mark_bitmap.bitmap_begin_;
194   for (size_t i = start; i <= end; i++) {
195     uintptr_t garbage = live[i].LoadRelaxed() & ~mark[i].LoadRelaxed();
196     if (UNLIKELY(garbage != 0)) {
197       uintptr_t ptr_base = IndexToOffset(i) + live_bitmap.heap_begin_;
198       do {
199         const size_t shift = CTZ(garbage);
200         garbage ^= (static_cast<uintptr_t>(1)) << shift;
201         *pb++ = reinterpret_cast<mirror::Object*>(ptr_base + shift * kAlignment);
202       } while (garbage != 0);
203       // Make sure that there are always enough slots available for an
204       // entire word of one bits.
205       if (pb >= &pointer_buf[buffer_size - kBitsPerIntPtrT]) {
206         (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
207         pb = &pointer_buf[0];
208       }
209     }
210   }
211   if (pb > &pointer_buf[0]) {
212     (*callback)(pb - &pointer_buf[0], &pointer_buf[0], arg);
213   }
214 }
215 
216 template class SpaceBitmap<kObjectAlignment>;
217 template class SpaceBitmap<kPageSize>;
218 
219 }  // namespace accounting
220 }  // namespace gc
221 }  // namespace art
222