1 /*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include "mark_compact.h"
18
19 #include "base/logging.h"
20 #include "base/mutex-inl.h"
21 #include "base/timing_logger.h"
22 #include "gc/accounting/heap_bitmap-inl.h"
23 #include "gc/accounting/mod_union_table.h"
24 #include "gc/accounting/remembered_set.h"
25 #include "gc/accounting/space_bitmap-inl.h"
26 #include "gc/heap.h"
27 #include "gc/reference_processor.h"
28 #include "gc/space/bump_pointer_space.h"
29 #include "gc/space/bump_pointer_space-inl.h"
30 #include "gc/space/image_space.h"
31 #include "gc/space/large_object_space.h"
32 #include "gc/space/space-inl.h"
33 #include "indirect_reference_table.h"
34 #include "intern_table.h"
35 #include "jni_internal.h"
36 #include "mark_sweep-inl.h"
37 #include "monitor.h"
38 #include "mirror/class-inl.h"
39 #include "mirror/class_loader.h"
40 #include "mirror/dex_cache.h"
41 #include "mirror/reference-inl.h"
42 #include "mirror/object-inl.h"
43 #include "mirror/object_array.h"
44 #include "mirror/object_array-inl.h"
45 #include "runtime.h"
46 #include "stack.h"
47 #include "thread-inl.h"
48 #include "thread_list.h"
49
50 using ::art::mirror::Object;
51
52 namespace art {
53 namespace gc {
54 namespace collector {
55
BindBitmaps()56 void MarkCompact::BindBitmaps() {
57 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
58 WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
59 // Mark all of the spaces we never collect as immune.
60 for (const auto& space : GetHeap()->GetContinuousSpaces()) {
61 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect ||
62 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
63 CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space;
64 }
65 }
66 }
67
MarkCompact(Heap * heap,const std::string & name_prefix)68 MarkCompact::MarkCompact(Heap* heap, const std::string& name_prefix)
69 : GarbageCollector(heap, name_prefix + (name_prefix.empty() ? "" : " ") + "mark compact"),
70 space_(nullptr), collector_name_(name_) {
71 }
72
RunPhases()73 void MarkCompact::RunPhases() {
74 Thread* self = Thread::Current();
75 InitializePhase();
76 CHECK(!Locks::mutator_lock_->IsExclusiveHeld(self));
77 {
78 ScopedPause pause(this);
79 GetHeap()->PreGcVerificationPaused(this);
80 GetHeap()->PrePauseRosAllocVerification(this);
81 MarkingPhase();
82 ReclaimPhase();
83 }
84 GetHeap()->PostGcVerification(this);
85 FinishPhase();
86 }
87
ForwardObject(mirror::Object * obj)88 void MarkCompact::ForwardObject(mirror::Object* obj) {
89 const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment);
90 LockWord lock_word = obj->GetLockWord(false);
91 // If we have a non empty lock word, store it and restore it later.
92 if (!LockWord::IsDefault(lock_word)) {
93 // Set the bit in the bitmap so that we know to restore it later.
94 objects_with_lockword_->Set(obj);
95 lock_words_to_restore_.push_back(lock_word);
96 }
97 obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(bump_pointer_)),
98 false);
99 bump_pointer_ += alloc_size;
100 ++live_objects_in_space_;
101 }
102
103 class CalculateObjectForwardingAddressVisitor {
104 public:
CalculateObjectForwardingAddressVisitor(MarkCompact * collector)105 explicit CalculateObjectForwardingAddressVisitor(MarkCompact* collector)
106 : collector_(collector) {}
operator ()(mirror::Object * obj) const107 void operator()(mirror::Object* obj) const EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_,
108 Locks::heap_bitmap_lock_) {
109 DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment);
110 DCHECK(collector_->IsMarked(obj));
111 collector_->ForwardObject(obj);
112 }
113
114 private:
115 MarkCompact* const collector_;
116 };
117
CalculateObjectForwardingAddresses()118 void MarkCompact::CalculateObjectForwardingAddresses() {
119 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
120 // The bump pointer in the space where the next forwarding address will be.
121 bump_pointer_ = reinterpret_cast<uint8_t*>(space_->Begin());
122 // Visit all the marked objects in the bitmap.
123 CalculateObjectForwardingAddressVisitor visitor(this);
124 objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
125 reinterpret_cast<uintptr_t>(space_->End()),
126 visitor);
127 }
128
InitializePhase()129 void MarkCompact::InitializePhase() {
130 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
131 mark_stack_ = heap_->GetMarkStack();
132 DCHECK(mark_stack_ != nullptr);
133 immune_region_.Reset();
134 CHECK(space_->CanMoveObjects()) << "Attempting compact non-movable space from " << *space_;
135 // TODO: I don't think we should need heap bitmap lock to Get the mark bitmap.
136 ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
137 mark_bitmap_ = heap_->GetMarkBitmap();
138 live_objects_in_space_ = 0;
139 }
140
ProcessReferences(Thread * self)141 void MarkCompact::ProcessReferences(Thread* self) {
142 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
143 heap_->GetReferenceProcessor()->ProcessReferences(
144 false, GetTimings(), GetCurrentIteration()->GetClearSoftReferences(),
145 &HeapReferenceMarkedCallback, &MarkObjectCallback, &ProcessMarkStackCallback, this);
146 }
147
148 class BitmapSetSlowPathVisitor {
149 public:
operator ()(const mirror::Object * obj) const150 void operator()(const mirror::Object* obj) const {
151 // Marking a large object, make sure its aligned as a sanity check.
152 if (!IsAligned<kPageSize>(obj)) {
153 Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR));
154 LOG(FATAL) << obj;
155 }
156 }
157 };
158
MarkObject(mirror::Object * obj)159 inline void MarkCompact::MarkObject(mirror::Object* obj) {
160 if (obj == nullptr) {
161 return;
162 }
163 if (kUseBakerOrBrooksReadBarrier) {
164 // Verify all the objects have the correct forward pointer installed.
165 obj->AssertReadBarrierPointer();
166 }
167 if (immune_region_.ContainsObject(obj)) {
168 return;
169 }
170 if (objects_before_forwarding_->HasAddress(obj)) {
171 if (!objects_before_forwarding_->Set(obj)) {
172 MarkStackPush(obj); // This object was not previously marked.
173 }
174 } else {
175 DCHECK(!space_->HasAddress(obj));
176 BitmapSetSlowPathVisitor visitor;
177 if (!mark_bitmap_->Set(obj, visitor)) {
178 // This object was not previously marked.
179 MarkStackPush(obj);
180 }
181 }
182 }
183
MarkingPhase()184 void MarkCompact::MarkingPhase() {
185 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
186 Thread* self = Thread::Current();
187 // Bitmap which describes which objects we have to move.
188 objects_before_forwarding_.reset(accounting::ContinuousSpaceBitmap::Create(
189 "objects before forwarding", space_->Begin(), space_->Size()));
190 // Bitmap which describes which lock words we need to restore.
191 objects_with_lockword_.reset(accounting::ContinuousSpaceBitmap::Create(
192 "objects with lock words", space_->Begin(), space_->Size()));
193 CHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
194 // Assume the cleared space is already empty.
195 BindBitmaps();
196 t.NewTiming("ProcessCards");
197 // Process dirty cards and add dirty cards to mod-union tables.
198 heap_->ProcessCards(GetTimings(), false, false, true);
199 // Clear the whole card table since we can not Get any additional dirty cards during the
200 // paused GC. This saves memory but only works for pause the world collectors.
201 t.NewTiming("ClearCardTable");
202 heap_->GetCardTable()->ClearCardTable();
203 // Need to do this before the checkpoint since we don't want any threads to add references to
204 // the live stack during the recursive mark.
205 if (kUseThreadLocalAllocationStack) {
206 t.NewTiming("RevokeAllThreadLocalAllocationStacks");
207 heap_->RevokeAllThreadLocalAllocationStacks(self);
208 }
209 t.NewTiming("SwapStacks");
210 heap_->SwapStacks(self);
211 {
212 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
213 MarkRoots();
214 // Mark roots of immune spaces.
215 UpdateAndMarkModUnion();
216 // Recursively mark remaining objects.
217 MarkReachableObjects();
218 }
219 ProcessReferences(self);
220 {
221 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
222 SweepSystemWeaks();
223 }
224 // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked
225 // before they are properly counted.
226 RevokeAllThreadLocalBuffers();
227 // Disabled due to an issue where we have objects in the bump pointer space which reference dead
228 // objects.
229 // heap_->PreSweepingGcVerification(this);
230 }
231
UpdateAndMarkModUnion()232 void MarkCompact::UpdateAndMarkModUnion() {
233 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
234 for (auto& space : heap_->GetContinuousSpaces()) {
235 // If the space is immune then we need to mark the references to other spaces.
236 if (immune_region_.ContainsSpace(space)) {
237 accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
238 if (table != nullptr) {
239 // TODO: Improve naming.
240 TimingLogger::ScopedTiming t2(
241 space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
242 "UpdateAndMarkImageModUnionTable", GetTimings());
243 table->UpdateAndMarkReferences(MarkHeapReferenceCallback, this);
244 }
245 }
246 }
247 }
248
MarkReachableObjects()249 void MarkCompact::MarkReachableObjects() {
250 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
251 accounting::ObjectStack* live_stack = heap_->GetLiveStack();
252 {
253 TimingLogger::ScopedTiming t2("MarkAllocStackAsLive", GetTimings());
254 heap_->MarkAllocStackAsLive(live_stack);
255 }
256 live_stack->Reset();
257 // Recursively process the mark stack.
258 ProcessMarkStack();
259 }
260
ReclaimPhase()261 void MarkCompact::ReclaimPhase() {
262 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
263 WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
264 // Reclaim unmarked objects.
265 Sweep(false);
266 // Swap the live and mark bitmaps for each space which we modified space. This is an
267 // optimization that enables us to not clear live bits inside of the sweep. Only swaps unbound
268 // bitmaps.
269 SwapBitmaps();
270 GetHeap()->UnBindBitmaps(); // Unbind the live and mark bitmaps.
271 Compact();
272 }
273
ResizeMarkStack(size_t new_size)274 void MarkCompact::ResizeMarkStack(size_t new_size) {
275 std::vector<StackReference<Object>> temp(mark_stack_->Begin(), mark_stack_->End());
276 CHECK_LE(mark_stack_->Size(), new_size);
277 mark_stack_->Resize(new_size);
278 for (auto& obj : temp) {
279 mark_stack_->PushBack(obj.AsMirrorPtr());
280 }
281 }
282
MarkStackPush(Object * obj)283 inline void MarkCompact::MarkStackPush(Object* obj) {
284 if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) {
285 ResizeMarkStack(mark_stack_->Capacity() * 2);
286 }
287 // The object must be pushed on to the mark stack.
288 mark_stack_->PushBack(obj);
289 }
290
ProcessMarkStackCallback(void * arg)291 void MarkCompact::ProcessMarkStackCallback(void* arg) {
292 reinterpret_cast<MarkCompact*>(arg)->ProcessMarkStack();
293 }
294
MarkObjectCallback(mirror::Object * root,void * arg)295 mirror::Object* MarkCompact::MarkObjectCallback(mirror::Object* root, void* arg) {
296 reinterpret_cast<MarkCompact*>(arg)->MarkObject(root);
297 return root;
298 }
299
MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object> * obj_ptr,void * arg)300 void MarkCompact::MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr,
301 void* arg) {
302 reinterpret_cast<MarkCompact*>(arg)->MarkObject(obj_ptr->AsMirrorPtr());
303 }
304
DelayReferenceReferentCallback(mirror::Class * klass,mirror::Reference * ref,void * arg)305 void MarkCompact::DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref,
306 void* arg) {
307 reinterpret_cast<MarkCompact*>(arg)->DelayReferenceReferent(klass, ref);
308 }
309
VisitRoots(mirror::Object *** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)310 void MarkCompact::VisitRoots(
311 mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED) {
312 for (size_t i = 0; i < count; ++i) {
313 MarkObject(*roots[i]);
314 }
315 }
316
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)317 void MarkCompact::VisitRoots(
318 mirror::CompressedReference<mirror::Object>** roots, size_t count,
319 const RootInfo& info ATTRIBUTE_UNUSED) {
320 for (size_t i = 0; i < count; ++i) {
321 MarkObject(roots[i]->AsMirrorPtr());
322 }
323 }
324
325 class UpdateRootVisitor : public RootVisitor {
326 public:
UpdateRootVisitor(MarkCompact * collector)327 explicit UpdateRootVisitor(MarkCompact* collector) : collector_(collector) {
328 }
329
VisitRoots(mirror::Object *** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)330 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info ATTRIBUTE_UNUSED)
331 OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
332 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
333 for (size_t i = 0; i < count; ++i) {
334 mirror::Object* obj = *roots[i];
335 mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
336 if (obj != new_obj) {
337 *roots[i] = new_obj;
338 DCHECK(new_obj != nullptr);
339 }
340 }
341 }
342
VisitRoots(mirror::CompressedReference<mirror::Object> ** roots,size_t count,const RootInfo & info ATTRIBUTE_UNUSED)343 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
344 const RootInfo& info ATTRIBUTE_UNUSED)
345 OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_)
346 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
347 for (size_t i = 0; i < count; ++i) {
348 mirror::Object* obj = roots[i]->AsMirrorPtr();
349 mirror::Object* new_obj = collector_->GetMarkedForwardAddress(obj);
350 if (obj != new_obj) {
351 roots[i]->Assign(new_obj);
352 DCHECK(new_obj != nullptr);
353 }
354 }
355 }
356
357 private:
358 MarkCompact* const collector_;
359 };
360
361 class UpdateObjectReferencesVisitor {
362 public:
UpdateObjectReferencesVisitor(MarkCompact * collector)363 explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {
364 }
operator ()(mirror::Object * obj) const365 void operator()(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
366 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
367 collector_->UpdateObjectReferences(obj);
368 }
369
370 private:
371 MarkCompact* const collector_;
372 };
373
UpdateReferences()374 void MarkCompact::UpdateReferences() {
375 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
376 Runtime* runtime = Runtime::Current();
377 // Update roots.
378 UpdateRootVisitor update_root_visitor(this);
379 runtime->VisitRoots(&update_root_visitor);
380 // Update object references in mod union tables and spaces.
381 for (const auto& space : heap_->GetContinuousSpaces()) {
382 // If the space is immune then we need to mark the references to other spaces.
383 accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
384 if (table != nullptr) {
385 // TODO: Improve naming.
386 TimingLogger::ScopedTiming t2(
387 space->IsZygoteSpace() ? "UpdateZygoteModUnionTableReferences" :
388 "UpdateImageModUnionTableReferences",
389 GetTimings());
390 table->UpdateAndMarkReferences(&UpdateHeapReferenceCallback, this);
391 } else {
392 // No mod union table, so we need to scan the space using bitmap visit.
393 // Scan the space using bitmap visit.
394 accounting::ContinuousSpaceBitmap* bitmap = space->GetLiveBitmap();
395 if (bitmap != nullptr) {
396 UpdateObjectReferencesVisitor visitor(this);
397 bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
398 reinterpret_cast<uintptr_t>(space->End()),
399 visitor);
400 }
401 }
402 }
403 CHECK(!kMovingClasses)
404 << "Didn't update large object classes since they are assumed to not move.";
405 // Update the system weaks, these should already have been swept.
406 runtime->SweepSystemWeaks(&MarkedForwardingAddressCallback, this);
407 // Update the objects in the bump pointer space last, these objects don't have a bitmap.
408 UpdateObjectReferencesVisitor visitor(this);
409 objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
410 reinterpret_cast<uintptr_t>(space_->End()),
411 visitor);
412 // Update the reference processor cleared list.
413 heap_->GetReferenceProcessor()->UpdateRoots(&MarkedForwardingAddressCallback, this);
414 }
415
Compact()416 void MarkCompact::Compact() {
417 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
418 CalculateObjectForwardingAddresses();
419 UpdateReferences();
420 MoveObjects();
421 // Space
422 int64_t objects_freed = space_->GetObjectsAllocated() - live_objects_in_space_;
423 int64_t bytes_freed = reinterpret_cast<int64_t>(space_->End()) -
424 reinterpret_cast<int64_t>(bump_pointer_);
425 t.NewTiming("RecordFree");
426 space_->RecordFree(objects_freed, bytes_freed);
427 RecordFree(ObjectBytePair(objects_freed, bytes_freed));
428 space_->SetEnd(bump_pointer_);
429 // Need to zero out the memory we freed. TODO: Use madvise for pages.
430 memset(bump_pointer_, 0, bytes_freed);
431 }
432
433 // Marks all objects in the root set.
MarkRoots()434 void MarkCompact::MarkRoots() {
435 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
436 Runtime::Current()->VisitRoots(this);
437 }
438
MarkedForwardingAddressCallback(mirror::Object * obj,void * arg)439 mirror::Object* MarkCompact::MarkedForwardingAddressCallback(mirror::Object* obj, void* arg) {
440 return reinterpret_cast<MarkCompact*>(arg)->GetMarkedForwardAddress(obj);
441 }
442
UpdateHeapReference(mirror::HeapReference<mirror::Object> * reference)443 inline void MarkCompact::UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference) {
444 mirror::Object* obj = reference->AsMirrorPtr();
445 if (obj != nullptr) {
446 mirror::Object* new_obj = GetMarkedForwardAddress(obj);
447 if (obj != new_obj) {
448 DCHECK(new_obj != nullptr);
449 reference->Assign(new_obj);
450 }
451 }
452 }
453
UpdateHeapReferenceCallback(mirror::HeapReference<mirror::Object> * reference,void * arg)454 void MarkCompact::UpdateHeapReferenceCallback(mirror::HeapReference<mirror::Object>* reference,
455 void* arg) {
456 reinterpret_cast<MarkCompact*>(arg)->UpdateHeapReference(reference);
457 }
458
459 class UpdateReferenceVisitor {
460 public:
UpdateReferenceVisitor(MarkCompact * collector)461 explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {
462 }
463
operator ()(Object * obj,MemberOffset offset,bool) const464 void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const
465 ALWAYS_INLINE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
466 collector_->UpdateHeapReference(obj->GetFieldObjectReferenceAddr<kVerifyNone>(offset));
467 }
468
operator ()(mirror::Class *,mirror::Reference * ref) const469 void operator()(mirror::Class* /*klass*/, mirror::Reference* ref) const
470 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
471 collector_->UpdateHeapReference(
472 ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset()));
473 }
474
475 private:
476 MarkCompact* const collector_;
477 };
478
UpdateObjectReferences(mirror::Object * obj)479 void MarkCompact::UpdateObjectReferences(mirror::Object* obj) {
480 UpdateReferenceVisitor visitor(this);
481 obj->VisitReferences<kMovingClasses>(visitor, visitor);
482 }
483
GetMarkedForwardAddress(mirror::Object * obj) const484 inline mirror::Object* MarkCompact::GetMarkedForwardAddress(mirror::Object* obj) const {
485 DCHECK(obj != nullptr);
486 if (objects_before_forwarding_->HasAddress(obj)) {
487 DCHECK(objects_before_forwarding_->Test(obj));
488 mirror::Object* ret =
489 reinterpret_cast<mirror::Object*>(obj->GetLockWord(false).ForwardingAddress());
490 DCHECK(ret != nullptr);
491 return ret;
492 }
493 DCHECK(!space_->HasAddress(obj));
494 DCHECK(IsMarked(obj));
495 return obj;
496 }
497
IsMarked(const Object * object) const498 inline bool MarkCompact::IsMarked(const Object* object) const {
499 if (immune_region_.ContainsObject(object)) {
500 return true;
501 }
502 if (objects_before_forwarding_->HasAddress(object)) {
503 return objects_before_forwarding_->Test(object);
504 }
505 return mark_bitmap_->Test(object);
506 }
507
IsMarkedCallback(mirror::Object * object,void * arg)508 mirror::Object* MarkCompact::IsMarkedCallback(mirror::Object* object, void* arg) {
509 return reinterpret_cast<MarkCompact*>(arg)->IsMarked(object) ? object : nullptr;
510 }
511
HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object> * ref_ptr,void * arg)512 bool MarkCompact::HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object>* ref_ptr,
513 void* arg) {
514 // Side effect free since we call this before ever moving objects.
515 return reinterpret_cast<MarkCompact*>(arg)->IsMarked(ref_ptr->AsMirrorPtr());
516 }
517
SweepSystemWeaks()518 void MarkCompact::SweepSystemWeaks() {
519 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
520 Runtime::Current()->SweepSystemWeaks(IsMarkedCallback, this);
521 }
522
ShouldSweepSpace(space::ContinuousSpace * space) const523 bool MarkCompact::ShouldSweepSpace(space::ContinuousSpace* space) const {
524 return space != space_ && !immune_region_.ContainsSpace(space);
525 }
526
527 class MoveObjectVisitor {
528 public:
MoveObjectVisitor(MarkCompact * collector)529 explicit MoveObjectVisitor(MarkCompact* collector) : collector_(collector) {
530 }
operator ()(mirror::Object * obj) const531 void operator()(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
532 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
533 collector_->MoveObject(obj, obj->SizeOf());
534 }
535
536 private:
537 MarkCompact* const collector_;
538 };
539
MoveObject(mirror::Object * obj,size_t len)540 void MarkCompact::MoveObject(mirror::Object* obj, size_t len) {
541 // Look at the forwarding address stored in the lock word to know where to copy.
542 DCHECK(space_->HasAddress(obj)) << obj;
543 uintptr_t dest_addr = obj->GetLockWord(false).ForwardingAddress();
544 mirror::Object* dest_obj = reinterpret_cast<mirror::Object*>(dest_addr);
545 DCHECK(space_->HasAddress(dest_obj)) << dest_obj;
546 // Use memmove since there may be overlap.
547 memmove(reinterpret_cast<void*>(dest_addr), reinterpret_cast<const void*>(obj), len);
548 // Restore the saved lock word if needed.
549 LockWord lock_word = LockWord::Default();
550 if (UNLIKELY(objects_with_lockword_->Test(obj))) {
551 lock_word = lock_words_to_restore_.front();
552 lock_words_to_restore_.pop_front();
553 }
554 dest_obj->SetLockWord(lock_word, false);
555 }
556
MoveObjects()557 void MarkCompact::MoveObjects() {
558 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
559 // Move the objects in the before forwarding bitmap.
560 MoveObjectVisitor visitor(this);
561 objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
562 reinterpret_cast<uintptr_t>(space_->End()),
563 visitor);
564 CHECK(lock_words_to_restore_.empty());
565 }
566
Sweep(bool swap_bitmaps)567 void MarkCompact::Sweep(bool swap_bitmaps) {
568 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
569 DCHECK(mark_stack_->IsEmpty());
570 for (const auto& space : GetHeap()->GetContinuousSpaces()) {
571 if (space->IsContinuousMemMapAllocSpace()) {
572 space::ContinuousMemMapAllocSpace* alloc_space = space->AsContinuousMemMapAllocSpace();
573 if (!ShouldSweepSpace(alloc_space)) {
574 continue;
575 }
576 TimingLogger::ScopedTiming t2(
577 alloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepAllocSpace", GetTimings());
578 RecordFree(alloc_space->Sweep(swap_bitmaps));
579 }
580 }
581 SweepLargeObjects(swap_bitmaps);
582 }
583
SweepLargeObjects(bool swap_bitmaps)584 void MarkCompact::SweepLargeObjects(bool swap_bitmaps) {
585 space::LargeObjectSpace* los = heap_->GetLargeObjectsSpace();
586 if (los != nullptr) {
587 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());\
588 RecordFreeLOS(los->Sweep(swap_bitmaps));
589 }
590 }
591
592 // Process the "referent" field in a java.lang.ref.Reference. If the referent has not yet been
593 // marked, put it on the appropriate list in the heap for later processing.
DelayReferenceReferent(mirror::Class * klass,mirror::Reference * reference)594 void MarkCompact::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) {
595 heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference,
596 &HeapReferenceMarkedCallback, this);
597 }
598
599 class MarkCompactMarkObjectVisitor {
600 public:
MarkCompactMarkObjectVisitor(MarkCompact * collector)601 explicit MarkCompactMarkObjectVisitor(MarkCompact* collector) : collector_(collector) {
602 }
603
operator ()(Object * obj,MemberOffset offset,bool) const604 void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const ALWAYS_INLINE
605 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
606 // Object was already verified when we scanned it.
607 collector_->MarkObject(obj->GetFieldObject<mirror::Object, kVerifyNone>(offset));
608 }
609
operator ()(mirror::Class * klass,mirror::Reference * ref) const610 void operator()(mirror::Class* klass, mirror::Reference* ref) const
611 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
612 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
613 collector_->DelayReferenceReferent(klass, ref);
614 }
615
616 private:
617 MarkCompact* const collector_;
618 };
619
620 // Visit all of the references of an object and update.
ScanObject(Object * obj)621 void MarkCompact::ScanObject(Object* obj) {
622 MarkCompactMarkObjectVisitor visitor(this);
623 obj->VisitReferences<kMovingClasses>(visitor, visitor);
624 }
625
626 // Scan anything that's on the mark stack.
ProcessMarkStack()627 void MarkCompact::ProcessMarkStack() {
628 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
629 while (!mark_stack_->IsEmpty()) {
630 Object* obj = mark_stack_->PopBack();
631 DCHECK(obj != nullptr);
632 ScanObject(obj);
633 }
634 }
635
SetSpace(space::BumpPointerSpace * space)636 void MarkCompact::SetSpace(space::BumpPointerSpace* space) {
637 DCHECK(space != nullptr);
638 space_ = space;
639 }
640
FinishPhase()641 void MarkCompact::FinishPhase() {
642 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
643 space_ = nullptr;
644 CHECK(mark_stack_->IsEmpty());
645 mark_stack_->Reset();
646 // Clear all of the spaces' mark bitmaps.
647 WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
648 heap_->ClearMarkedObjects();
649 // Release our bitmaps.
650 objects_before_forwarding_.reset(nullptr);
651 objects_with_lockword_.reset(nullptr);
652 }
653
RevokeAllThreadLocalBuffers()654 void MarkCompact::RevokeAllThreadLocalBuffers() {
655 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
656 GetHeap()->RevokeAllThreadLocalBuffers();
657 }
658
659 } // namespace collector
660 } // namespace gc
661 } // namespace art
662