1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #ifndef V8_HEAP_MARK_COMPACT_INL_H_
6 #define V8_HEAP_MARK_COMPACT_INL_H_
7
8 #include "src/heap/mark-compact.h"
9 #include "src/heap/remembered-set.h"
10 #include "src/isolate.h"
11
12 namespace v8 {
13 namespace internal {
14
PushBlack(HeapObject * obj)15 void MarkCompactCollector::PushBlack(HeapObject* obj) {
16 DCHECK(ObjectMarking::IsBlack(obj));
17 if (!marking_deque()->Push(obj)) {
18 ObjectMarking::BlackToGrey(obj);
19 }
20 }
21
22
UnshiftBlack(HeapObject * obj)23 void MarkCompactCollector::UnshiftBlack(HeapObject* obj) {
24 DCHECK(ObjectMarking::IsBlack(obj));
25 if (!marking_deque()->Unshift(obj)) {
26 ObjectMarking::BlackToGrey(obj);
27 }
28 }
29
MarkObject(HeapObject * obj)30 void MarkCompactCollector::MarkObject(HeapObject* obj) {
31 if (ObjectMarking::IsWhite(obj)) {
32 ObjectMarking::WhiteToBlack(obj);
33 PushBlack(obj);
34 }
35 }
36
RecordSlot(HeapObject * object,Object ** slot,Object * target)37 void MarkCompactCollector::RecordSlot(HeapObject* object, Object** slot,
38 Object* target) {
39 Page* target_page = Page::FromAddress(reinterpret_cast<Address>(target));
40 Page* source_page = Page::FromAddress(reinterpret_cast<Address>(object));
41 if (target_page->IsEvacuationCandidate() &&
42 !ShouldSkipEvacuationSlotRecording(object)) {
43 DCHECK(ObjectMarking::IsBlackOrGrey(object));
44 RememberedSet<OLD_TO_OLD>::Insert(source_page,
45 reinterpret_cast<Address>(slot));
46 }
47 }
48
49
AddCandidate(SharedFunctionInfo * shared_info)50 void CodeFlusher::AddCandidate(SharedFunctionInfo* shared_info) {
51 if (GetNextCandidate(shared_info) == nullptr) {
52 SetNextCandidate(shared_info, shared_function_info_candidates_head_);
53 shared_function_info_candidates_head_ = shared_info;
54 }
55 }
56
57
AddCandidate(JSFunction * function)58 void CodeFlusher::AddCandidate(JSFunction* function) {
59 DCHECK(function->code() == function->shared()->code());
60 if (function->next_function_link()->IsUndefined(isolate_)) {
61 SetNextCandidate(function, jsfunction_candidates_head_);
62 jsfunction_candidates_head_ = function;
63 }
64 }
65
66
GetNextCandidateSlot(JSFunction * candidate)67 JSFunction** CodeFlusher::GetNextCandidateSlot(JSFunction* candidate) {
68 return reinterpret_cast<JSFunction**>(
69 HeapObject::RawField(candidate, JSFunction::kNextFunctionLinkOffset));
70 }
71
72
GetNextCandidate(JSFunction * candidate)73 JSFunction* CodeFlusher::GetNextCandidate(JSFunction* candidate) {
74 Object* next_candidate = candidate->next_function_link();
75 return reinterpret_cast<JSFunction*>(next_candidate);
76 }
77
78
SetNextCandidate(JSFunction * candidate,JSFunction * next_candidate)79 void CodeFlusher::SetNextCandidate(JSFunction* candidate,
80 JSFunction* next_candidate) {
81 candidate->set_next_function_link(next_candidate, UPDATE_WEAK_WRITE_BARRIER);
82 }
83
84
ClearNextCandidate(JSFunction * candidate,Object * undefined)85 void CodeFlusher::ClearNextCandidate(JSFunction* candidate, Object* undefined) {
86 DCHECK(undefined->IsUndefined(candidate->GetIsolate()));
87 candidate->set_next_function_link(undefined, SKIP_WRITE_BARRIER);
88 }
89
90
GetNextCandidate(SharedFunctionInfo * candidate)91 SharedFunctionInfo* CodeFlusher::GetNextCandidate(
92 SharedFunctionInfo* candidate) {
93 Object* next_candidate = candidate->code()->gc_metadata();
94 return reinterpret_cast<SharedFunctionInfo*>(next_candidate);
95 }
96
97
SetNextCandidate(SharedFunctionInfo * candidate,SharedFunctionInfo * next_candidate)98 void CodeFlusher::SetNextCandidate(SharedFunctionInfo* candidate,
99 SharedFunctionInfo* next_candidate) {
100 candidate->code()->set_gc_metadata(next_candidate);
101 }
102
103
ClearNextCandidate(SharedFunctionInfo * candidate)104 void CodeFlusher::ClearNextCandidate(SharedFunctionInfo* candidate) {
105 candidate->code()->set_gc_metadata(NULL, SKIP_WRITE_BARRIER);
106 }
107
108
109 template <LiveObjectIterationMode T>
Next()110 HeapObject* LiveObjectIterator<T>::Next() {
111 while (!it_.Done()) {
112 HeapObject* object = nullptr;
113 while (current_cell_ != 0) {
114 uint32_t trailing_zeros = base::bits::CountTrailingZeros32(current_cell_);
115 Address addr = cell_base_ + trailing_zeros * kPointerSize;
116
117 // Clear the first bit of the found object..
118 current_cell_ &= ~(1u << trailing_zeros);
119
120 uint32_t second_bit_index = 0;
121 if (trailing_zeros < Bitmap::kBitIndexMask) {
122 second_bit_index = 1u << (trailing_zeros + 1);
123 } else {
124 second_bit_index = 0x1;
125 // The overlapping case; there has to exist a cell after the current
126 // cell.
127 // However, if there is a black area at the end of the page, and the
128 // last word is a one word filler, we are not allowed to advance. In
129 // that case we can return immediately.
130 if (it_.Done()) {
131 DCHECK(HeapObject::FromAddress(addr)->map() ==
132 HeapObject::FromAddress(addr)
133 ->GetHeap()
134 ->one_pointer_filler_map());
135 return nullptr;
136 }
137 it_.Advance();
138 cell_base_ = it_.CurrentCellBase();
139 current_cell_ = *it_.CurrentCell();
140 }
141
142 Map* map = nullptr;
143 if (current_cell_ & second_bit_index) {
144 // We found a black object. If the black object is within a black area,
145 // make sure that we skip all set bits in the black area until the
146 // object ends.
147 HeapObject* black_object = HeapObject::FromAddress(addr);
148 map = base::NoBarrierAtomicValue<Map*>::FromAddress(addr)->Value();
149 Address end = addr + black_object->SizeFromMap(map) - kPointerSize;
150 // One word filler objects do not borrow the second mark bit. We have
151 // to jump over the advancing and clearing part.
152 // Note that we know that we are at a one word filler when
153 // object_start + object_size - kPointerSize == object_start.
154 if (addr != end) {
155 DCHECK_EQ(chunk_, MemoryChunk::FromAddress(end));
156 uint32_t end_mark_bit_index = chunk_->AddressToMarkbitIndex(end);
157 unsigned int end_cell_index =
158 end_mark_bit_index >> Bitmap::kBitsPerCellLog2;
159 MarkBit::CellType end_index_mask =
160 1u << Bitmap::IndexInCell(end_mark_bit_index);
161 if (it_.Advance(end_cell_index)) {
162 cell_base_ = it_.CurrentCellBase();
163 current_cell_ = *it_.CurrentCell();
164 }
165
166 // Clear all bits in current_cell, including the end index.
167 current_cell_ &= ~(end_index_mask + end_index_mask - 1);
168 }
169
170 if (T == kBlackObjects || T == kAllLiveObjects) {
171 object = black_object;
172 }
173 } else if ((T == kGreyObjects || T == kAllLiveObjects)) {
174 map = base::NoBarrierAtomicValue<Map*>::FromAddress(addr)->Value();
175 object = HeapObject::FromAddress(addr);
176 }
177
178 // We found a live object.
179 if (object != nullptr) {
180 if (map == heap()->one_pointer_filler_map()) {
181 // Black areas together with slack tracking may result in black one
182 // word filler objects. We filter these objects out in the iterator.
183 object = nullptr;
184 } else {
185 break;
186 }
187 }
188 }
189
190 if (current_cell_ == 0) {
191 if (!it_.Done()) {
192 it_.Advance();
193 cell_base_ = it_.CurrentCellBase();
194 current_cell_ = *it_.CurrentCell();
195 }
196 }
197 if (object != nullptr) return object;
198 }
199 return nullptr;
200 }
201
202 } // namespace internal
203 } // namespace v8
204
205 #endif // V8_HEAP_MARK_COMPACT_INL_H_
206