1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #ifndef V8_HEAP_HEAP_INL_H_
6 #define V8_HEAP_HEAP_INL_H_
7 
8 #include <cmath>
9 
10 #include "src/base/platform/platform.h"
11 #include "src/counters.h"
12 #include "src/heap/heap.h"
13 #include "src/heap/incremental-marking-inl.h"
14 #include "src/heap/mark-compact.h"
15 #include "src/heap/spaces-inl.h"
16 #include "src/heap/store-buffer.h"
17 #include "src/heap/store-buffer-inl.h"
18 #include "src/isolate.h"
19 #include "src/list-inl.h"
20 #include "src/log.h"
21 #include "src/msan.h"
22 #include "src/objects-inl.h"
23 #include "src/type-feedback-vector-inl.h"
24 
25 namespace v8 {
26 namespace internal {
27 
insert(HeapObject * target,int size)28 void PromotionQueue::insert(HeapObject* target, int size) {
29   if (emergency_stack_ != NULL) {
30     emergency_stack_->Add(Entry(target, size));
31     return;
32   }
33 
34   if ((rear_ - 2) < limit_) {
35     RelocateQueueHead();
36     emergency_stack_->Add(Entry(target, size));
37     return;
38   }
39 
40   *(--rear_) = reinterpret_cast<intptr_t>(target);
41   *(--rear_) = size;
42 // Assert no overflow into live objects.
43 #ifdef DEBUG
44   SemiSpace::AssertValidRange(target->GetIsolate()->heap()->new_space()->top(),
45                               reinterpret_cast<Address>(rear_));
46 #endif
47 }
48 
49 
50 #define ROOT_ACCESSOR(type, name, camel_name) \
51   type* Heap::name() { return type::cast(roots_[k##camel_name##RootIndex]); }
52 ROOT_LIST(ROOT_ACCESSOR)
53 #undef ROOT_ACCESSOR
54 
55 #define STRUCT_MAP_ACCESSOR(NAME, Name, name) \
56   Map* Heap::name##_map() { return Map::cast(roots_[k##Name##MapRootIndex]); }
STRUCT_LIST(STRUCT_MAP_ACCESSOR)57 STRUCT_LIST(STRUCT_MAP_ACCESSOR)
58 #undef STRUCT_MAP_ACCESSOR
59 
60 #define STRING_ACCESSOR(name, str) \
61   String* Heap::name() { return String::cast(roots_[k##name##RootIndex]); }
62 INTERNALIZED_STRING_LIST(STRING_ACCESSOR)
63 #undef STRING_ACCESSOR
64 
65 #define SYMBOL_ACCESSOR(name) \
66   Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
67 PRIVATE_SYMBOL_LIST(SYMBOL_ACCESSOR)
68 #undef SYMBOL_ACCESSOR
69 
70 #define SYMBOL_ACCESSOR(name, description) \
71   Symbol* Heap::name() { return Symbol::cast(roots_[k##name##RootIndex]); }
72 PUBLIC_SYMBOL_LIST(SYMBOL_ACCESSOR)
73 WELL_KNOWN_SYMBOL_LIST(SYMBOL_ACCESSOR)
74 #undef SYMBOL_ACCESSOR
75 
76 #define ROOT_ACCESSOR(type, name, camel_name)                                 \
77   void Heap::set_##name(type* value) {                                        \
78     /* The deserializer makes use of the fact that these common roots are */  \
79     /* never in new space and never on a page that is being compacted.    */  \
80     DCHECK(!deserialization_complete() ||                                     \
81            RootCanBeWrittenAfterInitialization(k##camel_name##RootIndex));    \
82     DCHECK(k##camel_name##RootIndex >= kOldSpaceRoots || !InNewSpace(value)); \
83     roots_[k##camel_name##RootIndex] = value;                                 \
84   }
85 ROOT_LIST(ROOT_ACCESSOR)
86 #undef ROOT_ACCESSOR
87 
88 
89 template <>
90 bool inline Heap::IsOneByte(Vector<const char> str, int chars) {
91   // TODO(dcarney): incorporate Latin-1 check when Latin-1 is supported?
92   return chars == str.length();
93 }
94 
95 
96 template <>
IsOneByte(String * str,int chars)97 bool inline Heap::IsOneByte(String* str, int chars) {
98   return str->IsOneByteRepresentation();
99 }
100 
101 
AllocateInternalizedStringFromUtf8(Vector<const char> str,int chars,uint32_t hash_field)102 AllocationResult Heap::AllocateInternalizedStringFromUtf8(
103     Vector<const char> str, int chars, uint32_t hash_field) {
104   if (IsOneByte(str, chars)) {
105     return AllocateOneByteInternalizedString(Vector<const uint8_t>::cast(str),
106                                              hash_field);
107   }
108   return AllocateInternalizedStringImpl<false>(str, chars, hash_field);
109 }
110 
111 
112 template <typename T>
AllocateInternalizedStringImpl(T t,int chars,uint32_t hash_field)113 AllocationResult Heap::AllocateInternalizedStringImpl(T t, int chars,
114                                                       uint32_t hash_field) {
115   if (IsOneByte(t, chars)) {
116     return AllocateInternalizedStringImpl<true>(t, chars, hash_field);
117   }
118   return AllocateInternalizedStringImpl<false>(t, chars, hash_field);
119 }
120 
121 
AllocateOneByteInternalizedString(Vector<const uint8_t> str,uint32_t hash_field)122 AllocationResult Heap::AllocateOneByteInternalizedString(
123     Vector<const uint8_t> str, uint32_t hash_field) {
124   CHECK_GE(String::kMaxLength, str.length());
125   // Compute map and object size.
126   Map* map = one_byte_internalized_string_map();
127   int size = SeqOneByteString::SizeFor(str.length());
128 
129   // Allocate string.
130   HeapObject* result = nullptr;
131   {
132     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
133     if (!allocation.To(&result)) return allocation;
134   }
135 
136   // String maps are all immortal immovable objects.
137   result->set_map_no_write_barrier(map);
138   // Set length and hash fields of the allocated string.
139   String* answer = String::cast(result);
140   answer->set_length(str.length());
141   answer->set_hash_field(hash_field);
142 
143   DCHECK_EQ(size, answer->Size());
144 
145   // Fill in the characters.
146   MemCopy(answer->address() + SeqOneByteString::kHeaderSize, str.start(),
147           str.length());
148 
149   return answer;
150 }
151 
152 
AllocateTwoByteInternalizedString(Vector<const uc16> str,uint32_t hash_field)153 AllocationResult Heap::AllocateTwoByteInternalizedString(Vector<const uc16> str,
154                                                          uint32_t hash_field) {
155   CHECK_GE(String::kMaxLength, str.length());
156   // Compute map and object size.
157   Map* map = internalized_string_map();
158   int size = SeqTwoByteString::SizeFor(str.length());
159 
160   // Allocate string.
161   HeapObject* result = nullptr;
162   {
163     AllocationResult allocation = AllocateRaw(size, OLD_SPACE);
164     if (!allocation.To(&result)) return allocation;
165   }
166 
167   result->set_map(map);
168   // Set length and hash fields of the allocated string.
169   String* answer = String::cast(result);
170   answer->set_length(str.length());
171   answer->set_hash_field(hash_field);
172 
173   DCHECK_EQ(size, answer->Size());
174 
175   // Fill in the characters.
176   MemCopy(answer->address() + SeqTwoByteString::kHeaderSize, str.start(),
177           str.length() * kUC16Size);
178 
179   return answer;
180 }
181 
CopyFixedArray(FixedArray * src)182 AllocationResult Heap::CopyFixedArray(FixedArray* src) {
183   if (src->length() == 0) return src;
184   return CopyFixedArrayWithMap(src, src->map());
185 }
186 
187 
CopyFixedDoubleArray(FixedDoubleArray * src)188 AllocationResult Heap::CopyFixedDoubleArray(FixedDoubleArray* src) {
189   if (src->length() == 0) return src;
190   return CopyFixedDoubleArrayWithMap(src, src->map());
191 }
192 
193 
AllocateRaw(int size_in_bytes,AllocationSpace space,AllocationAlignment alignment)194 AllocationResult Heap::AllocateRaw(int size_in_bytes, AllocationSpace space,
195                                    AllocationAlignment alignment) {
196   DCHECK(AllowHandleAllocation::IsAllowed());
197   DCHECK(AllowHeapAllocation::IsAllowed());
198   DCHECK(gc_state_ == NOT_IN_GC);
199 #ifdef DEBUG
200   if (FLAG_gc_interval >= 0 && !always_allocate() &&
201       Heap::allocation_timeout_-- <= 0) {
202     return AllocationResult::Retry(space);
203   }
204   isolate_->counters()->objs_since_last_full()->Increment();
205   isolate_->counters()->objs_since_last_young()->Increment();
206 #endif
207 
208   bool large_object = size_in_bytes > Page::kMaxRegularHeapObjectSize;
209   HeapObject* object = nullptr;
210   AllocationResult allocation;
211   if (NEW_SPACE == space) {
212     if (large_object) {
213       space = LO_SPACE;
214     } else {
215       allocation = new_space_.AllocateRaw(size_in_bytes, alignment);
216       if (allocation.To(&object)) {
217         OnAllocationEvent(object, size_in_bytes);
218       }
219       return allocation;
220     }
221   }
222 
223   // Here we only allocate in the old generation.
224   if (OLD_SPACE == space) {
225     if (large_object) {
226       allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
227     } else {
228       allocation = old_space_->AllocateRaw(size_in_bytes, alignment);
229     }
230   } else if (CODE_SPACE == space) {
231     if (size_in_bytes <= code_space()->AreaSize()) {
232       allocation = code_space_->AllocateRawUnaligned(size_in_bytes);
233     } else {
234       allocation = lo_space_->AllocateRaw(size_in_bytes, EXECUTABLE);
235     }
236   } else if (LO_SPACE == space) {
237     DCHECK(large_object);
238     allocation = lo_space_->AllocateRaw(size_in_bytes, NOT_EXECUTABLE);
239   } else if (MAP_SPACE == space) {
240     allocation = map_space_->AllocateRawUnaligned(size_in_bytes);
241   } else {
242     // NEW_SPACE is not allowed here.
243     UNREACHABLE();
244   }
245   if (allocation.To(&object)) {
246     OnAllocationEvent(object, size_in_bytes);
247   } else {
248     old_gen_exhausted_ = true;
249   }
250   return allocation;
251 }
252 
253 
OnAllocationEvent(HeapObject * object,int size_in_bytes)254 void Heap::OnAllocationEvent(HeapObject* object, int size_in_bytes) {
255   HeapProfiler* profiler = isolate_->heap_profiler();
256   if (profiler->is_tracking_allocations()) {
257     profiler->AllocationEvent(object->address(), size_in_bytes);
258   }
259 
260   if (FLAG_verify_predictable) {
261     ++allocations_count_;
262     // Advance synthetic time by making a time request.
263     MonotonicallyIncreasingTimeInMs();
264 
265     UpdateAllocationsHash(object);
266     UpdateAllocationsHash(size_in_bytes);
267 
268     if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
269       PrintAlloctionsHash();
270     }
271   }
272 
273   if (FLAG_trace_allocation_stack_interval > 0) {
274     if (!FLAG_verify_predictable) ++allocations_count_;
275     if (allocations_count_ % FLAG_trace_allocation_stack_interval == 0) {
276       isolate()->PrintStack(stdout, Isolate::kPrintStackConcise);
277     }
278   }
279 }
280 
281 
OnMoveEvent(HeapObject * target,HeapObject * source,int size_in_bytes)282 void Heap::OnMoveEvent(HeapObject* target, HeapObject* source,
283                        int size_in_bytes) {
284   HeapProfiler* heap_profiler = isolate_->heap_profiler();
285   if (heap_profiler->is_tracking_object_moves()) {
286     heap_profiler->ObjectMoveEvent(source->address(), target->address(),
287                                    size_in_bytes);
288   }
289   if (target->IsSharedFunctionInfo()) {
290     LOG_CODE_EVENT(isolate_, SharedFunctionInfoMoveEvent(source->address(),
291                                                          target->address()));
292   }
293 
294   if (FLAG_verify_predictable) {
295     ++allocations_count_;
296     // Advance synthetic time by making a time request.
297     MonotonicallyIncreasingTimeInMs();
298 
299     UpdateAllocationsHash(source);
300     UpdateAllocationsHash(target);
301     UpdateAllocationsHash(size_in_bytes);
302 
303     if (allocations_count_ % FLAG_dump_allocations_digest_at_alloc == 0) {
304       PrintAlloctionsHash();
305     }
306   }
307 }
308 
309 
UpdateAllocationsHash(HeapObject * object)310 void Heap::UpdateAllocationsHash(HeapObject* object) {
311   Address object_address = object->address();
312   MemoryChunk* memory_chunk = MemoryChunk::FromAddress(object_address);
313   AllocationSpace allocation_space = memory_chunk->owner()->identity();
314 
315   STATIC_ASSERT(kSpaceTagSize + kPageSizeBits <= 32);
316   uint32_t value =
317       static_cast<uint32_t>(object_address - memory_chunk->address()) |
318       (static_cast<uint32_t>(allocation_space) << kPageSizeBits);
319 
320   UpdateAllocationsHash(value);
321 }
322 
323 
UpdateAllocationsHash(uint32_t value)324 void Heap::UpdateAllocationsHash(uint32_t value) {
325   uint16_t c1 = static_cast<uint16_t>(value);
326   uint16_t c2 = static_cast<uint16_t>(value >> 16);
327   raw_allocations_hash_ =
328       StringHasher::AddCharacterCore(raw_allocations_hash_, c1);
329   raw_allocations_hash_ =
330       StringHasher::AddCharacterCore(raw_allocations_hash_, c2);
331 }
332 
333 
RegisterExternalString(String * string)334 void Heap::RegisterExternalString(String* string) {
335   external_string_table_.AddString(string);
336 }
337 
338 
FinalizeExternalString(String * string)339 void Heap::FinalizeExternalString(String* string) {
340   DCHECK(string->IsExternalString());
341   v8::String::ExternalStringResourceBase** resource_addr =
342       reinterpret_cast<v8::String::ExternalStringResourceBase**>(
343           reinterpret_cast<byte*>(string) + ExternalString::kResourceOffset -
344           kHeapObjectTag);
345 
346   // Dispose of the C++ object if it has not already been disposed.
347   if (*resource_addr != NULL) {
348     (*resource_addr)->Dispose();
349     *resource_addr = NULL;
350   }
351 }
352 
353 
InNewSpace(Object * object)354 bool Heap::InNewSpace(Object* object) {
355   bool result = new_space_.Contains(object);
356   DCHECK(!result ||                 // Either not in new space
357          gc_state_ != NOT_IN_GC ||  // ... or in the middle of GC
358          InToSpace(object));        // ... or in to-space (where we allocate).
359   return result;
360 }
361 
362 
InNewSpace(Address address)363 bool Heap::InNewSpace(Address address) { return new_space_.Contains(address); }
364 
365 
InFromSpace(Object * object)366 bool Heap::InFromSpace(Object* object) {
367   return new_space_.FromSpaceContains(object);
368 }
369 
370 
InToSpace(Object * object)371 bool Heap::InToSpace(Object* object) {
372   return new_space_.ToSpaceContains(object);
373 }
374 
375 
InOldSpace(Address address)376 bool Heap::InOldSpace(Address address) { return old_space_->Contains(address); }
377 
378 
InOldSpace(Object * object)379 bool Heap::InOldSpace(Object* object) {
380   return InOldSpace(reinterpret_cast<Address>(object));
381 }
382 
383 
OldGenerationAllocationLimitReached()384 bool Heap::OldGenerationAllocationLimitReached() {
385   if (!incremental_marking()->IsStopped()) return false;
386   return OldGenerationSpaceAvailable() < 0;
387 }
388 
389 
ShouldBePromoted(Address old_address,int object_size)390 bool Heap::ShouldBePromoted(Address old_address, int object_size) {
391   NewSpacePage* page = NewSpacePage::FromAddress(old_address);
392   Address age_mark = new_space_.age_mark();
393   return page->IsFlagSet(MemoryChunk::NEW_SPACE_BELOW_AGE_MARK) &&
394          (!page->ContainsLimit(age_mark) || old_address < age_mark);
395 }
396 
397 
RecordWrite(Address address,int offset)398 void Heap::RecordWrite(Address address, int offset) {
399   if (!InNewSpace(address)) store_buffer_.Mark(address + offset);
400 }
401 
402 
RecordWrites(Address address,int start,int len)403 void Heap::RecordWrites(Address address, int start, int len) {
404   if (!InNewSpace(address)) {
405     for (int i = 0; i < len; i++) {
406       store_buffer_.Mark(address + start + i * kPointerSize);
407     }
408   }
409 }
410 
411 
AllowedToBeMigrated(HeapObject * obj,AllocationSpace dst)412 bool Heap::AllowedToBeMigrated(HeapObject* obj, AllocationSpace dst) {
413   // Object migration is governed by the following rules:
414   //
415   // 1) Objects in new-space can be migrated to the old space
416   //    that matches their target space or they stay in new-space.
417   // 2) Objects in old-space stay in the same space when migrating.
418   // 3) Fillers (two or more words) can migrate due to left-trimming of
419   //    fixed arrays in new-space or old space.
420   // 4) Fillers (one word) can never migrate, they are skipped by
421   //    incremental marking explicitly to prevent invalid pattern.
422   //
423   // Since this function is used for debugging only, we do not place
424   // asserts here, but check everything explicitly.
425   if (obj->map() == one_pointer_filler_map()) return false;
426   InstanceType type = obj->map()->instance_type();
427   MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
428   AllocationSpace src = chunk->owner()->identity();
429   switch (src) {
430     case NEW_SPACE:
431       return dst == src || dst == OLD_SPACE;
432     case OLD_SPACE:
433       return dst == src &&
434              (dst == OLD_SPACE || obj->IsFiller() || obj->IsExternalString());
435     case CODE_SPACE:
436       return dst == src && type == CODE_TYPE;
437     case MAP_SPACE:
438     case LO_SPACE:
439       return false;
440   }
441   UNREACHABLE();
442   return false;
443 }
444 
445 
CopyBlock(Address dst,Address src,int byte_size)446 void Heap::CopyBlock(Address dst, Address src, int byte_size) {
447   CopyWords(reinterpret_cast<Object**>(dst), reinterpret_cast<Object**>(src),
448             static_cast<size_t>(byte_size / kPointerSize));
449 }
450 
451 
MoveBlock(Address dst,Address src,int byte_size)452 void Heap::MoveBlock(Address dst, Address src, int byte_size) {
453   DCHECK(IsAligned(byte_size, kPointerSize));
454 
455   int size_in_words = byte_size / kPointerSize;
456 
457   if ((dst < src) || (dst >= (src + byte_size))) {
458     Object** src_slot = reinterpret_cast<Object**>(src);
459     Object** dst_slot = reinterpret_cast<Object**>(dst);
460     Object** end_slot = src_slot + size_in_words;
461 
462     while (src_slot != end_slot) {
463       *dst_slot++ = *src_slot++;
464     }
465   } else {
466     MemMove(dst, src, static_cast<size_t>(byte_size));
467   }
468 }
469 
470 
FindAllocationMemento(HeapObject * object)471 AllocationMemento* Heap::FindAllocationMemento(HeapObject* object) {
472   // Check if there is potentially a memento behind the object. If
473   // the last word of the memento is on another page we return
474   // immediately.
475   Address object_address = object->address();
476   Address memento_address = object_address + object->Size();
477   Address last_memento_word_address = memento_address + kPointerSize;
478   if (!NewSpacePage::OnSamePage(object_address, last_memento_word_address)) {
479     return NULL;
480   }
481 
482   HeapObject* candidate = HeapObject::FromAddress(memento_address);
483   Map* candidate_map = candidate->map();
484   // This fast check may peek at an uninitialized word. However, the slow check
485   // below (memento_address == top) ensures that this is safe. Mark the word as
486   // initialized to silence MemorySanitizer warnings.
487   MSAN_MEMORY_IS_INITIALIZED(&candidate_map, sizeof(candidate_map));
488   if (candidate_map != allocation_memento_map()) return NULL;
489 
490   // Either the object is the last object in the new space, or there is another
491   // object of at least word size (the header map word) following it, so
492   // suffices to compare ptr and top here. Note that technically we do not have
493   // to compare with the current top pointer of the from space page during GC,
494   // since we always install filler objects above the top pointer of a from
495   // space page when performing a garbage collection. However, always performing
496   // the test makes it possible to have a single, unified version of
497   // FindAllocationMemento that is used both by the GC and the mutator.
498   Address top = NewSpaceTop();
499   DCHECK(memento_address == top ||
500          memento_address + HeapObject::kHeaderSize <= top ||
501          !NewSpacePage::OnSamePage(memento_address, top - 1));
502   if (memento_address == top) return NULL;
503 
504   AllocationMemento* memento = AllocationMemento::cast(candidate);
505   if (!memento->IsValid()) return NULL;
506   return memento;
507 }
508 
509 
UpdateAllocationSite(HeapObject * object,HashMap * pretenuring_feedback)510 void Heap::UpdateAllocationSite(HeapObject* object,
511                                 HashMap* pretenuring_feedback) {
512   DCHECK(InFromSpace(object));
513   if (!FLAG_allocation_site_pretenuring ||
514       !AllocationSite::CanTrack(object->map()->instance_type()))
515     return;
516   AllocationMemento* memento = FindAllocationMemento(object);
517   if (memento == nullptr) return;
518 
519   AllocationSite* key = memento->GetAllocationSite();
520   DCHECK(!key->IsZombie());
521 
522   if (pretenuring_feedback == global_pretenuring_feedback_) {
523     // For inserting in the global pretenuring storage we need to first
524     // increment the memento found count on the allocation site.
525     if (key->IncrementMementoFoundCount()) {
526       global_pretenuring_feedback_->LookupOrInsert(
527           key, static_cast<uint32_t>(bit_cast<uintptr_t>(key)));
528     }
529   } else {
530     // Any other pretenuring storage than the global one is used as a cache,
531     // where the count is later on merge in the allocation site.
532     HashMap::Entry* e = pretenuring_feedback->LookupOrInsert(
533         key, static_cast<uint32_t>(bit_cast<uintptr_t>(key)));
534     DCHECK(e != nullptr);
535     (*bit_cast<intptr_t*>(&e->value))++;
536   }
537 }
538 
539 
RemoveAllocationSitePretenuringFeedback(AllocationSite * site)540 void Heap::RemoveAllocationSitePretenuringFeedback(AllocationSite* site) {
541   global_pretenuring_feedback_->Remove(
542       site, static_cast<uint32_t>(bit_cast<uintptr_t>(site)));
543 }
544 
545 
CollectGarbage(AllocationSpace space,const char * gc_reason,const v8::GCCallbackFlags callbackFlags)546 bool Heap::CollectGarbage(AllocationSpace space, const char* gc_reason,
547                           const v8::GCCallbackFlags callbackFlags) {
548   const char* collector_reason = NULL;
549   GarbageCollector collector = SelectGarbageCollector(space, &collector_reason);
550   return CollectGarbage(collector, gc_reason, collector_reason, callbackFlags);
551 }
552 
553 
isolate()554 Isolate* Heap::isolate() {
555   return reinterpret_cast<Isolate*>(
556       reinterpret_cast<intptr_t>(this) -
557       reinterpret_cast<size_t>(reinterpret_cast<Isolate*>(16)->heap()) + 16);
558 }
559 
560 
AddString(String * string)561 void Heap::ExternalStringTable::AddString(String* string) {
562   DCHECK(string->IsExternalString());
563   if (heap_->InNewSpace(string)) {
564     new_space_strings_.Add(string);
565   } else {
566     old_space_strings_.Add(string);
567   }
568 }
569 
570 
Iterate(ObjectVisitor * v)571 void Heap::ExternalStringTable::Iterate(ObjectVisitor* v) {
572   if (!new_space_strings_.is_empty()) {
573     Object** start = &new_space_strings_[0];
574     v->VisitPointers(start, start + new_space_strings_.length());
575   }
576   if (!old_space_strings_.is_empty()) {
577     Object** start = &old_space_strings_[0];
578     v->VisitPointers(start, start + old_space_strings_.length());
579   }
580 }
581 
582 
583 // Verify() is inline to avoid ifdef-s around its calls in release
584 // mode.
Verify()585 void Heap::ExternalStringTable::Verify() {
586 #ifdef DEBUG
587   for (int i = 0; i < new_space_strings_.length(); ++i) {
588     Object* obj = Object::cast(new_space_strings_[i]);
589     DCHECK(heap_->InNewSpace(obj));
590     DCHECK(obj != heap_->the_hole_value());
591   }
592   for (int i = 0; i < old_space_strings_.length(); ++i) {
593     Object* obj = Object::cast(old_space_strings_[i]);
594     DCHECK(!heap_->InNewSpace(obj));
595     DCHECK(obj != heap_->the_hole_value());
596   }
597 #endif
598 }
599 
600 
AddOldString(String * string)601 void Heap::ExternalStringTable::AddOldString(String* string) {
602   DCHECK(string->IsExternalString());
603   DCHECK(!heap_->InNewSpace(string));
604   old_space_strings_.Add(string);
605 }
606 
607 
ShrinkNewStrings(int position)608 void Heap::ExternalStringTable::ShrinkNewStrings(int position) {
609   new_space_strings_.Rewind(position);
610 #ifdef VERIFY_HEAP
611   if (FLAG_verify_heap) {
612     Verify();
613   }
614 #endif
615 }
616 
617 
Lookup(Map * source,Name * name)618 int DescriptorLookupCache::Lookup(Map* source, Name* name) {
619   if (!name->IsUniqueName()) return kAbsent;
620   int index = Hash(source, name);
621   Key& key = keys_[index];
622   if ((key.source == source) && (key.name == name)) return results_[index];
623   return kAbsent;
624 }
625 
626 
Update(Map * source,Name * name,int result)627 void DescriptorLookupCache::Update(Map* source, Name* name, int result) {
628   DCHECK(result != kAbsent);
629   if (name->IsUniqueName()) {
630     int index = Hash(source, name);
631     Key& key = keys_[index];
632     key.source = source;
633     key.name = name;
634     results_[index] = result;
635   }
636 }
637 
638 
ClearInstanceofCache()639 void Heap::ClearInstanceofCache() {
640   set_instanceof_cache_function(Smi::FromInt(0));
641 }
642 
643 
ToBoolean(bool condition)644 Object* Heap::ToBoolean(bool condition) {
645   return condition ? true_value() : false_value();
646 }
647 
648 
CompletelyClearInstanceofCache()649 void Heap::CompletelyClearInstanceofCache() {
650   set_instanceof_cache_map(Smi::FromInt(0));
651   set_instanceof_cache_function(Smi::FromInt(0));
652 }
653 
654 
HashSeed()655 uint32_t Heap::HashSeed() {
656   uint32_t seed = static_cast<uint32_t>(hash_seed()->value());
657   DCHECK(FLAG_randomize_hashes || seed == 0);
658   return seed;
659 }
660 
661 
NextScriptId()662 int Heap::NextScriptId() {
663   int last_id = last_script_id()->value();
664   if (last_id == Smi::kMaxValue) {
665     last_id = 1;
666   } else {
667     last_id++;
668   }
669   set_last_script_id(Smi::FromInt(last_id));
670   return last_id;
671 }
672 
673 
SetArgumentsAdaptorDeoptPCOffset(int pc_offset)674 void Heap::SetArgumentsAdaptorDeoptPCOffset(int pc_offset) {
675   DCHECK(arguments_adaptor_deopt_pc_offset() == Smi::FromInt(0));
676   set_arguments_adaptor_deopt_pc_offset(Smi::FromInt(pc_offset));
677 }
678 
679 
SetConstructStubDeoptPCOffset(int pc_offset)680 void Heap::SetConstructStubDeoptPCOffset(int pc_offset) {
681   DCHECK(construct_stub_deopt_pc_offset() == Smi::FromInt(0));
682   set_construct_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
683 }
684 
685 
SetGetterStubDeoptPCOffset(int pc_offset)686 void Heap::SetGetterStubDeoptPCOffset(int pc_offset) {
687   DCHECK(getter_stub_deopt_pc_offset() == Smi::FromInt(0));
688   set_getter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
689 }
690 
691 
SetSetterStubDeoptPCOffset(int pc_offset)692 void Heap::SetSetterStubDeoptPCOffset(int pc_offset) {
693   DCHECK(setter_stub_deopt_pc_offset() == Smi::FromInt(0));
694   set_setter_stub_deopt_pc_offset(Smi::FromInt(pc_offset));
695 }
696 
697 
AlwaysAllocateScope(Isolate * isolate)698 AlwaysAllocateScope::AlwaysAllocateScope(Isolate* isolate)
699     : heap_(isolate->heap()) {
700   heap_->always_allocate_scope_count_.Increment(1);
701 }
702 
703 
~AlwaysAllocateScope()704 AlwaysAllocateScope::~AlwaysAllocateScope() {
705   heap_->always_allocate_scope_count_.Increment(-1);
706 }
707 
708 
VisitPointers(Object ** start,Object ** end)709 void VerifyPointersVisitor::VisitPointers(Object** start, Object** end) {
710   for (Object** current = start; current < end; current++) {
711     if ((*current)->IsHeapObject()) {
712       HeapObject* object = HeapObject::cast(*current);
713       CHECK(object->GetIsolate()->heap()->Contains(object));
714       CHECK(object->map()->IsMap());
715     }
716   }
717 }
718 
719 
VisitPointers(Object ** start,Object ** end)720 void VerifySmisVisitor::VisitPointers(Object** start, Object** end) {
721   for (Object** current = start; current < end; current++) {
722     CHECK((*current)->IsSmi());
723   }
724 }
725 }  // namespace internal
726 }  // namespace v8
727 
728 #endif  // V8_HEAP_HEAP_INL_H_
729