1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // Review notes:
6 //
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
10 //
11 
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
14 
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts-inl.h"
18 #include "src/conversions-inl.h"
19 #include "src/factory.h"
20 #include "src/field-index-inl.h"
21 #include "src/heap/heap-inl.h"
22 #include "src/heap/heap.h"
23 #include "src/isolate.h"
24 #include "src/layout-descriptor-inl.h"
25 #include "src/lookup.h"
26 #include "src/objects.h"
27 #include "src/property.h"
28 #include "src/prototype.h"
29 #include "src/transitions-inl.h"
30 #include "src/type-feedback-vector-inl.h"
31 #include "src/types-inl.h"
32 #include "src/v8memory.h"
33 
34 namespace v8 {
35 namespace internal {
36 
PropertyDetails(Smi * smi)37 PropertyDetails::PropertyDetails(Smi* smi) {
38   value_ = smi->value();
39 }
40 
41 
AsSmi()42 Smi* PropertyDetails::AsSmi() const {
43   // Ensure the upper 2 bits have the same value by sign extending it. This is
44   // necessary to be able to use the 31st bit of the property details.
45   int value = value_ << 1;
46   return Smi::FromInt(value >> 1);
47 }
48 
49 
field_width_in_words()50 int PropertyDetails::field_width_in_words() const {
51   DCHECK(location() == kField);
52   if (!FLAG_unbox_double_fields) return 1;
53   if (kDoubleSize == kPointerSize) return 1;
54   return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
55 }
56 
57 
58 #define TYPE_CHECKER(type, instancetype)                                \
59   bool Object::Is##type() const {                                       \
60   return Object::IsHeapObject() &&                                      \
61       HeapObject::cast(this)->map()->instance_type() == instancetype;   \
62   }
63 
64 
65 #define CAST_ACCESSOR(type)                       \
66   type* type::cast(Object* object) {              \
67     SLOW_DCHECK(object->Is##type());              \
68     return reinterpret_cast<type*>(object);       \
69   }                                               \
70   const type* type::cast(const Object* object) {  \
71     SLOW_DCHECK(object->Is##type());              \
72     return reinterpret_cast<const type*>(object); \
73   }
74 
75 
76 #define INT_ACCESSORS(holder, name, offset)                                   \
77   int holder::name() const { return READ_INT_FIELD(this, offset); }           \
78   void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
79 
80 
81 #define ACCESSORS(holder, name, type, offset)                                 \
82   type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
83   void holder::set_##name(type* value, WriteBarrierMode mode) {               \
84     WRITE_FIELD(this, offset, value);                                         \
85     CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);          \
86   }
87 
88 
89 // Getter that returns a Smi as an int and writes an int as a Smi.
90 #define SMI_ACCESSORS(holder, name, offset)             \
91   int holder::name() const {                            \
92     Object* value = READ_FIELD(this, offset);           \
93     return Smi::cast(value)->value();                   \
94   }                                                     \
95   void holder::set_##name(int value) {                  \
96     WRITE_FIELD(this, offset, Smi::FromInt(value));     \
97   }
98 
99 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset)    \
100   int holder::synchronized_##name() const {                 \
101     Object* value = ACQUIRE_READ_FIELD(this, offset);       \
102     return Smi::cast(value)->value();                       \
103   }                                                         \
104   void holder::synchronized_set_##name(int value) {         \
105     RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
106   }
107 
108 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset)          \
109   int holder::nobarrier_##name() const {                       \
110     Object* value = NOBARRIER_READ_FIELD(this, offset);        \
111     return Smi::cast(value)->value();                          \
112   }                                                            \
113   void holder::nobarrier_set_##name(int value) {               \
114     NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value));  \
115   }
116 
117 #define BOOL_GETTER(holder, field, name, offset)           \
118   bool holder::name() const {                              \
119     return BooleanBit::get(field(), offset);               \
120   }                                                        \
121 
122 
123 #define BOOL_ACCESSORS(holder, field, name, offset)        \
124   bool holder::name() const {                              \
125     return BooleanBit::get(field(), offset);               \
126   }                                                        \
127   void holder::set_##name(bool value) {                    \
128     set_##field(BooleanBit::set(field(), offset, value));  \
129   }
130 
131 
IsFixedArrayBase()132 bool Object::IsFixedArrayBase() const {
133   return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
134 }
135 
136 
IsFixedArray()137 bool Object::IsFixedArray() const {
138   if (!IsHeapObject()) return false;
139   InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
140   return instance_type == FIXED_ARRAY_TYPE ||
141          instance_type == TRANSITION_ARRAY_TYPE;
142 }
143 
144 
145 // External objects are not extensible, so the map check is enough.
IsExternal()146 bool Object::IsExternal() const {
147   return Object::IsHeapObject() &&
148       HeapObject::cast(this)->map() ==
149       HeapObject::cast(this)->GetHeap()->external_map();
150 }
151 
152 
IsAccessorInfo()153 bool Object::IsAccessorInfo() const { return IsExecutableAccessorInfo(); }
154 
155 
TYPE_CHECKER(HeapNumber,HEAP_NUMBER_TYPE)156 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
157 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
158 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
159 TYPE_CHECKER(Simd128Value, SIMD128_VALUE_TYPE)
160 
161 
162 #define SIMD128_TYPE_CHECKER(TYPE, Type, type, lane_count, lane_type) \
163   bool Object::Is##Type() const {                                     \
164     return Object::IsHeapObject() &&                                  \
165            HeapObject::cast(this)->map() ==                           \
166                HeapObject::cast(this)->GetHeap()->type##_map();       \
167   }
168 SIMD128_TYPES(SIMD128_TYPE_CHECKER)
169 #undef SIMD128_TYPE_CHECKER
170 
171 
172 bool Object::IsString() const {
173   return Object::IsHeapObject()
174     && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
175 }
176 
177 
IsName()178 bool Object::IsName() const {
179   STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
180   return Object::IsHeapObject() &&
181          HeapObject::cast(this)->map()->instance_type() <= LAST_NAME_TYPE;
182 }
183 
184 
IsUniqueName()185 bool Object::IsUniqueName() const {
186   return IsInternalizedString() || IsSymbol();
187 }
188 
189 
IsFunction()190 bool Object::IsFunction() const {
191   STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
192   return Object::IsHeapObject() &&
193          HeapObject::cast(this)->map()->instance_type() >= FIRST_FUNCTION_TYPE;
194 }
195 
196 
IsCallable()197 bool Object::IsCallable() const {
198   return Object::IsHeapObject() && HeapObject::cast(this)->map()->is_callable();
199 }
200 
201 
IsConstructor()202 bool Object::IsConstructor() const {
203   return Object::IsHeapObject() &&
204          HeapObject::cast(this)->map()->is_constructor();
205 }
206 
207 
IsTemplateInfo()208 bool Object::IsTemplateInfo() const {
209   return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
210 }
211 
212 
IsInternalizedString()213 bool Object::IsInternalizedString() const {
214   if (!this->IsHeapObject()) return false;
215   uint32_t type = HeapObject::cast(this)->map()->instance_type();
216   STATIC_ASSERT(kNotInternalizedTag != 0);
217   return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
218       (kStringTag | kInternalizedTag);
219 }
220 
221 
IsConsString()222 bool Object::IsConsString() const {
223   if (!IsString()) return false;
224   return StringShape(String::cast(this)).IsCons();
225 }
226 
227 
IsSlicedString()228 bool Object::IsSlicedString() const {
229   if (!IsString()) return false;
230   return StringShape(String::cast(this)).IsSliced();
231 }
232 
233 
IsSeqString()234 bool Object::IsSeqString() const {
235   if (!IsString()) return false;
236   return StringShape(String::cast(this)).IsSequential();
237 }
238 
239 
IsSeqOneByteString()240 bool Object::IsSeqOneByteString() const {
241   if (!IsString()) return false;
242   return StringShape(String::cast(this)).IsSequential() &&
243          String::cast(this)->IsOneByteRepresentation();
244 }
245 
246 
IsSeqTwoByteString()247 bool Object::IsSeqTwoByteString() const {
248   if (!IsString()) return false;
249   return StringShape(String::cast(this)).IsSequential() &&
250          String::cast(this)->IsTwoByteRepresentation();
251 }
252 
253 
IsExternalString()254 bool Object::IsExternalString() const {
255   if (!IsString()) return false;
256   return StringShape(String::cast(this)).IsExternal();
257 }
258 
259 
IsExternalOneByteString()260 bool Object::IsExternalOneByteString() const {
261   if (!IsString()) return false;
262   return StringShape(String::cast(this)).IsExternal() &&
263          String::cast(this)->IsOneByteRepresentation();
264 }
265 
266 
IsExternalTwoByteString()267 bool Object::IsExternalTwoByteString() const {
268   if (!IsString()) return false;
269   return StringShape(String::cast(this)).IsExternal() &&
270          String::cast(this)->IsTwoByteRepresentation();
271 }
272 
273 
HasValidElements()274 bool Object::HasValidElements() {
275   // Dictionary is covered under FixedArray.
276   return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
277 }
278 
279 
KeyEquals(Object * second)280 bool Object::KeyEquals(Object* second) {
281   Object* first = this;
282   if (second->IsNumber()) {
283     if (first->IsNumber()) return first->Number() == second->Number();
284     Object* temp = first;
285     first = second;
286     second = temp;
287   }
288   if (first->IsNumber()) {
289     DCHECK_LE(0, first->Number());
290     uint32_t expected = static_cast<uint32_t>(first->Number());
291     uint32_t index;
292     return Name::cast(second)->AsArrayIndex(&index) && index == expected;
293   }
294   return Name::cast(first)->Equals(Name::cast(second));
295 }
296 
297 
FilterKey(PropertyFilter filter)298 bool Object::FilterKey(PropertyFilter filter) {
299   if (IsSymbol()) {
300     if (filter & SKIP_SYMBOLS) return true;
301     if (Symbol::cast(this)->is_private()) return true;
302   } else {
303     if (filter & SKIP_STRINGS) return true;
304   }
305   return false;
306 }
307 
308 
NewStorageFor(Isolate * isolate,Handle<Object> object,Representation representation)309 Handle<Object> Object::NewStorageFor(Isolate* isolate,
310                                      Handle<Object> object,
311                                      Representation representation) {
312   if (representation.IsSmi() && object->IsUninitialized()) {
313     return handle(Smi::FromInt(0), isolate);
314   }
315   if (!representation.IsDouble()) return object;
316   double value;
317   if (object->IsUninitialized()) {
318     value = 0;
319   } else if (object->IsMutableHeapNumber()) {
320     value = HeapNumber::cast(*object)->value();
321   } else {
322     value = object->Number();
323   }
324   return isolate->factory()->NewHeapNumber(value, MUTABLE);
325 }
326 
327 
WrapForRead(Isolate * isolate,Handle<Object> object,Representation representation)328 Handle<Object> Object::WrapForRead(Isolate* isolate,
329                                    Handle<Object> object,
330                                    Representation representation) {
331   DCHECK(!object->IsUninitialized());
332   if (!representation.IsDouble()) {
333     DCHECK(object->FitsRepresentation(representation));
334     return object;
335   }
336   return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
337 }
338 
339 
StringShape(const String * str)340 StringShape::StringShape(const String* str)
341   : type_(str->map()->instance_type()) {
342   set_valid();
343   DCHECK((type_ & kIsNotStringMask) == kStringTag);
344 }
345 
346 
StringShape(Map * map)347 StringShape::StringShape(Map* map)
348   : type_(map->instance_type()) {
349   set_valid();
350   DCHECK((type_ & kIsNotStringMask) == kStringTag);
351 }
352 
353 
StringShape(InstanceType t)354 StringShape::StringShape(InstanceType t)
355   : type_(static_cast<uint32_t>(t)) {
356   set_valid();
357   DCHECK((type_ & kIsNotStringMask) == kStringTag);
358 }
359 
360 
IsInternalized()361 bool StringShape::IsInternalized() {
362   DCHECK(valid());
363   STATIC_ASSERT(kNotInternalizedTag != 0);
364   return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
365       (kStringTag | kInternalizedTag);
366 }
367 
368 
IsOneByteRepresentation()369 bool String::IsOneByteRepresentation() const {
370   uint32_t type = map()->instance_type();
371   return (type & kStringEncodingMask) == kOneByteStringTag;
372 }
373 
374 
IsTwoByteRepresentation()375 bool String::IsTwoByteRepresentation() const {
376   uint32_t type = map()->instance_type();
377   return (type & kStringEncodingMask) == kTwoByteStringTag;
378 }
379 
380 
IsOneByteRepresentationUnderneath()381 bool String::IsOneByteRepresentationUnderneath() {
382   uint32_t type = map()->instance_type();
383   STATIC_ASSERT(kIsIndirectStringTag != 0);
384   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
385   DCHECK(IsFlat());
386   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
387     case kOneByteStringTag:
388       return true;
389     case kTwoByteStringTag:
390       return false;
391     default:  // Cons or sliced string.  Need to go deeper.
392       return GetUnderlying()->IsOneByteRepresentation();
393   }
394 }
395 
396 
IsTwoByteRepresentationUnderneath()397 bool String::IsTwoByteRepresentationUnderneath() {
398   uint32_t type = map()->instance_type();
399   STATIC_ASSERT(kIsIndirectStringTag != 0);
400   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
401   DCHECK(IsFlat());
402   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
403     case kOneByteStringTag:
404       return false;
405     case kTwoByteStringTag:
406       return true;
407     default:  // Cons or sliced string.  Need to go deeper.
408       return GetUnderlying()->IsTwoByteRepresentation();
409   }
410 }
411 
412 
HasOnlyOneByteChars()413 bool String::HasOnlyOneByteChars() {
414   uint32_t type = map()->instance_type();
415   return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
416          IsOneByteRepresentation();
417 }
418 
419 
IsCons()420 bool StringShape::IsCons() {
421   return (type_ & kStringRepresentationMask) == kConsStringTag;
422 }
423 
424 
IsSliced()425 bool StringShape::IsSliced() {
426   return (type_ & kStringRepresentationMask) == kSlicedStringTag;
427 }
428 
429 
IsIndirect()430 bool StringShape::IsIndirect() {
431   return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
432 }
433 
434 
IsExternal()435 bool StringShape::IsExternal() {
436   return (type_ & kStringRepresentationMask) == kExternalStringTag;
437 }
438 
439 
IsSequential()440 bool StringShape::IsSequential() {
441   return (type_ & kStringRepresentationMask) == kSeqStringTag;
442 }
443 
444 
representation_tag()445 StringRepresentationTag StringShape::representation_tag() {
446   uint32_t tag = (type_ & kStringRepresentationMask);
447   return static_cast<StringRepresentationTag>(tag);
448 }
449 
450 
encoding_tag()451 uint32_t StringShape::encoding_tag() {
452   return type_ & kStringEncodingMask;
453 }
454 
455 
full_representation_tag()456 uint32_t StringShape::full_representation_tag() {
457   return (type_ & (kStringRepresentationMask | kStringEncodingMask));
458 }
459 
460 
461 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
462              Internals::kFullStringRepresentationMask);
463 
464 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
465              Internals::kStringEncodingMask);
466 
467 
IsSequentialOneByte()468 bool StringShape::IsSequentialOneByte() {
469   return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
470 }
471 
472 
IsSequentialTwoByte()473 bool StringShape::IsSequentialTwoByte() {
474   return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
475 }
476 
477 
IsExternalOneByte()478 bool StringShape::IsExternalOneByte() {
479   return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
480 }
481 
482 
483 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
484               Internals::kExternalOneByteRepresentationTag);
485 
486 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
487 
488 
IsExternalTwoByte()489 bool StringShape::IsExternalTwoByte() {
490   return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
491 }
492 
493 
494 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
495              Internals::kExternalTwoByteRepresentationTag);
496 
497 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
498 
499 
Get(int index)500 uc32 FlatStringReader::Get(int index) {
501   if (is_one_byte_) {
502     return Get<uint8_t>(index);
503   } else {
504     return Get<uc16>(index);
505   }
506 }
507 
508 
509 template <typename Char>
Get(int index)510 Char FlatStringReader::Get(int index) {
511   DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
512   DCHECK(0 <= index && index <= length_);
513   if (sizeof(Char) == 1) {
514     return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
515   } else {
516     return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
517   }
518 }
519 
520 
AsHandle(Isolate * isolate,HashTableKey * key)521 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
522   return key->AsHandle(isolate);
523 }
524 
525 
AsHandle(Isolate * isolate,HashTableKey * key)526 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
527                                                HashTableKey* key) {
528   return key->AsHandle(isolate);
529 }
530 
531 
AsHandle(Isolate * isolate,HashTableKey * key)532 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
533                                                  HashTableKey* key) {
534   return key->AsHandle(isolate);
535 }
536 
537 template <typename Char>
538 class SequentialStringKey : public HashTableKey {
539  public:
SequentialStringKey(Vector<const Char> string,uint32_t seed)540   explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
541       : string_(string), hash_field_(0), seed_(seed) { }
542 
Hash()543   uint32_t Hash() override {
544     hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
545                                                            string_.length(),
546                                                            seed_);
547 
548     uint32_t result = hash_field_ >> String::kHashShift;
549     DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
550     return result;
551   }
552 
553 
HashForObject(Object * other)554   uint32_t HashForObject(Object* other) override {
555     return String::cast(other)->Hash();
556   }
557 
558   Vector<const Char> string_;
559   uint32_t hash_field_;
560   uint32_t seed_;
561 };
562 
563 
564 class OneByteStringKey : public SequentialStringKey<uint8_t> {
565  public:
OneByteStringKey(Vector<const uint8_t> str,uint32_t seed)566   OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
567       : SequentialStringKey<uint8_t>(str, seed) { }
568 
IsMatch(Object * string)569   bool IsMatch(Object* string) override {
570     return String::cast(string)->IsOneByteEqualTo(string_);
571   }
572 
573   Handle<Object> AsHandle(Isolate* isolate) override;
574 };
575 
576 
577 class SeqOneByteSubStringKey : public HashTableKey {
578  public:
SeqOneByteSubStringKey(Handle<SeqOneByteString> string,int from,int length)579   SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
580       : string_(string), from_(from), length_(length) {
581     DCHECK(string_->IsSeqOneByteString());
582   }
583 
Hash()584   uint32_t Hash() override {
585     DCHECK(length_ >= 0);
586     DCHECK(from_ + length_ <= string_->length());
587     const uint8_t* chars = string_->GetChars() + from_;
588     hash_field_ = StringHasher::HashSequentialString(
589         chars, length_, string_->GetHeap()->HashSeed());
590     uint32_t result = hash_field_ >> String::kHashShift;
591     DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
592     return result;
593   }
594 
HashForObject(Object * other)595   uint32_t HashForObject(Object* other) override {
596     return String::cast(other)->Hash();
597   }
598 
599   bool IsMatch(Object* string) override;
600   Handle<Object> AsHandle(Isolate* isolate) override;
601 
602  private:
603   Handle<SeqOneByteString> string_;
604   int from_;
605   int length_;
606   uint32_t hash_field_;
607 };
608 
609 
610 class TwoByteStringKey : public SequentialStringKey<uc16> {
611  public:
TwoByteStringKey(Vector<const uc16> str,uint32_t seed)612   explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
613       : SequentialStringKey<uc16>(str, seed) { }
614 
IsMatch(Object * string)615   bool IsMatch(Object* string) override {
616     return String::cast(string)->IsTwoByteEqualTo(string_);
617   }
618 
619   Handle<Object> AsHandle(Isolate* isolate) override;
620 };
621 
622 
623 // Utf8StringKey carries a vector of chars as key.
624 class Utf8StringKey : public HashTableKey {
625  public:
Utf8StringKey(Vector<const char> string,uint32_t seed)626   explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
627       : string_(string), hash_field_(0), seed_(seed) { }
628 
IsMatch(Object * string)629   bool IsMatch(Object* string) override {
630     return String::cast(string)->IsUtf8EqualTo(string_);
631   }
632 
Hash()633   uint32_t Hash() override {
634     if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
635     hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
636     uint32_t result = hash_field_ >> String::kHashShift;
637     DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
638     return result;
639   }
640 
HashForObject(Object * other)641   uint32_t HashForObject(Object* other) override {
642     return String::cast(other)->Hash();
643   }
644 
AsHandle(Isolate * isolate)645   Handle<Object> AsHandle(Isolate* isolate) override {
646     if (hash_field_ == 0) Hash();
647     return isolate->factory()->NewInternalizedStringFromUtf8(
648         string_, chars_, hash_field_);
649   }
650 
651   Vector<const char> string_;
652   uint32_t hash_field_;
653   int chars_;  // Caches the number of characters when computing the hash code.
654   uint32_t seed_;
655 };
656 
657 
IsNumber()658 bool Object::IsNumber() const {
659   return IsSmi() || IsHeapNumber();
660 }
661 
662 
TYPE_CHECKER(ByteArray,BYTE_ARRAY_TYPE)663 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
664 TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
665 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
666 
667 
668 bool Object::IsFiller() const {
669   if (!Object::IsHeapObject()) return false;
670   InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
671   return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
672 }
673 
674 
675 
676 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size)               \
677   TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
678 
TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)679 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
680 #undef TYPED_ARRAY_TYPE_CHECKER
681 
682 
683 bool Object::IsFixedTypedArrayBase() const {
684   if (!Object::IsHeapObject()) return false;
685 
686   InstanceType instance_type =
687       HeapObject::cast(this)->map()->instance_type();
688   return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
689           instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
690 }
691 
692 
IsJSReceiver()693 bool Object::IsJSReceiver() const {
694   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
695   return IsHeapObject() &&
696       HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
697 }
698 
699 
IsJSObject()700 bool Object::IsJSObject() const {
701   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
702   return IsHeapObject() && HeapObject::cast(this)->map()->IsJSObjectMap();
703 }
704 
705 
IsJSProxy()706 bool Object::IsJSProxy() const {
707   if (!Object::IsHeapObject()) return false;
708   return  HeapObject::cast(this)->map()->IsJSProxyMap();
709 }
710 
711 
TYPE_CHECKER(JSSet,JS_SET_TYPE)712 TYPE_CHECKER(JSSet, JS_SET_TYPE)
713 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
714 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
715 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
716 TYPE_CHECKER(JSIteratorResult, JS_ITERATOR_RESULT_TYPE)
717 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
718 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
719 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
720 TYPE_CHECKER(Map, MAP_TYPE)
721 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
722 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
723 TYPE_CHECKER(TransitionArray, TRANSITION_ARRAY_TYPE)
724 
725 
726 bool Object::IsJSWeakCollection() const {
727   return IsJSWeakMap() || IsJSWeakSet();
728 }
729 
730 
IsDescriptorArray()731 bool Object::IsDescriptorArray() const {
732   return IsFixedArray();
733 }
734 
735 
IsArrayList()736 bool Object::IsArrayList() const { return IsFixedArray(); }
737 
738 
IsLayoutDescriptor()739 bool Object::IsLayoutDescriptor() const {
740   return IsSmi() || IsFixedTypedArrayBase();
741 }
742 
743 
IsTypeFeedbackVector()744 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
745 
746 
IsTypeFeedbackMetadata()747 bool Object::IsTypeFeedbackMetadata() const { return IsFixedArray(); }
748 
749 
IsLiteralsArray()750 bool Object::IsLiteralsArray() const { return IsFixedArray(); }
751 
752 
IsDeoptimizationInputData()753 bool Object::IsDeoptimizationInputData() const {
754   // Must be a fixed array.
755   if (!IsFixedArray()) return false;
756 
757   // There's no sure way to detect the difference between a fixed array and
758   // a deoptimization data array.  Since this is used for asserts we can
759   // check that the length is zero or else the fixed size plus a multiple of
760   // the entry size.
761   int length = FixedArray::cast(this)->length();
762   if (length == 0) return true;
763 
764   length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
765   return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
766 }
767 
768 
IsDeoptimizationOutputData()769 bool Object::IsDeoptimizationOutputData() const {
770   if (!IsFixedArray()) return false;
771   // There's actually no way to see the difference between a fixed array and
772   // a deoptimization data array.  Since this is used for asserts we can check
773   // that the length is plausible though.
774   if (FixedArray::cast(this)->length() % 2 != 0) return false;
775   return true;
776 }
777 
778 
IsHandlerTable()779 bool Object::IsHandlerTable() const {
780   if (!IsFixedArray()) return false;
781   // There's actually no way to see the difference between a fixed array and
782   // a handler table array.
783   return true;
784 }
785 
786 
IsDependentCode()787 bool Object::IsDependentCode() const {
788   if (!IsFixedArray()) return false;
789   // There's actually no way to see the difference between a fixed array and
790   // a dependent codes array.
791   return true;
792 }
793 
794 
IsContext()795 bool Object::IsContext() const {
796   if (!Object::IsHeapObject()) return false;
797   Map* map = HeapObject::cast(this)->map();
798   Heap* heap = map->GetHeap();
799   return (map == heap->function_context_map() ||
800       map == heap->catch_context_map() ||
801       map == heap->with_context_map() ||
802       map == heap->native_context_map() ||
803       map == heap->block_context_map() ||
804       map == heap->module_context_map() ||
805       map == heap->script_context_map());
806 }
807 
808 
IsNativeContext()809 bool Object::IsNativeContext() const {
810   return Object::IsHeapObject() &&
811       HeapObject::cast(this)->map() ==
812       HeapObject::cast(this)->GetHeap()->native_context_map();
813 }
814 
815 
IsScriptContextTable()816 bool Object::IsScriptContextTable() const {
817   if (!Object::IsHeapObject()) return false;
818   Map* map = HeapObject::cast(this)->map();
819   Heap* heap = map->GetHeap();
820   return map == heap->script_context_table_map();
821 }
822 
823 
IsScopeInfo()824 bool Object::IsScopeInfo() const {
825   return Object::IsHeapObject() &&
826       HeapObject::cast(this)->map() ==
827       HeapObject::cast(this)->GetHeap()->scope_info_map();
828 }
829 
830 
TYPE_CHECKER(JSBoundFunction,JS_BOUND_FUNCTION_TYPE)831 TYPE_CHECKER(JSBoundFunction, JS_BOUND_FUNCTION_TYPE)
832 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
833 
834 
835 template <> inline bool Is<JSFunction>(Object* obj) {
836   return obj->IsJSFunction();
837 }
838 
839 
TYPE_CHECKER(Code,CODE_TYPE)840 TYPE_CHECKER(Code, CODE_TYPE)
841 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
842 TYPE_CHECKER(Cell, CELL_TYPE)
843 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
844 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
845 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
846 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
847 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
848 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
849 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
850 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
851 
852 
853 bool Object::IsStringWrapper() const {
854   return IsJSValue() && JSValue::cast(this)->value()->IsString();
855 }
856 
857 
TYPE_CHECKER(Foreign,FOREIGN_TYPE)858 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
859 
860 
861 bool Object::IsBoolean() const {
862   return IsOddball() &&
863       ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
864 }
865 
866 
TYPE_CHECKER(JSArray,JS_ARRAY_TYPE)867 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
868 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
869 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
870 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
871 
872 
873 bool Object::IsJSArrayBufferView() const {
874   return IsJSDataView() || IsJSTypedArray();
875 }
876 
877 
TYPE_CHECKER(JSRegExp,JS_REGEXP_TYPE)878 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
879 
880 
881 template <> inline bool Is<JSArray>(Object* obj) {
882   return obj->IsJSArray();
883 }
884 
885 
IsHashTable()886 bool Object::IsHashTable() const {
887   return Object::IsHeapObject() &&
888       HeapObject::cast(this)->map() ==
889       HeapObject::cast(this)->GetHeap()->hash_table_map();
890 }
891 
892 
IsWeakHashTable()893 bool Object::IsWeakHashTable() const {
894   return IsHashTable();
895 }
896 
897 
IsDictionary()898 bool Object::IsDictionary() const {
899   return IsHashTable() &&
900       this != HeapObject::cast(this)->GetHeap()->string_table();
901 }
902 
903 
IsNameDictionary()904 bool Object::IsNameDictionary() const {
905   return IsDictionary();
906 }
907 
908 
IsGlobalDictionary()909 bool Object::IsGlobalDictionary() const { return IsDictionary(); }
910 
911 
IsSeededNumberDictionary()912 bool Object::IsSeededNumberDictionary() const {
913   return IsDictionary();
914 }
915 
916 
IsUnseededNumberDictionary()917 bool Object::IsUnseededNumberDictionary() const {
918   return IsDictionary();
919 }
920 
921 
IsStringTable()922 bool Object::IsStringTable() const {
923   return IsHashTable();
924 }
925 
926 
IsNormalizedMapCache()927 bool Object::IsNormalizedMapCache() const {
928   return NormalizedMapCache::IsNormalizedMapCache(this);
929 }
930 
931 
GetIndex(Handle<Map> map)932 int NormalizedMapCache::GetIndex(Handle<Map> map) {
933   return map->Hash() % NormalizedMapCache::kEntries;
934 }
935 
936 
IsNormalizedMapCache(const Object * obj)937 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
938   if (!obj->IsFixedArray()) return false;
939   if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
940     return false;
941   }
942 #ifdef VERIFY_HEAP
943   if (FLAG_verify_heap) {
944     reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
945         NormalizedMapCacheVerify();
946   }
947 #endif
948   return true;
949 }
950 
951 
IsCompilationCacheTable()952 bool Object::IsCompilationCacheTable() const {
953   return IsHashTable();
954 }
955 
956 
IsCodeCacheHashTable()957 bool Object::IsCodeCacheHashTable() const {
958   return IsHashTable();
959 }
960 
961 
IsPolymorphicCodeCacheHashTable()962 bool Object::IsPolymorphicCodeCacheHashTable() const {
963   return IsHashTable();
964 }
965 
966 
IsMapCache()967 bool Object::IsMapCache() const {
968   return IsHashTable();
969 }
970 
971 
IsObjectHashTable()972 bool Object::IsObjectHashTable() const {
973   return IsHashTable();
974 }
975 
976 
IsOrderedHashTable()977 bool Object::IsOrderedHashTable() const {
978   return IsHeapObject() &&
979       HeapObject::cast(this)->map() ==
980       HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
981 }
982 
983 
IsOrderedHashSet()984 bool Object::IsOrderedHashSet() const {
985   return IsOrderedHashTable();
986 }
987 
988 
IsOrderedHashMap()989 bool Object::IsOrderedHashMap() const {
990   return IsOrderedHashTable();
991 }
992 
993 
IsPrimitive()994 bool Object::IsPrimitive() const {
995   return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
996 }
997 
998 
IsJSGlobalProxy()999 bool Object::IsJSGlobalProxy() const {
1000   bool result = IsHeapObject() &&
1001                 (HeapObject::cast(this)->map()->instance_type() ==
1002                  JS_GLOBAL_PROXY_TYPE);
1003   DCHECK(!result ||
1004          HeapObject::cast(this)->map()->is_access_check_needed());
1005   return result;
1006 }
1007 
1008 
TYPE_CHECKER(JSGlobalObject,JS_GLOBAL_OBJECT_TYPE)1009 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
1010 
1011 
1012 bool Object::IsUndetectableObject() const {
1013   return IsHeapObject()
1014     && HeapObject::cast(this)->map()->is_undetectable();
1015 }
1016 
1017 
IsAccessCheckNeeded()1018 bool Object::IsAccessCheckNeeded() const {
1019   if (!IsHeapObject()) return false;
1020   if (IsJSGlobalProxy()) {
1021     const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
1022     JSGlobalObject* global = proxy->GetIsolate()->context()->global_object();
1023     return proxy->IsDetachedFrom(global);
1024   }
1025   return HeapObject::cast(this)->map()->is_access_check_needed();
1026 }
1027 
1028 
IsStruct()1029 bool Object::IsStruct() const {
1030   if (!IsHeapObject()) return false;
1031   switch (HeapObject::cast(this)->map()->instance_type()) {
1032 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1033   STRUCT_LIST(MAKE_STRUCT_CASE)
1034 #undef MAKE_STRUCT_CASE
1035     default: return false;
1036   }
1037 }
1038 
1039 
1040 #define MAKE_STRUCT_PREDICATE(NAME, Name, name)                         \
1041   bool Object::Is##Name() const {                                       \
1042     return Object::IsHeapObject()                                       \
1043       && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1044   }
STRUCT_LIST(MAKE_STRUCT_PREDICATE)1045   STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1046 #undef MAKE_STRUCT_PREDICATE
1047 
1048 
1049 bool Object::IsUndefined() const {
1050   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1051 }
1052 
1053 
IsNull()1054 bool Object::IsNull() const {
1055   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1056 }
1057 
1058 
IsTheHole()1059 bool Object::IsTheHole() const {
1060   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1061 }
1062 
1063 
IsException()1064 bool Object::IsException() const {
1065   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1066 }
1067 
1068 
IsUninitialized()1069 bool Object::IsUninitialized() const {
1070   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1071 }
1072 
1073 
IsTrue()1074 bool Object::IsTrue() const {
1075   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1076 }
1077 
1078 
IsFalse()1079 bool Object::IsFalse() const {
1080   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1081 }
1082 
1083 
IsArgumentsMarker()1084 bool Object::IsArgumentsMarker() const {
1085   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1086 }
1087 
1088 
Number()1089 double Object::Number() const {
1090   DCHECK(IsNumber());
1091   return IsSmi()
1092              ? static_cast<double>(reinterpret_cast<const Smi*>(this)->value())
1093              : reinterpret_cast<const HeapNumber*>(this)->value();
1094 }
1095 
1096 
IsNaN()1097 bool Object::IsNaN() const {
1098   return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1099 }
1100 
1101 
IsMinusZero()1102 bool Object::IsMinusZero() const {
1103   return this->IsHeapNumber() &&
1104          i::IsMinusZero(HeapNumber::cast(this)->value());
1105 }
1106 
1107 
OptimalRepresentation()1108 Representation Object::OptimalRepresentation() {
1109   if (!FLAG_track_fields) return Representation::Tagged();
1110   if (IsSmi()) {
1111     return Representation::Smi();
1112   } else if (FLAG_track_double_fields && IsHeapNumber()) {
1113     return Representation::Double();
1114   } else if (FLAG_track_computed_fields && IsUninitialized()) {
1115     return Representation::None();
1116   } else if (FLAG_track_heap_object_fields) {
1117     DCHECK(IsHeapObject());
1118     return Representation::HeapObject();
1119   } else {
1120     return Representation::Tagged();
1121   }
1122 }
1123 
1124 
OptimalElementsKind()1125 ElementsKind Object::OptimalElementsKind() {
1126   if (IsSmi()) return FAST_SMI_ELEMENTS;
1127   if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
1128   return FAST_ELEMENTS;
1129 }
1130 
1131 
FitsRepresentation(Representation representation)1132 bool Object::FitsRepresentation(Representation representation) {
1133   if (FLAG_track_fields && representation.IsNone()) {
1134     return false;
1135   } else if (FLAG_track_fields && representation.IsSmi()) {
1136     return IsSmi();
1137   } else if (FLAG_track_double_fields && representation.IsDouble()) {
1138     return IsMutableHeapNumber() || IsNumber();
1139   } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1140     return IsHeapObject();
1141   }
1142   return true;
1143 }
1144 
1145 
1146 // static
ToObject(Isolate * isolate,Handle<Object> object)1147 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1148                                          Handle<Object> object) {
1149   return ToObject(
1150       isolate, object, handle(isolate->context()->native_context(), isolate));
1151 }
1152 
1153 
1154 // static
ToPrimitive(Handle<Object> input,ToPrimitiveHint hint)1155 MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
1156                                         ToPrimitiveHint hint) {
1157   if (input->IsPrimitive()) return input;
1158   return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
1159 }
1160 
1161 
HasSpecificClassOf(String * name)1162 bool Object::HasSpecificClassOf(String* name) {
1163   return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1164 }
1165 
1166 
GetProperty(Handle<Object> object,Handle<Name> name,LanguageMode language_mode)1167 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1168                                         Handle<Name> name,
1169                                         LanguageMode language_mode) {
1170   LookupIterator it(object, name);
1171   return GetProperty(&it, language_mode);
1172 }
1173 
1174 
GetElement(Isolate * isolate,Handle<Object> object,uint32_t index,LanguageMode language_mode)1175 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1176                                        uint32_t index,
1177                                        LanguageMode language_mode) {
1178   LookupIterator it(isolate, object, index);
1179   return GetProperty(&it, language_mode);
1180 }
1181 
1182 
SetElement(Isolate * isolate,Handle<Object> object,uint32_t index,Handle<Object> value,LanguageMode language_mode)1183 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1184                                        uint32_t index, Handle<Object> value,
1185                                        LanguageMode language_mode) {
1186   LookupIterator it(isolate, object, index);
1187   MAYBE_RETURN_NULL(
1188       SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED));
1189   return value;
1190 }
1191 
1192 
GetPrototype(Isolate * isolate,Handle<Object> receiver)1193 MaybeHandle<Object> Object::GetPrototype(Isolate* isolate,
1194                                          Handle<Object> receiver) {
1195   // We don't expect access checks to be needed on JSProxy objects.
1196   DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject());
1197   PrototypeIterator iter(isolate, receiver,
1198                          PrototypeIterator::START_AT_RECEIVER);
1199   do {
1200     if (!iter.AdvanceFollowingProxies()) return MaybeHandle<Object>();
1201   } while (!iter.IsAtEnd(PrototypeIterator::END_AT_NON_HIDDEN));
1202   return PrototypeIterator::GetCurrent(iter);
1203 }
1204 
1205 
GetProperty(Isolate * isolate,Handle<Object> object,const char * name,LanguageMode language_mode)1206 MaybeHandle<Object> Object::GetProperty(Isolate* isolate, Handle<Object> object,
1207                                         const char* name,
1208                                         LanguageMode language_mode) {
1209   Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1210   return GetProperty(object, str, language_mode);
1211 }
1212 
1213 
1214 #define FIELD_ADDR(p, offset) \
1215   (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1216 
1217 #define FIELD_ADDR_CONST(p, offset) \
1218   (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1219 
1220 #define READ_FIELD(p, offset) \
1221   (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1222 
1223 #define ACQUIRE_READ_FIELD(p, offset)           \
1224   reinterpret_cast<Object*>(base::Acquire_Load( \
1225       reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1226 
1227 #define NOBARRIER_READ_FIELD(p, offset)           \
1228   reinterpret_cast<Object*>(base::NoBarrier_Load( \
1229       reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1230 
1231 #define WRITE_FIELD(p, offset, value) \
1232   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1233 
1234 #define RELEASE_WRITE_FIELD(p, offset, value)                     \
1235   base::Release_Store(                                            \
1236       reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1237       reinterpret_cast<base::AtomicWord>(value));
1238 
1239 #define NOBARRIER_WRITE_FIELD(p, offset, value)                   \
1240   base::NoBarrier_Store(                                          \
1241       reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1242       reinterpret_cast<base::AtomicWord>(value));
1243 
1244 #define WRITE_BARRIER(heap, object, offset, value)                      \
1245   heap->incremental_marking()->RecordWrite(                             \
1246       object, HeapObject::RawField(object, offset), value);             \
1247   if (heap->InNewSpace(value)) {                                        \
1248     heap->RecordWrite(object->address(), offset);                       \
1249   }
1250 
1251 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1252   if (mode != SKIP_WRITE_BARRIER) {                                  \
1253     if (mode == UPDATE_WRITE_BARRIER) {                              \
1254       heap->incremental_marking()->RecordWrite(                      \
1255           object, HeapObject::RawField(object, offset), value);      \
1256     }                                                                \
1257     if (heap->InNewSpace(value)) {                                   \
1258       heap->RecordWrite(object->address(), offset);                  \
1259     }                                                                \
1260   }
1261 
1262 #define READ_DOUBLE_FIELD(p, offset) \
1263   ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1264 
1265 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1266   WriteDoubleValue(FIELD_ADDR(p, offset), value)
1267 
1268 #define READ_INT_FIELD(p, offset) \
1269   (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1270 
1271 #define WRITE_INT_FIELD(p, offset, value) \
1272   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1273 
1274 #define READ_INTPTR_FIELD(p, offset) \
1275   (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1276 
1277 #define WRITE_INTPTR_FIELD(p, offset, value) \
1278   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1279 
1280 #define READ_UINT8_FIELD(p, offset) \
1281   (*reinterpret_cast<const uint8_t*>(FIELD_ADDR_CONST(p, offset)))
1282 
1283 #define WRITE_UINT8_FIELD(p, offset, value) \
1284   (*reinterpret_cast<uint8_t*>(FIELD_ADDR(p, offset)) = value)
1285 
1286 #define READ_INT8_FIELD(p, offset) \
1287   (*reinterpret_cast<const int8_t*>(FIELD_ADDR_CONST(p, offset)))
1288 
1289 #define WRITE_INT8_FIELD(p, offset, value) \
1290   (*reinterpret_cast<int8_t*>(FIELD_ADDR(p, offset)) = value)
1291 
1292 #define READ_UINT16_FIELD(p, offset) \
1293   (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1294 
1295 #define WRITE_UINT16_FIELD(p, offset, value) \
1296   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1297 
1298 #define READ_INT16_FIELD(p, offset) \
1299   (*reinterpret_cast<const int16_t*>(FIELD_ADDR_CONST(p, offset)))
1300 
1301 #define WRITE_INT16_FIELD(p, offset, value) \
1302   (*reinterpret_cast<int16_t*>(FIELD_ADDR(p, offset)) = value)
1303 
1304 #define READ_UINT32_FIELD(p, offset) \
1305   (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1306 
1307 #define WRITE_UINT32_FIELD(p, offset, value) \
1308   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1309 
1310 #define READ_INT32_FIELD(p, offset) \
1311   (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1312 
1313 #define WRITE_INT32_FIELD(p, offset, value) \
1314   (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1315 
1316 #define READ_FLOAT_FIELD(p, offset) \
1317   (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1318 
1319 #define WRITE_FLOAT_FIELD(p, offset, value) \
1320   (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1321 
1322 #define READ_UINT64_FIELD(p, offset) \
1323   (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1324 
1325 #define WRITE_UINT64_FIELD(p, offset, value) \
1326   (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1327 
1328 #define READ_INT64_FIELD(p, offset) \
1329   (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1330 
1331 #define WRITE_INT64_FIELD(p, offset, value) \
1332   (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1333 
1334 #define READ_BYTE_FIELD(p, offset) \
1335   (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1336 
1337 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1338   static_cast<byte>(base::NoBarrier_Load(    \
1339       reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1340 
1341 #define WRITE_BYTE_FIELD(p, offset, value) \
1342   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1343 
1344 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value)           \
1345   base::NoBarrier_Store(                                       \
1346       reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1347       static_cast<base::Atomic8>(value));
1348 
RawField(HeapObject * obj,int byte_offset)1349 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1350   return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1351 }
1352 
1353 
FromMap(const Map * map)1354 MapWord MapWord::FromMap(const Map* map) {
1355   return MapWord(reinterpret_cast<uintptr_t>(map));
1356 }
1357 
1358 
ToMap()1359 Map* MapWord::ToMap() {
1360   return reinterpret_cast<Map*>(value_);
1361 }
1362 
1363 
IsForwardingAddress()1364 bool MapWord::IsForwardingAddress() {
1365   return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1366 }
1367 
1368 
FromForwardingAddress(HeapObject * object)1369 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1370   Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1371   return MapWord(reinterpret_cast<uintptr_t>(raw));
1372 }
1373 
1374 
ToForwardingAddress()1375 HeapObject* MapWord::ToForwardingAddress() {
1376   DCHECK(IsForwardingAddress());
1377   return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1378 }
1379 
1380 
1381 #ifdef VERIFY_HEAP
VerifyObjectField(int offset)1382 void HeapObject::VerifyObjectField(int offset) {
1383   VerifyPointer(READ_FIELD(this, offset));
1384 }
1385 
VerifySmiField(int offset)1386 void HeapObject::VerifySmiField(int offset) {
1387   CHECK(READ_FIELD(this, offset)->IsSmi());
1388 }
1389 #endif
1390 
1391 
GetHeap()1392 Heap* HeapObject::GetHeap() const {
1393   Heap* heap =
1394       MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1395   SLOW_DCHECK(heap != NULL);
1396   return heap;
1397 }
1398 
1399 
GetIsolate()1400 Isolate* HeapObject::GetIsolate() const {
1401   return GetHeap()->isolate();
1402 }
1403 
1404 
map()1405 Map* HeapObject::map() const {
1406 #ifdef DEBUG
1407   // Clear mark potentially added by PathTracer.
1408   uintptr_t raw_value =
1409       map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1410   return MapWord::FromRawValue(raw_value).ToMap();
1411 #else
1412   return map_word().ToMap();
1413 #endif
1414 }
1415 
1416 
set_map(Map * value)1417 void HeapObject::set_map(Map* value) {
1418   set_map_word(MapWord::FromMap(value));
1419   if (value != NULL) {
1420     // TODO(1600) We are passing NULL as a slot because maps can never be on
1421     // evacuation candidate.
1422     value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1423   }
1424 }
1425 
1426 
synchronized_map()1427 Map* HeapObject::synchronized_map() {
1428   return synchronized_map_word().ToMap();
1429 }
1430 
1431 
synchronized_set_map(Map * value)1432 void HeapObject::synchronized_set_map(Map* value) {
1433   synchronized_set_map_word(MapWord::FromMap(value));
1434   if (value != NULL) {
1435     // TODO(1600) We are passing NULL as a slot because maps can never be on
1436     // evacuation candidate.
1437     value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1438   }
1439 }
1440 
1441 
synchronized_set_map_no_write_barrier(Map * value)1442 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1443   synchronized_set_map_word(MapWord::FromMap(value));
1444 }
1445 
1446 
1447 // Unsafe accessor omitting write barrier.
set_map_no_write_barrier(Map * value)1448 void HeapObject::set_map_no_write_barrier(Map* value) {
1449   set_map_word(MapWord::FromMap(value));
1450 }
1451 
1452 
map_word()1453 MapWord HeapObject::map_word() const {
1454   return MapWord(
1455       reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1456 }
1457 
1458 
set_map_word(MapWord map_word)1459 void HeapObject::set_map_word(MapWord map_word) {
1460   NOBARRIER_WRITE_FIELD(
1461       this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1462 }
1463 
1464 
synchronized_map_word()1465 MapWord HeapObject::synchronized_map_word() const {
1466   return MapWord(
1467       reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1468 }
1469 
1470 
synchronized_set_map_word(MapWord map_word)1471 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1472   RELEASE_WRITE_FIELD(
1473       this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1474 }
1475 
1476 
Size()1477 int HeapObject::Size() {
1478   return SizeFromMap(map());
1479 }
1480 
1481 
value()1482 double HeapNumber::value() const {
1483   return READ_DOUBLE_FIELD(this, kValueOffset);
1484 }
1485 
1486 
set_value(double value)1487 void HeapNumber::set_value(double value) {
1488   WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1489 }
1490 
1491 
get_exponent()1492 int HeapNumber::get_exponent() {
1493   return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1494           kExponentShift) - kExponentBias;
1495 }
1496 
1497 
get_sign()1498 int HeapNumber::get_sign() {
1499   return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1500 }
1501 
1502 
Equals(Simd128Value * that)1503 bool Simd128Value::Equals(Simd128Value* that) {
1504 #define SIMD128_VALUE(TYPE, Type, type, lane_count, lane_type) \
1505   if (this->Is##Type()) {                                      \
1506     if (!that->Is##Type()) return false;                       \
1507     return Type::cast(this)->Equals(Type::cast(that));         \
1508   }
1509   SIMD128_TYPES(SIMD128_VALUE)
1510 #undef SIMD128_VALUE
1511   return false;
1512 }
1513 
1514 
1515 // static
Equals(Handle<Simd128Value> one,Handle<Simd128Value> two)1516 bool Simd128Value::Equals(Handle<Simd128Value> one, Handle<Simd128Value> two) {
1517   return one->Equals(*two);
1518 }
1519 
1520 
1521 #define SIMD128_VALUE_EQUALS(TYPE, Type, type, lane_count, lane_type) \
1522   bool Type::Equals(Type* that) {                                     \
1523     for (int lane = 0; lane < lane_count; ++lane) {                   \
1524       if (this->get_lane(lane) != that->get_lane(lane)) return false; \
1525     }                                                                 \
1526     return true;                                                      \
1527   }
1528 SIMD128_TYPES(SIMD128_VALUE_EQUALS)
1529 #undef SIMD128_VALUE_EQUALS
1530 
1531 
1532 #if defined(V8_TARGET_LITTLE_ENDIAN)
1533 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1534   lane_type value =                                                      \
1535       READ_##field_type##_FIELD(this, kValueOffset + lane * field_size);
1536 #elif defined(V8_TARGET_BIG_ENDIAN)
1537 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1538   lane_type value = READ_##field_type##_FIELD(                           \
1539       this, kValueOffset + (lane_count - lane - 1) * field_size);
1540 #else
1541 #error Unknown byte ordering
1542 #endif
1543 
1544 #if defined(V8_TARGET_LITTLE_ENDIAN)
1545 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1546   WRITE_##field_type##_FIELD(this, kValueOffset + lane * field_size, value);
1547 #elif defined(V8_TARGET_BIG_ENDIAN)
1548 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1549   WRITE_##field_type##_FIELD(                                         \
1550       this, kValueOffset + (lane_count - lane - 1) * field_size, value);
1551 #else
1552 #error Unknown byte ordering
1553 #endif
1554 
1555 #define SIMD128_NUMERIC_LANE_FNS(type, lane_type, lane_count, field_type, \
1556                                  field_size)                              \
1557   lane_type type::get_lane(int lane) const {                              \
1558     DCHECK(lane < lane_count && lane >= 0);                               \
1559     SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size)      \
1560     return value;                                                         \
1561   }                                                                       \
1562                                                                           \
1563   void type::set_lane(int lane, lane_type value) {                        \
1564     DCHECK(lane < lane_count && lane >= 0);                               \
1565     SIMD128_WRITE_LANE(lane_count, field_type, field_size, value)         \
1566   }
1567 
1568 SIMD128_NUMERIC_LANE_FNS(Float32x4, float, 4, FLOAT, kFloatSize)
1569 SIMD128_NUMERIC_LANE_FNS(Int32x4, int32_t, 4, INT32, kInt32Size)
1570 SIMD128_NUMERIC_LANE_FNS(Uint32x4, uint32_t, 4, UINT32, kInt32Size)
1571 SIMD128_NUMERIC_LANE_FNS(Int16x8, int16_t, 8, INT16, kShortSize)
1572 SIMD128_NUMERIC_LANE_FNS(Uint16x8, uint16_t, 8, UINT16, kShortSize)
1573 SIMD128_NUMERIC_LANE_FNS(Int8x16, int8_t, 16, INT8, kCharSize)
1574 SIMD128_NUMERIC_LANE_FNS(Uint8x16, uint8_t, 16, UINT8, kCharSize)
1575 #undef SIMD128_NUMERIC_LANE_FNS
1576 
1577 
1578 #define SIMD128_BOOLEAN_LANE_FNS(type, lane_type, lane_count, field_type, \
1579                                  field_size)                              \
1580   bool type::get_lane(int lane) const {                                   \
1581     DCHECK(lane < lane_count && lane >= 0);                               \
1582     SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size)      \
1583     DCHECK(value == 0 || value == -1);                                    \
1584     return value != 0;                                                    \
1585   }                                                                       \
1586                                                                           \
1587   void type::set_lane(int lane, bool value) {                             \
1588     DCHECK(lane < lane_count && lane >= 0);                               \
1589     int32_t int_val = value ? -1 : 0;                                     \
1590     SIMD128_WRITE_LANE(lane_count, field_type, field_size, int_val)       \
1591   }
1592 
1593 SIMD128_BOOLEAN_LANE_FNS(Bool32x4, int32_t, 4, INT32, kInt32Size)
1594 SIMD128_BOOLEAN_LANE_FNS(Bool16x8, int16_t, 8, INT16, kShortSize)
1595 SIMD128_BOOLEAN_LANE_FNS(Bool8x16, int8_t, 16, INT8, kCharSize)
1596 #undef SIMD128_BOOLEAN_LANE_FNS
1597 
1598 #undef SIMD128_READ_LANE
1599 #undef SIMD128_WRITE_LANE
1600 
1601 
ACCESSORS(JSReceiver,properties,FixedArray,kPropertiesOffset)1602 ACCESSORS(JSReceiver, properties, FixedArray, kPropertiesOffset)
1603 
1604 
1605 Object** FixedArray::GetFirstElementAddress() {
1606   return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1607 }
1608 
1609 
ContainsOnlySmisOrHoles()1610 bool FixedArray::ContainsOnlySmisOrHoles() {
1611   Object* the_hole = GetHeap()->the_hole_value();
1612   Object** current = GetFirstElementAddress();
1613   for (int i = 0; i < length(); ++i) {
1614     Object* candidate = *current++;
1615     if (!candidate->IsSmi() && candidate != the_hole) return false;
1616   }
1617   return true;
1618 }
1619 
1620 
elements()1621 FixedArrayBase* JSObject::elements() const {
1622   Object* array = READ_FIELD(this, kElementsOffset);
1623   return static_cast<FixedArrayBase*>(array);
1624 }
1625 
1626 
Initialize()1627 void AllocationSite::Initialize() {
1628   set_transition_info(Smi::FromInt(0));
1629   SetElementsKind(GetInitialFastElementsKind());
1630   set_nested_site(Smi::FromInt(0));
1631   set_pretenure_data(0);
1632   set_pretenure_create_count(0);
1633   set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1634                      SKIP_WRITE_BARRIER);
1635 }
1636 
1637 
IsZombie()1638 bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
1639 
1640 
IsMaybeTenure()1641 bool AllocationSite::IsMaybeTenure() {
1642   return pretenure_decision() == kMaybeTenure;
1643 }
1644 
1645 
PretenuringDecisionMade()1646 bool AllocationSite::PretenuringDecisionMade() {
1647   return pretenure_decision() != kUndecided;
1648 }
1649 
1650 
MarkZombie()1651 void AllocationSite::MarkZombie() {
1652   DCHECK(!IsZombie());
1653   Initialize();
1654   set_pretenure_decision(kZombie);
1655 }
1656 
1657 
GetElementsKind()1658 ElementsKind AllocationSite::GetElementsKind() {
1659   DCHECK(!SitePointsToLiteral());
1660   int value = Smi::cast(transition_info())->value();
1661   return ElementsKindBits::decode(value);
1662 }
1663 
1664 
SetElementsKind(ElementsKind kind)1665 void AllocationSite::SetElementsKind(ElementsKind kind) {
1666   int value = Smi::cast(transition_info())->value();
1667   set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
1668                       SKIP_WRITE_BARRIER);
1669 }
1670 
1671 
CanInlineCall()1672 bool AllocationSite::CanInlineCall() {
1673   int value = Smi::cast(transition_info())->value();
1674   return DoNotInlineBit::decode(value) == 0;
1675 }
1676 
1677 
SetDoNotInlineCall()1678 void AllocationSite::SetDoNotInlineCall() {
1679   int value = Smi::cast(transition_info())->value();
1680   set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
1681                       SKIP_WRITE_BARRIER);
1682 }
1683 
1684 
SitePointsToLiteral()1685 bool AllocationSite::SitePointsToLiteral() {
1686   // If transition_info is a smi, then it represents an ElementsKind
1687   // for a constructed array. Otherwise, it must be a boilerplate
1688   // for an object or array literal.
1689   return transition_info()->IsJSArray() || transition_info()->IsJSObject();
1690 }
1691 
1692 
1693 // Heuristic: We only need to create allocation site info if the boilerplate
1694 // elements kind is the initial elements kind.
GetMode(ElementsKind boilerplate_elements_kind)1695 AllocationSiteMode AllocationSite::GetMode(
1696     ElementsKind boilerplate_elements_kind) {
1697   if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
1698     return TRACK_ALLOCATION_SITE;
1699   }
1700 
1701   return DONT_TRACK_ALLOCATION_SITE;
1702 }
1703 
1704 
GetMode(ElementsKind from,ElementsKind to)1705 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1706                                            ElementsKind to) {
1707   if (IsFastSmiElementsKind(from) &&
1708       IsMoreGeneralElementsKindTransition(from, to)) {
1709     return TRACK_ALLOCATION_SITE;
1710   }
1711 
1712   return DONT_TRACK_ALLOCATION_SITE;
1713 }
1714 
1715 
CanTrack(InstanceType type)1716 inline bool AllocationSite::CanTrack(InstanceType type) {
1717   if (FLAG_allocation_site_pretenuring) {
1718     return type == JS_ARRAY_TYPE ||
1719         type == JS_OBJECT_TYPE ||
1720         type < FIRST_NONSTRING_TYPE;
1721   }
1722   return type == JS_ARRAY_TYPE;
1723 }
1724 
1725 
pretenure_decision()1726 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
1727   int value = pretenure_data();
1728   return PretenureDecisionBits::decode(value);
1729 }
1730 
1731 
set_pretenure_decision(PretenureDecision decision)1732 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1733   int value = pretenure_data();
1734   set_pretenure_data(PretenureDecisionBits::update(value, decision));
1735 }
1736 
1737 
deopt_dependent_code()1738 bool AllocationSite::deopt_dependent_code() {
1739   int value = pretenure_data();
1740   return DeoptDependentCodeBit::decode(value);
1741 }
1742 
1743 
set_deopt_dependent_code(bool deopt)1744 void AllocationSite::set_deopt_dependent_code(bool deopt) {
1745   int value = pretenure_data();
1746   set_pretenure_data(DeoptDependentCodeBit::update(value, deopt));
1747 }
1748 
1749 
memento_found_count()1750 int AllocationSite::memento_found_count() {
1751   int value = pretenure_data();
1752   return MementoFoundCountBits::decode(value);
1753 }
1754 
1755 
set_memento_found_count(int count)1756 inline void AllocationSite::set_memento_found_count(int count) {
1757   int value = pretenure_data();
1758   // Verify that we can count more mementos than we can possibly find in one
1759   // new space collection.
1760   DCHECK((GetHeap()->MaxSemiSpaceSize() /
1761           (Heap::kMinObjectSizeInWords * kPointerSize +
1762            AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1763   DCHECK(count < MementoFoundCountBits::kMax);
1764   set_pretenure_data(MementoFoundCountBits::update(value, count));
1765 }
1766 
1767 
memento_create_count()1768 int AllocationSite::memento_create_count() { return pretenure_create_count(); }
1769 
1770 
set_memento_create_count(int count)1771 void AllocationSite::set_memento_create_count(int count) {
1772   set_pretenure_create_count(count);
1773 }
1774 
1775 
IncrementMementoFoundCount(int increment)1776 bool AllocationSite::IncrementMementoFoundCount(int increment) {
1777   if (IsZombie()) return false;
1778 
1779   int value = memento_found_count();
1780   set_memento_found_count(value + increment);
1781   return memento_found_count() >= kPretenureMinimumCreated;
1782 }
1783 
1784 
IncrementMementoCreateCount()1785 inline void AllocationSite::IncrementMementoCreateCount() {
1786   DCHECK(FLAG_allocation_site_pretenuring);
1787   int value = memento_create_count();
1788   set_memento_create_count(value + 1);
1789 }
1790 
1791 
MakePretenureDecision(PretenureDecision current_decision,double ratio,bool maximum_size_scavenge)1792 inline bool AllocationSite::MakePretenureDecision(
1793     PretenureDecision current_decision,
1794     double ratio,
1795     bool maximum_size_scavenge) {
1796   // Here we just allow state transitions from undecided or maybe tenure
1797   // to don't tenure, maybe tenure, or tenure.
1798   if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1799     if (ratio >= kPretenureRatio) {
1800       // We just transition into tenure state when the semi-space was at
1801       // maximum capacity.
1802       if (maximum_size_scavenge) {
1803         set_deopt_dependent_code(true);
1804         set_pretenure_decision(kTenure);
1805         // Currently we just need to deopt when we make a state transition to
1806         // tenure.
1807         return true;
1808       }
1809       set_pretenure_decision(kMaybeTenure);
1810     } else {
1811       set_pretenure_decision(kDontTenure);
1812     }
1813   }
1814   return false;
1815 }
1816 
1817 
DigestPretenuringFeedback(bool maximum_size_scavenge)1818 inline bool AllocationSite::DigestPretenuringFeedback(
1819     bool maximum_size_scavenge) {
1820   bool deopt = false;
1821   int create_count = memento_create_count();
1822   int found_count = memento_found_count();
1823   bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1824   double ratio =
1825       minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1826           static_cast<double>(found_count) / create_count : 0.0;
1827   PretenureDecision current_decision = pretenure_decision();
1828 
1829   if (minimum_mementos_created) {
1830     deopt = MakePretenureDecision(
1831         current_decision, ratio, maximum_size_scavenge);
1832   }
1833 
1834   if (FLAG_trace_pretenuring_statistics) {
1835     PrintIsolate(GetIsolate(),
1836                  "pretenuring: AllocationSite(%p): (created, found, ratio) "
1837                  "(%d, %d, %f) %s => %s\n",
1838                  this, create_count, found_count, ratio,
1839                  PretenureDecisionName(current_decision),
1840                  PretenureDecisionName(pretenure_decision()));
1841   }
1842 
1843   // Clear feedback calculation fields until the next gc.
1844   set_memento_found_count(0);
1845   set_memento_create_count(0);
1846   return deopt;
1847 }
1848 
1849 
IsValid()1850 bool AllocationMemento::IsValid() {
1851   return allocation_site()->IsAllocationSite() &&
1852          !AllocationSite::cast(allocation_site())->IsZombie();
1853 }
1854 
1855 
GetAllocationSite()1856 AllocationSite* AllocationMemento::GetAllocationSite() {
1857   DCHECK(IsValid());
1858   return AllocationSite::cast(allocation_site());
1859 }
1860 
1861 
EnsureCanContainHeapObjectElements(Handle<JSObject> object)1862 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1863   JSObject::ValidateElements(object);
1864   ElementsKind elements_kind = object->map()->elements_kind();
1865   if (!IsFastObjectElementsKind(elements_kind)) {
1866     if (IsFastHoleyElementsKind(elements_kind)) {
1867       TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1868     } else {
1869       TransitionElementsKind(object, FAST_ELEMENTS);
1870     }
1871   }
1872 }
1873 
1874 
EnsureCanContainElements(Handle<JSObject> object,Object ** objects,uint32_t count,EnsureElementsMode mode)1875 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1876                                         Object** objects,
1877                                         uint32_t count,
1878                                         EnsureElementsMode mode) {
1879   ElementsKind current_kind = object->map()->elements_kind();
1880   ElementsKind target_kind = current_kind;
1881   {
1882     DisallowHeapAllocation no_allocation;
1883     DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1884     bool is_holey = IsFastHoleyElementsKind(current_kind);
1885     if (current_kind == FAST_HOLEY_ELEMENTS) return;
1886     Heap* heap = object->GetHeap();
1887     Object* the_hole = heap->the_hole_value();
1888     for (uint32_t i = 0; i < count; ++i) {
1889       Object* current = *objects++;
1890       if (current == the_hole) {
1891         is_holey = true;
1892         target_kind = GetHoleyElementsKind(target_kind);
1893       } else if (!current->IsSmi()) {
1894         if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1895           if (IsFastSmiElementsKind(target_kind)) {
1896             if (is_holey) {
1897               target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1898             } else {
1899               target_kind = FAST_DOUBLE_ELEMENTS;
1900             }
1901           }
1902         } else if (is_holey) {
1903           target_kind = FAST_HOLEY_ELEMENTS;
1904           break;
1905         } else {
1906           target_kind = FAST_ELEMENTS;
1907         }
1908       }
1909     }
1910   }
1911   if (target_kind != current_kind) {
1912     TransitionElementsKind(object, target_kind);
1913   }
1914 }
1915 
1916 
EnsureCanContainElements(Handle<JSObject> object,Handle<FixedArrayBase> elements,uint32_t length,EnsureElementsMode mode)1917 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1918                                         Handle<FixedArrayBase> elements,
1919                                         uint32_t length,
1920                                         EnsureElementsMode mode) {
1921   Heap* heap = object->GetHeap();
1922   if (elements->map() != heap->fixed_double_array_map()) {
1923     DCHECK(elements->map() == heap->fixed_array_map() ||
1924            elements->map() == heap->fixed_cow_array_map());
1925     if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1926       mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1927     }
1928     Object** objects =
1929         Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1930     EnsureCanContainElements(object, objects, length, mode);
1931     return;
1932   }
1933 
1934   DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1935   if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1936     TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1937   } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1938     Handle<FixedDoubleArray> double_array =
1939         Handle<FixedDoubleArray>::cast(elements);
1940     for (uint32_t i = 0; i < length; ++i) {
1941       if (double_array->is_the_hole(i)) {
1942         TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1943         return;
1944       }
1945     }
1946     TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1947   }
1948 }
1949 
1950 
SetMapAndElements(Handle<JSObject> object,Handle<Map> new_map,Handle<FixedArrayBase> value)1951 void JSObject::SetMapAndElements(Handle<JSObject> object,
1952                                  Handle<Map> new_map,
1953                                  Handle<FixedArrayBase> value) {
1954   JSObject::MigrateToMap(object, new_map);
1955   DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1956           (*value == object->GetHeap()->empty_fixed_array())) ==
1957          (value->map() == object->GetHeap()->fixed_array_map() ||
1958           value->map() == object->GetHeap()->fixed_cow_array_map()));
1959   DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1960          (object->map()->has_fast_double_elements() ==
1961           value->IsFixedDoubleArray()));
1962   object->set_elements(*value);
1963 }
1964 
1965 
set_elements(FixedArrayBase * value,WriteBarrierMode mode)1966 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1967   WRITE_FIELD(this, kElementsOffset, value);
1968   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1969 }
1970 
1971 
initialize_elements()1972 void JSObject::initialize_elements() {
1973   FixedArrayBase* elements = map()->GetInitialElements();
1974   WRITE_FIELD(this, kElementsOffset, elements);
1975 }
1976 
1977 
GetIndexedInterceptor()1978 InterceptorInfo* JSObject::GetIndexedInterceptor() {
1979   DCHECK(map()->has_indexed_interceptor());
1980   JSFunction* constructor = JSFunction::cast(map()->GetConstructor());
1981   DCHECK(constructor->shared()->IsApiFunction());
1982   Object* result =
1983       constructor->shared()->get_api_func_data()->indexed_property_handler();
1984   return InterceptorInfo::cast(result);
1985 }
1986 
1987 
ACCESSORS(Oddball,to_string,String,kToStringOffset)1988 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1989 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1990 ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
1991 
1992 
1993 byte Oddball::kind() const {
1994   return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1995 }
1996 
1997 
set_kind(byte value)1998 void Oddball::set_kind(byte value) {
1999   WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
2000 }
2001 
2002 
2003 // static
ToNumber(Handle<Oddball> input)2004 Handle<Object> Oddball::ToNumber(Handle<Oddball> input) {
2005   return handle(input->to_number(), input->GetIsolate());
2006 }
2007 
2008 
ACCESSORS(Cell,value,Object,kValueOffset)2009 ACCESSORS(Cell, value, Object, kValueOffset)
2010 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
2011 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
2012 ACCESSORS(PropertyCell, value, Object, kValueOffset)
2013 
2014 
2015 PropertyDetails PropertyCell::property_details() {
2016   return PropertyDetails(Smi::cast(property_details_raw()));
2017 }
2018 
2019 
set_property_details(PropertyDetails details)2020 void PropertyCell::set_property_details(PropertyDetails details) {
2021   set_property_details_raw(details.AsSmi());
2022 }
2023 
2024 
value()2025 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
2026 
2027 
clear()2028 void WeakCell::clear() {
2029   // Either the garbage collector is clearing the cell or we are simply
2030   // initializing the root empty weak cell.
2031   DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT ||
2032          this == GetHeap()->empty_weak_cell());
2033   WRITE_FIELD(this, kValueOffset, Smi::FromInt(0));
2034 }
2035 
2036 
initialize(HeapObject * val)2037 void WeakCell::initialize(HeapObject* val) {
2038   WRITE_FIELD(this, kValueOffset, val);
2039   Heap* heap = GetHeap();
2040   // We just have to execute the generational barrier here because we never
2041   // mark through a weak cell and collect evacuation candidates when we process
2042   // all weak cells.
2043   if (heap->InNewSpace(val)) {
2044     heap->RecordWrite(address(), kValueOffset);
2045   }
2046 }
2047 
2048 
cleared()2049 bool WeakCell::cleared() const { return value() == Smi::FromInt(0); }
2050 
2051 
next()2052 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
2053 
2054 
set_next(Object * val,WriteBarrierMode mode)2055 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
2056   WRITE_FIELD(this, kNextOffset, val);
2057   if (mode == UPDATE_WRITE_BARRIER) {
2058     WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
2059   }
2060 }
2061 
2062 
clear_next(Object * the_hole_value)2063 void WeakCell::clear_next(Object* the_hole_value) {
2064   DCHECK_EQ(GetHeap()->the_hole_value(), the_hole_value);
2065   set_next(the_hole_value, SKIP_WRITE_BARRIER);
2066 }
2067 
2068 
next_cleared()2069 bool WeakCell::next_cleared() { return next()->IsTheHole(); }
2070 
2071 
GetHeaderSize()2072 int JSObject::GetHeaderSize() { return GetHeaderSize(map()->instance_type()); }
2073 
2074 
GetHeaderSize(InstanceType type)2075 int JSObject::GetHeaderSize(InstanceType type) {
2076   // Check for the most common kind of JavaScript object before
2077   // falling into the generic switch. This speeds up the internal
2078   // field operations considerably on average.
2079   if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2080   switch (type) {
2081     case JS_GENERATOR_OBJECT_TYPE:
2082       return JSGeneratorObject::kSize;
2083     case JS_MODULE_TYPE:
2084       return JSModule::kSize;
2085     case JS_GLOBAL_PROXY_TYPE:
2086       return JSGlobalProxy::kSize;
2087     case JS_GLOBAL_OBJECT_TYPE:
2088       return JSGlobalObject::kSize;
2089     case JS_BOUND_FUNCTION_TYPE:
2090       return JSBoundFunction::kSize;
2091     case JS_FUNCTION_TYPE:
2092       return JSFunction::kSize;
2093     case JS_VALUE_TYPE:
2094       return JSValue::kSize;
2095     case JS_DATE_TYPE:
2096       return JSDate::kSize;
2097     case JS_ARRAY_TYPE:
2098       return JSArray::kSize;
2099     case JS_ARRAY_BUFFER_TYPE:
2100       return JSArrayBuffer::kSize;
2101     case JS_TYPED_ARRAY_TYPE:
2102       return JSTypedArray::kSize;
2103     case JS_DATA_VIEW_TYPE:
2104       return JSDataView::kSize;
2105     case JS_SET_TYPE:
2106       return JSSet::kSize;
2107     case JS_MAP_TYPE:
2108       return JSMap::kSize;
2109     case JS_SET_ITERATOR_TYPE:
2110       return JSSetIterator::kSize;
2111     case JS_MAP_ITERATOR_TYPE:
2112       return JSMapIterator::kSize;
2113     case JS_ITERATOR_RESULT_TYPE:
2114       return JSIteratorResult::kSize;
2115     case JS_WEAK_MAP_TYPE:
2116       return JSWeakMap::kSize;
2117     case JS_WEAK_SET_TYPE:
2118       return JSWeakSet::kSize;
2119     case JS_PROMISE_TYPE:
2120       return JSObject::kHeaderSize;
2121     case JS_REGEXP_TYPE:
2122       return JSRegExp::kSize;
2123     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2124       return JSObject::kHeaderSize;
2125     case JS_MESSAGE_OBJECT_TYPE:
2126       return JSMessageObject::kSize;
2127     default:
2128       UNREACHABLE();
2129       return 0;
2130   }
2131 }
2132 
2133 
GetInternalFieldCount(Map * map)2134 int JSObject::GetInternalFieldCount(Map* map) {
2135   int instance_size = map->instance_size();
2136   if (instance_size == kVariableSizeSentinel) return 0;
2137   InstanceType instance_type = map->instance_type();
2138   return ((instance_size - GetHeaderSize(instance_type)) >> kPointerSizeLog2) -
2139          map->GetInObjectProperties();
2140 }
2141 
2142 
GetInternalFieldCount()2143 int JSObject::GetInternalFieldCount() { return GetInternalFieldCount(map()); }
2144 
2145 
GetInternalFieldOffset(int index)2146 int JSObject::GetInternalFieldOffset(int index) {
2147   DCHECK(index < GetInternalFieldCount() && index >= 0);
2148   return GetHeaderSize() + (kPointerSize * index);
2149 }
2150 
2151 
GetInternalField(int index)2152 Object* JSObject::GetInternalField(int index) {
2153   DCHECK(index < GetInternalFieldCount() && index >= 0);
2154   // Internal objects do follow immediately after the header, whereas in-object
2155   // properties are at the end of the object. Therefore there is no need
2156   // to adjust the index here.
2157   return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2158 }
2159 
2160 
SetInternalField(int index,Object * value)2161 void JSObject::SetInternalField(int index, Object* value) {
2162   DCHECK(index < GetInternalFieldCount() && index >= 0);
2163   // Internal objects do follow immediately after the header, whereas in-object
2164   // properties are at the end of the object. Therefore there is no need
2165   // to adjust the index here.
2166   int offset = GetHeaderSize() + (kPointerSize * index);
2167   WRITE_FIELD(this, offset, value);
2168   WRITE_BARRIER(GetHeap(), this, offset, value);
2169 }
2170 
2171 
SetInternalField(int index,Smi * value)2172 void JSObject::SetInternalField(int index, Smi* value) {
2173   DCHECK(index < GetInternalFieldCount() && index >= 0);
2174   // Internal objects do follow immediately after the header, whereas in-object
2175   // properties are at the end of the object. Therefore there is no need
2176   // to adjust the index here.
2177   int offset = GetHeaderSize() + (kPointerSize * index);
2178   WRITE_FIELD(this, offset, value);
2179 }
2180 
2181 
IsUnboxedDoubleField(FieldIndex index)2182 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2183   if (!FLAG_unbox_double_fields) return false;
2184   return map()->IsUnboxedDoubleField(index);
2185 }
2186 
2187 
IsUnboxedDoubleField(FieldIndex index)2188 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2189   if (!FLAG_unbox_double_fields) return false;
2190   if (index.is_hidden_field() || !index.is_inobject()) return false;
2191   return !layout_descriptor()->IsTagged(index.property_index());
2192 }
2193 
2194 
2195 // Access fast-case object properties at index. The use of these routines
2196 // is needed to correctly distinguish between properties stored in-object and
2197 // properties stored in the properties array.
RawFastPropertyAt(FieldIndex index)2198 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2199   DCHECK(!IsUnboxedDoubleField(index));
2200   if (index.is_inobject()) {
2201     return READ_FIELD(this, index.offset());
2202   } else {
2203     return properties()->get(index.outobject_array_index());
2204   }
2205 }
2206 
2207 
RawFastDoublePropertyAt(FieldIndex index)2208 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2209   DCHECK(IsUnboxedDoubleField(index));
2210   return READ_DOUBLE_FIELD(this, index.offset());
2211 }
2212 
2213 
RawFastPropertyAtPut(FieldIndex index,Object * value)2214 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2215   if (index.is_inobject()) {
2216     int offset = index.offset();
2217     WRITE_FIELD(this, offset, value);
2218     WRITE_BARRIER(GetHeap(), this, offset, value);
2219   } else {
2220     properties()->set(index.outobject_array_index(), value);
2221   }
2222 }
2223 
2224 
RawFastDoublePropertyAtPut(FieldIndex index,double value)2225 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2226   WRITE_DOUBLE_FIELD(this, index.offset(), value);
2227 }
2228 
2229 
FastPropertyAtPut(FieldIndex index,Object * value)2230 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2231   if (IsUnboxedDoubleField(index)) {
2232     DCHECK(value->IsMutableHeapNumber());
2233     RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2234   } else {
2235     RawFastPropertyAtPut(index, value);
2236   }
2237 }
2238 
2239 
WriteToField(int descriptor,Object * value)2240 void JSObject::WriteToField(int descriptor, Object* value) {
2241   DisallowHeapAllocation no_gc;
2242 
2243   DescriptorArray* desc = map()->instance_descriptors();
2244   PropertyDetails details = desc->GetDetails(descriptor);
2245 
2246   DCHECK(details.type() == DATA);
2247 
2248   FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2249   if (details.representation().IsDouble()) {
2250     // Nothing more to be done.
2251     if (value->IsUninitialized()) return;
2252     if (IsUnboxedDoubleField(index)) {
2253       RawFastDoublePropertyAtPut(index, value->Number());
2254     } else {
2255       HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2256       DCHECK(box->IsMutableHeapNumber());
2257       box->set_value(value->Number());
2258     }
2259   } else {
2260     RawFastPropertyAtPut(index, value);
2261   }
2262 }
2263 
2264 
GetInObjectPropertyOffset(int index)2265 int JSObject::GetInObjectPropertyOffset(int index) {
2266   return map()->GetInObjectPropertyOffset(index);
2267 }
2268 
2269 
InObjectPropertyAt(int index)2270 Object* JSObject::InObjectPropertyAt(int index) {
2271   int offset = GetInObjectPropertyOffset(index);
2272   return READ_FIELD(this, offset);
2273 }
2274 
2275 
InObjectPropertyAtPut(int index,Object * value,WriteBarrierMode mode)2276 Object* JSObject::InObjectPropertyAtPut(int index,
2277                                         Object* value,
2278                                         WriteBarrierMode mode) {
2279   // Adjust for the number of properties stored in the object.
2280   int offset = GetInObjectPropertyOffset(index);
2281   WRITE_FIELD(this, offset, value);
2282   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2283   return value;
2284 }
2285 
2286 
InitializeBody(Map * map,int start_offset,Object * pre_allocated_value,Object * filler_value)2287 void JSObject::InitializeBody(Map* map, int start_offset,
2288                               Object* pre_allocated_value,
2289                               Object* filler_value) {
2290   DCHECK(!filler_value->IsHeapObject() ||
2291          !GetHeap()->InNewSpace(filler_value));
2292   DCHECK(!pre_allocated_value->IsHeapObject() ||
2293          !GetHeap()->InNewSpace(pre_allocated_value));
2294   int size = map->instance_size();
2295   int offset = start_offset;
2296   if (filler_value != pre_allocated_value) {
2297     int end_of_pre_allocated_offset =
2298         size - (map->unused_property_fields() * kPointerSize);
2299     DCHECK_LE(kHeaderSize, end_of_pre_allocated_offset);
2300     while (offset < end_of_pre_allocated_offset) {
2301       WRITE_FIELD(this, offset, pre_allocated_value);
2302       offset += kPointerSize;
2303     }
2304   }
2305   while (offset < size) {
2306     WRITE_FIELD(this, offset, filler_value);
2307     offset += kPointerSize;
2308   }
2309 }
2310 
2311 
TooManyFastProperties(StoreFromKeyed store_mode)2312 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2313   if (unused_property_fields() != 0) return false;
2314   if (is_prototype_map()) return false;
2315   int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2316   int limit = Max(minimum, GetInObjectProperties());
2317   int external = NumberOfFields() - GetInObjectProperties();
2318   return external > limit;
2319 }
2320 
2321 
InitializeBody(int object_size)2322 void Struct::InitializeBody(int object_size) {
2323   Object* value = GetHeap()->undefined_value();
2324   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2325     WRITE_FIELD(this, offset, value);
2326   }
2327 }
2328 
2329 
ToArrayLength(uint32_t * index)2330 bool Object::ToArrayLength(uint32_t* index) { return Object::ToUint32(index); }
2331 
2332 
ToArrayIndex(uint32_t * index)2333 bool Object::ToArrayIndex(uint32_t* index) {
2334   return Object::ToUint32(index) && *index != kMaxUInt32;
2335 }
2336 
2337 
IsStringObjectWithCharacterAt(uint32_t index)2338 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2339   if (!this->IsJSValue()) return false;
2340 
2341   JSValue* js_value = JSValue::cast(this);
2342   if (!js_value->value()->IsString()) return false;
2343 
2344   String* str = String::cast(js_value->value());
2345   if (index >= static_cast<uint32_t>(str->length())) return false;
2346 
2347   return true;
2348 }
2349 
2350 
VerifyApiCallResultType()2351 void Object::VerifyApiCallResultType() {
2352 #if DEBUG
2353   if (!(IsSmi() || IsString() || IsSymbol() || IsJSReceiver() ||
2354         IsHeapNumber() || IsSimd128Value() || IsUndefined() || IsTrue() ||
2355         IsFalse() || IsNull())) {
2356     FATAL("API call returned invalid object");
2357   }
2358 #endif  // DEBUG
2359 }
2360 
2361 
get(int index)2362 Object* FixedArray::get(int index) const {
2363   SLOW_DCHECK(index >= 0 && index < this->length());
2364   return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2365 }
2366 
2367 
get(Handle<FixedArray> array,int index)2368 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2369   return handle(array->get(index), array->GetIsolate());
2370 }
2371 
2372 
is_the_hole(int index)2373 bool FixedArray::is_the_hole(int index) {
2374   return get(index) == GetHeap()->the_hole_value();
2375 }
2376 
2377 
set(int index,Smi * value)2378 void FixedArray::set(int index, Smi* value) {
2379   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2380   DCHECK(index >= 0 && index < this->length());
2381   DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2382   int offset = kHeaderSize + index * kPointerSize;
2383   WRITE_FIELD(this, offset, value);
2384 }
2385 
2386 
set(int index,Object * value)2387 void FixedArray::set(int index, Object* value) {
2388   DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2389   DCHECK(IsFixedArray());
2390   DCHECK(index >= 0 && index < this->length());
2391   int offset = kHeaderSize + index * kPointerSize;
2392   WRITE_FIELD(this, offset, value);
2393   WRITE_BARRIER(GetHeap(), this, offset, value);
2394 }
2395 
2396 
get_scalar(int index)2397 double FixedDoubleArray::get_scalar(int index) {
2398   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2399          map() != GetHeap()->fixed_array_map());
2400   DCHECK(index >= 0 && index < this->length());
2401   DCHECK(!is_the_hole(index));
2402   return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2403 }
2404 
2405 
get_representation(int index)2406 uint64_t FixedDoubleArray::get_representation(int index) {
2407   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2408          map() != GetHeap()->fixed_array_map());
2409   DCHECK(index >= 0 && index < this->length());
2410   int offset = kHeaderSize + index * kDoubleSize;
2411   return READ_UINT64_FIELD(this, offset);
2412 }
2413 
2414 
get(Handle<FixedDoubleArray> array,int index)2415 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2416                                      int index) {
2417   if (array->is_the_hole(index)) {
2418     return array->GetIsolate()->factory()->the_hole_value();
2419   } else {
2420     return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2421   }
2422 }
2423 
2424 
set(int index,double value)2425 void FixedDoubleArray::set(int index, double value) {
2426   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2427          map() != GetHeap()->fixed_array_map());
2428   int offset = kHeaderSize + index * kDoubleSize;
2429   if (std::isnan(value)) {
2430     WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2431   } else {
2432     WRITE_DOUBLE_FIELD(this, offset, value);
2433   }
2434   DCHECK(!is_the_hole(index));
2435 }
2436 
2437 
set_the_hole(int index)2438 void FixedDoubleArray::set_the_hole(int index) {
2439   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2440          map() != GetHeap()->fixed_array_map());
2441   int offset = kHeaderSize + index * kDoubleSize;
2442   WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2443 }
2444 
2445 
is_the_hole(int index)2446 bool FixedDoubleArray::is_the_hole(int index) {
2447   return get_representation(index) == kHoleNanInt64;
2448 }
2449 
2450 
data_start()2451 double* FixedDoubleArray::data_start() {
2452   return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2453 }
2454 
2455 
FillWithHoles(int from,int to)2456 void FixedDoubleArray::FillWithHoles(int from, int to) {
2457   for (int i = from; i < to; i++) {
2458     set_the_hole(i);
2459   }
2460 }
2461 
2462 
Get(int index)2463 Object* WeakFixedArray::Get(int index) const {
2464   Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2465   if (raw->IsSmi()) return raw;
2466   DCHECK(raw->IsWeakCell());
2467   return WeakCell::cast(raw)->value();
2468 }
2469 
2470 
IsEmptySlot(int index)2471 bool WeakFixedArray::IsEmptySlot(int index) const {
2472   DCHECK(index < Length());
2473   return Get(index)->IsSmi();
2474 }
2475 
2476 
Clear(int index)2477 void WeakFixedArray::Clear(int index) {
2478   FixedArray::cast(this)->set(index + kFirstIndex, Smi::FromInt(0));
2479 }
2480 
2481 
Length()2482 int WeakFixedArray::Length() const {
2483   return FixedArray::cast(this)->length() - kFirstIndex;
2484 }
2485 
2486 
last_used_index()2487 int WeakFixedArray::last_used_index() const {
2488   return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2489 }
2490 
2491 
set_last_used_index(int index)2492 void WeakFixedArray::set_last_used_index(int index) {
2493   FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2494 }
2495 
2496 
2497 template <class T>
Next()2498 T* WeakFixedArray::Iterator::Next() {
2499   if (list_ != NULL) {
2500     // Assert that list did not change during iteration.
2501     DCHECK_EQ(last_used_index_, list_->last_used_index());
2502     while (index_ < list_->Length()) {
2503       Object* item = list_->Get(index_++);
2504       if (item != Empty()) return T::cast(item);
2505     }
2506     list_ = NULL;
2507   }
2508   return NULL;
2509 }
2510 
2511 
Length()2512 int ArrayList::Length() {
2513   if (FixedArray::cast(this)->length() == 0) return 0;
2514   return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2515 }
2516 
2517 
SetLength(int length)2518 void ArrayList::SetLength(int length) {
2519   return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2520 }
2521 
2522 
Get(int index)2523 Object* ArrayList::Get(int index) {
2524   return FixedArray::cast(this)->get(kFirstIndex + index);
2525 }
2526 
2527 
Slot(int index)2528 Object** ArrayList::Slot(int index) {
2529   return data_start() + kFirstIndex + index;
2530 }
2531 
2532 
Set(int index,Object * obj)2533 void ArrayList::Set(int index, Object* obj) {
2534   FixedArray::cast(this)->set(kFirstIndex + index, obj);
2535 }
2536 
2537 
Clear(int index,Object * undefined)2538 void ArrayList::Clear(int index, Object* undefined) {
2539   DCHECK(undefined->IsUndefined());
2540   FixedArray::cast(this)
2541       ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2542 }
2543 
2544 
GetWriteBarrierMode(const DisallowHeapAllocation & promise)2545 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2546     const DisallowHeapAllocation& promise) {
2547   Heap* heap = GetHeap();
2548   if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2549   if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2550   return UPDATE_WRITE_BARRIER;
2551 }
2552 
2553 
RequiredAlignment()2554 AllocationAlignment HeapObject::RequiredAlignment() {
2555 #ifdef V8_HOST_ARCH_32_BIT
2556   if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2557       FixedArrayBase::cast(this)->length() != 0) {
2558     return kDoubleAligned;
2559   }
2560   if (IsHeapNumber()) return kDoubleUnaligned;
2561   if (IsSimd128Value()) return kSimd128Unaligned;
2562 #endif  // V8_HOST_ARCH_32_BIT
2563   return kWordAligned;
2564 }
2565 
2566 
set(int index,Object * value,WriteBarrierMode mode)2567 void FixedArray::set(int index,
2568                      Object* value,
2569                      WriteBarrierMode mode) {
2570   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2571   DCHECK(index >= 0 && index < this->length());
2572   int offset = kHeaderSize + index * kPointerSize;
2573   WRITE_FIELD(this, offset, value);
2574   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2575 }
2576 
2577 
NoWriteBarrierSet(FixedArray * array,int index,Object * value)2578 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2579                                    int index,
2580                                    Object* value) {
2581   DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2582   DCHECK(index >= 0 && index < array->length());
2583   DCHECK(!array->GetHeap()->InNewSpace(value));
2584   WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2585 }
2586 
2587 
set_undefined(int index)2588 void FixedArray::set_undefined(int index) {
2589   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2590   DCHECK(index >= 0 && index < this->length());
2591   DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2592   WRITE_FIELD(this,
2593               kHeaderSize + index * kPointerSize,
2594               GetHeap()->undefined_value());
2595 }
2596 
2597 
set_null(int index)2598 void FixedArray::set_null(int index) {
2599   DCHECK(index >= 0 && index < this->length());
2600   DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2601   WRITE_FIELD(this,
2602               kHeaderSize + index * kPointerSize,
2603               GetHeap()->null_value());
2604 }
2605 
2606 
set_the_hole(int index)2607 void FixedArray::set_the_hole(int index) {
2608   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2609   DCHECK(index >= 0 && index < this->length());
2610   DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2611   WRITE_FIELD(this,
2612               kHeaderSize + index * kPointerSize,
2613               GetHeap()->the_hole_value());
2614 }
2615 
2616 
FillWithHoles(int from,int to)2617 void FixedArray::FillWithHoles(int from, int to) {
2618   for (int i = from; i < to; i++) {
2619     set_the_hole(i);
2620   }
2621 }
2622 
2623 
data_start()2624 Object** FixedArray::data_start() {
2625   return HeapObject::RawField(this, kHeaderSize);
2626 }
2627 
2628 
RawFieldOfElementAt(int index)2629 Object** FixedArray::RawFieldOfElementAt(int index) {
2630   return HeapObject::RawField(this, OffsetOfElementAt(index));
2631 }
2632 
2633 
IsEmpty()2634 bool DescriptorArray::IsEmpty() {
2635   DCHECK(length() >= kFirstIndex ||
2636          this == GetHeap()->empty_descriptor_array());
2637   return length() < kFirstIndex;
2638 }
2639 
2640 
number_of_descriptors()2641 int DescriptorArray::number_of_descriptors() {
2642   DCHECK(length() >= kFirstIndex || IsEmpty());
2643   int len = length();
2644   return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
2645 }
2646 
2647 
number_of_descriptors_storage()2648 int DescriptorArray::number_of_descriptors_storage() {
2649   int len = length();
2650   return len == 0 ? 0 : (len - kFirstIndex) / kDescriptorSize;
2651 }
2652 
2653 
NumberOfSlackDescriptors()2654 int DescriptorArray::NumberOfSlackDescriptors() {
2655   return number_of_descriptors_storage() - number_of_descriptors();
2656 }
2657 
2658 
SetNumberOfDescriptors(int number_of_descriptors)2659 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2660   WRITE_FIELD(
2661       this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2662 }
2663 
2664 
number_of_entries()2665 inline int DescriptorArray::number_of_entries() {
2666   return number_of_descriptors();
2667 }
2668 
2669 
HasEnumCache()2670 bool DescriptorArray::HasEnumCache() {
2671   return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
2672 }
2673 
2674 
CopyEnumCacheFrom(DescriptorArray * array)2675 void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
2676   set(kEnumCacheIndex, array->get(kEnumCacheIndex));
2677 }
2678 
2679 
GetEnumCache()2680 FixedArray* DescriptorArray::GetEnumCache() {
2681   DCHECK(HasEnumCache());
2682   FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2683   return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
2684 }
2685 
2686 
HasEnumIndicesCache()2687 bool DescriptorArray::HasEnumIndicesCache() {
2688   if (IsEmpty()) return false;
2689   Object* object = get(kEnumCacheIndex);
2690   if (object->IsSmi()) return false;
2691   FixedArray* bridge = FixedArray::cast(object);
2692   return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
2693 }
2694 
2695 
GetEnumIndicesCache()2696 FixedArray* DescriptorArray::GetEnumIndicesCache() {
2697   DCHECK(HasEnumIndicesCache());
2698   FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2699   return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
2700 }
2701 
2702 
GetEnumCacheSlot()2703 Object** DescriptorArray::GetEnumCacheSlot() {
2704   DCHECK(HasEnumCache());
2705   return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
2706                               kEnumCacheOffset);
2707 }
2708 
2709 
2710 // Perform a binary search in a fixed array. Low and high are entry indices. If
2711 // there are three entries in this array it should be called with low=0 and
2712 // high=2.
2713 template <SearchMode search_mode, typename T>
BinarySearch(T * array,Name * name,int low,int high,int valid_entries,int * out_insertion_index)2714 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries,
2715                  int* out_insertion_index) {
2716   DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2717   uint32_t hash = name->Hash();
2718   int limit = high;
2719 
2720   DCHECK(low <= high);
2721 
2722   while (low != high) {
2723     int mid = low + (high - low) / 2;
2724     Name* mid_name = array->GetSortedKey(mid);
2725     uint32_t mid_hash = mid_name->Hash();
2726 
2727     if (mid_hash >= hash) {
2728       high = mid;
2729     } else {
2730       low = mid + 1;
2731     }
2732   }
2733 
2734   for (; low <= limit; ++low) {
2735     int sort_index = array->GetSortedKeyIndex(low);
2736     Name* entry = array->GetKey(sort_index);
2737     uint32_t current_hash = entry->Hash();
2738     if (current_hash != hash) {
2739       if (out_insertion_index != NULL) {
2740         *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2741       }
2742       return T::kNotFound;
2743     }
2744     if (entry->Equals(name)) {
2745       if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2746         return sort_index;
2747       }
2748       return T::kNotFound;
2749     }
2750   }
2751 
2752   if (out_insertion_index != NULL) *out_insertion_index = limit + 1;
2753   return T::kNotFound;
2754 }
2755 
2756 
2757 // Perform a linear search in this fixed array. len is the number of entry
2758 // indices that are valid.
2759 template <SearchMode search_mode, typename T>
LinearSearch(T * array,Name * name,int len,int valid_entries,int * out_insertion_index)2760 int LinearSearch(T* array, Name* name, int len, int valid_entries,
2761                  int* out_insertion_index) {
2762   uint32_t hash = name->Hash();
2763   if (search_mode == ALL_ENTRIES) {
2764     for (int number = 0; number < len; number++) {
2765       int sorted_index = array->GetSortedKeyIndex(number);
2766       Name* entry = array->GetKey(sorted_index);
2767       uint32_t current_hash = entry->Hash();
2768       if (current_hash > hash) {
2769         if (out_insertion_index != NULL) *out_insertion_index = sorted_index;
2770         return T::kNotFound;
2771       }
2772       if (current_hash == hash && entry->Equals(name)) return sorted_index;
2773     }
2774     if (out_insertion_index != NULL) *out_insertion_index = len;
2775     return T::kNotFound;
2776   } else {
2777     DCHECK(len >= valid_entries);
2778     DCHECK_NULL(out_insertion_index);  // Not supported here.
2779     for (int number = 0; number < valid_entries; number++) {
2780       Name* entry = array->GetKey(number);
2781       uint32_t current_hash = entry->Hash();
2782       if (current_hash == hash && entry->Equals(name)) return number;
2783     }
2784     return T::kNotFound;
2785   }
2786 }
2787 
2788 
2789 template <SearchMode search_mode, typename T>
Search(T * array,Name * name,int valid_entries,int * out_insertion_index)2790 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2791   if (search_mode == VALID_ENTRIES) {
2792     SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2793   } else {
2794     SLOW_DCHECK(array->IsSortedNoDuplicates());
2795   }
2796 
2797   int nof = array->number_of_entries();
2798   if (nof == 0) {
2799     if (out_insertion_index != NULL) *out_insertion_index = 0;
2800     return T::kNotFound;
2801   }
2802 
2803   // Fast case: do linear search for small arrays.
2804   const int kMaxElementsForLinearSearch = 8;
2805   if ((search_mode == ALL_ENTRIES &&
2806        nof <= kMaxElementsForLinearSearch) ||
2807       (search_mode == VALID_ENTRIES &&
2808        valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2809     return LinearSearch<search_mode>(array, name, nof, valid_entries,
2810                                      out_insertion_index);
2811   }
2812 
2813   // Slow case: perform binary search.
2814   return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries,
2815                                    out_insertion_index);
2816 }
2817 
2818 
Search(Name * name,int valid_descriptors)2819 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2820   return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2821 }
2822 
2823 
SearchWithCache(Name * name,Map * map)2824 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2825   int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2826   if (number_of_own_descriptors == 0) return kNotFound;
2827 
2828   DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2829   int number = cache->Lookup(map, name);
2830 
2831   if (number == DescriptorLookupCache::kAbsent) {
2832     number = Search(name, number_of_own_descriptors);
2833     cache->Update(map, name, number);
2834   }
2835 
2836   return number;
2837 }
2838 
2839 
GetLastDescriptorDetails()2840 PropertyDetails Map::GetLastDescriptorDetails() {
2841   return instance_descriptors()->GetDetails(LastAdded());
2842 }
2843 
2844 
LastAdded()2845 int Map::LastAdded() {
2846   int number_of_own_descriptors = NumberOfOwnDescriptors();
2847   DCHECK(number_of_own_descriptors > 0);
2848   return number_of_own_descriptors - 1;
2849 }
2850 
2851 
NumberOfOwnDescriptors()2852 int Map::NumberOfOwnDescriptors() {
2853   return NumberOfOwnDescriptorsBits::decode(bit_field3());
2854 }
2855 
2856 
SetNumberOfOwnDescriptors(int number)2857 void Map::SetNumberOfOwnDescriptors(int number) {
2858   DCHECK(number <= instance_descriptors()->number_of_descriptors());
2859   set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
2860 }
2861 
2862 
EnumLength()2863 int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
2864 
2865 
SetEnumLength(int length)2866 void Map::SetEnumLength(int length) {
2867   if (length != kInvalidEnumCacheSentinel) {
2868     DCHECK(length >= 0);
2869     DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
2870     DCHECK(length <= NumberOfOwnDescriptors());
2871   }
2872   set_bit_field3(EnumLengthBits::update(bit_field3(), length));
2873 }
2874 
2875 
GetInitialElements()2876 FixedArrayBase* Map::GetInitialElements() {
2877   if (has_fast_smi_or_object_elements() ||
2878       has_fast_double_elements()) {
2879     DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2880     return GetHeap()->empty_fixed_array();
2881   } else if (has_fixed_typed_array_elements()) {
2882     FixedTypedArrayBase* empty_array =
2883         GetHeap()->EmptyFixedTypedArrayForMap(this);
2884     DCHECK(!GetHeap()->InNewSpace(empty_array));
2885     return empty_array;
2886   } else {
2887     UNREACHABLE();
2888   }
2889   return NULL;
2890 }
2891 
2892 
GetKeySlot(int descriptor_number)2893 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2894   DCHECK(descriptor_number < number_of_descriptors());
2895   return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2896 }
2897 
2898 
GetDescriptorStartSlot(int descriptor_number)2899 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2900   return GetKeySlot(descriptor_number);
2901 }
2902 
2903 
GetDescriptorEndSlot(int descriptor_number)2904 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2905   return GetValueSlot(descriptor_number - 1) + 1;
2906 }
2907 
2908 
GetKey(int descriptor_number)2909 Name* DescriptorArray::GetKey(int descriptor_number) {
2910   DCHECK(descriptor_number < number_of_descriptors());
2911   return Name::cast(get(ToKeyIndex(descriptor_number)));
2912 }
2913 
2914 
GetSortedKeyIndex(int descriptor_number)2915 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2916   return GetDetails(descriptor_number).pointer();
2917 }
2918 
2919 
GetSortedKey(int descriptor_number)2920 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2921   return GetKey(GetSortedKeyIndex(descriptor_number));
2922 }
2923 
2924 
SetSortedKey(int descriptor_index,int pointer)2925 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2926   PropertyDetails details = GetDetails(descriptor_index);
2927   set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2928 }
2929 
2930 
SetRepresentation(int descriptor_index,Representation representation)2931 void DescriptorArray::SetRepresentation(int descriptor_index,
2932                                         Representation representation) {
2933   DCHECK(!representation.IsNone());
2934   PropertyDetails details = GetDetails(descriptor_index);
2935   set(ToDetailsIndex(descriptor_index),
2936       details.CopyWithRepresentation(representation).AsSmi());
2937 }
2938 
2939 
GetValueSlot(int descriptor_number)2940 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2941   DCHECK(descriptor_number < number_of_descriptors());
2942   return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2943 }
2944 
2945 
GetValueOffset(int descriptor_number)2946 int DescriptorArray::GetValueOffset(int descriptor_number) {
2947   return OffsetOfElementAt(ToValueIndex(descriptor_number));
2948 }
2949 
2950 
GetValue(int descriptor_number)2951 Object* DescriptorArray::GetValue(int descriptor_number) {
2952   DCHECK(descriptor_number < number_of_descriptors());
2953   return get(ToValueIndex(descriptor_number));
2954 }
2955 
2956 
SetValue(int descriptor_index,Object * value)2957 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2958   set(ToValueIndex(descriptor_index), value);
2959 }
2960 
2961 
GetDetails(int descriptor_number)2962 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2963   DCHECK(descriptor_number < number_of_descriptors());
2964   Object* details = get(ToDetailsIndex(descriptor_number));
2965   return PropertyDetails(Smi::cast(details));
2966 }
2967 
2968 
GetType(int descriptor_number)2969 PropertyType DescriptorArray::GetType(int descriptor_number) {
2970   return GetDetails(descriptor_number).type();
2971 }
2972 
2973 
GetFieldIndex(int descriptor_number)2974 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2975   DCHECK(GetDetails(descriptor_number).location() == kField);
2976   return GetDetails(descriptor_number).field_index();
2977 }
2978 
2979 
GetFieldType(int descriptor_number)2980 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
2981   DCHECK(GetDetails(descriptor_number).location() == kField);
2982   Object* value = GetValue(descriptor_number);
2983   if (value->IsWeakCell()) {
2984     if (WeakCell::cast(value)->cleared()) return HeapType::None();
2985     value = WeakCell::cast(value)->value();
2986   }
2987   return HeapType::cast(value);
2988 }
2989 
2990 
GetConstant(int descriptor_number)2991 Object* DescriptorArray::GetConstant(int descriptor_number) {
2992   return GetValue(descriptor_number);
2993 }
2994 
2995 
GetCallbacksObject(int descriptor_number)2996 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
2997   DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
2998   return GetValue(descriptor_number);
2999 }
3000 
3001 
GetCallbacks(int descriptor_number)3002 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3003   DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3004   Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3005   return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3006 }
3007 
3008 
Get(int descriptor_number,Descriptor * desc)3009 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3010   desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3011              handle(GetValue(descriptor_number), GetIsolate()),
3012              GetDetails(descriptor_number));
3013 }
3014 
3015 
SetDescriptor(int descriptor_number,Descriptor * desc)3016 void DescriptorArray::SetDescriptor(int descriptor_number, Descriptor* desc) {
3017   // Range check.
3018   DCHECK(descriptor_number < number_of_descriptors());
3019   set(ToKeyIndex(descriptor_number), *desc->GetKey());
3020   set(ToValueIndex(descriptor_number), *desc->GetValue());
3021   set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3022 }
3023 
3024 
Set(int descriptor_number,Descriptor * desc)3025 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3026   // Range check.
3027   DCHECK(descriptor_number < number_of_descriptors());
3028 
3029   set(ToKeyIndex(descriptor_number), *desc->GetKey());
3030   set(ToValueIndex(descriptor_number), *desc->GetValue());
3031   set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3032 }
3033 
3034 
Append(Descriptor * desc)3035 void DescriptorArray::Append(Descriptor* desc) {
3036   DisallowHeapAllocation no_gc;
3037   int descriptor_number = number_of_descriptors();
3038   SetNumberOfDescriptors(descriptor_number + 1);
3039   Set(descriptor_number, desc);
3040 
3041   uint32_t hash = desc->GetKey()->Hash();
3042 
3043   int insertion;
3044 
3045   for (insertion = descriptor_number; insertion > 0; --insertion) {
3046     Name* key = GetSortedKey(insertion - 1);
3047     if (key->Hash() <= hash) break;
3048     SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3049   }
3050 
3051   SetSortedKey(insertion, descriptor_number);
3052 }
3053 
3054 
SwapSortedKeys(int first,int second)3055 void DescriptorArray::SwapSortedKeys(int first, int second) {
3056   int first_key = GetSortedKeyIndex(first);
3057   SetSortedKey(first, GetSortedKeyIndex(second));
3058   SetSortedKey(second, first_key);
3059 }
3060 
3061 
type()3062 PropertyType DescriptorArray::Entry::type() { return descs_->GetType(index_); }
3063 
3064 
GetCallbackObject()3065 Object* DescriptorArray::Entry::GetCallbackObject() {
3066   return descs_->GetValue(index_);
3067 }
3068 
3069 
NumberOfElements()3070 int HashTableBase::NumberOfElements() {
3071   return Smi::cast(get(kNumberOfElementsIndex))->value();
3072 }
3073 
3074 
NumberOfDeletedElements()3075 int HashTableBase::NumberOfDeletedElements() {
3076   return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
3077 }
3078 
3079 
Capacity()3080 int HashTableBase::Capacity() {
3081   return Smi::cast(get(kCapacityIndex))->value();
3082 }
3083 
3084 
ElementAdded()3085 void HashTableBase::ElementAdded() {
3086   SetNumberOfElements(NumberOfElements() + 1);
3087 }
3088 
3089 
ElementRemoved()3090 void HashTableBase::ElementRemoved() {
3091   SetNumberOfElements(NumberOfElements() - 1);
3092   SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
3093 }
3094 
3095 
ElementsRemoved(int n)3096 void HashTableBase::ElementsRemoved(int n) {
3097   SetNumberOfElements(NumberOfElements() - n);
3098   SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
3099 }
3100 
3101 
3102 // static
ComputeCapacity(int at_least_space_for)3103 int HashTableBase::ComputeCapacity(int at_least_space_for) {
3104   const int kMinCapacity = 4;
3105   int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3106   return Max(capacity, kMinCapacity);
3107 }
3108 
3109 
IsKey(Object * k)3110 bool HashTableBase::IsKey(Object* k) {
3111   return !k->IsTheHole() && !k->IsUndefined();
3112 }
3113 
3114 
SetNumberOfElements(int nof)3115 void HashTableBase::SetNumberOfElements(int nof) {
3116   set(kNumberOfElementsIndex, Smi::FromInt(nof));
3117 }
3118 
3119 
SetNumberOfDeletedElements(int nod)3120 void HashTableBase::SetNumberOfDeletedElements(int nod) {
3121   set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
3122 }
3123 
3124 
3125 template <typename Derived, typename Shape, typename Key>
FindEntry(Key key)3126 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3127   return FindEntry(GetIsolate(), key);
3128 }
3129 
3130 
3131 template<typename Derived, typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key)3132 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3133   return FindEntry(isolate, key, HashTable::Hash(key));
3134 }
3135 
3136 
3137 // Find entry for key otherwise return kNotFound.
3138 template <typename Derived, typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key,int32_t hash)3139 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
3140                                               int32_t hash) {
3141   uint32_t capacity = Capacity();
3142   uint32_t entry = FirstProbe(hash, capacity);
3143   uint32_t count = 1;
3144   // EnsureCapacity will guarantee the hash table is never full.
3145   while (true) {
3146     Object* element = KeyAt(entry);
3147     // Empty entry. Uses raw unchecked accessors because it is called by the
3148     // string table during bootstrapping.
3149     if (element == isolate->heap()->root(Heap::kUndefinedValueRootIndex)) break;
3150     if (element != isolate->heap()->root(Heap::kTheHoleValueRootIndex) &&
3151         Shape::IsMatch(key, element)) return entry;
3152     entry = NextProbe(entry, count++, capacity);
3153   }
3154   return kNotFound;
3155 }
3156 
3157 
requires_slow_elements()3158 bool SeededNumberDictionary::requires_slow_elements() {
3159   Object* max_index_object = get(kMaxNumberKeyIndex);
3160   if (!max_index_object->IsSmi()) return false;
3161   return 0 !=
3162       (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3163 }
3164 
3165 
max_number_key()3166 uint32_t SeededNumberDictionary::max_number_key() {
3167   DCHECK(!requires_slow_elements());
3168   Object* max_index_object = get(kMaxNumberKeyIndex);
3169   if (!max_index_object->IsSmi()) return 0;
3170   uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3171   return value >> kRequiresSlowElementsTagSize;
3172 }
3173 
3174 
set_requires_slow_elements()3175 void SeededNumberDictionary::set_requires_slow_elements() {
3176   set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3177 }
3178 
3179 
3180 // ------------------------------------
3181 // Cast operations
3182 
3183 
3184 CAST_ACCESSOR(AccessorInfo)
3185 CAST_ACCESSOR(ArrayList)
3186 CAST_ACCESSOR(Bool16x8)
3187 CAST_ACCESSOR(Bool32x4)
3188 CAST_ACCESSOR(Bool8x16)
3189 CAST_ACCESSOR(ByteArray)
3190 CAST_ACCESSOR(BytecodeArray)
3191 CAST_ACCESSOR(Cell)
3192 CAST_ACCESSOR(Code)
3193 CAST_ACCESSOR(CodeCacheHashTable)
3194 CAST_ACCESSOR(CompilationCacheTable)
3195 CAST_ACCESSOR(ConsString)
3196 CAST_ACCESSOR(DeoptimizationInputData)
3197 CAST_ACCESSOR(DeoptimizationOutputData)
3198 CAST_ACCESSOR(DependentCode)
3199 CAST_ACCESSOR(DescriptorArray)
3200 CAST_ACCESSOR(ExternalOneByteString)
3201 CAST_ACCESSOR(ExternalString)
3202 CAST_ACCESSOR(ExternalTwoByteString)
3203 CAST_ACCESSOR(FixedArray)
3204 CAST_ACCESSOR(FixedArrayBase)
3205 CAST_ACCESSOR(FixedDoubleArray)
3206 CAST_ACCESSOR(FixedTypedArrayBase)
3207 CAST_ACCESSOR(Float32x4)
3208 CAST_ACCESSOR(Foreign)
3209 CAST_ACCESSOR(GlobalDictionary)
3210 CAST_ACCESSOR(HandlerTable)
3211 CAST_ACCESSOR(HeapObject)
3212 CAST_ACCESSOR(Int16x8)
3213 CAST_ACCESSOR(Int32x4)
3214 CAST_ACCESSOR(Int8x16)
3215 CAST_ACCESSOR(JSArray)
3216 CAST_ACCESSOR(JSArrayBuffer)
3217 CAST_ACCESSOR(JSArrayBufferView)
3218 CAST_ACCESSOR(JSBoundFunction)
3219 CAST_ACCESSOR(JSDataView)
3220 CAST_ACCESSOR(JSDate)
3221 CAST_ACCESSOR(JSFunction)
3222 CAST_ACCESSOR(JSGeneratorObject)
3223 CAST_ACCESSOR(JSGlobalObject)
3224 CAST_ACCESSOR(JSGlobalProxy)
3225 CAST_ACCESSOR(JSMap)
3226 CAST_ACCESSOR(JSMapIterator)
3227 CAST_ACCESSOR(JSMessageObject)
3228 CAST_ACCESSOR(JSModule)
3229 CAST_ACCESSOR(JSObject)
3230 CAST_ACCESSOR(JSProxy)
3231 CAST_ACCESSOR(JSReceiver)
3232 CAST_ACCESSOR(JSRegExp)
3233 CAST_ACCESSOR(JSSet)
3234 CAST_ACCESSOR(JSSetIterator)
3235 CAST_ACCESSOR(JSIteratorResult)
3236 CAST_ACCESSOR(JSTypedArray)
3237 CAST_ACCESSOR(JSValue)
3238 CAST_ACCESSOR(JSWeakMap)
3239 CAST_ACCESSOR(JSWeakSet)
3240 CAST_ACCESSOR(LayoutDescriptor)
3241 CAST_ACCESSOR(Map)
3242 CAST_ACCESSOR(Name)
3243 CAST_ACCESSOR(NameDictionary)
3244 CAST_ACCESSOR(NormalizedMapCache)
3245 CAST_ACCESSOR(Object)
3246 CAST_ACCESSOR(ObjectHashTable)
3247 CAST_ACCESSOR(Oddball)
3248 CAST_ACCESSOR(OrderedHashMap)
3249 CAST_ACCESSOR(OrderedHashSet)
3250 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3251 CAST_ACCESSOR(PropertyCell)
3252 CAST_ACCESSOR(ScopeInfo)
3253 CAST_ACCESSOR(SeededNumberDictionary)
3254 CAST_ACCESSOR(SeqOneByteString)
3255 CAST_ACCESSOR(SeqString)
3256 CAST_ACCESSOR(SeqTwoByteString)
3257 CAST_ACCESSOR(SharedFunctionInfo)
3258 CAST_ACCESSOR(Simd128Value)
3259 CAST_ACCESSOR(SlicedString)
3260 CAST_ACCESSOR(Smi)
3261 CAST_ACCESSOR(String)
3262 CAST_ACCESSOR(StringTable)
3263 CAST_ACCESSOR(Struct)
3264 CAST_ACCESSOR(Symbol)
3265 CAST_ACCESSOR(Uint16x8)
3266 CAST_ACCESSOR(Uint32x4)
3267 CAST_ACCESSOR(Uint8x16)
3268 CAST_ACCESSOR(UnseededNumberDictionary)
3269 CAST_ACCESSOR(WeakCell)
3270 CAST_ACCESSOR(WeakFixedArray)
3271 CAST_ACCESSOR(WeakHashTable)
3272 
3273 
3274 // static
3275 template <class Traits>
3276 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3277     FixedTypedArray<Traits>::kInstanceType;
3278 
3279 
3280 template <class Traits>
cast(Object * object)3281 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3282   SLOW_DCHECK(object->IsHeapObject() &&
3283               HeapObject::cast(object)->map()->instance_type() ==
3284               Traits::kInstanceType);
3285   return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3286 }
3287 
3288 
3289 template <class Traits>
3290 const FixedTypedArray<Traits>*
cast(const Object * object)3291 FixedTypedArray<Traits>::cast(const Object* object) {
3292   SLOW_DCHECK(object->IsHeapObject() &&
3293               HeapObject::cast(object)->map()->instance_type() ==
3294               Traits::kInstanceType);
3295   return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3296 }
3297 
3298 
3299 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type)       \
3300   type* DeoptimizationInputData::name() {                \
3301     return type::cast(get(k##name##Index));              \
3302   }                                                      \
3303   void DeoptimizationInputData::Set##name(type* value) { \
3304     set(k##name##Index, value);                          \
3305   }
3306 
DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray,ByteArray)3307 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
3308 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
3309 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
3310 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
3311 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
3312 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
3313 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
3314 DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
3315 
3316 #undef DEFINE_DEOPT_ELEMENT_ACCESSORS
3317 
3318 
3319 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type)                \
3320   type* DeoptimizationInputData::name(int i) {                  \
3321     return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
3322   }                                                             \
3323   void DeoptimizationInputData::Set##name(int i, type* value) { \
3324     set(IndexForEntry(i) + k##name##Offset, value);             \
3325   }
3326 
3327 DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
3328 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
3329 DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
3330 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
3331 
3332 #undef DEFINE_DEOPT_ENTRY_ACCESSORS
3333 
3334 
3335 BailoutId DeoptimizationInputData::AstId(int i) {
3336   return BailoutId(AstIdRaw(i)->value());
3337 }
3338 
3339 
SetAstId(int i,BailoutId value)3340 void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
3341   SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
3342 }
3343 
3344 
DeoptCount()3345 int DeoptimizationInputData::DeoptCount() {
3346   return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
3347 }
3348 
3349 
DeoptPoints()3350 int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
3351 
3352 
AstId(int index)3353 BailoutId DeoptimizationOutputData::AstId(int index) {
3354   return BailoutId(Smi::cast(get(index * 2))->value());
3355 }
3356 
3357 
SetAstId(int index,BailoutId id)3358 void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
3359   set(index * 2, Smi::FromInt(id.ToInt()));
3360 }
3361 
3362 
PcAndState(int index)3363 Smi* DeoptimizationOutputData::PcAndState(int index) {
3364   return Smi::cast(get(1 + index * 2));
3365 }
3366 
3367 
SetPcAndState(int index,Smi * offset)3368 void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
3369   set(1 + index * 2, offset);
3370 }
3371 
3372 
get(int index)3373 Object* LiteralsArray::get(int index) const { return FixedArray::get(index); }
3374 
3375 
set(int index,Object * value)3376 void LiteralsArray::set(int index, Object* value) {
3377   FixedArray::set(index, value);
3378 }
3379 
3380 
set(int index,Smi * value)3381 void LiteralsArray::set(int index, Smi* value) {
3382   FixedArray::set(index, value);
3383 }
3384 
3385 
set(int index,Object * value,WriteBarrierMode mode)3386 void LiteralsArray::set(int index, Object* value, WriteBarrierMode mode) {
3387   FixedArray::set(index, value, mode);
3388 }
3389 
3390 
cast(Object * object)3391 LiteralsArray* LiteralsArray::cast(Object* object) {
3392   SLOW_DCHECK(object->IsLiteralsArray());
3393   return reinterpret_cast<LiteralsArray*>(object);
3394 }
3395 
3396 
feedback_vector()3397 TypeFeedbackVector* LiteralsArray::feedback_vector() const {
3398   return TypeFeedbackVector::cast(get(kVectorIndex));
3399 }
3400 
3401 
set_feedback_vector(TypeFeedbackVector * vector)3402 void LiteralsArray::set_feedback_vector(TypeFeedbackVector* vector) {
3403   set(kVectorIndex, vector);
3404 }
3405 
3406 
literal(int literal_index)3407 Object* LiteralsArray::literal(int literal_index) const {
3408   return get(kFirstLiteralIndex + literal_index);
3409 }
3410 
3411 
set_literal(int literal_index,Object * literal)3412 void LiteralsArray::set_literal(int literal_index, Object* literal) {
3413   set(kFirstLiteralIndex + literal_index, literal);
3414 }
3415 
3416 
literals_count()3417 int LiteralsArray::literals_count() const {
3418   return length() - kFirstLiteralIndex;
3419 }
3420 
3421 
SetRangeStart(int index,int value)3422 void HandlerTable::SetRangeStart(int index, int value) {
3423   set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
3424 }
3425 
3426 
SetRangeEnd(int index,int value)3427 void HandlerTable::SetRangeEnd(int index, int value) {
3428   set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
3429 }
3430 
3431 
SetRangeHandler(int index,int offset,CatchPrediction prediction)3432 void HandlerTable::SetRangeHandler(int index, int offset,
3433                                    CatchPrediction prediction) {
3434   int value = HandlerOffsetField::encode(offset) |
3435               HandlerPredictionField::encode(prediction);
3436   set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
3437 }
3438 
3439 
SetRangeDepth(int index,int value)3440 void HandlerTable::SetRangeDepth(int index, int value) {
3441   set(index * kRangeEntrySize + kRangeDepthIndex, Smi::FromInt(value));
3442 }
3443 
3444 
SetReturnOffset(int index,int value)3445 void HandlerTable::SetReturnOffset(int index, int value) {
3446   set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
3447 }
3448 
3449 
SetReturnHandler(int index,int offset,CatchPrediction prediction)3450 void HandlerTable::SetReturnHandler(int index, int offset,
3451                                     CatchPrediction prediction) {
3452   int value = HandlerOffsetField::encode(offset) |
3453               HandlerPredictionField::encode(prediction);
3454   set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
3455 }
3456 
3457 
3458 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
STRUCT_LIST(MAKE_STRUCT_CAST)3459   STRUCT_LIST(MAKE_STRUCT_CAST)
3460 #undef MAKE_STRUCT_CAST
3461 
3462 
3463 template <typename Derived, typename Shape, typename Key>
3464 HashTable<Derived, Shape, Key>*
3465 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3466   SLOW_DCHECK(obj->IsHashTable());
3467   return reinterpret_cast<HashTable*>(obj);
3468 }
3469 
3470 
3471 template <typename Derived, typename Shape, typename Key>
3472 const HashTable<Derived, Shape, Key>*
cast(const Object * obj)3473 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3474   SLOW_DCHECK(obj->IsHashTable());
3475   return reinterpret_cast<const HashTable*>(obj);
3476 }
3477 
3478 
SMI_ACCESSORS(FixedArrayBase,length,kLengthOffset)3479 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3480 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3481 
3482 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3483 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3484 
3485 SMI_ACCESSORS(String, length, kLengthOffset)
3486 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3487 
3488 
3489 int FreeSpace::Size() { return size(); }
3490 
3491 
next()3492 FreeSpace* FreeSpace::next() {
3493   DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3494          (!GetHeap()->deserialization_complete() && map() == NULL));
3495   DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3496   return reinterpret_cast<FreeSpace*>(
3497       Memory::Address_at(address() + kNextOffset));
3498 }
3499 
3500 
set_next(FreeSpace * next)3501 void FreeSpace::set_next(FreeSpace* next) {
3502   DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3503          (!GetHeap()->deserialization_complete() && map() == NULL));
3504   DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3505   base::NoBarrier_Store(
3506       reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3507       reinterpret_cast<base::AtomicWord>(next));
3508 }
3509 
3510 
cast(HeapObject * o)3511 FreeSpace* FreeSpace::cast(HeapObject* o) {
3512   SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3513   return reinterpret_cast<FreeSpace*>(o);
3514 }
3515 
3516 
hash_field()3517 uint32_t Name::hash_field() {
3518   return READ_UINT32_FIELD(this, kHashFieldOffset);
3519 }
3520 
3521 
set_hash_field(uint32_t value)3522 void Name::set_hash_field(uint32_t value) {
3523   WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3524 #if V8_HOST_ARCH_64_BIT
3525 #if V8_TARGET_LITTLE_ENDIAN
3526   WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3527 #else
3528   WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3529 #endif
3530 #endif
3531 }
3532 
3533 
Equals(Name * other)3534 bool Name::Equals(Name* other) {
3535   if (other == this) return true;
3536   if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3537       this->IsSymbol() || other->IsSymbol()) {
3538     return false;
3539   }
3540   return String::cast(this)->SlowEquals(String::cast(other));
3541 }
3542 
3543 
Equals(Handle<Name> one,Handle<Name> two)3544 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3545   if (one.is_identical_to(two)) return true;
3546   if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3547       one->IsSymbol() || two->IsSymbol()) {
3548     return false;
3549   }
3550   return String::SlowEquals(Handle<String>::cast(one),
3551                             Handle<String>::cast(two));
3552 }
3553 
3554 
ACCESSORS(Symbol,name,Object,kNameOffset)3555 ACCESSORS(Symbol, name, Object, kNameOffset)
3556 SMI_ACCESSORS(Symbol, flags, kFlagsOffset)
3557 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3558 BOOL_ACCESSORS(Symbol, flags, is_well_known_symbol, kWellKnownSymbolBit)
3559 
3560 
3561 bool String::Equals(String* other) {
3562   if (other == this) return true;
3563   if (this->IsInternalizedString() && other->IsInternalizedString()) {
3564     return false;
3565   }
3566   return SlowEquals(other);
3567 }
3568 
3569 
Equals(Handle<String> one,Handle<String> two)3570 bool String::Equals(Handle<String> one, Handle<String> two) {
3571   if (one.is_identical_to(two)) return true;
3572   if (one->IsInternalizedString() && two->IsInternalizedString()) {
3573     return false;
3574   }
3575   return SlowEquals(one, two);
3576 }
3577 
3578 
Flatten(Handle<String> string,PretenureFlag pretenure)3579 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3580   if (!string->IsConsString()) return string;
3581   Handle<ConsString> cons = Handle<ConsString>::cast(string);
3582   if (cons->IsFlat()) return handle(cons->first());
3583   return SlowFlatten(cons, pretenure);
3584 }
3585 
3586 
Flatten(Handle<Name> name,PretenureFlag pretenure)3587 Handle<Name> Name::Flatten(Handle<Name> name, PretenureFlag pretenure) {
3588   if (name->IsSymbol()) return name;
3589   return String::Flatten(Handle<String>::cast(name));
3590 }
3591 
3592 
Get(int index)3593 uint16_t String::Get(int index) {
3594   DCHECK(index >= 0 && index < length());
3595   switch (StringShape(this).full_representation_tag()) {
3596     case kSeqStringTag | kOneByteStringTag:
3597       return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3598     case kSeqStringTag | kTwoByteStringTag:
3599       return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3600     case kConsStringTag | kOneByteStringTag:
3601     case kConsStringTag | kTwoByteStringTag:
3602       return ConsString::cast(this)->ConsStringGet(index);
3603     case kExternalStringTag | kOneByteStringTag:
3604       return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3605     case kExternalStringTag | kTwoByteStringTag:
3606       return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3607     case kSlicedStringTag | kOneByteStringTag:
3608     case kSlicedStringTag | kTwoByteStringTag:
3609       return SlicedString::cast(this)->SlicedStringGet(index);
3610     default:
3611       break;
3612   }
3613 
3614   UNREACHABLE();
3615   return 0;
3616 }
3617 
3618 
Set(int index,uint16_t value)3619 void String::Set(int index, uint16_t value) {
3620   DCHECK(index >= 0 && index < length());
3621   DCHECK(StringShape(this).IsSequential());
3622 
3623   return this->IsOneByteRepresentation()
3624       ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3625       : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3626 }
3627 
3628 
IsFlat()3629 bool String::IsFlat() {
3630   if (!StringShape(this).IsCons()) return true;
3631   return ConsString::cast(this)->second()->length() == 0;
3632 }
3633 
3634 
GetUnderlying()3635 String* String::GetUnderlying() {
3636   // Giving direct access to underlying string only makes sense if the
3637   // wrapping string is already flattened.
3638   DCHECK(this->IsFlat());
3639   DCHECK(StringShape(this).IsIndirect());
3640   STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3641   const int kUnderlyingOffset = SlicedString::kParentOffset;
3642   return String::cast(READ_FIELD(this, kUnderlyingOffset));
3643 }
3644 
3645 
3646 template<class Visitor>
VisitFlat(Visitor * visitor,String * string,const int offset)3647 ConsString* String::VisitFlat(Visitor* visitor,
3648                               String* string,
3649                               const int offset) {
3650   int slice_offset = offset;
3651   const int length = string->length();
3652   DCHECK(offset <= length);
3653   while (true) {
3654     int32_t type = string->map()->instance_type();
3655     switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3656       case kSeqStringTag | kOneByteStringTag:
3657         visitor->VisitOneByteString(
3658             SeqOneByteString::cast(string)->GetChars() + slice_offset,
3659             length - offset);
3660         return NULL;
3661 
3662       case kSeqStringTag | kTwoByteStringTag:
3663         visitor->VisitTwoByteString(
3664             SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3665             length - offset);
3666         return NULL;
3667 
3668       case kExternalStringTag | kOneByteStringTag:
3669         visitor->VisitOneByteString(
3670             ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3671             length - offset);
3672         return NULL;
3673 
3674       case kExternalStringTag | kTwoByteStringTag:
3675         visitor->VisitTwoByteString(
3676             ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3677             length - offset);
3678         return NULL;
3679 
3680       case kSlicedStringTag | kOneByteStringTag:
3681       case kSlicedStringTag | kTwoByteStringTag: {
3682         SlicedString* slicedString = SlicedString::cast(string);
3683         slice_offset += slicedString->offset();
3684         string = slicedString->parent();
3685         continue;
3686       }
3687 
3688       case kConsStringTag | kOneByteStringTag:
3689       case kConsStringTag | kTwoByteStringTag:
3690         return ConsString::cast(string);
3691 
3692       default:
3693         UNREACHABLE();
3694         return NULL;
3695     }
3696   }
3697 }
3698 
3699 
3700 template <>
GetCharVector()3701 inline Vector<const uint8_t> String::GetCharVector() {
3702   String::FlatContent flat = GetFlatContent();
3703   DCHECK(flat.IsOneByte());
3704   return flat.ToOneByteVector();
3705 }
3706 
3707 
3708 template <>
GetCharVector()3709 inline Vector<const uc16> String::GetCharVector() {
3710   String::FlatContent flat = GetFlatContent();
3711   DCHECK(flat.IsTwoByte());
3712   return flat.ToUC16Vector();
3713 }
3714 
3715 
SeqOneByteStringGet(int index)3716 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3717   DCHECK(index >= 0 && index < length());
3718   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3719 }
3720 
3721 
SeqOneByteStringSet(int index,uint16_t value)3722 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3723   DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3724   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3725                    static_cast<byte>(value));
3726 }
3727 
3728 
GetCharsAddress()3729 Address SeqOneByteString::GetCharsAddress() {
3730   return FIELD_ADDR(this, kHeaderSize);
3731 }
3732 
3733 
GetChars()3734 uint8_t* SeqOneByteString::GetChars() {
3735   return reinterpret_cast<uint8_t*>(GetCharsAddress());
3736 }
3737 
3738 
GetCharsAddress()3739 Address SeqTwoByteString::GetCharsAddress() {
3740   return FIELD_ADDR(this, kHeaderSize);
3741 }
3742 
3743 
GetChars()3744 uc16* SeqTwoByteString::GetChars() {
3745   return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3746 }
3747 
3748 
SeqTwoByteStringGet(int index)3749 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3750   DCHECK(index >= 0 && index < length());
3751   return READ_UINT16_FIELD(this, kHeaderSize + index * kShortSize);
3752 }
3753 
3754 
SeqTwoByteStringSet(int index,uint16_t value)3755 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3756   DCHECK(index >= 0 && index < length());
3757   WRITE_UINT16_FIELD(this, kHeaderSize + index * kShortSize, value);
3758 }
3759 
3760 
SeqTwoByteStringSize(InstanceType instance_type)3761 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3762   return SizeFor(length());
3763 }
3764 
3765 
SeqOneByteStringSize(InstanceType instance_type)3766 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3767   return SizeFor(length());
3768 }
3769 
3770 
parent()3771 String* SlicedString::parent() {
3772   return String::cast(READ_FIELD(this, kParentOffset));
3773 }
3774 
3775 
set_parent(String * parent,WriteBarrierMode mode)3776 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3777   DCHECK(parent->IsSeqString() || parent->IsExternalString());
3778   WRITE_FIELD(this, kParentOffset, parent);
3779   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3780 }
3781 
3782 
SMI_ACCESSORS(SlicedString,offset,kOffsetOffset)3783 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3784 
3785 
3786 String* ConsString::first() {
3787   return String::cast(READ_FIELD(this, kFirstOffset));
3788 }
3789 
3790 
unchecked_first()3791 Object* ConsString::unchecked_first() {
3792   return READ_FIELD(this, kFirstOffset);
3793 }
3794 
3795 
set_first(String * value,WriteBarrierMode mode)3796 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3797   WRITE_FIELD(this, kFirstOffset, value);
3798   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3799 }
3800 
3801 
second()3802 String* ConsString::second() {
3803   return String::cast(READ_FIELD(this, kSecondOffset));
3804 }
3805 
3806 
unchecked_second()3807 Object* ConsString::unchecked_second() {
3808   return READ_FIELD(this, kSecondOffset);
3809 }
3810 
3811 
set_second(String * value,WriteBarrierMode mode)3812 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3813   WRITE_FIELD(this, kSecondOffset, value);
3814   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3815 }
3816 
3817 
is_short()3818 bool ExternalString::is_short() {
3819   InstanceType type = map()->instance_type();
3820   return (type & kShortExternalStringMask) == kShortExternalStringTag;
3821 }
3822 
3823 
resource()3824 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3825   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3826 }
3827 
3828 
update_data_cache()3829 void ExternalOneByteString::update_data_cache() {
3830   if (is_short()) return;
3831   const char** data_field =
3832       reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3833   *data_field = resource()->data();
3834 }
3835 
3836 
set_resource(const ExternalOneByteString::Resource * resource)3837 void ExternalOneByteString::set_resource(
3838     const ExternalOneByteString::Resource* resource) {
3839   DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3840   *reinterpret_cast<const Resource**>(
3841       FIELD_ADDR(this, kResourceOffset)) = resource;
3842   if (resource != NULL) update_data_cache();
3843 }
3844 
3845 
GetChars()3846 const uint8_t* ExternalOneByteString::GetChars() {
3847   return reinterpret_cast<const uint8_t*>(resource()->data());
3848 }
3849 
3850 
ExternalOneByteStringGet(int index)3851 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3852   DCHECK(index >= 0 && index < length());
3853   return GetChars()[index];
3854 }
3855 
3856 
resource()3857 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3858   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3859 }
3860 
3861 
update_data_cache()3862 void ExternalTwoByteString::update_data_cache() {
3863   if (is_short()) return;
3864   const uint16_t** data_field =
3865       reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3866   *data_field = resource()->data();
3867 }
3868 
3869 
set_resource(const ExternalTwoByteString::Resource * resource)3870 void ExternalTwoByteString::set_resource(
3871     const ExternalTwoByteString::Resource* resource) {
3872   *reinterpret_cast<const Resource**>(
3873       FIELD_ADDR(this, kResourceOffset)) = resource;
3874   if (resource != NULL) update_data_cache();
3875 }
3876 
3877 
GetChars()3878 const uint16_t* ExternalTwoByteString::GetChars() {
3879   return resource()->data();
3880 }
3881 
3882 
ExternalTwoByteStringGet(int index)3883 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3884   DCHECK(index >= 0 && index < length());
3885   return GetChars()[index];
3886 }
3887 
3888 
ExternalTwoByteStringGetData(unsigned start)3889 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3890       unsigned start) {
3891   return GetChars() + start;
3892 }
3893 
3894 
OffsetForDepth(int depth)3895 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
3896 
3897 
PushLeft(ConsString * string)3898 void ConsStringIterator::PushLeft(ConsString* string) {
3899   frames_[depth_++ & kDepthMask] = string;
3900 }
3901 
3902 
PushRight(ConsString * string)3903 void ConsStringIterator::PushRight(ConsString* string) {
3904   // Inplace update.
3905   frames_[(depth_-1) & kDepthMask] = string;
3906 }
3907 
3908 
AdjustMaximumDepth()3909 void ConsStringIterator::AdjustMaximumDepth() {
3910   if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3911 }
3912 
3913 
Pop()3914 void ConsStringIterator::Pop() {
3915   DCHECK(depth_ > 0);
3916   DCHECK(depth_ <= maximum_depth_);
3917   depth_--;
3918 }
3919 
3920 
GetNext()3921 uint16_t StringCharacterStream::GetNext() {
3922   DCHECK(buffer8_ != NULL && end_ != NULL);
3923   // Advance cursor if needed.
3924   if (buffer8_ == end_) HasMore();
3925   DCHECK(buffer8_ < end_);
3926   return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3927 }
3928 
3929 
StringCharacterStream(String * string,int offset)3930 StringCharacterStream::StringCharacterStream(String* string, int offset)
3931     : is_one_byte_(false) {
3932   Reset(string, offset);
3933 }
3934 
3935 
Reset(String * string,int offset)3936 void StringCharacterStream::Reset(String* string, int offset) {
3937   buffer8_ = NULL;
3938   end_ = NULL;
3939   ConsString* cons_string = String::VisitFlat(this, string, offset);
3940   iter_.Reset(cons_string, offset);
3941   if (cons_string != NULL) {
3942     string = iter_.Next(&offset);
3943     if (string != NULL) String::VisitFlat(this, string, offset);
3944   }
3945 }
3946 
3947 
HasMore()3948 bool StringCharacterStream::HasMore() {
3949   if (buffer8_ != end_) return true;
3950   int offset;
3951   String* string = iter_.Next(&offset);
3952   DCHECK_EQ(offset, 0);
3953   if (string == NULL) return false;
3954   String::VisitFlat(this, string);
3955   DCHECK(buffer8_ != end_);
3956   return true;
3957 }
3958 
3959 
VisitOneByteString(const uint8_t * chars,int length)3960 void StringCharacterStream::VisitOneByteString(
3961     const uint8_t* chars, int length) {
3962   is_one_byte_ = true;
3963   buffer8_ = chars;
3964   end_ = chars + length;
3965 }
3966 
3967 
VisitTwoByteString(const uint16_t * chars,int length)3968 void StringCharacterStream::VisitTwoByteString(
3969     const uint16_t* chars, int length) {
3970   is_one_byte_ = false;
3971   buffer16_ = chars;
3972   end_ = reinterpret_cast<const uint8_t*>(chars + length);
3973 }
3974 
3975 
Size()3976 int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
3977 
3978 
get(int index)3979 byte ByteArray::get(int index) {
3980   DCHECK(index >= 0 && index < this->length());
3981   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3982 }
3983 
3984 
set(int index,byte value)3985 void ByteArray::set(int index, byte value) {
3986   DCHECK(index >= 0 && index < this->length());
3987   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3988 }
3989 
3990 
get_int(int index)3991 int ByteArray::get_int(int index) {
3992   DCHECK(index >= 0 && (index * kIntSize) < this->length());
3993   return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3994 }
3995 
3996 
FromDataStartAddress(Address address)3997 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3998   DCHECK_TAG_ALIGNED(address);
3999   return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
4000 }
4001 
4002 
ByteArraySize()4003 int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
4004 
4005 
GetDataStartAddress()4006 Address ByteArray::GetDataStartAddress() {
4007   return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4008 }
4009 
4010 
get(int index)4011 byte BytecodeArray::get(int index) {
4012   DCHECK(index >= 0 && index < this->length());
4013   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4014 }
4015 
4016 
set(int index,byte value)4017 void BytecodeArray::set(int index, byte value) {
4018   DCHECK(index >= 0 && index < this->length());
4019   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4020 }
4021 
4022 
set_frame_size(int frame_size)4023 void BytecodeArray::set_frame_size(int frame_size) {
4024   DCHECK_GE(frame_size, 0);
4025   DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
4026   WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
4027 }
4028 
4029 
frame_size()4030 int BytecodeArray::frame_size() const {
4031   return READ_INT_FIELD(this, kFrameSizeOffset);
4032 }
4033 
4034 
register_count()4035 int BytecodeArray::register_count() const {
4036   return frame_size() / kPointerSize;
4037 }
4038 
4039 
set_parameter_count(int number_of_parameters)4040 void BytecodeArray::set_parameter_count(int number_of_parameters) {
4041   DCHECK_GE(number_of_parameters, 0);
4042   // Parameter count is stored as the size on stack of the parameters to allow
4043   // it to be used directly by generated code.
4044   WRITE_INT_FIELD(this, kParameterSizeOffset,
4045                   (number_of_parameters << kPointerSizeLog2));
4046 }
4047 
4048 
parameter_count()4049 int BytecodeArray::parameter_count() const {
4050   // Parameter count is stored as the size on stack of the parameters to allow
4051   // it to be used directly by generated code.
4052   return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
4053 }
4054 
4055 
ACCESSORS(BytecodeArray,constant_pool,FixedArray,kConstantPoolOffset)4056 ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
4057 
4058 
4059 Address BytecodeArray::GetFirstBytecodeAddress() {
4060   return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4061 }
4062 
4063 
BytecodeArraySize()4064 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
4065 
4066 
ACCESSORS(FixedTypedArrayBase,base_pointer,Object,kBasePointerOffset)4067 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
4068 
4069 
4070 void* FixedTypedArrayBase::external_pointer() const {
4071   intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4072   return reinterpret_cast<void*>(ptr);
4073 }
4074 
4075 
set_external_pointer(void * value,WriteBarrierMode mode)4076 void FixedTypedArrayBase::set_external_pointer(void* value,
4077                                                WriteBarrierMode mode) {
4078   intptr_t ptr = reinterpret_cast<intptr_t>(value);
4079   WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4080 }
4081 
4082 
DataPtr()4083 void* FixedTypedArrayBase::DataPtr() {
4084   return reinterpret_cast<void*>(
4085       reinterpret_cast<intptr_t>(base_pointer()) +
4086       reinterpret_cast<intptr_t>(external_pointer()));
4087 }
4088 
4089 
ElementSize(InstanceType type)4090 int FixedTypedArrayBase::ElementSize(InstanceType type) {
4091   int element_size;
4092   switch (type) {
4093 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)                       \
4094     case FIXED_##TYPE##_ARRAY_TYPE:                                           \
4095       element_size = size;                                                    \
4096       break;
4097 
4098     TYPED_ARRAYS(TYPED_ARRAY_CASE)
4099 #undef TYPED_ARRAY_CASE
4100     default:
4101       UNREACHABLE();
4102       return 0;
4103   }
4104   return element_size;
4105 }
4106 
4107 
DataSize(InstanceType type)4108 int FixedTypedArrayBase::DataSize(InstanceType type) {
4109   if (base_pointer() == Smi::FromInt(0)) return 0;
4110   return length() * ElementSize(type);
4111 }
4112 
4113 
DataSize()4114 int FixedTypedArrayBase::DataSize() {
4115   return DataSize(map()->instance_type());
4116 }
4117 
4118 
size()4119 int FixedTypedArrayBase::size() {
4120   return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4121 }
4122 
4123 
TypedArraySize(InstanceType type)4124 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4125   return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4126 }
4127 
4128 
TypedArraySize(InstanceType type,int length)4129 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
4130   return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
4131 }
4132 
4133 
defaultValue()4134 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4135 
4136 
defaultValue()4137 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4138 
4139 
defaultValue()4140 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4141 
4142 
defaultValue()4143 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4144 
4145 
defaultValue()4146 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4147 
4148 
defaultValue()4149 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4150 
4151 
defaultValue()4152 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4153 
4154 
defaultValue()4155 float Float32ArrayTraits::defaultValue() {
4156   return std::numeric_limits<float>::quiet_NaN();
4157 }
4158 
4159 
defaultValue()4160 double Float64ArrayTraits::defaultValue() {
4161   return std::numeric_limits<double>::quiet_NaN();
4162 }
4163 
4164 
4165 template <class Traits>
get_scalar(int index)4166 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4167   DCHECK((index >= 0) && (index < this->length()));
4168   ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4169   return ptr[index];
4170 }
4171 
4172 
4173 template <class Traits>
set(int index,ElementType value)4174 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4175   DCHECK((index >= 0) && (index < this->length()));
4176   ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4177   ptr[index] = value;
4178 }
4179 
4180 
4181 template <class Traits>
from_int(int value)4182 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4183   return static_cast<ElementType>(value);
4184 }
4185 
4186 
4187 template <> inline
from_int(int value)4188 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4189   if (value < 0) return 0;
4190   if (value > 0xFF) return 0xFF;
4191   return static_cast<uint8_t>(value);
4192 }
4193 
4194 
4195 template <class Traits>
from_double(double value)4196 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4197     double value) {
4198   return static_cast<ElementType>(DoubleToInt32(value));
4199 }
4200 
4201 
4202 template<> inline
from_double(double value)4203 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4204   // Handle NaNs and less than zero values which clamp to zero.
4205   if (!(value > 0)) return 0;
4206   if (value > 0xFF) return 0xFF;
4207   return static_cast<uint8_t>(lrint(value));
4208 }
4209 
4210 
4211 template<> inline
from_double(double value)4212 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4213   return static_cast<float>(value);
4214 }
4215 
4216 
4217 template<> inline
from_double(double value)4218 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4219   return value;
4220 }
4221 
4222 
4223 template <class Traits>
get(Handle<FixedTypedArray<Traits>> array,int index)4224 Handle<Object> FixedTypedArray<Traits>::get(
4225     Handle<FixedTypedArray<Traits> > array,
4226     int index) {
4227   return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4228 }
4229 
4230 
4231 template <class Traits>
SetValue(uint32_t index,Object * value)4232 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
4233   ElementType cast_value = Traits::defaultValue();
4234   if (value->IsSmi()) {
4235     int int_value = Smi::cast(value)->value();
4236     cast_value = from_int(int_value);
4237   } else if (value->IsHeapNumber()) {
4238     double double_value = HeapNumber::cast(value)->value();
4239     cast_value = from_double(double_value);
4240   } else {
4241     // Clamp undefined to the default value. All other types have been
4242     // converted to a number type further up in the call chain.
4243     DCHECK(value->IsUndefined());
4244   }
4245   set(index, cast_value);
4246 }
4247 
4248 
ToHandle(Isolate * isolate,uint8_t scalar)4249 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4250   return handle(Smi::FromInt(scalar), isolate);
4251 }
4252 
4253 
ToHandle(Isolate * isolate,uint8_t scalar)4254 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4255                                                  uint8_t scalar) {
4256   return handle(Smi::FromInt(scalar), isolate);
4257 }
4258 
4259 
ToHandle(Isolate * isolate,int8_t scalar)4260 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4261   return handle(Smi::FromInt(scalar), isolate);
4262 }
4263 
4264 
ToHandle(Isolate * isolate,uint16_t scalar)4265 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4266   return handle(Smi::FromInt(scalar), isolate);
4267 }
4268 
4269 
ToHandle(Isolate * isolate,int16_t scalar)4270 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4271   return handle(Smi::FromInt(scalar), isolate);
4272 }
4273 
4274 
ToHandle(Isolate * isolate,uint32_t scalar)4275 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4276   return isolate->factory()->NewNumberFromUint(scalar);
4277 }
4278 
4279 
ToHandle(Isolate * isolate,int32_t scalar)4280 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4281   return isolate->factory()->NewNumberFromInt(scalar);
4282 }
4283 
4284 
ToHandle(Isolate * isolate,float scalar)4285 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4286   return isolate->factory()->NewNumber(scalar);
4287 }
4288 
4289 
ToHandle(Isolate * isolate,double scalar)4290 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4291   return isolate->factory()->NewNumber(scalar);
4292 }
4293 
4294 
visitor_id()4295 int Map::visitor_id() {
4296   return READ_BYTE_FIELD(this, kVisitorIdOffset);
4297 }
4298 
4299 
set_visitor_id(int id)4300 void Map::set_visitor_id(int id) {
4301   DCHECK(0 <= id && id < 256);
4302   WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4303 }
4304 
4305 
instance_size()4306 int Map::instance_size() {
4307   return NOBARRIER_READ_BYTE_FIELD(
4308       this, kInstanceSizeOffset) << kPointerSizeLog2;
4309 }
4310 
4311 
inobject_properties_or_constructor_function_index()4312 int Map::inobject_properties_or_constructor_function_index() {
4313   return READ_BYTE_FIELD(this,
4314                          kInObjectPropertiesOrConstructorFunctionIndexOffset);
4315 }
4316 
4317 
set_inobject_properties_or_constructor_function_index(int value)4318 void Map::set_inobject_properties_or_constructor_function_index(int value) {
4319   DCHECK(0 <= value && value < 256);
4320   WRITE_BYTE_FIELD(this, kInObjectPropertiesOrConstructorFunctionIndexOffset,
4321                    static_cast<byte>(value));
4322 }
4323 
4324 
GetInObjectProperties()4325 int Map::GetInObjectProperties() {
4326   DCHECK(IsJSObjectMap());
4327   return inobject_properties_or_constructor_function_index();
4328 }
4329 
4330 
SetInObjectProperties(int value)4331 void Map::SetInObjectProperties(int value) {
4332   DCHECK(IsJSObjectMap());
4333   set_inobject_properties_or_constructor_function_index(value);
4334 }
4335 
4336 
GetConstructorFunctionIndex()4337 int Map::GetConstructorFunctionIndex() {
4338   DCHECK(IsPrimitiveMap());
4339   return inobject_properties_or_constructor_function_index();
4340 }
4341 
4342 
SetConstructorFunctionIndex(int value)4343 void Map::SetConstructorFunctionIndex(int value) {
4344   DCHECK(IsPrimitiveMap());
4345   set_inobject_properties_or_constructor_function_index(value);
4346 }
4347 
4348 
GetInObjectPropertyOffset(int index)4349 int Map::GetInObjectPropertyOffset(int index) {
4350   // Adjust for the number of properties stored in the object.
4351   index -= GetInObjectProperties();
4352   DCHECK(index <= 0);
4353   return instance_size() + (index * kPointerSize);
4354 }
4355 
4356 
AddMissingTransitionsForTesting(Handle<Map> split_map,Handle<DescriptorArray> descriptors,Handle<LayoutDescriptor> full_layout_descriptor)4357 Handle<Map> Map::AddMissingTransitionsForTesting(
4358     Handle<Map> split_map, Handle<DescriptorArray> descriptors,
4359     Handle<LayoutDescriptor> full_layout_descriptor) {
4360   return AddMissingTransitions(split_map, descriptors, full_layout_descriptor);
4361 }
4362 
4363 
SizeFromMap(Map * map)4364 int HeapObject::SizeFromMap(Map* map) {
4365   int instance_size = map->instance_size();
4366   if (instance_size != kVariableSizeSentinel) return instance_size;
4367   // Only inline the most frequent cases.
4368   InstanceType instance_type = map->instance_type();
4369   if (instance_type == FIXED_ARRAY_TYPE ||
4370       instance_type == TRANSITION_ARRAY_TYPE) {
4371     return FixedArray::SizeFor(
4372         reinterpret_cast<FixedArray*>(this)->synchronized_length());
4373   }
4374   if (instance_type == ONE_BYTE_STRING_TYPE ||
4375       instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4376     // Strings may get concurrently truncated, hence we have to access its
4377     // length synchronized.
4378     return SeqOneByteString::SizeFor(
4379         reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4380   }
4381   if (instance_type == BYTE_ARRAY_TYPE) {
4382     return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4383   }
4384   if (instance_type == BYTECODE_ARRAY_TYPE) {
4385     return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4386   }
4387   if (instance_type == FREE_SPACE_TYPE) {
4388     return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4389   }
4390   if (instance_type == STRING_TYPE ||
4391       instance_type == INTERNALIZED_STRING_TYPE) {
4392     // Strings may get concurrently truncated, hence we have to access its
4393     // length synchronized.
4394     return SeqTwoByteString::SizeFor(
4395         reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4396   }
4397   if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4398     return FixedDoubleArray::SizeFor(
4399         reinterpret_cast<FixedDoubleArray*>(this)->length());
4400   }
4401   if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4402       instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4403     return reinterpret_cast<FixedTypedArrayBase*>(
4404         this)->TypedArraySize(instance_type);
4405   }
4406   DCHECK(instance_type == CODE_TYPE);
4407   return reinterpret_cast<Code*>(this)->CodeSize();
4408 }
4409 
4410 
set_instance_size(int value)4411 void Map::set_instance_size(int value) {
4412   DCHECK_EQ(0, value & (kPointerSize - 1));
4413   value >>= kPointerSizeLog2;
4414   DCHECK(0 <= value && value < 256);
4415   NOBARRIER_WRITE_BYTE_FIELD(
4416       this, kInstanceSizeOffset, static_cast<byte>(value));
4417 }
4418 
4419 
clear_unused()4420 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4421 
4422 
instance_type()4423 InstanceType Map::instance_type() {
4424   return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4425 }
4426 
4427 
set_instance_type(InstanceType value)4428 void Map::set_instance_type(InstanceType value) {
4429   WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4430 }
4431 
4432 
unused_property_fields()4433 int Map::unused_property_fields() {
4434   return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4435 }
4436 
4437 
set_unused_property_fields(int value)4438 void Map::set_unused_property_fields(int value) {
4439   WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4440 }
4441 
4442 
bit_field()4443 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4444 
4445 
set_bit_field(byte value)4446 void Map::set_bit_field(byte value) {
4447   WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4448 }
4449 
4450 
bit_field2()4451 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4452 
4453 
set_bit_field2(byte value)4454 void Map::set_bit_field2(byte value) {
4455   WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4456 }
4457 
4458 
set_non_instance_prototype(bool value)4459 void Map::set_non_instance_prototype(bool value) {
4460   if (value) {
4461     set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4462   } else {
4463     set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4464   }
4465 }
4466 
4467 
has_non_instance_prototype()4468 bool Map::has_non_instance_prototype() {
4469   return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4470 }
4471 
4472 
set_is_constructor()4473 void Map::set_is_constructor() {
4474   set_bit_field(bit_field() | (1 << kIsConstructor));
4475 }
4476 
4477 
is_constructor()4478 bool Map::is_constructor() const {
4479   return ((1 << kIsConstructor) & bit_field()) != 0;
4480 }
4481 
4482 
set_is_hidden_prototype()4483 void Map::set_is_hidden_prototype() {
4484   set_bit_field3(IsHiddenPrototype::update(bit_field3(), true));
4485 }
4486 
4487 
is_hidden_prototype()4488 bool Map::is_hidden_prototype() const {
4489   return IsHiddenPrototype::decode(bit_field3());
4490 }
4491 
4492 
set_has_indexed_interceptor()4493 void Map::set_has_indexed_interceptor() {
4494   set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
4495 }
4496 
4497 
has_indexed_interceptor()4498 bool Map::has_indexed_interceptor() {
4499   return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
4500 }
4501 
4502 
set_is_undetectable()4503 void Map::set_is_undetectable() {
4504   set_bit_field(bit_field() | (1 << kIsUndetectable));
4505 }
4506 
4507 
is_undetectable()4508 bool Map::is_undetectable() {
4509   return ((1 << kIsUndetectable) & bit_field()) != 0;
4510 }
4511 
4512 
set_is_observed()4513 void Map::set_is_observed() { set_bit_field(bit_field() | (1 << kIsObserved)); }
4514 
is_observed()4515 bool Map::is_observed() { return ((1 << kIsObserved) & bit_field()) != 0; }
4516 
4517 
set_has_named_interceptor()4518 void Map::set_has_named_interceptor() {
4519   set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
4520 }
4521 
4522 
has_named_interceptor()4523 bool Map::has_named_interceptor() {
4524   return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
4525 }
4526 
4527 
set_is_access_check_needed(bool access_check_needed)4528 void Map::set_is_access_check_needed(bool access_check_needed) {
4529   if (access_check_needed) {
4530     set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4531   } else {
4532     set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4533   }
4534 }
4535 
4536 
is_access_check_needed()4537 bool Map::is_access_check_needed() {
4538   return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4539 }
4540 
4541 
set_is_extensible(bool value)4542 void Map::set_is_extensible(bool value) {
4543   if (value) {
4544     set_bit_field2(bit_field2() | (1 << kIsExtensible));
4545   } else {
4546     set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4547   }
4548 }
4549 
is_extensible()4550 bool Map::is_extensible() {
4551   return ((1 << kIsExtensible) & bit_field2()) != 0;
4552 }
4553 
4554 
set_is_prototype_map(bool value)4555 void Map::set_is_prototype_map(bool value) {
4556   set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4557 }
4558 
is_prototype_map()4559 bool Map::is_prototype_map() const {
4560   return IsPrototypeMapBits::decode(bit_field2());
4561 }
4562 
4563 
set_elements_kind(ElementsKind elements_kind)4564 void Map::set_elements_kind(ElementsKind elements_kind) {
4565   DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
4566   DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
4567   set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
4568   DCHECK(this->elements_kind() == elements_kind);
4569 }
4570 
4571 
elements_kind()4572 ElementsKind Map::elements_kind() {
4573   return Map::ElementsKindBits::decode(bit_field2());
4574 }
4575 
4576 
has_fast_smi_elements()4577 bool Map::has_fast_smi_elements() {
4578   return IsFastSmiElementsKind(elements_kind());
4579 }
4580 
has_fast_object_elements()4581 bool Map::has_fast_object_elements() {
4582   return IsFastObjectElementsKind(elements_kind());
4583 }
4584 
has_fast_smi_or_object_elements()4585 bool Map::has_fast_smi_or_object_elements() {
4586   return IsFastSmiOrObjectElementsKind(elements_kind());
4587 }
4588 
has_fast_double_elements()4589 bool Map::has_fast_double_elements() {
4590   return IsFastDoubleElementsKind(elements_kind());
4591 }
4592 
has_fast_elements()4593 bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
4594 
has_sloppy_arguments_elements()4595 bool Map::has_sloppy_arguments_elements() {
4596   return IsSloppyArgumentsElements(elements_kind());
4597 }
4598 
has_fixed_typed_array_elements()4599 bool Map::has_fixed_typed_array_elements() {
4600   return IsFixedTypedArrayElementsKind(elements_kind());
4601 }
4602 
has_dictionary_elements()4603 bool Map::has_dictionary_elements() {
4604   return IsDictionaryElementsKind(elements_kind());
4605 }
4606 
4607 
set_dictionary_map(bool value)4608 void Map::set_dictionary_map(bool value) {
4609   uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4610   new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4611   set_bit_field3(new_bit_field3);
4612 }
4613 
4614 
is_dictionary_map()4615 bool Map::is_dictionary_map() {
4616   return DictionaryMap::decode(bit_field3());
4617 }
4618 
4619 
flags()4620 Code::Flags Code::flags() {
4621   return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4622 }
4623 
4624 
set_owns_descriptors(bool owns_descriptors)4625 void Map::set_owns_descriptors(bool owns_descriptors) {
4626   set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4627 }
4628 
4629 
owns_descriptors()4630 bool Map::owns_descriptors() {
4631   return OwnsDescriptors::decode(bit_field3());
4632 }
4633 
4634 
set_is_callable()4635 void Map::set_is_callable() { set_bit_field(bit_field() | (1 << kIsCallable)); }
4636 
4637 
is_callable()4638 bool Map::is_callable() const {
4639   return ((1 << kIsCallable) & bit_field()) != 0;
4640 }
4641 
4642 
deprecate()4643 void Map::deprecate() {
4644   set_bit_field3(Deprecated::update(bit_field3(), true));
4645 }
4646 
4647 
is_deprecated()4648 bool Map::is_deprecated() {
4649   return Deprecated::decode(bit_field3());
4650 }
4651 
4652 
set_migration_target(bool value)4653 void Map::set_migration_target(bool value) {
4654   set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4655 }
4656 
4657 
is_migration_target()4658 bool Map::is_migration_target() {
4659   return IsMigrationTarget::decode(bit_field3());
4660 }
4661 
4662 
set_is_strong()4663 void Map::set_is_strong() {
4664   set_bit_field3(IsStrong::update(bit_field3(), true));
4665 }
4666 
4667 
is_strong()4668 bool Map::is_strong() {
4669   return IsStrong::decode(bit_field3());
4670 }
4671 
4672 
set_new_target_is_base(bool value)4673 void Map::set_new_target_is_base(bool value) {
4674   set_bit_field3(NewTargetIsBase::update(bit_field3(), value));
4675 }
4676 
4677 
new_target_is_base()4678 bool Map::new_target_is_base() { return NewTargetIsBase::decode(bit_field3()); }
4679 
4680 
set_construction_counter(int value)4681 void Map::set_construction_counter(int value) {
4682   set_bit_field3(ConstructionCounter::update(bit_field3(), value));
4683 }
4684 
4685 
construction_counter()4686 int Map::construction_counter() {
4687   return ConstructionCounter::decode(bit_field3());
4688 }
4689 
4690 
mark_unstable()4691 void Map::mark_unstable() {
4692   set_bit_field3(IsUnstable::update(bit_field3(), true));
4693 }
4694 
4695 
is_stable()4696 bool Map::is_stable() {
4697   return !IsUnstable::decode(bit_field3());
4698 }
4699 
4700 
has_code_cache()4701 bool Map::has_code_cache() {
4702   return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4703 }
4704 
4705 
CanBeDeprecated()4706 bool Map::CanBeDeprecated() {
4707   int descriptor = LastAdded();
4708   for (int i = 0; i <= descriptor; i++) {
4709     PropertyDetails details = instance_descriptors()->GetDetails(i);
4710     if (details.representation().IsNone()) return true;
4711     if (details.representation().IsSmi()) return true;
4712     if (details.representation().IsDouble()) return true;
4713     if (details.representation().IsHeapObject()) return true;
4714     if (details.type() == DATA_CONSTANT) return true;
4715   }
4716   return false;
4717 }
4718 
4719 
NotifyLeafMapLayoutChange()4720 void Map::NotifyLeafMapLayoutChange() {
4721   if (is_stable()) {
4722     mark_unstable();
4723     dependent_code()->DeoptimizeDependentCodeGroup(
4724         GetIsolate(),
4725         DependentCode::kPrototypeCheckGroup);
4726   }
4727 }
4728 
4729 
CanTransition()4730 bool Map::CanTransition() {
4731   // Only JSObject and subtypes have map transitions and back pointers.
4732   STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
4733   return instance_type() >= FIRST_JS_OBJECT_TYPE;
4734 }
4735 
4736 
IsBooleanMap()4737 bool Map::IsBooleanMap() { return this == GetHeap()->boolean_map(); }
IsPrimitiveMap()4738 bool Map::IsPrimitiveMap() {
4739   STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
4740   return instance_type() <= LAST_PRIMITIVE_TYPE;
4741 }
IsJSReceiverMap()4742 bool Map::IsJSReceiverMap() {
4743   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4744   return instance_type() >= FIRST_JS_RECEIVER_TYPE;
4745 }
IsJSObjectMap()4746 bool Map::IsJSObjectMap() {
4747   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4748   return instance_type() >= FIRST_JS_OBJECT_TYPE;
4749 }
IsJSArrayMap()4750 bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
IsJSFunctionMap()4751 bool Map::IsJSFunctionMap() { return instance_type() == JS_FUNCTION_TYPE; }
IsStringMap()4752 bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
IsJSProxyMap()4753 bool Map::IsJSProxyMap() { return instance_type() == JS_PROXY_TYPE; }
IsJSGlobalProxyMap()4754 bool Map::IsJSGlobalProxyMap() {
4755   return instance_type() == JS_GLOBAL_PROXY_TYPE;
4756 }
IsJSGlobalObjectMap()4757 bool Map::IsJSGlobalObjectMap() {
4758   return instance_type() == JS_GLOBAL_OBJECT_TYPE;
4759 }
IsJSTypedArrayMap()4760 bool Map::IsJSTypedArrayMap() { return instance_type() == JS_TYPED_ARRAY_TYPE; }
IsJSDataViewMap()4761 bool Map::IsJSDataViewMap() { return instance_type() == JS_DATA_VIEW_TYPE; }
4762 
4763 
CanOmitMapChecks()4764 bool Map::CanOmitMapChecks() {
4765   return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4766 }
4767 
4768 
next_link()4769 DependentCode* DependentCode::next_link() {
4770   return DependentCode::cast(get(kNextLinkIndex));
4771 }
4772 
4773 
set_next_link(DependentCode * next)4774 void DependentCode::set_next_link(DependentCode* next) {
4775   set(kNextLinkIndex, next);
4776 }
4777 
4778 
flags()4779 int DependentCode::flags() { return Smi::cast(get(kFlagsIndex))->value(); }
4780 
4781 
set_flags(int flags)4782 void DependentCode::set_flags(int flags) {
4783   set(kFlagsIndex, Smi::FromInt(flags));
4784 }
4785 
4786 
count()4787 int DependentCode::count() { return CountField::decode(flags()); }
4788 
set_count(int value)4789 void DependentCode::set_count(int value) {
4790   set_flags(CountField::update(flags(), value));
4791 }
4792 
4793 
group()4794 DependentCode::DependencyGroup DependentCode::group() {
4795   return static_cast<DependencyGroup>(GroupField::decode(flags()));
4796 }
4797 
4798 
set_group(DependentCode::DependencyGroup group)4799 void DependentCode::set_group(DependentCode::DependencyGroup group) {
4800   set_flags(GroupField::update(flags(), static_cast<int>(group)));
4801 }
4802 
4803 
set_object_at(int i,Object * object)4804 void DependentCode::set_object_at(int i, Object* object) {
4805   set(kCodesStartIndex + i, object);
4806 }
4807 
4808 
object_at(int i)4809 Object* DependentCode::object_at(int i) {
4810   return get(kCodesStartIndex + i);
4811 }
4812 
4813 
clear_at(int i)4814 void DependentCode::clear_at(int i) {
4815   set_undefined(kCodesStartIndex + i);
4816 }
4817 
4818 
copy(int from,int to)4819 void DependentCode::copy(int from, int to) {
4820   set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4821 }
4822 
4823 
set_flags(Code::Flags flags)4824 void Code::set_flags(Code::Flags flags) {
4825   STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4826   WRITE_INT_FIELD(this, kFlagsOffset, flags);
4827 }
4828 
4829 
kind()4830 Code::Kind Code::kind() {
4831   return ExtractKindFromFlags(flags());
4832 }
4833 
4834 
IsCodeStubOrIC()4835 bool Code::IsCodeStubOrIC() {
4836   return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4837          kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4838          kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4839          kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4840          kind() == TO_BOOLEAN_IC;
4841 }
4842 
4843 
IsJavaScriptCode()4844 bool Code::IsJavaScriptCode() {
4845   return kind() == FUNCTION || kind() == OPTIMIZED_FUNCTION ||
4846          is_interpreter_entry_trampoline();
4847 }
4848 
4849 
ic_state()4850 InlineCacheState Code::ic_state() {
4851   InlineCacheState result = ExtractICStateFromFlags(flags());
4852   // Only allow uninitialized or debugger states for non-IC code
4853   // objects. This is used in the debugger to determine whether or not
4854   // a call to code object has been replaced with a debug break call.
4855   DCHECK(is_inline_cache_stub() ||
4856          result == UNINITIALIZED ||
4857          result == DEBUG_STUB);
4858   return result;
4859 }
4860 
4861 
extra_ic_state()4862 ExtraICState Code::extra_ic_state() {
4863   DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4864   return ExtractExtraICStateFromFlags(flags());
4865 }
4866 
4867 
type()4868 Code::StubType Code::type() {
4869   return ExtractTypeFromFlags(flags());
4870 }
4871 
4872 
4873 // For initialization.
set_raw_kind_specific_flags1(int value)4874 void Code::set_raw_kind_specific_flags1(int value) {
4875   WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4876 }
4877 
4878 
set_raw_kind_specific_flags2(int value)4879 void Code::set_raw_kind_specific_flags2(int value) {
4880   WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4881 }
4882 
4883 
is_crankshafted()4884 inline bool Code::is_crankshafted() {
4885   return IsCrankshaftedField::decode(
4886       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4887 }
4888 
4889 
is_hydrogen_stub()4890 inline bool Code::is_hydrogen_stub() {
4891   return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4892 }
4893 
4894 
is_interpreter_entry_trampoline()4895 inline bool Code::is_interpreter_entry_trampoline() {
4896   Handle<Code> interpreter_entry =
4897       GetIsolate()->builtins()->InterpreterEntryTrampoline();
4898   return interpreter_entry.location() != nullptr && *interpreter_entry == this;
4899 }
4900 
set_is_crankshafted(bool value)4901 inline void Code::set_is_crankshafted(bool value) {
4902   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4903   int updated = IsCrankshaftedField::update(previous, value);
4904   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4905 }
4906 
4907 
is_turbofanned()4908 inline bool Code::is_turbofanned() {
4909   return IsTurbofannedField::decode(
4910       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4911 }
4912 
4913 
set_is_turbofanned(bool value)4914 inline void Code::set_is_turbofanned(bool value) {
4915   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4916   int updated = IsTurbofannedField::update(previous, value);
4917   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4918 }
4919 
4920 
can_have_weak_objects()4921 inline bool Code::can_have_weak_objects() {
4922   DCHECK(kind() == OPTIMIZED_FUNCTION);
4923   return CanHaveWeakObjectsField::decode(
4924       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4925 }
4926 
4927 
set_can_have_weak_objects(bool value)4928 inline void Code::set_can_have_weak_objects(bool value) {
4929   DCHECK(kind() == OPTIMIZED_FUNCTION);
4930   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4931   int updated = CanHaveWeakObjectsField::update(previous, value);
4932   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4933 }
4934 
4935 
has_deoptimization_support()4936 bool Code::has_deoptimization_support() {
4937   DCHECK_EQ(FUNCTION, kind());
4938   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4939   return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4940 }
4941 
4942 
set_has_deoptimization_support(bool value)4943 void Code::set_has_deoptimization_support(bool value) {
4944   DCHECK_EQ(FUNCTION, kind());
4945   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4946   flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4947   WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4948 }
4949 
4950 
has_debug_break_slots()4951 bool Code::has_debug_break_slots() {
4952   DCHECK_EQ(FUNCTION, kind());
4953   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4954   return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4955 }
4956 
4957 
set_has_debug_break_slots(bool value)4958 void Code::set_has_debug_break_slots(bool value) {
4959   DCHECK_EQ(FUNCTION, kind());
4960   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4961   flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4962   WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4963 }
4964 
4965 
has_reloc_info_for_serialization()4966 bool Code::has_reloc_info_for_serialization() {
4967   DCHECK_EQ(FUNCTION, kind());
4968   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4969   return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
4970 }
4971 
4972 
set_has_reloc_info_for_serialization(bool value)4973 void Code::set_has_reloc_info_for_serialization(bool value) {
4974   DCHECK_EQ(FUNCTION, kind());
4975   unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
4976   flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
4977   WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
4978 }
4979 
4980 
allow_osr_at_loop_nesting_level()4981 int Code::allow_osr_at_loop_nesting_level() {
4982   DCHECK_EQ(FUNCTION, kind());
4983   int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4984   return AllowOSRAtLoopNestingLevelField::decode(fields);
4985 }
4986 
4987 
set_allow_osr_at_loop_nesting_level(int level)4988 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4989   DCHECK_EQ(FUNCTION, kind());
4990   DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4991   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4992   int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4993   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4994 }
4995 
4996 
profiler_ticks()4997 int Code::profiler_ticks() {
4998   DCHECK_EQ(FUNCTION, kind());
4999   return ProfilerTicksField::decode(
5000       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5001 }
5002 
5003 
set_profiler_ticks(int ticks)5004 void Code::set_profiler_ticks(int ticks) {
5005   if (kind() == FUNCTION) {
5006     unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5007     unsigned updated = ProfilerTicksField::update(previous, ticks);
5008     WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5009   }
5010 }
5011 
5012 
builtin_index()5013 int Code::builtin_index() {
5014   return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
5015 }
5016 
5017 
set_builtin_index(int index)5018 void Code::set_builtin_index(int index) {
5019   WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
5020 }
5021 
5022 
stack_slots()5023 unsigned Code::stack_slots() {
5024   DCHECK(is_crankshafted());
5025   return StackSlotsField::decode(
5026       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5027 }
5028 
5029 
set_stack_slots(unsigned slots)5030 void Code::set_stack_slots(unsigned slots) {
5031   CHECK(slots <= (1 << kStackSlotsBitCount));
5032   DCHECK(is_crankshafted());
5033   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5034   int updated = StackSlotsField::update(previous, slots);
5035   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5036 }
5037 
5038 
safepoint_table_offset()5039 unsigned Code::safepoint_table_offset() {
5040   DCHECK(is_crankshafted());
5041   return SafepointTableOffsetField::decode(
5042       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5043 }
5044 
5045 
set_safepoint_table_offset(unsigned offset)5046 void Code::set_safepoint_table_offset(unsigned offset) {
5047   CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5048   DCHECK(is_crankshafted());
5049   DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5050   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5051   int updated = SafepointTableOffsetField::update(previous, offset);
5052   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5053 }
5054 
5055 
back_edge_table_offset()5056 unsigned Code::back_edge_table_offset() {
5057   DCHECK_EQ(FUNCTION, kind());
5058   return BackEdgeTableOffsetField::decode(
5059       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5060 }
5061 
5062 
set_back_edge_table_offset(unsigned offset)5063 void Code::set_back_edge_table_offset(unsigned offset) {
5064   DCHECK_EQ(FUNCTION, kind());
5065   DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5066   offset = offset >> kPointerSizeLog2;
5067   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5068   int updated = BackEdgeTableOffsetField::update(previous, offset);
5069   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5070 }
5071 
5072 
back_edges_patched_for_osr()5073 bool Code::back_edges_patched_for_osr() {
5074   DCHECK_EQ(FUNCTION, kind());
5075   return allow_osr_at_loop_nesting_level() > 0;
5076 }
5077 
5078 
to_boolean_state()5079 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
5080 
5081 
marked_for_deoptimization()5082 bool Code::marked_for_deoptimization() {
5083   DCHECK(kind() == OPTIMIZED_FUNCTION);
5084   return MarkedForDeoptimizationField::decode(
5085       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5086 }
5087 
5088 
set_marked_for_deoptimization(bool flag)5089 void Code::set_marked_for_deoptimization(bool flag) {
5090   DCHECK(kind() == OPTIMIZED_FUNCTION);
5091   DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5092   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5093   int updated = MarkedForDeoptimizationField::update(previous, flag);
5094   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5095 }
5096 
5097 
is_inline_cache_stub()5098 bool Code::is_inline_cache_stub() {
5099   Kind kind = this->kind();
5100   switch (kind) {
5101 #define CASE(name) case name: return true;
5102     IC_KIND_LIST(CASE)
5103 #undef CASE
5104     default: return false;
5105   }
5106 }
5107 
5108 
is_keyed_stub()5109 bool Code::is_keyed_stub() {
5110   return is_keyed_load_stub() || is_keyed_store_stub();
5111 }
5112 
5113 
is_debug_stub()5114 bool Code::is_debug_stub() { return ic_state() == DEBUG_STUB; }
is_handler()5115 bool Code::is_handler() { return kind() == HANDLER; }
is_load_stub()5116 bool Code::is_load_stub() { return kind() == LOAD_IC; }
is_keyed_load_stub()5117 bool Code::is_keyed_load_stub() { return kind() == KEYED_LOAD_IC; }
is_store_stub()5118 bool Code::is_store_stub() { return kind() == STORE_IC; }
is_keyed_store_stub()5119 bool Code::is_keyed_store_stub() { return kind() == KEYED_STORE_IC; }
is_call_stub()5120 bool Code::is_call_stub() { return kind() == CALL_IC; }
is_binary_op_stub()5121 bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
is_compare_ic_stub()5122 bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
is_compare_nil_ic_stub()5123 bool Code::is_compare_nil_ic_stub() { return kind() == COMPARE_NIL_IC; }
is_to_boolean_ic_stub()5124 bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
is_optimized_code()5125 bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
5126 
5127 
embeds_maps_weakly()5128 bool Code::embeds_maps_weakly() {
5129   Kind k = kind();
5130   return (k == LOAD_IC || k == STORE_IC || k == KEYED_LOAD_IC ||
5131           k == KEYED_STORE_IC || k == COMPARE_NIL_IC) &&
5132          ic_state() == MONOMORPHIC;
5133 }
5134 
5135 
constant_pool()5136 Address Code::constant_pool() {
5137   Address constant_pool = NULL;
5138   if (FLAG_enable_embedded_constant_pool) {
5139     int offset = constant_pool_offset();
5140     if (offset < instruction_size()) {
5141       constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
5142     }
5143   }
5144   return constant_pool;
5145 }
5146 
5147 
ComputeFlags(Kind kind,InlineCacheState ic_state,ExtraICState extra_ic_state,StubType type,CacheHolderFlag holder)5148 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
5149                                ExtraICState extra_ic_state, StubType type,
5150                                CacheHolderFlag holder) {
5151   // Compute the bit mask.
5152   unsigned int bits = KindField::encode(kind)
5153       | ICStateField::encode(ic_state)
5154       | TypeField::encode(type)
5155       | ExtraICStateField::encode(extra_ic_state)
5156       | CacheHolderField::encode(holder);
5157   return static_cast<Flags>(bits);
5158 }
5159 
5160 
ComputeMonomorphicFlags(Kind kind,ExtraICState extra_ic_state,CacheHolderFlag holder,StubType type)5161 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
5162                                           ExtraICState extra_ic_state,
5163                                           CacheHolderFlag holder,
5164                                           StubType type) {
5165   return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
5166 }
5167 
5168 
ComputeHandlerFlags(Kind handler_kind,StubType type,CacheHolderFlag holder)5169 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
5170                                       CacheHolderFlag holder) {
5171   return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
5172 }
5173 
5174 
ExtractKindFromFlags(Flags flags)5175 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5176   return KindField::decode(flags);
5177 }
5178 
5179 
ExtractICStateFromFlags(Flags flags)5180 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
5181   return ICStateField::decode(flags);
5182 }
5183 
5184 
ExtractExtraICStateFromFlags(Flags flags)5185 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5186   return ExtraICStateField::decode(flags);
5187 }
5188 
5189 
ExtractTypeFromFlags(Flags flags)5190 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
5191   return TypeField::decode(flags);
5192 }
5193 
5194 
ExtractCacheHolderFromFlags(Flags flags)5195 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5196   return CacheHolderField::decode(flags);
5197 }
5198 
5199 
RemoveTypeFromFlags(Flags flags)5200 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5201   int bits = flags & ~TypeField::kMask;
5202   return static_cast<Flags>(bits);
5203 }
5204 
5205 
RemoveTypeAndHolderFromFlags(Flags flags)5206 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5207   int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5208   return static_cast<Flags>(bits);
5209 }
5210 
5211 
GetCodeFromTargetAddress(Address address)5212 Code* Code::GetCodeFromTargetAddress(Address address) {
5213   HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5214   // GetCodeFromTargetAddress might be called when marking objects during mark
5215   // sweep. reinterpret_cast is therefore used instead of the more appropriate
5216   // Code::cast. Code::cast does not work when the object's map is
5217   // marked.
5218   Code* result = reinterpret_cast<Code*>(code);
5219   return result;
5220 }
5221 
5222 
GetObjectFromEntryAddress(Address location_of_address)5223 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5224   return HeapObject::
5225       FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5226 }
5227 
5228 
CanContainWeakObjects()5229 bool Code::CanContainWeakObjects() {
5230   return is_optimized_code() && can_have_weak_objects();
5231 }
5232 
5233 
IsWeakObject(Object * object)5234 bool Code::IsWeakObject(Object* object) {
5235   return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
5236 }
5237 
5238 
IsWeakObjectInOptimizedCode(Object * object)5239 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5240   if (object->IsMap()) {
5241     return Map::cast(object)->CanTransition() &&
5242            FLAG_weak_embedded_maps_in_optimized_code;
5243   }
5244   if (object->IsCell()) {
5245     object = Cell::cast(object)->value();
5246   } else if (object->IsPropertyCell()) {
5247     object = PropertyCell::cast(object)->value();
5248   }
5249   if (object->IsJSReceiver()) {
5250     return FLAG_weak_embedded_objects_in_optimized_code;
5251   }
5252   if (object->IsContext()) {
5253     // Contexts of inlined functions are embedded in optimized code.
5254     return FLAG_weak_embedded_objects_in_optimized_code;
5255   }
5256   return false;
5257 }
5258 
5259 
5260 class Code::FindAndReplacePattern {
5261  public:
FindAndReplacePattern()5262   FindAndReplacePattern() : count_(0) { }
Add(Handle<Map> map_to_find,Handle<Object> obj_to_replace)5263   void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5264     DCHECK(count_ < kMaxCount);
5265     find_[count_] = map_to_find;
5266     replace_[count_] = obj_to_replace;
5267     ++count_;
5268   }
5269  private:
5270   static const int kMaxCount = 4;
5271   int count_;
5272   Handle<Map> find_[kMaxCount];
5273   Handle<Object> replace_[kMaxCount];
5274   friend class Code;
5275 };
5276 
5277 
prototype()5278 Object* Map::prototype() const {
5279   return READ_FIELD(this, kPrototypeOffset);
5280 }
5281 
5282 
set_prototype(Object * value,WriteBarrierMode mode)5283 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5284   DCHECK(value->IsNull() || value->IsJSReceiver());
5285   WRITE_FIELD(this, kPrototypeOffset, value);
5286   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5287 }
5288 
5289 
layout_descriptor_gc_safe()5290 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5291   Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5292   return LayoutDescriptor::cast_gc_safe(layout_desc);
5293 }
5294 
5295 
HasFastPointerLayout()5296 bool Map::HasFastPointerLayout() const {
5297   Object* layout_desc = READ_FIELD(this, kLayoutDecriptorOffset);
5298   return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5299 }
5300 
5301 
UpdateDescriptors(DescriptorArray * descriptors,LayoutDescriptor * layout_desc)5302 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5303                             LayoutDescriptor* layout_desc) {
5304   set_instance_descriptors(descriptors);
5305   if (FLAG_unbox_double_fields) {
5306     if (layout_descriptor()->IsSlowLayout()) {
5307       set_layout_descriptor(layout_desc);
5308     }
5309 #ifdef VERIFY_HEAP
5310     // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5311     if (FLAG_verify_heap) {
5312       CHECK(layout_descriptor()->IsConsistentWithMap(this));
5313       CHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5314     }
5315 #else
5316     SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5317     DCHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5318 #endif
5319   }
5320 }
5321 
5322 
InitializeDescriptors(DescriptorArray * descriptors,LayoutDescriptor * layout_desc)5323 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5324                                 LayoutDescriptor* layout_desc) {
5325   int len = descriptors->number_of_descriptors();
5326   set_instance_descriptors(descriptors);
5327   SetNumberOfOwnDescriptors(len);
5328 
5329   if (FLAG_unbox_double_fields) {
5330     set_layout_descriptor(layout_desc);
5331 #ifdef VERIFY_HEAP
5332     // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5333     if (FLAG_verify_heap) {
5334       CHECK(layout_descriptor()->IsConsistentWithMap(this));
5335     }
5336 #else
5337     SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5338 #endif
5339     set_visitor_id(Heap::GetStaticVisitorIdForMap(this));
5340   }
5341 }
5342 
5343 
ACCESSORS(Map,instance_descriptors,DescriptorArray,kDescriptorsOffset)5344 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5345 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDecriptorOffset)
5346 
5347 
5348 void Map::set_bit_field3(uint32_t bits) {
5349   if (kInt32Size != kPointerSize) {
5350     WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5351   }
5352   WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5353 }
5354 
5355 
bit_field3()5356 uint32_t Map::bit_field3() const {
5357   return READ_UINT32_FIELD(this, kBitField3Offset);
5358 }
5359 
5360 
GetLayoutDescriptor()5361 LayoutDescriptor* Map::GetLayoutDescriptor() {
5362   return FLAG_unbox_double_fields ? layout_descriptor()
5363                                   : LayoutDescriptor::FastPointerLayout();
5364 }
5365 
5366 
AppendDescriptor(Descriptor * desc)5367 void Map::AppendDescriptor(Descriptor* desc) {
5368   DescriptorArray* descriptors = instance_descriptors();
5369   int number_of_own_descriptors = NumberOfOwnDescriptors();
5370   DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5371   descriptors->Append(desc);
5372   SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5373 
5374 // This function does not support appending double field descriptors and
5375 // it should never try to (otherwise, layout descriptor must be updated too).
5376 #ifdef DEBUG
5377   PropertyDetails details = desc->GetDetails();
5378   CHECK(details.type() != DATA || !details.representation().IsDouble());
5379 #endif
5380 }
5381 
5382 
GetBackPointer()5383 Object* Map::GetBackPointer() {
5384   Object* object = constructor_or_backpointer();
5385   if (object->IsMap()) {
5386     return object;
5387   }
5388   return GetIsolate()->heap()->undefined_value();
5389 }
5390 
5391 
ElementsTransitionMap()5392 Map* Map::ElementsTransitionMap() {
5393   return TransitionArray::SearchSpecial(
5394       this, GetHeap()->elements_transition_symbol());
5395 }
5396 
5397 
ACCESSORS(Map,raw_transitions,Object,kTransitionsOrPrototypeInfoOffset)5398 ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
5399 
5400 
5401 Object* Map::prototype_info() const {
5402   DCHECK(is_prototype_map());
5403   return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
5404 }
5405 
5406 
set_prototype_info(Object * value,WriteBarrierMode mode)5407 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
5408   DCHECK(is_prototype_map());
5409   WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
5410   CONDITIONAL_WRITE_BARRIER(
5411       GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
5412 }
5413 
5414 
SetBackPointer(Object * value,WriteBarrierMode mode)5415 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5416   DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5417   DCHECK((value->IsMap() && GetBackPointer()->IsUndefined()));
5418   DCHECK(!value->IsMap() ||
5419          Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5420   set_constructor_or_backpointer(value, mode);
5421 }
5422 
5423 
ACCESSORS(Map,code_cache,Object,kCodeCacheOffset)5424 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5425 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5426 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5427 ACCESSORS(Map, constructor_or_backpointer, Object,
5428           kConstructorOrBackPointerOffset)
5429 
5430 
5431 Object* Map::GetConstructor() const {
5432   Object* maybe_constructor = constructor_or_backpointer();
5433   // Follow any back pointers.
5434   while (maybe_constructor->IsMap()) {
5435     maybe_constructor =
5436         Map::cast(maybe_constructor)->constructor_or_backpointer();
5437   }
5438   return maybe_constructor;
5439 }
5440 
5441 
SetConstructor(Object * constructor,WriteBarrierMode mode)5442 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5443   // Never overwrite a back pointer with a constructor.
5444   DCHECK(!constructor_or_backpointer()->IsMap());
5445   set_constructor_or_backpointer(constructor, mode);
5446 }
5447 
5448 
CopyInitialMap(Handle<Map> map)5449 Handle<Map> Map::CopyInitialMap(Handle<Map> map) {
5450   return CopyInitialMap(map, map->instance_size(), map->GetInObjectProperties(),
5451                         map->unused_property_fields());
5452 }
5453 
5454 
ACCESSORS(JSBoundFunction,length,Object,kLengthOffset)5455 ACCESSORS(JSBoundFunction, length, Object, kLengthOffset)
5456 ACCESSORS(JSBoundFunction, name, Object, kNameOffset)
5457 ACCESSORS(JSBoundFunction, bound_target_function, JSReceiver,
5458           kBoundTargetFunctionOffset)
5459 ACCESSORS(JSBoundFunction, bound_this, Object, kBoundThisOffset)
5460 ACCESSORS(JSBoundFunction, bound_arguments, FixedArray, kBoundArgumentsOffset)
5461 ACCESSORS(JSBoundFunction, creation_context, Context, kCreationContextOffset)
5462 
5463 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5464 ACCESSORS(JSFunction, literals, LiteralsArray, kLiteralsOffset)
5465 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5466 
5467 ACCESSORS(JSGlobalObject, native_context, Context, kNativeContextOffset)
5468 ACCESSORS(JSGlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5469 
5470 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5471 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5472 
5473 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5474 SMI_ACCESSORS(AccessorInfo, flag, kFlagOffset)
5475 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5476           kExpectedReceiverTypeOffset)
5477 
5478 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5479 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5480 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5481 
5482 ACCESSORS(Box, value, Object, kValueOffset)
5483 
5484 ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5485 SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
5486 ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5487 
5488 ACCESSORS(SloppyBlockWithEvalContextExtension, scope_info, ScopeInfo,
5489           kScopeInfoOffset)
5490 ACCESSORS(SloppyBlockWithEvalContextExtension, extension, JSObject,
5491           kExtensionOffset)
5492 
5493 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5494 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5495 
5496 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5497 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5498 ACCESSORS(AccessCheckInfo, callback, Object, kCallbackOffset)
5499 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5500 
5501 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5502 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5503 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5504 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5505 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5506 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5507 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5508 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5509                kCanInterceptSymbolsBit)
5510 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5511 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5512 
5513 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5514 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5515 ACCESSORS(CallHandlerInfo, fast_handler, Object, kFastHandlerOffset)
5516 
5517 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5518 SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5519 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5520 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5521 
5522 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5523 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5524 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5525           kPrototypeTemplateOffset)
5526 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5527 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5528           kNamedPropertyHandlerOffset)
5529 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5530           kIndexedPropertyHandlerOffset)
5531 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5532           kInstanceTemplateOffset)
5533 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5534 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5535 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5536           kInstanceCallHandlerOffset)
5537 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5538           kAccessCheckInfoOffset)
5539 SMI_ACCESSORS(FunctionTemplateInfo, flag, kFlagOffset)
5540 
5541 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5542 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5543           kInternalFieldCountOffset)
5544 
5545 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5546 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5547 SMI_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
5548 SMI_ACCESSORS(AllocationSite, pretenure_create_count,
5549               kPretenureCreateCountOffset)
5550 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5551           kDependentCodeOffset)
5552 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5553 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5554 
5555 ACCESSORS(Script, source, Object, kSourceOffset)
5556 ACCESSORS(Script, name, Object, kNameOffset)
5557 SMI_ACCESSORS(Script, id, kIdOffset)
5558 SMI_ACCESSORS(Script, line_offset, kLineOffsetOffset)
5559 SMI_ACCESSORS(Script, column_offset, kColumnOffsetOffset)
5560 ACCESSORS(Script, context_data, Object, kContextOffset)
5561 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5562 SMI_ACCESSORS(Script, type, kTypeOffset)
5563 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5564 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5565 SMI_ACCESSORS(Script, eval_from_instructions_offset,
5566               kEvalFrominstructionsOffsetOffset)
5567 ACCESSORS(Script, shared_function_infos, Object, kSharedFunctionInfosOffset)
5568 SMI_ACCESSORS(Script, flags, kFlagsOffset)
5569 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5570 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5571 
5572 Script::CompilationType Script::compilation_type() {
5573   return BooleanBit::get(flags(), kCompilationTypeBit) ?
5574       COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5575 }
set_compilation_type(CompilationType type)5576 void Script::set_compilation_type(CompilationType type) {
5577   set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5578       type == COMPILATION_TYPE_EVAL));
5579 }
hide_source()5580 bool Script::hide_source() { return BooleanBit::get(flags(), kHideSourceBit); }
set_hide_source(bool value)5581 void Script::set_hide_source(bool value) {
5582   set_flags(BooleanBit::set(flags(), kHideSourceBit, value));
5583 }
compilation_state()5584 Script::CompilationState Script::compilation_state() {
5585   return BooleanBit::get(flags(), kCompilationStateBit) ?
5586       COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5587 }
set_compilation_state(CompilationState state)5588 void Script::set_compilation_state(CompilationState state) {
5589   set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5590       state == COMPILATION_STATE_COMPILED));
5591 }
origin_options()5592 ScriptOriginOptions Script::origin_options() {
5593   return ScriptOriginOptions((flags() & kOriginOptionsMask) >>
5594                              kOriginOptionsShift);
5595 }
set_origin_options(ScriptOriginOptions origin_options)5596 void Script::set_origin_options(ScriptOriginOptions origin_options) {
5597   DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5598   set_flags((flags() & ~kOriginOptionsMask) |
5599             (origin_options.Flags() << kOriginOptionsShift));
5600 }
5601 
5602 
ACCESSORS(DebugInfo,shared,SharedFunctionInfo,kSharedFunctionInfoIndex)5603 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5604 ACCESSORS(DebugInfo, code, Code, kCodeIndex)
5605 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5606 
5607 SMI_ACCESSORS(BreakPointInfo, code_position, kCodePositionIndex)
5608 SMI_ACCESSORS(BreakPointInfo, source_position, kSourcePositionIndex)
5609 SMI_ACCESSORS(BreakPointInfo, statement_position, kStatementPositionIndex)
5610 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5611 
5612 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5613 ACCESSORS(SharedFunctionInfo, optimized_code_map, FixedArray,
5614           kOptimizedCodeMapOffset)
5615 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5616 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5617           kFeedbackVectorOffset)
5618 #if TRACE_MAPS
5619 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
5620 #endif
5621 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5622           kInstanceClassNameOffset)
5623 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5624 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5625 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5626 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5627 
5628 
5629 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5630 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5631                kHiddenPrototypeBit)
5632 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5633 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5634                kNeedsAccessCheckBit)
5635 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5636                kReadOnlyPrototypeBit)
5637 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5638                kRemovePrototypeBit)
5639 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5640                kDoNotCacheBit)
5641 BOOL_ACCESSORS(FunctionTemplateInfo, flag, instantiated, kInstantiatedBit)
5642 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
5643                kAcceptAnyReceiver)
5644 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5645                kIsExpressionBit)
5646 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5647                kIsTopLevelBit)
5648 
5649 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
5650                kAllowLazyCompilation)
5651 BOOL_ACCESSORS(SharedFunctionInfo,
5652                compiler_hints,
5653                allows_lazy_compilation_without_context,
5654                kAllowLazyCompilationWithoutContext)
5655 BOOL_ACCESSORS(SharedFunctionInfo,
5656                compiler_hints,
5657                uses_arguments,
5658                kUsesArguments)
5659 BOOL_ACCESSORS(SharedFunctionInfo,
5660                compiler_hints,
5661                has_duplicate_parameters,
5662                kHasDuplicateParameters)
5663 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5664 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
5665 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, never_compiled,
5666                kNeverCompiled)
5667 
5668 
5669 #if V8_HOST_ARCH_32_BIT
5670 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5671 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
5672               kFormalParameterCountOffset)
5673 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5674               kExpectedNofPropertiesOffset)
5675 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5676 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5677               kStartPositionAndTypeOffset)
5678 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5679 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5680               kFunctionTokenPositionOffset)
5681 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5682               kCompilerHintsOffset)
5683 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5684               kOptCountAndBailoutReasonOffset)
5685 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5686 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5687 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5688 
5689 #else
5690 
5691 #if V8_TARGET_LITTLE_ENDIAN
5692 #define PSEUDO_SMI_LO_ALIGN 0
5693 #define PSEUDO_SMI_HI_ALIGN kIntSize
5694 #else
5695 #define PSEUDO_SMI_LO_ALIGN kIntSize
5696 #define PSEUDO_SMI_HI_ALIGN 0
5697 #endif
5698 
5699 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)                          \
5700   STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN);         \
5701   int holder::name() const {                                                   \
5702     int value = READ_INT_FIELD(this, offset);                                  \
5703     DCHECK(kHeapObjectTag == 1);                                               \
5704     DCHECK((value & kHeapObjectTag) == 0);                                     \
5705     return value >> 1;                                                         \
5706   }                                                                            \
5707   void holder::set_##name(int value) {                                         \
5708     DCHECK(kHeapObjectTag == 1);                                               \
5709     DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
5710     WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag);             \
5711   }
5712 
5713 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)                  \
5714   STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
5715   INT_ACCESSORS(holder, name, offset)
5716 
5717 
5718 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5719 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
5720                         kFormalParameterCountOffset)
5721 
5722 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5723                         expected_nof_properties,
5724                         kExpectedNofPropertiesOffset)
5725 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5726 
5727 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5728 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5729                         start_position_and_type,
5730                         kStartPositionAndTypeOffset)
5731 
5732 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5733                         function_token_position,
5734                         kFunctionTokenPositionOffset)
5735 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5736                         compiler_hints,
5737                         kCompilerHintsOffset)
5738 
5739 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5740                         opt_count_and_bailout_reason,
5741                         kOptCountAndBailoutReasonOffset)
5742 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5743 
5744 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5745                         ast_node_count,
5746                         kAstNodeCountOffset)
5747 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5748                         profiler_ticks,
5749                         kProfilerTicksOffset)
5750 
5751 #endif
5752 
5753 
5754 BOOL_GETTER(SharedFunctionInfo,
5755             compiler_hints,
5756             optimization_disabled,
5757             kOptimizationDisabled)
5758 
5759 
5760 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5761   set_compiler_hints(BooleanBit::set(compiler_hints(),
5762                                      kOptimizationDisabled,
5763                                      disable));
5764 }
5765 
5766 
language_mode()5767 LanguageMode SharedFunctionInfo::language_mode() {
5768   STATIC_ASSERT(LANGUAGE_END == 3);
5769   return construct_language_mode(
5770       BooleanBit::get(compiler_hints(), kStrictModeFunction),
5771       BooleanBit::get(compiler_hints(), kStrongModeFunction));
5772 }
5773 
5774 
set_language_mode(LanguageMode language_mode)5775 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
5776   STATIC_ASSERT(LANGUAGE_END == 3);
5777   // We only allow language mode transitions that set the same language mode
5778   // again or go up in the chain:
5779   DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
5780   int hints = compiler_hints();
5781   hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
5782   hints = BooleanBit::set(hints, kStrongModeFunction, is_strong(language_mode));
5783   set_compiler_hints(hints);
5784 }
5785 
5786 
kind()5787 FunctionKind SharedFunctionInfo::kind() {
5788   return FunctionKindBits::decode(compiler_hints());
5789 }
5790 
5791 
set_kind(FunctionKind kind)5792 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5793   DCHECK(IsValidFunctionKind(kind));
5794   int hints = compiler_hints();
5795   hints = FunctionKindBits::update(hints, kind);
5796   set_compiler_hints(hints);
5797 }
5798 
5799 
BOOL_ACCESSORS(SharedFunctionInfo,compiler_hints,needs_home_object,kNeedsHomeObject)5800 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
5801                kNeedsHomeObject)
5802 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5803 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
5804 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5805                name_should_print_as_anonymous,
5806                kNameShouldPrintAsAnonymous)
5807 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5808 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5809 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
5810                kDontCrankshaft)
5811 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5812 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5813 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5814 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5815                kIsConciseMethod)
5816 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_accessor_function,
5817                kIsAccessorFunction)
5818 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_default_constructor,
5819                kIsDefaultConstructor)
5820 
5821 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5822 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5823 
5824 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5825 
5826 bool Script::HasValidSource() {
5827   Object* src = this->source();
5828   if (!src->IsString()) return true;
5829   String* src_str = String::cast(src);
5830   if (!StringShape(src_str).IsExternal()) return true;
5831   if (src_str->IsOneByteRepresentation()) {
5832     return ExternalOneByteString::cast(src)->resource() != NULL;
5833   } else if (src_str->IsTwoByteRepresentation()) {
5834     return ExternalTwoByteString::cast(src)->resource() != NULL;
5835   }
5836   return true;
5837 }
5838 
5839 
DontAdaptArguments()5840 void SharedFunctionInfo::DontAdaptArguments() {
5841   DCHECK(code()->kind() == Code::BUILTIN);
5842   set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
5843 }
5844 
5845 
start_position()5846 int SharedFunctionInfo::start_position() const {
5847   return start_position_and_type() >> kStartPositionShift;
5848 }
5849 
5850 
set_start_position(int start_position)5851 void SharedFunctionInfo::set_start_position(int start_position) {
5852   set_start_position_and_type((start_position << kStartPositionShift)
5853     | (start_position_and_type() & ~kStartPositionMask));
5854 }
5855 
5856 
code()5857 Code* SharedFunctionInfo::code() const {
5858   return Code::cast(READ_FIELD(this, kCodeOffset));
5859 }
5860 
5861 
set_code(Code * value,WriteBarrierMode mode)5862 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5863   DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5864   WRITE_FIELD(this, kCodeOffset, value);
5865   CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5866 }
5867 
5868 
ReplaceCode(Code * value)5869 void SharedFunctionInfo::ReplaceCode(Code* value) {
5870   // If the GC metadata field is already used then the function was
5871   // enqueued as a code flushing candidate and we remove it now.
5872   if (code()->gc_metadata() != NULL) {
5873     CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5874     flusher->EvictCandidate(this);
5875   }
5876 
5877   DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5878 #ifdef DEBUG
5879   Code::VerifyRecompiledCode(code(), value);
5880 #endif  // DEBUG
5881 
5882   set_code(value);
5883 
5884   if (is_compiled()) set_never_compiled(false);
5885 }
5886 
5887 
scope_info()5888 ScopeInfo* SharedFunctionInfo::scope_info() const {
5889   return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5890 }
5891 
5892 
set_scope_info(ScopeInfo * value,WriteBarrierMode mode)5893 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5894                                         WriteBarrierMode mode) {
5895   WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5896   CONDITIONAL_WRITE_BARRIER(GetHeap(),
5897                             this,
5898                             kScopeInfoOffset,
5899                             reinterpret_cast<Object*>(value),
5900                             mode);
5901 }
5902 
5903 
is_compiled()5904 bool SharedFunctionInfo::is_compiled() {
5905   Builtins* builtins = GetIsolate()->builtins();
5906   DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
5907   DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
5908   return code() != builtins->builtin(Builtins::kCompileLazy);
5909 }
5910 
5911 
has_simple_parameters()5912 bool SharedFunctionInfo::has_simple_parameters() {
5913   return scope_info()->HasSimpleParameters();
5914 }
5915 
5916 
HasDebugInfo()5917 bool SharedFunctionInfo::HasDebugInfo() {
5918   bool has_debug_info = debug_info()->IsStruct();
5919   DCHECK(!has_debug_info || HasDebugCode());
5920   return has_debug_info;
5921 }
5922 
5923 
GetDebugInfo()5924 DebugInfo* SharedFunctionInfo::GetDebugInfo() {
5925   DCHECK(HasDebugInfo());
5926   return DebugInfo::cast(debug_info());
5927 }
5928 
5929 
HasDebugCode()5930 bool SharedFunctionInfo::HasDebugCode() {
5931   return code()->kind() == Code::FUNCTION && code()->has_debug_break_slots();
5932 }
5933 
5934 
IsApiFunction()5935 bool SharedFunctionInfo::IsApiFunction() {
5936   return function_data()->IsFunctionTemplateInfo();
5937 }
5938 
5939 
get_api_func_data()5940 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5941   DCHECK(IsApiFunction());
5942   return FunctionTemplateInfo::cast(function_data());
5943 }
5944 
5945 
HasBuiltinFunctionId()5946 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5947   return function_data()->IsSmi();
5948 }
5949 
5950 
builtin_function_id()5951 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5952   DCHECK(HasBuiltinFunctionId());
5953   return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5954 }
5955 
5956 
HasBytecodeArray()5957 bool SharedFunctionInfo::HasBytecodeArray() {
5958   return function_data()->IsBytecodeArray();
5959 }
5960 
5961 
bytecode_array()5962 BytecodeArray* SharedFunctionInfo::bytecode_array() {
5963   DCHECK(HasBytecodeArray());
5964   return BytecodeArray::cast(function_data());
5965 }
5966 
5967 
ic_age()5968 int SharedFunctionInfo::ic_age() {
5969   return ICAgeBits::decode(counters());
5970 }
5971 
5972 
set_ic_age(int ic_age)5973 void SharedFunctionInfo::set_ic_age(int ic_age) {
5974   set_counters(ICAgeBits::update(counters(), ic_age));
5975 }
5976 
5977 
deopt_count()5978 int SharedFunctionInfo::deopt_count() {
5979   return DeoptCountBits::decode(counters());
5980 }
5981 
5982 
set_deopt_count(int deopt_count)5983 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5984   set_counters(DeoptCountBits::update(counters(), deopt_count));
5985 }
5986 
5987 
increment_deopt_count()5988 void SharedFunctionInfo::increment_deopt_count() {
5989   int value = counters();
5990   int deopt_count = DeoptCountBits::decode(value);
5991   deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5992   set_counters(DeoptCountBits::update(value, deopt_count));
5993 }
5994 
5995 
opt_reenable_tries()5996 int SharedFunctionInfo::opt_reenable_tries() {
5997   return OptReenableTriesBits::decode(counters());
5998 }
5999 
6000 
set_opt_reenable_tries(int tries)6001 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6002   set_counters(OptReenableTriesBits::update(counters(), tries));
6003 }
6004 
6005 
opt_count()6006 int SharedFunctionInfo::opt_count() {
6007   return OptCountBits::decode(opt_count_and_bailout_reason());
6008 }
6009 
6010 
set_opt_count(int opt_count)6011 void SharedFunctionInfo::set_opt_count(int opt_count) {
6012   set_opt_count_and_bailout_reason(
6013       OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6014 }
6015 
6016 
disable_optimization_reason()6017 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
6018   return static_cast<BailoutReason>(
6019       DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6020 }
6021 
6022 
has_deoptimization_support()6023 bool SharedFunctionInfo::has_deoptimization_support() {
6024   Code* code = this->code();
6025   return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6026 }
6027 
6028 
TryReenableOptimization()6029 void SharedFunctionInfo::TryReenableOptimization() {
6030   int tries = opt_reenable_tries();
6031   set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6032   // We reenable optimization whenever the number of tries is a large
6033   // enough power of 2.
6034   if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6035     set_optimization_disabled(false);
6036     set_opt_count(0);
6037     set_deopt_count(0);
6038   }
6039 }
6040 
6041 
set_disable_optimization_reason(BailoutReason reason)6042 void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
6043   set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
6044       opt_count_and_bailout_reason(), reason));
6045 }
6046 
6047 
IsBuiltin()6048 bool SharedFunctionInfo::IsBuiltin() {
6049   Object* script_obj = script();
6050   if (script_obj->IsUndefined()) return true;
6051   Script* script = Script::cast(script_obj);
6052   Script::Type type = static_cast<Script::Type>(script->type());
6053   return type != Script::TYPE_NORMAL;
6054 }
6055 
6056 
IsSubjectToDebugging()6057 bool SharedFunctionInfo::IsSubjectToDebugging() { return !IsBuiltin(); }
6058 
6059 
OptimizedCodeMapIsCleared()6060 bool SharedFunctionInfo::OptimizedCodeMapIsCleared() const {
6061   return optimized_code_map() == GetHeap()->cleared_optimized_code_map();
6062 }
6063 
6064 
6065 // static
AddToOptimizedCodeMap(Handle<SharedFunctionInfo> shared,Handle<Context> native_context,Handle<Code> code,Handle<LiteralsArray> literals,BailoutId osr_ast_id)6066 void SharedFunctionInfo::AddToOptimizedCodeMap(
6067     Handle<SharedFunctionInfo> shared, Handle<Context> native_context,
6068     Handle<Code> code, Handle<LiteralsArray> literals, BailoutId osr_ast_id) {
6069   AddToOptimizedCodeMapInternal(shared, native_context, code, literals,
6070                                 osr_ast_id);
6071 }
6072 
6073 
6074 // static
AddLiteralsToOptimizedCodeMap(Handle<SharedFunctionInfo> shared,Handle<Context> native_context,Handle<LiteralsArray> literals)6075 void SharedFunctionInfo::AddLiteralsToOptimizedCodeMap(
6076     Handle<SharedFunctionInfo> shared, Handle<Context> native_context,
6077     Handle<LiteralsArray> literals) {
6078   Isolate* isolate = shared->GetIsolate();
6079   Handle<Oddball> undefined = isolate->factory()->undefined_value();
6080   AddToOptimizedCodeMapInternal(shared, native_context, undefined, literals,
6081                                 BailoutId::None());
6082 }
6083 
6084 
IsOptimized()6085 bool JSFunction::IsOptimized() {
6086   return code()->kind() == Code::OPTIMIZED_FUNCTION;
6087 }
6088 
6089 
IsMarkedForOptimization()6090 bool JSFunction::IsMarkedForOptimization() {
6091   return code() == GetIsolate()->builtins()->builtin(
6092       Builtins::kCompileOptimized);
6093 }
6094 
6095 
IsMarkedForConcurrentOptimization()6096 bool JSFunction::IsMarkedForConcurrentOptimization() {
6097   return code() == GetIsolate()->builtins()->builtin(
6098       Builtins::kCompileOptimizedConcurrent);
6099 }
6100 
6101 
IsInOptimizationQueue()6102 bool JSFunction::IsInOptimizationQueue() {
6103   return code() == GetIsolate()->builtins()->builtin(
6104       Builtins::kInOptimizationQueue);
6105 }
6106 
6107 
CompleteInobjectSlackTrackingIfActive()6108 void JSFunction::CompleteInobjectSlackTrackingIfActive() {
6109   if (has_initial_map() && initial_map()->IsInobjectSlackTrackingInProgress()) {
6110     initial_map()->CompleteInobjectSlackTracking();
6111   }
6112 }
6113 
6114 
IsInobjectSlackTrackingInProgress()6115 bool Map::IsInobjectSlackTrackingInProgress() {
6116   return construction_counter() != Map::kNoSlackTracking;
6117 }
6118 
6119 
InobjectSlackTrackingStep()6120 void Map::InobjectSlackTrackingStep() {
6121   if (!IsInobjectSlackTrackingInProgress()) return;
6122   int counter = construction_counter();
6123   set_construction_counter(counter - 1);
6124   if (counter == kSlackTrackingCounterEnd) {
6125     CompleteInobjectSlackTracking();
6126   }
6127 }
6128 
6129 
code()6130 Code* JSFunction::code() {
6131   return Code::cast(
6132       Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6133 }
6134 
6135 
set_code(Code * value)6136 void JSFunction::set_code(Code* value) {
6137   DCHECK(!GetHeap()->InNewSpace(value));
6138   Address entry = value->entry();
6139   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6140   GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6141       this,
6142       HeapObject::RawField(this, kCodeEntryOffset),
6143       value);
6144 }
6145 
6146 
set_code_no_write_barrier(Code * value)6147 void JSFunction::set_code_no_write_barrier(Code* value) {
6148   DCHECK(!GetHeap()->InNewSpace(value));
6149   Address entry = value->entry();
6150   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6151 }
6152 
6153 
ReplaceCode(Code * code)6154 void JSFunction::ReplaceCode(Code* code) {
6155   bool was_optimized = IsOptimized();
6156   bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6157 
6158   if (was_optimized && is_optimized) {
6159     shared()->EvictFromOptimizedCodeMap(this->code(),
6160         "Replacing with another optimized code");
6161   }
6162 
6163   set_code(code);
6164 
6165   // Add/remove the function from the list of optimized functions for this
6166   // context based on the state change.
6167   if (!was_optimized && is_optimized) {
6168     context()->native_context()->AddOptimizedFunction(this);
6169   }
6170   if (was_optimized && !is_optimized) {
6171     // TODO(titzer): linear in the number of optimized functions; fix!
6172     context()->native_context()->RemoveOptimizedFunction(this);
6173   }
6174 }
6175 
6176 
context()6177 Context* JSFunction::context() {
6178   return Context::cast(READ_FIELD(this, kContextOffset));
6179 }
6180 
6181 
global_proxy()6182 JSObject* JSFunction::global_proxy() {
6183   return context()->global_proxy();
6184 }
6185 
6186 
native_context()6187 Context* JSFunction::native_context() { return context()->native_context(); }
6188 
6189 
set_context(Object * value)6190 void JSFunction::set_context(Object* value) {
6191   DCHECK(value->IsUndefined() || value->IsContext());
6192   WRITE_FIELD(this, kContextOffset, value);
6193   WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6194 }
6195 
ACCESSORS(JSFunction,prototype_or_initial_map,Object,kPrototypeOrInitialMapOffset)6196 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6197           kPrototypeOrInitialMapOffset)
6198 
6199 
6200 Map* JSFunction::initial_map() {
6201   return Map::cast(prototype_or_initial_map());
6202 }
6203 
6204 
has_initial_map()6205 bool JSFunction::has_initial_map() {
6206   return prototype_or_initial_map()->IsMap();
6207 }
6208 
6209 
has_instance_prototype()6210 bool JSFunction::has_instance_prototype() {
6211   return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
6212 }
6213 
6214 
has_prototype()6215 bool JSFunction::has_prototype() {
6216   return map()->has_non_instance_prototype() || has_instance_prototype();
6217 }
6218 
6219 
instance_prototype()6220 Object* JSFunction::instance_prototype() {
6221   DCHECK(has_instance_prototype());
6222   if (has_initial_map()) return initial_map()->prototype();
6223   // When there is no initial map and the prototype is a JSObject, the
6224   // initial map field is used for the prototype field.
6225   return prototype_or_initial_map();
6226 }
6227 
6228 
prototype()6229 Object* JSFunction::prototype() {
6230   DCHECK(has_prototype());
6231   // If the function's prototype property has been set to a non-JSObject
6232   // value, that value is stored in the constructor field of the map.
6233   if (map()->has_non_instance_prototype()) {
6234     Object* prototype = map()->GetConstructor();
6235     // The map must have a prototype in that field, not a back pointer.
6236     DCHECK(!prototype->IsMap());
6237     return prototype;
6238   }
6239   return instance_prototype();
6240 }
6241 
6242 
is_compiled()6243 bool JSFunction::is_compiled() {
6244   Builtins* builtins = GetIsolate()->builtins();
6245   return code() != builtins->builtin(Builtins::kCompileLazy) &&
6246          code() != builtins->builtin(Builtins::kCompileOptimized) &&
6247          code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6248 }
6249 
6250 
NumberOfLiterals()6251 int JSFunction::NumberOfLiterals() {
6252   return literals()->length();
6253 }
6254 
6255 
ACCESSORS(JSProxy,target,JSReceiver,kTargetOffset)6256 ACCESSORS(JSProxy, target, JSReceiver, kTargetOffset)
6257 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6258 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6259 
6260 bool JSProxy::IsRevoked() const { return !handler()->IsJSReceiver(); }
6261 
ACCESSORS(JSCollection,table,Object,kTableOffset)6262 ACCESSORS(JSCollection, table, Object, kTableOffset)
6263 
6264 
6265 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset)    \
6266   template<class Derived, class TableType>                           \
6267   type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6268     return type::cast(READ_FIELD(this, offset));                     \
6269   }                                                                  \
6270   template<class Derived, class TableType>                           \
6271   void OrderedHashTableIterator<Derived, TableType>::set_##name(     \
6272       type* value, WriteBarrierMode mode) {                          \
6273     WRITE_FIELD(this, offset, value);                                \
6274     CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6275   }
6276 
6277 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6278 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6279 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6280 
6281 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6282 
6283 
6284 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6285 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6286 
6287 
6288 Address Foreign::foreign_address() {
6289   return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6290 }
6291 
6292 
set_foreign_address(Address value)6293 void Foreign::set_foreign_address(Address value) {
6294   WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6295 }
6296 
6297 
ACCESSORS(JSGeneratorObject,function,JSFunction,kFunctionOffset)6298 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6299 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6300 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6301 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6302 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6303 
6304 bool JSGeneratorObject::is_suspended() {
6305   DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6306   DCHECK_EQ(kGeneratorClosed, 0);
6307   return continuation() > 0;
6308 }
6309 
is_closed()6310 bool JSGeneratorObject::is_closed() {
6311   return continuation() == kGeneratorClosed;
6312 }
6313 
is_executing()6314 bool JSGeneratorObject::is_executing() {
6315   return continuation() == kGeneratorExecuting;
6316 }
6317 
ACCESSORS(JSModule,context,Object,kContextOffset)6318 ACCESSORS(JSModule, context, Object, kContextOffset)
6319 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6320 
6321 
6322 ACCESSORS(JSValue, value, Object, kValueOffset)
6323 
6324 
6325 HeapNumber* HeapNumber::cast(Object* object) {
6326   SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6327   return reinterpret_cast<HeapNumber*>(object);
6328 }
6329 
6330 
cast(const Object * object)6331 const HeapNumber* HeapNumber::cast(const Object* object) {
6332   SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6333   return reinterpret_cast<const HeapNumber*>(object);
6334 }
6335 
6336 
ACCESSORS(JSDate,value,Object,kValueOffset)6337 ACCESSORS(JSDate, value, Object, kValueOffset)
6338 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6339 ACCESSORS(JSDate, year, Object, kYearOffset)
6340 ACCESSORS(JSDate, month, Object, kMonthOffset)
6341 ACCESSORS(JSDate, day, Object, kDayOffset)
6342 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6343 ACCESSORS(JSDate, hour, Object, kHourOffset)
6344 ACCESSORS(JSDate, min, Object, kMinOffset)
6345 ACCESSORS(JSDate, sec, Object, kSecOffset)
6346 
6347 
6348 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
6349 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
6350 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6351 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6352 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6353 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6354 
6355 
6356 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6357 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6358 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
6359 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6360 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6361 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6362 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6363 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6364 
6365 
6366 void Code::WipeOutHeader() {
6367   WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6368   WRITE_FIELD(this, kHandlerTableOffset, NULL);
6369   WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6370   // Do not wipe out major/minor keys on a code stub or IC
6371   if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6372     WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6373   }
6374   WRITE_FIELD(this, kNextCodeLinkOffset, NULL);
6375   WRITE_FIELD(this, kGCMetadataOffset, NULL);
6376 }
6377 
6378 
type_feedback_info()6379 Object* Code::type_feedback_info() {
6380   DCHECK(kind() == FUNCTION);
6381   return raw_type_feedback_info();
6382 }
6383 
6384 
set_type_feedback_info(Object * value,WriteBarrierMode mode)6385 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6386   DCHECK(kind() == FUNCTION);
6387   set_raw_type_feedback_info(value, mode);
6388   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6389                             value, mode);
6390 }
6391 
6392 
stub_key()6393 uint32_t Code::stub_key() {
6394   DCHECK(IsCodeStubOrIC());
6395   Smi* smi_key = Smi::cast(raw_type_feedback_info());
6396   return static_cast<uint32_t>(smi_key->value());
6397 }
6398 
6399 
set_stub_key(uint32_t key)6400 void Code::set_stub_key(uint32_t key) {
6401   DCHECK(IsCodeStubOrIC());
6402   set_raw_type_feedback_info(Smi::FromInt(key));
6403 }
6404 
6405 
ACCESSORS(Code,gc_metadata,Object,kGCMetadataOffset)6406 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6407 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6408 
6409 
6410 byte* Code::instruction_start()  {
6411   return FIELD_ADDR(this, kHeaderSize);
6412 }
6413 
6414 
instruction_end()6415 byte* Code::instruction_end()  {
6416   return instruction_start() + instruction_size();
6417 }
6418 
6419 
body_size()6420 int Code::body_size() {
6421   return RoundUp(instruction_size(), kObjectAlignment);
6422 }
6423 
6424 
unchecked_relocation_info()6425 ByteArray* Code::unchecked_relocation_info() {
6426   return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6427 }
6428 
6429 
relocation_start()6430 byte* Code::relocation_start() {
6431   return unchecked_relocation_info()->GetDataStartAddress();
6432 }
6433 
6434 
relocation_size()6435 int Code::relocation_size() {
6436   return unchecked_relocation_info()->length();
6437 }
6438 
6439 
entry()6440 byte* Code::entry() {
6441   return instruction_start();
6442 }
6443 
6444 
contains(byte * inner_pointer)6445 bool Code::contains(byte* inner_pointer) {
6446   return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6447 }
6448 
6449 
ExecutableSize()6450 int Code::ExecutableSize() {
6451   // Check that the assumptions about the layout of the code object holds.
6452   DCHECK_EQ(static_cast<int>(instruction_start() - address()),
6453             Code::kHeaderSize);
6454   return instruction_size() + Code::kHeaderSize;
6455 }
6456 
6457 
CodeSize()6458 int Code::CodeSize() { return SizeFor(body_size()); }
6459 
6460 
ACCESSORS(JSArray,length,Object,kLengthOffset)6461 ACCESSORS(JSArray, length, Object, kLengthOffset)
6462 
6463 
6464 void* JSArrayBuffer::backing_store() const {
6465   intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6466   return reinterpret_cast<void*>(ptr);
6467 }
6468 
6469 
set_backing_store(void * value,WriteBarrierMode mode)6470 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6471   intptr_t ptr = reinterpret_cast<intptr_t>(value);
6472   WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6473 }
6474 
6475 
ACCESSORS(JSArrayBuffer,byte_length,Object,kByteLengthOffset)6476 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6477 
6478 
6479 void JSArrayBuffer::set_bit_field(uint32_t bits) {
6480   if (kInt32Size != kPointerSize) {
6481 #if V8_TARGET_LITTLE_ENDIAN
6482     WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6483 #else
6484     WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6485 #endif
6486   }
6487   WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6488 }
6489 
6490 
bit_field()6491 uint32_t JSArrayBuffer::bit_field() const {
6492   return READ_UINT32_FIELD(this, kBitFieldOffset);
6493 }
6494 
6495 
is_external()6496 bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6497 
6498 
set_is_external(bool value)6499 void JSArrayBuffer::set_is_external(bool value) {
6500   set_bit_field(IsExternal::update(bit_field(), value));
6501 }
6502 
6503 
is_neuterable()6504 bool JSArrayBuffer::is_neuterable() {
6505   return IsNeuterable::decode(bit_field());
6506 }
6507 
6508 
set_is_neuterable(bool value)6509 void JSArrayBuffer::set_is_neuterable(bool value) {
6510   set_bit_field(IsNeuterable::update(bit_field(), value));
6511 }
6512 
6513 
was_neutered()6514 bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
6515 
6516 
set_was_neutered(bool value)6517 void JSArrayBuffer::set_was_neutered(bool value) {
6518   set_bit_field(WasNeutered::update(bit_field(), value));
6519 }
6520 
6521 
is_shared()6522 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
6523 
6524 
set_is_shared(bool value)6525 void JSArrayBuffer::set_is_shared(bool value) {
6526   set_bit_field(IsShared::update(bit_field(), value));
6527 }
6528 
6529 
byte_offset()6530 Object* JSArrayBufferView::byte_offset() const {
6531   if (WasNeutered()) return Smi::FromInt(0);
6532   return Object::cast(READ_FIELD(this, kByteOffsetOffset));
6533 }
6534 
6535 
set_byte_offset(Object * value,WriteBarrierMode mode)6536 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
6537   WRITE_FIELD(this, kByteOffsetOffset, value);
6538   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
6539 }
6540 
6541 
byte_length()6542 Object* JSArrayBufferView::byte_length() const {
6543   if (WasNeutered()) return Smi::FromInt(0);
6544   return Object::cast(READ_FIELD(this, kByteLengthOffset));
6545 }
6546 
6547 
set_byte_length(Object * value,WriteBarrierMode mode)6548 void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
6549   WRITE_FIELD(this, kByteLengthOffset, value);
6550   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
6551 }
6552 
6553 
ACCESSORS(JSArrayBufferView,buffer,Object,kBufferOffset)6554 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6555 #ifdef VERIFY_HEAP
6556 ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
6557 ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
6558 #endif
6559 
6560 
6561 bool JSArrayBufferView::WasNeutered() const {
6562   return JSArrayBuffer::cast(buffer())->was_neutered();
6563 }
6564 
6565 
length()6566 Object* JSTypedArray::length() const {
6567   if (WasNeutered()) return Smi::FromInt(0);
6568   return Object::cast(READ_FIELD(this, kLengthOffset));
6569 }
6570 
6571 
length_value()6572 uint32_t JSTypedArray::length_value() const {
6573   if (WasNeutered()) return 0;
6574   uint32_t index = 0;
6575   CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
6576   return index;
6577 }
6578 
6579 
set_length(Object * value,WriteBarrierMode mode)6580 void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
6581   WRITE_FIELD(this, kLengthOffset, value);
6582   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
6583 }
6584 
6585 
6586 #ifdef VERIFY_HEAP
ACCESSORS(JSTypedArray,raw_length,Object,kLengthOffset)6587 ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
6588 #endif
6589 
6590 
6591 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6592 ACCESSORS(JSRegExp, flags, Object, kFlagsOffset)
6593 ACCESSORS(JSRegExp, source, Object, kSourceOffset)
6594 
6595 
6596 JSRegExp::Type JSRegExp::TypeTag() {
6597   Object* data = this->data();
6598   if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6599   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6600   return static_cast<JSRegExp::Type>(smi->value());
6601 }
6602 
6603 
CaptureCount()6604 int JSRegExp::CaptureCount() {
6605   switch (TypeTag()) {
6606     case ATOM:
6607       return 0;
6608     case IRREGEXP:
6609       return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6610     default:
6611       UNREACHABLE();
6612       return -1;
6613   }
6614 }
6615 
6616 
GetFlags()6617 JSRegExp::Flags JSRegExp::GetFlags() {
6618   DCHECK(this->data()->IsFixedArray());
6619   Object* data = this->data();
6620   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6621   return Flags(smi->value());
6622 }
6623 
6624 
Pattern()6625 String* JSRegExp::Pattern() {
6626   DCHECK(this->data()->IsFixedArray());
6627   Object* data = this->data();
6628   String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
6629   return pattern;
6630 }
6631 
6632 
DataAt(int index)6633 Object* JSRegExp::DataAt(int index) {
6634   DCHECK(TypeTag() != NOT_COMPILED);
6635   return FixedArray::cast(data())->get(index);
6636 }
6637 
6638 
SetDataAt(int index,Object * value)6639 void JSRegExp::SetDataAt(int index, Object* value) {
6640   DCHECK(TypeTag() != NOT_COMPILED);
6641   DCHECK(index >= kDataIndex);  // Only implementation data can be set this way.
6642   FixedArray::cast(data())->set(index, value);
6643 }
6644 
6645 
GetElementsKind()6646 ElementsKind JSObject::GetElementsKind() {
6647   ElementsKind kind = map()->elements_kind();
6648 #if VERIFY_HEAP && DEBUG
6649   FixedArrayBase* fixed_array =
6650       reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6651 
6652   // If a GC was caused while constructing this object, the elements
6653   // pointer may point to a one pointer filler map.
6654   if (ElementsAreSafeToExamine()) {
6655     Map* map = fixed_array->map();
6656     DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6657             (map == GetHeap()->fixed_array_map() ||
6658              map == GetHeap()->fixed_cow_array_map())) ||
6659            (IsFastDoubleElementsKind(kind) &&
6660             (fixed_array->IsFixedDoubleArray() ||
6661              fixed_array == GetHeap()->empty_fixed_array())) ||
6662            (kind == DICTIONARY_ELEMENTS &&
6663             fixed_array->IsFixedArray() &&
6664             fixed_array->IsDictionary()) ||
6665            (kind > DICTIONARY_ELEMENTS));
6666     DCHECK(!IsSloppyArgumentsElements(kind) ||
6667            (elements()->IsFixedArray() && elements()->length() >= 2));
6668   }
6669 #endif
6670   return kind;
6671 }
6672 
6673 
HasFastObjectElements()6674 bool JSObject::HasFastObjectElements() {
6675   return IsFastObjectElementsKind(GetElementsKind());
6676 }
6677 
6678 
HasFastSmiElements()6679 bool JSObject::HasFastSmiElements() {
6680   return IsFastSmiElementsKind(GetElementsKind());
6681 }
6682 
6683 
HasFastSmiOrObjectElements()6684 bool JSObject::HasFastSmiOrObjectElements() {
6685   return IsFastSmiOrObjectElementsKind(GetElementsKind());
6686 }
6687 
6688 
HasFastDoubleElements()6689 bool JSObject::HasFastDoubleElements() {
6690   return IsFastDoubleElementsKind(GetElementsKind());
6691 }
6692 
6693 
HasFastHoleyElements()6694 bool JSObject::HasFastHoleyElements() {
6695   return IsFastHoleyElementsKind(GetElementsKind());
6696 }
6697 
6698 
HasFastElements()6699 bool JSObject::HasFastElements() {
6700   return IsFastElementsKind(GetElementsKind());
6701 }
6702 
6703 
HasDictionaryElements()6704 bool JSObject::HasDictionaryElements() {
6705   return GetElementsKind() == DICTIONARY_ELEMENTS;
6706 }
6707 
6708 
HasFastArgumentsElements()6709 bool JSObject::HasFastArgumentsElements() {
6710   return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
6711 }
6712 
6713 
HasSlowArgumentsElements()6714 bool JSObject::HasSlowArgumentsElements() {
6715   return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
6716 }
6717 
6718 
HasSloppyArgumentsElements()6719 bool JSObject::HasSloppyArgumentsElements() {
6720   return IsSloppyArgumentsElements(GetElementsKind());
6721 }
6722 
6723 
HasFixedTypedArrayElements()6724 bool JSObject::HasFixedTypedArrayElements() {
6725   HeapObject* array = elements();
6726   DCHECK(array != NULL);
6727   return array->IsFixedTypedArrayBase();
6728 }
6729 
6730 
6731 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)         \
6732 bool JSObject::HasFixed##Type##Elements() {                               \
6733   HeapObject* array = elements();                                         \
6734   DCHECK(array != NULL);                                                  \
6735   if (!array->IsHeapObject())                                             \
6736     return false;                                                         \
6737   return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE;      \
6738 }
6739 
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)6740 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6741 
6742 #undef FIXED_TYPED_ELEMENTS_CHECK
6743 
6744 
6745 bool JSObject::HasNamedInterceptor() {
6746   return map()->has_named_interceptor();
6747 }
6748 
6749 
HasIndexedInterceptor()6750 bool JSObject::HasIndexedInterceptor() {
6751   return map()->has_indexed_interceptor();
6752 }
6753 
6754 
global_dictionary()6755 GlobalDictionary* JSObject::global_dictionary() {
6756   DCHECK(!HasFastProperties());
6757   DCHECK(IsJSGlobalObject());
6758   return GlobalDictionary::cast(properties());
6759 }
6760 
6761 
element_dictionary()6762 SeededNumberDictionary* JSObject::element_dictionary() {
6763   DCHECK(HasDictionaryElements());
6764   return SeededNumberDictionary::cast(elements());
6765 }
6766 
6767 
IsHashFieldComputed(uint32_t field)6768 bool Name::IsHashFieldComputed(uint32_t field) {
6769   return (field & kHashNotComputedMask) == 0;
6770 }
6771 
6772 
HasHashCode()6773 bool Name::HasHashCode() {
6774   return IsHashFieldComputed(hash_field());
6775 }
6776 
6777 
Hash()6778 uint32_t Name::Hash() {
6779   // Fast case: has hash code already been computed?
6780   uint32_t field = hash_field();
6781   if (IsHashFieldComputed(field)) return field >> kHashShift;
6782   // Slow case: compute hash code and set it. Has to be a string.
6783   return String::cast(this)->ComputeAndSetHash();
6784 }
6785 
6786 
IsPrivate()6787 bool Name::IsPrivate() {
6788   return this->IsSymbol() && Symbol::cast(this)->is_private();
6789 }
6790 
6791 
StringHasher(int length,uint32_t seed)6792 StringHasher::StringHasher(int length, uint32_t seed)
6793   : length_(length),
6794     raw_running_hash_(seed),
6795     array_index_(0),
6796     is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6797     is_first_char_(true) {
6798   DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6799 }
6800 
6801 
has_trivial_hash()6802 bool StringHasher::has_trivial_hash() {
6803   return length_ > String::kMaxHashCalcLength;
6804 }
6805 
6806 
AddCharacterCore(uint32_t running_hash,uint16_t c)6807 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6808   running_hash += c;
6809   running_hash += (running_hash << 10);
6810   running_hash ^= (running_hash >> 6);
6811   return running_hash;
6812 }
6813 
6814 
GetHashCore(uint32_t running_hash)6815 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6816   running_hash += (running_hash << 3);
6817   running_hash ^= (running_hash >> 11);
6818   running_hash += (running_hash << 15);
6819   if ((running_hash & String::kHashBitMask) == 0) {
6820     return kZeroHash;
6821   }
6822   return running_hash;
6823 }
6824 
6825 
ComputeRunningHash(uint32_t running_hash,const uc16 * chars,int length)6826 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
6827                                           const uc16* chars, int length) {
6828   DCHECK_NOT_NULL(chars);
6829   DCHECK(length >= 0);
6830   for (int i = 0; i < length; ++i) {
6831     running_hash = AddCharacterCore(running_hash, *chars++);
6832   }
6833   return running_hash;
6834 }
6835 
6836 
ComputeRunningHashOneByte(uint32_t running_hash,const char * chars,int length)6837 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
6838                                                  const char* chars,
6839                                                  int length) {
6840   DCHECK_NOT_NULL(chars);
6841   DCHECK(length >= 0);
6842   for (int i = 0; i < length; ++i) {
6843     uint16_t c = static_cast<uint16_t>(*chars++);
6844     running_hash = AddCharacterCore(running_hash, c);
6845   }
6846   return running_hash;
6847 }
6848 
6849 
AddCharacter(uint16_t c)6850 void StringHasher::AddCharacter(uint16_t c) {
6851   // Use the Jenkins one-at-a-time hash function to update the hash
6852   // for the given character.
6853   raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6854 }
6855 
6856 
UpdateIndex(uint16_t c)6857 bool StringHasher::UpdateIndex(uint16_t c) {
6858   DCHECK(is_array_index_);
6859   if (c < '0' || c > '9') {
6860     is_array_index_ = false;
6861     return false;
6862   }
6863   int d = c - '0';
6864   if (is_first_char_) {
6865     is_first_char_ = false;
6866     if (c == '0' && length_ > 1) {
6867       is_array_index_ = false;
6868       return false;
6869     }
6870   }
6871   if (array_index_ > 429496729U - ((d + 3) >> 3)) {
6872     is_array_index_ = false;
6873     return false;
6874   }
6875   array_index_ = array_index_ * 10 + d;
6876   return true;
6877 }
6878 
6879 
6880 template<typename Char>
AddCharacters(const Char * chars,int length)6881 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6882   DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6883   int i = 0;
6884   if (is_array_index_) {
6885     for (; i < length; i++) {
6886       AddCharacter(chars[i]);
6887       if (!UpdateIndex(chars[i])) {
6888         i++;
6889         break;
6890       }
6891     }
6892   }
6893   for (; i < length; i++) {
6894     DCHECK(!is_array_index_);
6895     AddCharacter(chars[i]);
6896   }
6897 }
6898 
6899 
6900 template <typename schar>
HashSequentialString(const schar * chars,int length,uint32_t seed)6901 uint32_t StringHasher::HashSequentialString(const schar* chars,
6902                                             int length,
6903                                             uint32_t seed) {
6904   StringHasher hasher(length, seed);
6905   if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6906   return hasher.GetHashField();
6907 }
6908 
6909 
IteratingStringHasher(int len,uint32_t seed)6910 IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
6911     : StringHasher(len, seed) {}
6912 
6913 
Hash(String * string,uint32_t seed)6914 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6915   IteratingStringHasher hasher(string->length(), seed);
6916   // Nothing to do.
6917   if (hasher.has_trivial_hash()) return hasher.GetHashField();
6918   ConsString* cons_string = String::VisitFlat(&hasher, string);
6919   if (cons_string == nullptr) return hasher.GetHashField();
6920   hasher.VisitConsString(cons_string);
6921   return hasher.GetHashField();
6922 }
6923 
6924 
VisitOneByteString(const uint8_t * chars,int length)6925 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6926                                                int length) {
6927   AddCharacters(chars, length);
6928 }
6929 
6930 
VisitTwoByteString(const uint16_t * chars,int length)6931 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6932                                                int length) {
6933   AddCharacters(chars, length);
6934 }
6935 
6936 
AsArrayIndex(uint32_t * index)6937 bool Name::AsArrayIndex(uint32_t* index) {
6938   return IsString() && String::cast(this)->AsArrayIndex(index);
6939 }
6940 
6941 
AsArrayIndex(uint32_t * index)6942 bool String::AsArrayIndex(uint32_t* index) {
6943   uint32_t field = hash_field();
6944   if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6945     return false;
6946   }
6947   return SlowAsArrayIndex(index);
6948 }
6949 
6950 
SetForwardedInternalizedString(String * canonical)6951 void String::SetForwardedInternalizedString(String* canonical) {
6952   DCHECK(IsInternalizedString());
6953   DCHECK(HasHashCode());
6954   if (canonical == this) return;  // No need to forward.
6955   DCHECK(SlowEquals(canonical));
6956   DCHECK(canonical->IsInternalizedString());
6957   DCHECK(canonical->HasHashCode());
6958   WRITE_FIELD(this, kHashFieldSlot, canonical);
6959   // Setting the hash field to a tagged value sets the LSB, causing the hash
6960   // code to be interpreted as uninitialized.  We use this fact to recognize
6961   // that we have a forwarded string.
6962   DCHECK(!HasHashCode());
6963 }
6964 
6965 
GetForwardedInternalizedString()6966 String* String::GetForwardedInternalizedString() {
6967   DCHECK(IsInternalizedString());
6968   if (HasHashCode()) return this;
6969   String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
6970   DCHECK(canonical->IsInternalizedString());
6971   DCHECK(SlowEquals(canonical));
6972   DCHECK(canonical->HasHashCode());
6973   return canonical;
6974 }
6975 
6976 
6977 // static
GreaterThan(Handle<Object> x,Handle<Object> y,Strength strength)6978 Maybe<bool> Object::GreaterThan(Handle<Object> x, Handle<Object> y,
6979                                 Strength strength) {
6980   Maybe<ComparisonResult> result = Compare(x, y, strength);
6981   if (result.IsJust()) {
6982     switch (result.FromJust()) {
6983       case ComparisonResult::kGreaterThan:
6984         return Just(true);
6985       case ComparisonResult::kLessThan:
6986       case ComparisonResult::kEqual:
6987       case ComparisonResult::kUndefined:
6988         return Just(false);
6989     }
6990   }
6991   return Nothing<bool>();
6992 }
6993 
6994 
6995 // static
GreaterThanOrEqual(Handle<Object> x,Handle<Object> y,Strength strength)6996 Maybe<bool> Object::GreaterThanOrEqual(Handle<Object> x, Handle<Object> y,
6997                                        Strength strength) {
6998   Maybe<ComparisonResult> result = Compare(x, y, strength);
6999   if (result.IsJust()) {
7000     switch (result.FromJust()) {
7001       case ComparisonResult::kEqual:
7002       case ComparisonResult::kGreaterThan:
7003         return Just(true);
7004       case ComparisonResult::kLessThan:
7005       case ComparisonResult::kUndefined:
7006         return Just(false);
7007     }
7008   }
7009   return Nothing<bool>();
7010 }
7011 
7012 
7013 // static
LessThan(Handle<Object> x,Handle<Object> y,Strength strength)7014 Maybe<bool> Object::LessThan(Handle<Object> x, Handle<Object> y,
7015                              Strength strength) {
7016   Maybe<ComparisonResult> result = Compare(x, y, strength);
7017   if (result.IsJust()) {
7018     switch (result.FromJust()) {
7019       case ComparisonResult::kLessThan:
7020         return Just(true);
7021       case ComparisonResult::kEqual:
7022       case ComparisonResult::kGreaterThan:
7023       case ComparisonResult::kUndefined:
7024         return Just(false);
7025     }
7026   }
7027   return Nothing<bool>();
7028 }
7029 
7030 
7031 // static
LessThanOrEqual(Handle<Object> x,Handle<Object> y,Strength strength)7032 Maybe<bool> Object::LessThanOrEqual(Handle<Object> x, Handle<Object> y,
7033                                     Strength strength) {
7034   Maybe<ComparisonResult> result = Compare(x, y, strength);
7035   if (result.IsJust()) {
7036     switch (result.FromJust()) {
7037       case ComparisonResult::kEqual:
7038       case ComparisonResult::kLessThan:
7039         return Just(true);
7040       case ComparisonResult::kGreaterThan:
7041       case ComparisonResult::kUndefined:
7042         return Just(false);
7043     }
7044   }
7045   return Nothing<bool>();
7046 }
7047 
7048 
GetPropertyOrElement(Handle<Object> object,Handle<Name> name,LanguageMode language_mode)7049 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
7050                                                  Handle<Name> name,
7051                                                  LanguageMode language_mode) {
7052   LookupIterator it =
7053       LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7054   return GetProperty(&it, language_mode);
7055 }
7056 
7057 
GetPropertyOrElement(Handle<JSReceiver> holder,Handle<Name> name,Handle<Object> receiver,LanguageMode language_mode)7058 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<JSReceiver> holder,
7059                                                  Handle<Name> name,
7060                                                  Handle<Object> receiver,
7061                                                  LanguageMode language_mode) {
7062   LookupIterator it = LookupIterator::PropertyOrElement(
7063       name->GetIsolate(), receiver, name, holder);
7064   return GetProperty(&it, language_mode);
7065 }
7066 
7067 
initialize_properties()7068 void JSReceiver::initialize_properties() {
7069   DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
7070   DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_properties_dictionary()));
7071   if (map()->is_dictionary_map()) {
7072     WRITE_FIELD(this, kPropertiesOffset,
7073                 GetHeap()->empty_properties_dictionary());
7074   } else {
7075     WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
7076   }
7077 }
7078 
7079 
HasFastProperties()7080 bool JSReceiver::HasFastProperties() {
7081   DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
7082   return !properties()->IsDictionary();
7083 }
7084 
7085 
property_dictionary()7086 NameDictionary* JSReceiver::property_dictionary() {
7087   DCHECK(!HasFastProperties());
7088   DCHECK(!IsJSGlobalObject());
7089   return NameDictionary::cast(properties());
7090 }
7091 
7092 
HasProperty(Handle<JSReceiver> object,Handle<Name> name)7093 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
7094                                     Handle<Name> name) {
7095   LookupIterator it =
7096       LookupIterator::PropertyOrElement(object->GetIsolate(), object, name);
7097   return HasProperty(&it);
7098 }
7099 
7100 
HasOwnProperty(Handle<JSReceiver> object,Handle<Name> name)7101 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7102                                        Handle<Name> name) {
7103   if (object->IsJSObject()) {  // Shortcut
7104     LookupIterator it = LookupIterator::PropertyOrElement(
7105         object->GetIsolate(), object, name, LookupIterator::HIDDEN);
7106     return HasProperty(&it);
7107   }
7108 
7109   Maybe<PropertyAttributes> attributes =
7110       JSReceiver::GetOwnPropertyAttributes(object, name);
7111   MAYBE_RETURN(attributes, Nothing<bool>());
7112   return Just(attributes.FromJust() != ABSENT);
7113 }
7114 
7115 
GetPropertyAttributes(Handle<JSReceiver> object,Handle<Name> name)7116 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
7117     Handle<JSReceiver> object, Handle<Name> name) {
7118   LookupIterator it =
7119       LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7120   return GetPropertyAttributes(&it);
7121 }
7122 
7123 
GetOwnPropertyAttributes(Handle<JSReceiver> object,Handle<Name> name)7124 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7125     Handle<JSReceiver> object, Handle<Name> name) {
7126   LookupIterator it = LookupIterator::PropertyOrElement(
7127       name->GetIsolate(), object, name, LookupIterator::HIDDEN);
7128   return GetPropertyAttributes(&it);
7129 }
7130 
7131 
HasElement(Handle<JSReceiver> object,uint32_t index)7132 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7133   LookupIterator it(object->GetIsolate(), object, index);
7134   return HasProperty(&it);
7135 }
7136 
7137 
GetElementAttributes(Handle<JSReceiver> object,uint32_t index)7138 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
7139     Handle<JSReceiver> object, uint32_t index) {
7140   Isolate* isolate = object->GetIsolate();
7141   LookupIterator it(isolate, object, index);
7142   return GetPropertyAttributes(&it);
7143 }
7144 
7145 
GetOwnElementAttributes(Handle<JSReceiver> object,uint32_t index)7146 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
7147     Handle<JSReceiver> object, uint32_t index) {
7148   Isolate* isolate = object->GetIsolate();
7149   LookupIterator it(isolate, object, index, LookupIterator::HIDDEN);
7150   return GetPropertyAttributes(&it);
7151 }
7152 
7153 
IsDetached()7154 bool JSGlobalObject::IsDetached() {
7155   return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7156 }
7157 
7158 
IsDetachedFrom(JSGlobalObject * global)7159 bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject* global) const {
7160   const PrototypeIterator iter(this->GetIsolate(),
7161                                const_cast<JSGlobalProxy*>(this));
7162   return iter.GetCurrent() != global;
7163 }
7164 
7165 
GetOrCreateIdentityHash(Handle<JSReceiver> object)7166 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
7167   return object->IsJSProxy()
7168       ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
7169       : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
7170 }
7171 
7172 
GetIdentityHash()7173 Object* JSReceiver::GetIdentityHash() {
7174   return IsJSProxy()
7175       ? JSProxy::cast(this)->GetIdentityHash()
7176       : JSObject::cast(this)->GetIdentityHash();
7177 }
7178 
7179 
all_can_read()7180 bool AccessorInfo::all_can_read() {
7181   return BooleanBit::get(flag(), kAllCanReadBit);
7182 }
7183 
7184 
set_all_can_read(bool value)7185 void AccessorInfo::set_all_can_read(bool value) {
7186   set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7187 }
7188 
7189 
all_can_write()7190 bool AccessorInfo::all_can_write() {
7191   return BooleanBit::get(flag(), kAllCanWriteBit);
7192 }
7193 
7194 
set_all_can_write(bool value)7195 void AccessorInfo::set_all_can_write(bool value) {
7196   set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7197 }
7198 
7199 
is_special_data_property()7200 bool AccessorInfo::is_special_data_property() {
7201   return BooleanBit::get(flag(), kSpecialDataProperty);
7202 }
7203 
7204 
set_is_special_data_property(bool value)7205 void AccessorInfo::set_is_special_data_property(bool value) {
7206   set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
7207 }
7208 
7209 
property_attributes()7210 PropertyAttributes AccessorInfo::property_attributes() {
7211   return AttributesField::decode(static_cast<uint32_t>(flag()));
7212 }
7213 
7214 
set_property_attributes(PropertyAttributes attributes)7215 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7216   set_flag(AttributesField::update(flag(), attributes));
7217 }
7218 
7219 
IsCompatibleReceiver(Object * receiver)7220 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7221   if (!HasExpectedReceiverType()) return true;
7222   if (!receiver->IsJSObject()) return false;
7223   return FunctionTemplateInfo::cast(expected_receiver_type())
7224       ->IsTemplateFor(JSObject::cast(receiver)->map());
7225 }
7226 
7227 
HasExpectedReceiverType()7228 bool AccessorInfo::HasExpectedReceiverType() {
7229   return expected_receiver_type()->IsFunctionTemplateInfo();
7230 }
7231 
7232 
get(AccessorComponent component)7233 Object* AccessorPair::get(AccessorComponent component) {
7234   return component == ACCESSOR_GETTER ? getter() : setter();
7235 }
7236 
7237 
set(AccessorComponent component,Object * value)7238 void AccessorPair::set(AccessorComponent component, Object* value) {
7239   if (component == ACCESSOR_GETTER) {
7240     set_getter(value);
7241   } else {
7242     set_setter(value);
7243   }
7244 }
7245 
7246 
SetComponents(Object * getter,Object * setter)7247 void AccessorPair::SetComponents(Object* getter, Object* setter) {
7248   if (!getter->IsNull()) set_getter(getter);
7249   if (!setter->IsNull()) set_setter(setter);
7250 }
7251 
7252 
Equals(AccessorPair * pair)7253 bool AccessorPair::Equals(AccessorPair* pair) {
7254   return (this == pair) || pair->Equals(getter(), setter());
7255 }
7256 
7257 
Equals(Object * getter_value,Object * setter_value)7258 bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
7259   return (getter() == getter_value) && (setter() == setter_value);
7260 }
7261 
7262 
ContainsAccessor()7263 bool AccessorPair::ContainsAccessor() {
7264   return IsJSAccessor(getter()) || IsJSAccessor(setter());
7265 }
7266 
7267 
IsJSAccessor(Object * obj)7268 bool AccessorPair::IsJSAccessor(Object* obj) {
7269   return obj->IsCallable() || obj->IsUndefined();
7270 }
7271 
7272 
7273 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value)7274 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7275                                                Handle<Object> key,
7276                                                Handle<Object> value) {
7277   this->SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
7278 }
7279 
7280 
7281 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7282 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7283                                                Handle<Object> key,
7284                                                Handle<Object> value,
7285                                                PropertyDetails details) {
7286   Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
7287 }
7288 
7289 
7290 template <typename Key>
7291 template <typename Dictionary>
SetEntry(Dictionary * dict,int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7292 void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
7293                                         Handle<Object> key,
7294                                         Handle<Object> value,
7295                                         PropertyDetails details) {
7296   STATIC_ASSERT(Dictionary::kEntrySize == 3);
7297   DCHECK(!key->IsName() || details.dictionary_index() > 0);
7298   int index = dict->EntryToIndex(entry);
7299   DisallowHeapAllocation no_gc;
7300   WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7301   dict->set(index, *key, mode);
7302   dict->set(index + 1, *value, mode);
7303   dict->set(index + 2, details.AsSmi());
7304 }
7305 
7306 
7307 template <typename Dictionary>
SetEntry(Dictionary * dict,int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7308 void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
7309                                      Handle<Object> key, Handle<Object> value,
7310                                      PropertyDetails details) {
7311   STATIC_ASSERT(Dictionary::kEntrySize == 2);
7312   DCHECK(!key->IsName() || details.dictionary_index() > 0);
7313   DCHECK(value->IsPropertyCell());
7314   int index = dict->EntryToIndex(entry);
7315   DisallowHeapAllocation no_gc;
7316   WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7317   dict->set(index, *key, mode);
7318   dict->set(index + 1, *value, mode);
7319   PropertyCell::cast(*value)->set_property_details(details);
7320 }
7321 
7322 
IsMatch(uint32_t key,Object * other)7323 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7324   DCHECK(other->IsNumber());
7325   return key == static_cast<uint32_t>(other->Number());
7326 }
7327 
7328 
Hash(uint32_t key)7329 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7330   return ComputeIntegerHash(key, 0);
7331 }
7332 
7333 
HashForObject(uint32_t key,Object * other)7334 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7335                                                       Object* other) {
7336   DCHECK(other->IsNumber());
7337   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7338 }
7339 
7340 
SeededHash(uint32_t key,uint32_t seed)7341 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7342   return ComputeIntegerHash(key, seed);
7343 }
7344 
7345 
SeededHashForObject(uint32_t key,uint32_t seed,Object * other)7346 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7347                                                           uint32_t seed,
7348                                                           Object* other) {
7349   DCHECK(other->IsNumber());
7350   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7351 }
7352 
7353 
AsHandle(Isolate * isolate,uint32_t key)7354 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7355   return isolate->factory()->NewNumberFromUint(key);
7356 }
7357 
7358 
IsMatch(Handle<Name> key,Object * other)7359 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7360   // We know that all entries in a hash table had their hash keys created.
7361   // Use that knowledge to have fast failure.
7362   if (key->Hash() != Name::cast(other)->Hash()) return false;
7363   return key->Equals(Name::cast(other));
7364 }
7365 
7366 
Hash(Handle<Name> key)7367 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7368   return key->Hash();
7369 }
7370 
7371 
HashForObject(Handle<Name> key,Object * other)7372 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7373   return Name::cast(other)->Hash();
7374 }
7375 
7376 
AsHandle(Isolate * isolate,Handle<Name> key)7377 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7378                                              Handle<Name> key) {
7379   DCHECK(key->IsUniqueName());
7380   return key;
7381 }
7382 
7383 
DoGenerateNewEnumerationIndices(Handle<NameDictionary> dictionary)7384 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7385     Handle<NameDictionary> dictionary) {
7386   return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7387 }
7388 
7389 
7390 template <typename Dictionary>
DetailsAt(Dictionary * dict,int entry)7391 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
7392   DCHECK(entry >= 0);  // Not found is -1, which is not caught by get().
7393   Object* raw_value = dict->ValueAt(entry);
7394   DCHECK(raw_value->IsPropertyCell());
7395   PropertyCell* cell = PropertyCell::cast(raw_value);
7396   return cell->property_details();
7397 }
7398 
7399 
7400 template <typename Dictionary>
DetailsAtPut(Dictionary * dict,int entry,PropertyDetails value)7401 void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
7402                                          PropertyDetails value) {
7403   DCHECK(entry >= 0);  // Not found is -1, which is not caught by get().
7404   Object* raw_value = dict->ValueAt(entry);
7405   DCHECK(raw_value->IsPropertyCell());
7406   PropertyCell* cell = PropertyCell::cast(raw_value);
7407   cell->set_property_details(value);
7408 }
7409 
7410 
7411 template <typename Dictionary>
IsDeleted(Dictionary * dict,int entry)7412 bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
7413   DCHECK(dict->ValueAt(entry)->IsPropertyCell());
7414   return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole();
7415 }
7416 
7417 
IsMatch(Handle<Object> key,Object * other)7418 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7419   return key->SameValue(other);
7420 }
7421 
7422 
Hash(Handle<Object> key)7423 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7424   return Smi::cast(key->GetHash())->value();
7425 }
7426 
7427 
HashForObject(Handle<Object> key,Object * other)7428 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7429                                              Object* other) {
7430   return Smi::cast(other->GetHash())->value();
7431 }
7432 
7433 
AsHandle(Isolate * isolate,Handle<Object> key)7434 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7435                                               Handle<Object> key) {
7436   return key;
7437 }
7438 
7439 
Shrink(Handle<ObjectHashTable> table,Handle<Object> key)7440 Handle<ObjectHashTable> ObjectHashTable::Shrink(
7441     Handle<ObjectHashTable> table, Handle<Object> key) {
7442   return DerivedHashTable::Shrink(table, key);
7443 }
7444 
7445 
ValueAt(int entry)7446 Object* OrderedHashMap::ValueAt(int entry) {
7447   return get(EntryToIndex(entry) + kValueOffset);
7448 }
7449 
7450 
7451 template <int entrysize>
IsMatch(Handle<Object> key,Object * other)7452 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
7453   if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7454   return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
7455                            : *key == other;
7456 }
7457 
7458 
7459 template <int entrysize>
Hash(Handle<Object> key)7460 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
7461   intptr_t hash =
7462       key->IsWeakCell()
7463           ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
7464           : reinterpret_cast<intptr_t>(*key);
7465   return (uint32_t)(hash & 0xFFFFFFFF);
7466 }
7467 
7468 
7469 template <int entrysize>
HashForObject(Handle<Object> key,Object * other)7470 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
7471                                                       Object* other) {
7472   if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
7473   intptr_t hash = reinterpret_cast<intptr_t>(other);
7474   return (uint32_t)(hash & 0xFFFFFFFF);
7475 }
7476 
7477 
7478 template <int entrysize>
AsHandle(Isolate * isolate,Handle<Object> key)7479 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
7480                                                        Handle<Object> key) {
7481   return key;
7482 }
7483 
7484 
IsAsmModule()7485 bool ScopeInfo::IsAsmModule() { return AsmModuleField::decode(Flags()); }
7486 
7487 
IsAsmFunction()7488 bool ScopeInfo::IsAsmFunction() { return AsmFunctionField::decode(Flags()); }
7489 
7490 
HasSimpleParameters()7491 bool ScopeInfo::HasSimpleParameters() {
7492   return HasSimpleParametersField::decode(Flags());
7493 }
7494 
7495 
7496 #define SCOPE_INFO_FIELD_ACCESSORS(name)                                      \
7497   void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
7498   int ScopeInfo::name() {                                                     \
7499     if (length() > 0) {                                                       \
7500       return Smi::cast(get(k##name))->value();                                \
7501     } else {                                                                  \
7502       return 0;                                                               \
7503     }                                                                         \
7504   }
FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)7505 FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)
7506 #undef SCOPE_INFO_FIELD_ACCESSORS
7507 
7508 
7509 void Map::ClearCodeCache(Heap* heap) {
7510   // No write barrier is needed since empty_fixed_array is not in new space.
7511   // Please note this function is used during marking:
7512   //  - MarkCompactCollector::MarkUnmarkedObject
7513   //  - IncrementalMarking::Step
7514   DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
7515   WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
7516 }
7517 
7518 
SlackForArraySize(int old_size,int size_limit)7519 int Map::SlackForArraySize(int old_size, int size_limit) {
7520   const int max_slack = size_limit - old_size;
7521   CHECK_LE(0, max_slack);
7522   if (old_size < 4) {
7523     DCHECK_LE(1, max_slack);
7524     return 1;
7525   }
7526   return Min(max_slack, old_size / 4);
7527 }
7528 
7529 
set_length(Smi * length)7530 void JSArray::set_length(Smi* length) {
7531   // Don't need a write barrier for a Smi.
7532   set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
7533 }
7534 
7535 
SetLengthWouldNormalize(Heap * heap,uint32_t new_length)7536 bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
7537   // If the new array won't fit in a some non-trivial fraction of the max old
7538   // space size, then force it to go dictionary mode.
7539   uint32_t max_fast_array_size =
7540       static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
7541   return new_length >= max_fast_array_size;
7542 }
7543 
7544 
AllowsSetLength()7545 bool JSArray::AllowsSetLength() {
7546   bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
7547   DCHECK(result == !HasFixedTypedArrayElements());
7548   return result;
7549 }
7550 
7551 
SetContent(Handle<JSArray> array,Handle<FixedArrayBase> storage)7552 void JSArray::SetContent(Handle<JSArray> array,
7553                          Handle<FixedArrayBase> storage) {
7554   EnsureCanContainElements(array, storage, storage->length(),
7555                            ALLOW_COPIED_DOUBLE_ELEMENTS);
7556 
7557   DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
7558           IsFastDoubleElementsKind(array->GetElementsKind())) ||
7559          ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
7560           (IsFastObjectElementsKind(array->GetElementsKind()) ||
7561            (IsFastSmiElementsKind(array->GetElementsKind()) &&
7562             Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
7563   array->set_elements(*storage);
7564   array->set_length(Smi::FromInt(storage->length()));
7565 }
7566 
7567 
ic_total_count()7568 int TypeFeedbackInfo::ic_total_count() {
7569   int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7570   return ICTotalCountField::decode(current);
7571 }
7572 
7573 
set_ic_total_count(int count)7574 void TypeFeedbackInfo::set_ic_total_count(int count) {
7575   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7576   value = ICTotalCountField::update(value,
7577                                     ICTotalCountField::decode(count));
7578   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7579 }
7580 
7581 
ic_with_type_info_count()7582 int TypeFeedbackInfo::ic_with_type_info_count() {
7583   int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7584   return ICsWithTypeInfoCountField::decode(current);
7585 }
7586 
7587 
change_ic_with_type_info_count(int delta)7588 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7589   if (delta == 0) return;
7590   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7591   int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7592   // We can get negative count here when the type-feedback info is
7593   // shared between two code objects. The can only happen when
7594   // the debugger made a shallow copy of code object (see Heap::CopyCode).
7595   // Since we do not optimize when the debugger is active, we can skip
7596   // this counter update.
7597   if (new_count >= 0) {
7598     new_count &= ICsWithTypeInfoCountField::kMask;
7599     value = ICsWithTypeInfoCountField::update(value, new_count);
7600     WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7601   }
7602 }
7603 
7604 
ic_generic_count()7605 int TypeFeedbackInfo::ic_generic_count() {
7606   return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7607 }
7608 
7609 
change_ic_generic_count(int delta)7610 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7611   if (delta == 0) return;
7612   int new_count = ic_generic_count() + delta;
7613   if (new_count >= 0) {
7614     new_count &= ~Smi::kMinValue;
7615     WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7616   }
7617 }
7618 
7619 
initialize_storage()7620 void TypeFeedbackInfo::initialize_storage() {
7621   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7622   WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7623   WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7624 }
7625 
7626 
change_own_type_change_checksum()7627 void TypeFeedbackInfo::change_own_type_change_checksum() {
7628   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7629   int checksum = OwnTypeChangeChecksum::decode(value);
7630   checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7631   value = OwnTypeChangeChecksum::update(value, checksum);
7632   // Ensure packed bit field is in Smi range.
7633   if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7634   if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7635   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7636 }
7637 
7638 
set_inlined_type_change_checksum(int checksum)7639 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7640   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7641   int mask = (1 << kTypeChangeChecksumBits) - 1;
7642   value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7643   // Ensure packed bit field is in Smi range.
7644   if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7645   if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7646   WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7647 }
7648 
7649 
own_type_change_checksum()7650 int TypeFeedbackInfo::own_type_change_checksum() {
7651   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7652   return OwnTypeChangeChecksum::decode(value);
7653 }
7654 
7655 
matches_inlined_type_change_checksum(int checksum)7656 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7657   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7658   int mask = (1 << kTypeChangeChecksumBits) - 1;
7659   return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7660 }
7661 
7662 
SMI_ACCESSORS(AliasedArgumentsEntry,aliased_context_slot,kAliasedContextSlot)7663 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7664 
7665 
7666 Relocatable::Relocatable(Isolate* isolate) {
7667   isolate_ = isolate;
7668   prev_ = isolate->relocatable_top();
7669   isolate->set_relocatable_top(this);
7670 }
7671 
7672 
~Relocatable()7673 Relocatable::~Relocatable() {
7674   DCHECK_EQ(isolate_->relocatable_top(), this);
7675   isolate_->set_relocatable_top(prev_);
7676 }
7677 
7678 
7679 template<class Derived, class TableType>
CurrentKey()7680 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7681   TableType* table(TableType::cast(this->table()));
7682   int index = Smi::cast(this->index())->value();
7683   Object* key = table->KeyAt(index);
7684   DCHECK(!key->IsTheHole());
7685   return key;
7686 }
7687 
7688 
PopulateValueArray(FixedArray * array)7689 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7690   array->set(0, CurrentKey());
7691 }
7692 
7693 
PopulateValueArray(FixedArray * array)7694 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7695   array->set(0, CurrentKey());
7696   array->set(1, CurrentValue());
7697 }
7698 
7699 
CurrentValue()7700 Object* JSMapIterator::CurrentValue() {
7701   OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7702   int index = Smi::cast(this->index())->value();
7703   Object* value = table->ValueAt(index);
7704   DCHECK(!value->IsTheHole());
7705   return value;
7706 }
7707 
7708 
ACCESSORS(JSIteratorResult,done,Object,kDoneOffset)7709 ACCESSORS(JSIteratorResult, done, Object, kDoneOffset)
7710 ACCESSORS(JSIteratorResult, value, Object, kValueOffset)
7711 
7712 
7713 String::SubStringRange::SubStringRange(String* string, int first, int length)
7714     : string_(string),
7715       first_(first),
7716       length_(length == -1 ? string->length() : length) {}
7717 
7718 
7719 class String::SubStringRange::iterator final {
7720  public:
7721   typedef std::forward_iterator_tag iterator_category;
7722   typedef int difference_type;
7723   typedef uc16 value_type;
7724   typedef uc16* pointer;
7725   typedef uc16& reference;
7726 
iterator(const iterator & other)7727   iterator(const iterator& other)
7728       : content_(other.content_), offset_(other.offset_) {}
7729 
7730   uc16 operator*() { return content_.Get(offset_); }
7731   bool operator==(const iterator& other) const {
7732     return content_.UsesSameString(other.content_) && offset_ == other.offset_;
7733   }
7734   bool operator!=(const iterator& other) const {
7735     return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
7736   }
7737   iterator& operator++() {
7738     ++offset_;
7739     return *this;
7740   }
7741   iterator operator++(int);
7742 
7743  private:
7744   friend class String;
iterator(String * from,int offset)7745   iterator(String* from, int offset)
7746       : content_(from->GetFlatContent()), offset_(offset) {}
7747   String::FlatContent content_;
7748   int offset_;
7749 };
7750 
7751 
begin()7752 String::SubStringRange::iterator String::SubStringRange::begin() {
7753   return String::SubStringRange::iterator(string_, first_);
7754 }
7755 
7756 
end()7757 String::SubStringRange::iterator String::SubStringRange::end() {
7758   return String::SubStringRange::iterator(string_, first_ + length_);
7759 }
7760 
7761 
7762 // Predictably converts HeapObject* or Address to uint32 by calculating
7763 // offset of the address in respective MemoryChunk.
ObjectAddressForHashing(void * object)7764 static inline uint32_t ObjectAddressForHashing(void* object) {
7765   uint32_t value = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object));
7766   return value & MemoryChunk::kAlignmentMask;
7767 }
7768 
7769 
7770 #undef TYPE_CHECKER
7771 #undef CAST_ACCESSOR
7772 #undef INT_ACCESSORS
7773 #undef ACCESSORS
7774 #undef SMI_ACCESSORS
7775 #undef SYNCHRONIZED_SMI_ACCESSORS
7776 #undef NOBARRIER_SMI_ACCESSORS
7777 #undef BOOL_GETTER
7778 #undef BOOL_ACCESSORS
7779 #undef FIELD_ADDR
7780 #undef FIELD_ADDR_CONST
7781 #undef READ_FIELD
7782 #undef NOBARRIER_READ_FIELD
7783 #undef WRITE_FIELD
7784 #undef NOBARRIER_WRITE_FIELD
7785 #undef WRITE_BARRIER
7786 #undef CONDITIONAL_WRITE_BARRIER
7787 #undef READ_DOUBLE_FIELD
7788 #undef WRITE_DOUBLE_FIELD
7789 #undef READ_INT_FIELD
7790 #undef WRITE_INT_FIELD
7791 #undef READ_INTPTR_FIELD
7792 #undef WRITE_INTPTR_FIELD
7793 #undef READ_UINT8_FIELD
7794 #undef WRITE_UINT8_FIELD
7795 #undef READ_INT8_FIELD
7796 #undef WRITE_INT8_FIELD
7797 #undef READ_UINT16_FIELD
7798 #undef WRITE_UINT16_FIELD
7799 #undef READ_INT16_FIELD
7800 #undef WRITE_INT16_FIELD
7801 #undef READ_UINT32_FIELD
7802 #undef WRITE_UINT32_FIELD
7803 #undef READ_INT32_FIELD
7804 #undef WRITE_INT32_FIELD
7805 #undef READ_FLOAT_FIELD
7806 #undef WRITE_FLOAT_FIELD
7807 #undef READ_UINT64_FIELD
7808 #undef WRITE_UINT64_FIELD
7809 #undef READ_INT64_FIELD
7810 #undef WRITE_INT64_FIELD
7811 #undef READ_BYTE_FIELD
7812 #undef WRITE_BYTE_FIELD
7813 #undef NOBARRIER_READ_BYTE_FIELD
7814 #undef NOBARRIER_WRITE_BYTE_FIELD
7815 
7816 }  // namespace internal
7817 }  // namespace v8
7818 
7819 #endif  // V8_OBJECTS_INL_H_
7820