1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // Review notes:
6 //
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
10 //
11 
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
14 
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/contexts.h"
18 #include "src/conversions-inl.h"
19 #include "src/elements.h"
20 #include "src/factory.h"
21 #include "src/field-index-inl.h"
22 #include "src/heap/heap-inl.h"
23 #include "src/heap/heap.h"
24 #include "src/heap/incremental-marking.h"
25 #include "src/heap/objects-visiting.h"
26 #include "src/heap/spaces.h"
27 #include "src/heap/store-buffer.h"
28 #include "src/isolate.h"
29 #include "src/lookup.h"
30 #include "src/objects.h"
31 #include "src/property.h"
32 #include "src/prototype.h"
33 #include "src/transitions-inl.h"
34 #include "src/type-feedback-vector-inl.h"
35 #include "src/v8memory.h"
36 
37 namespace v8 {
38 namespace internal {
39 
PropertyDetails(Smi * smi)40 PropertyDetails::PropertyDetails(Smi* smi) {
41   value_ = smi->value();
42 }
43 
44 
AsSmi()45 Smi* PropertyDetails::AsSmi() const {
46   // Ensure the upper 2 bits have the same value by sign extending it. This is
47   // necessary to be able to use the 31st bit of the property details.
48   int value = value_ << 1;
49   return Smi::FromInt(value >> 1);
50 }
51 
52 
AsDeleted()53 PropertyDetails PropertyDetails::AsDeleted() const {
54   Smi* smi = Smi::FromInt(value_ | DeletedField::encode(1));
55   return PropertyDetails(smi);
56 }
57 
58 
59 #define TYPE_CHECKER(type, instancetype)                                \
60   bool Object::Is##type() const {                                       \
61   return Object::IsHeapObject() &&                                      \
62       HeapObject::cast(this)->map()->instance_type() == instancetype;   \
63   }
64 
65 
66 #define CAST_ACCESSOR(type)                       \
67   type* type::cast(Object* object) {              \
68     SLOW_DCHECK(object->Is##type());              \
69     return reinterpret_cast<type*>(object);       \
70   }                                               \
71   const type* type::cast(const Object* object) {  \
72     SLOW_DCHECK(object->Is##type());              \
73     return reinterpret_cast<const type*>(object); \
74   }
75 
76 
77 #define INT_ACCESSORS(holder, name, offset)                                   \
78   int holder::name() const { return READ_INT_FIELD(this, offset); }           \
79   void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
80 
81 
82 #define ACCESSORS(holder, name, type, offset)                                 \
83   type* holder::name() const { return type::cast(READ_FIELD(this, offset)); } \
84   void holder::set_##name(type* value, WriteBarrierMode mode) {               \
85     WRITE_FIELD(this, offset, value);                                         \
86     CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);          \
87   }
88 
89 
90 // Getter that returns a tagged Smi and setter that writes a tagged Smi.
91 #define ACCESSORS_TO_SMI(holder, name, offset)                              \
92   Smi* holder::name() const { return Smi::cast(READ_FIELD(this, offset)); } \
93   void holder::set_##name(Smi* value, WriteBarrierMode mode) {              \
94     WRITE_FIELD(this, offset, value);                                       \
95   }
96 
97 
98 // Getter that returns a Smi as an int and writes an int as a Smi.
99 #define SMI_ACCESSORS(holder, name, offset)             \
100   int holder::name() const {                            \
101     Object* value = READ_FIELD(this, offset);           \
102     return Smi::cast(value)->value();                   \
103   }                                                     \
104   void holder::set_##name(int value) {                  \
105     WRITE_FIELD(this, offset, Smi::FromInt(value));     \
106   }
107 
108 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset)    \
109   int holder::synchronized_##name() const {                 \
110     Object* value = ACQUIRE_READ_FIELD(this, offset);       \
111     return Smi::cast(value)->value();                       \
112   }                                                         \
113   void holder::synchronized_set_##name(int value) {         \
114     RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
115   }
116 
117 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset)          \
118   int holder::nobarrier_##name() const {                       \
119     Object* value = NOBARRIER_READ_FIELD(this, offset);        \
120     return Smi::cast(value)->value();                          \
121   }                                                            \
122   void holder::nobarrier_set_##name(int value) {               \
123     NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value));  \
124   }
125 
126 #define BOOL_GETTER(holder, field, name, offset)           \
127   bool holder::name() const {                              \
128     return BooleanBit::get(field(), offset);               \
129   }                                                        \
130 
131 
132 #define BOOL_ACCESSORS(holder, field, name, offset)        \
133   bool holder::name() const {                              \
134     return BooleanBit::get(field(), offset);               \
135   }                                                        \
136   void holder::set_##name(bool value) {                    \
137     set_##field(BooleanBit::set(field(), offset, value));  \
138   }
139 
140 
IsFixedArrayBase()141 bool Object::IsFixedArrayBase() const {
142   return IsFixedArray() || IsFixedDoubleArray() || IsConstantPoolArray() ||
143          IsFixedTypedArrayBase() || IsExternalArray();
144 }
145 
146 
147 // External objects are not extensible, so the map check is enough.
IsExternal()148 bool Object::IsExternal() const {
149   return Object::IsHeapObject() &&
150       HeapObject::cast(this)->map() ==
151       HeapObject::cast(this)->GetHeap()->external_map();
152 }
153 
154 
IsAccessorInfo()155 bool Object::IsAccessorInfo() const {
156   return IsExecutableAccessorInfo() || IsDeclaredAccessorInfo();
157 }
158 
159 
IsSmi()160 bool Object::IsSmi() const {
161   return HAS_SMI_TAG(this);
162 }
163 
164 
IsHeapObject()165 bool Object::IsHeapObject() const {
166   return Internals::HasHeapObjectTag(this);
167 }
168 
169 
TYPE_CHECKER(HeapNumber,HEAP_NUMBER_TYPE)170 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
171 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
172 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
173 
174 
175 bool Object::IsString() const {
176   return Object::IsHeapObject()
177     && HeapObject::cast(this)->map()->instance_type() < FIRST_NONSTRING_TYPE;
178 }
179 
180 
IsName()181 bool Object::IsName() const {
182   return IsString() || IsSymbol();
183 }
184 
185 
IsUniqueName()186 bool Object::IsUniqueName() const {
187   return IsInternalizedString() || IsSymbol();
188 }
189 
190 
IsSpecObject()191 bool Object::IsSpecObject() const {
192   return Object::IsHeapObject()
193     && HeapObject::cast(this)->map()->instance_type() >= FIRST_SPEC_OBJECT_TYPE;
194 }
195 
196 
IsSpecFunction()197 bool Object::IsSpecFunction() const {
198   if (!Object::IsHeapObject()) return false;
199   InstanceType type = HeapObject::cast(this)->map()->instance_type();
200   return type == JS_FUNCTION_TYPE || type == JS_FUNCTION_PROXY_TYPE;
201 }
202 
203 
IsTemplateInfo()204 bool Object::IsTemplateInfo() const {
205   return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
206 }
207 
208 
IsInternalizedString()209 bool Object::IsInternalizedString() const {
210   if (!this->IsHeapObject()) return false;
211   uint32_t type = HeapObject::cast(this)->map()->instance_type();
212   STATIC_ASSERT(kNotInternalizedTag != 0);
213   return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
214       (kStringTag | kInternalizedTag);
215 }
216 
217 
IsConsString()218 bool Object::IsConsString() const {
219   if (!IsString()) return false;
220   return StringShape(String::cast(this)).IsCons();
221 }
222 
223 
IsSlicedString()224 bool Object::IsSlicedString() const {
225   if (!IsString()) return false;
226   return StringShape(String::cast(this)).IsSliced();
227 }
228 
229 
IsSeqString()230 bool Object::IsSeqString() const {
231   if (!IsString()) return false;
232   return StringShape(String::cast(this)).IsSequential();
233 }
234 
235 
IsSeqOneByteString()236 bool Object::IsSeqOneByteString() const {
237   if (!IsString()) return false;
238   return StringShape(String::cast(this)).IsSequential() &&
239          String::cast(this)->IsOneByteRepresentation();
240 }
241 
242 
IsSeqTwoByteString()243 bool Object::IsSeqTwoByteString() const {
244   if (!IsString()) return false;
245   return StringShape(String::cast(this)).IsSequential() &&
246          String::cast(this)->IsTwoByteRepresentation();
247 }
248 
249 
IsExternalString()250 bool Object::IsExternalString() const {
251   if (!IsString()) return false;
252   return StringShape(String::cast(this)).IsExternal();
253 }
254 
255 
IsExternalOneByteString()256 bool Object::IsExternalOneByteString() const {
257   if (!IsString()) return false;
258   return StringShape(String::cast(this)).IsExternal() &&
259          String::cast(this)->IsOneByteRepresentation();
260 }
261 
262 
IsExternalTwoByteString()263 bool Object::IsExternalTwoByteString() const {
264   if (!IsString()) return false;
265   return StringShape(String::cast(this)).IsExternal() &&
266          String::cast(this)->IsTwoByteRepresentation();
267 }
268 
269 
HasValidElements()270 bool Object::HasValidElements() {
271   // Dictionary is covered under FixedArray.
272   return IsFixedArray() || IsFixedDoubleArray() || IsExternalArray() ||
273          IsFixedTypedArrayBase();
274 }
275 
276 
NewStorageFor(Isolate * isolate,Handle<Object> object,Representation representation)277 Handle<Object> Object::NewStorageFor(Isolate* isolate,
278                                      Handle<Object> object,
279                                      Representation representation) {
280   if (representation.IsSmi() && object->IsUninitialized()) {
281     return handle(Smi::FromInt(0), isolate);
282   }
283   if (!representation.IsDouble()) return object;
284   double value;
285   if (object->IsUninitialized()) {
286     value = 0;
287   } else if (object->IsMutableHeapNumber()) {
288     value = HeapNumber::cast(*object)->value();
289   } else {
290     value = object->Number();
291   }
292   return isolate->factory()->NewHeapNumber(value, MUTABLE);
293 }
294 
295 
WrapForRead(Isolate * isolate,Handle<Object> object,Representation representation)296 Handle<Object> Object::WrapForRead(Isolate* isolate,
297                                    Handle<Object> object,
298                                    Representation representation) {
299   DCHECK(!object->IsUninitialized());
300   if (!representation.IsDouble()) {
301     DCHECK(object->FitsRepresentation(representation));
302     return object;
303   }
304   return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
305 }
306 
307 
StringShape(const String * str)308 StringShape::StringShape(const String* str)
309   : type_(str->map()->instance_type()) {
310   set_valid();
311   DCHECK((type_ & kIsNotStringMask) == kStringTag);
312 }
313 
314 
StringShape(Map * map)315 StringShape::StringShape(Map* map)
316   : type_(map->instance_type()) {
317   set_valid();
318   DCHECK((type_ & kIsNotStringMask) == kStringTag);
319 }
320 
321 
StringShape(InstanceType t)322 StringShape::StringShape(InstanceType t)
323   : type_(static_cast<uint32_t>(t)) {
324   set_valid();
325   DCHECK((type_ & kIsNotStringMask) == kStringTag);
326 }
327 
328 
IsInternalized()329 bool StringShape::IsInternalized() {
330   DCHECK(valid());
331   STATIC_ASSERT(kNotInternalizedTag != 0);
332   return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
333       (kStringTag | kInternalizedTag);
334 }
335 
336 
IsOneByteRepresentation()337 bool String::IsOneByteRepresentation() const {
338   uint32_t type = map()->instance_type();
339   return (type & kStringEncodingMask) == kOneByteStringTag;
340 }
341 
342 
IsTwoByteRepresentation()343 bool String::IsTwoByteRepresentation() const {
344   uint32_t type = map()->instance_type();
345   return (type & kStringEncodingMask) == kTwoByteStringTag;
346 }
347 
348 
IsOneByteRepresentationUnderneath()349 bool String::IsOneByteRepresentationUnderneath() {
350   uint32_t type = map()->instance_type();
351   STATIC_ASSERT(kIsIndirectStringTag != 0);
352   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
353   DCHECK(IsFlat());
354   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
355     case kOneByteStringTag:
356       return true;
357     case kTwoByteStringTag:
358       return false;
359     default:  // Cons or sliced string.  Need to go deeper.
360       return GetUnderlying()->IsOneByteRepresentation();
361   }
362 }
363 
364 
IsTwoByteRepresentationUnderneath()365 bool String::IsTwoByteRepresentationUnderneath() {
366   uint32_t type = map()->instance_type();
367   STATIC_ASSERT(kIsIndirectStringTag != 0);
368   STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
369   DCHECK(IsFlat());
370   switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
371     case kOneByteStringTag:
372       return false;
373     case kTwoByteStringTag:
374       return true;
375     default:  // Cons or sliced string.  Need to go deeper.
376       return GetUnderlying()->IsTwoByteRepresentation();
377   }
378 }
379 
380 
HasOnlyOneByteChars()381 bool String::HasOnlyOneByteChars() {
382   uint32_t type = map()->instance_type();
383   return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
384          IsOneByteRepresentation();
385 }
386 
387 
IsCons()388 bool StringShape::IsCons() {
389   return (type_ & kStringRepresentationMask) == kConsStringTag;
390 }
391 
392 
IsSliced()393 bool StringShape::IsSliced() {
394   return (type_ & kStringRepresentationMask) == kSlicedStringTag;
395 }
396 
397 
IsIndirect()398 bool StringShape::IsIndirect() {
399   return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
400 }
401 
402 
IsExternal()403 bool StringShape::IsExternal() {
404   return (type_ & kStringRepresentationMask) == kExternalStringTag;
405 }
406 
407 
IsSequential()408 bool StringShape::IsSequential() {
409   return (type_ & kStringRepresentationMask) == kSeqStringTag;
410 }
411 
412 
representation_tag()413 StringRepresentationTag StringShape::representation_tag() {
414   uint32_t tag = (type_ & kStringRepresentationMask);
415   return static_cast<StringRepresentationTag>(tag);
416 }
417 
418 
encoding_tag()419 uint32_t StringShape::encoding_tag() {
420   return type_ & kStringEncodingMask;
421 }
422 
423 
full_representation_tag()424 uint32_t StringShape::full_representation_tag() {
425   return (type_ & (kStringRepresentationMask | kStringEncodingMask));
426 }
427 
428 
429 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
430              Internals::kFullStringRepresentationMask);
431 
432 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
433              Internals::kStringEncodingMask);
434 
435 
IsSequentialOneByte()436 bool StringShape::IsSequentialOneByte() {
437   return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
438 }
439 
440 
IsSequentialTwoByte()441 bool StringShape::IsSequentialTwoByte() {
442   return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
443 }
444 
445 
IsExternalOneByte()446 bool StringShape::IsExternalOneByte() {
447   return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
448 }
449 
450 
451 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
452               Internals::kExternalOneByteRepresentationTag);
453 
454 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
455 
456 
IsExternalTwoByte()457 bool StringShape::IsExternalTwoByte() {
458   return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
459 }
460 
461 
462 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
463              Internals::kExternalTwoByteRepresentationTag);
464 
465 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
466 
Get(int index)467 uc32 FlatStringReader::Get(int index) {
468   DCHECK(0 <= index && index <= length_);
469   if (is_one_byte_) {
470     return static_cast<const byte*>(start_)[index];
471   } else {
472     return static_cast<const uc16*>(start_)[index];
473   }
474 }
475 
476 
AsHandle(Isolate * isolate,HashTableKey * key)477 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
478   return key->AsHandle(isolate);
479 }
480 
481 
AsHandle(Isolate * isolate,HashTableKey * key)482 Handle<Object> MapCacheShape::AsHandle(Isolate* isolate, HashTableKey* key) {
483   return key->AsHandle(isolate);
484 }
485 
486 
AsHandle(Isolate * isolate,HashTableKey * key)487 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
488                                                HashTableKey* key) {
489   return key->AsHandle(isolate);
490 }
491 
492 
AsHandle(Isolate * isolate,HashTableKey * key)493 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
494                                                  HashTableKey* key) {
495   return key->AsHandle(isolate);
496 }
497 
498 template <typename Char>
499 class SequentialStringKey : public HashTableKey {
500  public:
SequentialStringKey(Vector<const Char> string,uint32_t seed)501   explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
502       : string_(string), hash_field_(0), seed_(seed) { }
503 
Hash()504   virtual uint32_t Hash() OVERRIDE {
505     hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
506                                                            string_.length(),
507                                                            seed_);
508 
509     uint32_t result = hash_field_ >> String::kHashShift;
510     DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
511     return result;
512   }
513 
514 
HashForObject(Object * other)515   virtual uint32_t HashForObject(Object* other) OVERRIDE {
516     return String::cast(other)->Hash();
517   }
518 
519   Vector<const Char> string_;
520   uint32_t hash_field_;
521   uint32_t seed_;
522 };
523 
524 
525 class OneByteStringKey : public SequentialStringKey<uint8_t> {
526  public:
OneByteStringKey(Vector<const uint8_t> str,uint32_t seed)527   OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
528       : SequentialStringKey<uint8_t>(str, seed) { }
529 
IsMatch(Object * string)530   virtual bool IsMatch(Object* string) OVERRIDE {
531     return String::cast(string)->IsOneByteEqualTo(string_);
532   }
533 
534   virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
535 };
536 
537 
538 class SeqOneByteSubStringKey : public HashTableKey {
539  public:
SeqOneByteSubStringKey(Handle<SeqOneByteString> string,int from,int length)540   SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
541       : string_(string), from_(from), length_(length) {
542     DCHECK(string_->IsSeqOneByteString());
543   }
544 
Hash()545   virtual uint32_t Hash() OVERRIDE {
546     DCHECK(length_ >= 0);
547     DCHECK(from_ + length_ <= string_->length());
548     const uint8_t* chars = string_->GetChars() + from_;
549     hash_field_ = StringHasher::HashSequentialString(
550         chars, length_, string_->GetHeap()->HashSeed());
551     uint32_t result = hash_field_ >> String::kHashShift;
552     DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
553     return result;
554   }
555 
HashForObject(Object * other)556   virtual uint32_t HashForObject(Object* other) OVERRIDE {
557     return String::cast(other)->Hash();
558   }
559 
560   virtual bool IsMatch(Object* string) OVERRIDE;
561   virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
562 
563  private:
564   Handle<SeqOneByteString> string_;
565   int from_;
566   int length_;
567   uint32_t hash_field_;
568 };
569 
570 
571 class TwoByteStringKey : public SequentialStringKey<uc16> {
572  public:
TwoByteStringKey(Vector<const uc16> str,uint32_t seed)573   explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
574       : SequentialStringKey<uc16>(str, seed) { }
575 
IsMatch(Object * string)576   virtual bool IsMatch(Object* string) OVERRIDE {
577     return String::cast(string)->IsTwoByteEqualTo(string_);
578   }
579 
580   virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE;
581 };
582 
583 
584 // Utf8StringKey carries a vector of chars as key.
585 class Utf8StringKey : public HashTableKey {
586  public:
Utf8StringKey(Vector<const char> string,uint32_t seed)587   explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
588       : string_(string), hash_field_(0), seed_(seed) { }
589 
IsMatch(Object * string)590   virtual bool IsMatch(Object* string) OVERRIDE {
591     return String::cast(string)->IsUtf8EqualTo(string_);
592   }
593 
Hash()594   virtual uint32_t Hash() OVERRIDE {
595     if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
596     hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
597     uint32_t result = hash_field_ >> String::kHashShift;
598     DCHECK(result != 0);  // Ensure that the hash value of 0 is never computed.
599     return result;
600   }
601 
HashForObject(Object * other)602   virtual uint32_t HashForObject(Object* other) OVERRIDE {
603     return String::cast(other)->Hash();
604   }
605 
AsHandle(Isolate * isolate)606   virtual Handle<Object> AsHandle(Isolate* isolate) OVERRIDE {
607     if (hash_field_ == 0) Hash();
608     return isolate->factory()->NewInternalizedStringFromUtf8(
609         string_, chars_, hash_field_);
610   }
611 
612   Vector<const char> string_;
613   uint32_t hash_field_;
614   int chars_;  // Caches the number of characters when computing the hash code.
615   uint32_t seed_;
616 };
617 
618 
IsNumber()619 bool Object::IsNumber() const {
620   return IsSmi() || IsHeapNumber();
621 }
622 
623 
TYPE_CHECKER(ByteArray,BYTE_ARRAY_TYPE)624 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
625 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
626 
627 
628 bool Object::IsFiller() const {
629   if (!Object::IsHeapObject()) return false;
630   InstanceType instance_type = HeapObject::cast(this)->map()->instance_type();
631   return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
632 }
633 
634 
IsExternalArray()635 bool Object::IsExternalArray() const {
636   if (!Object::IsHeapObject())
637     return false;
638   InstanceType instance_type =
639       HeapObject::cast(this)->map()->instance_type();
640   return (instance_type >= FIRST_EXTERNAL_ARRAY_TYPE &&
641           instance_type <= LAST_EXTERNAL_ARRAY_TYPE);
642 }
643 
644 
645 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size)               \
646   TYPE_CHECKER(External##Type##Array, EXTERNAL_##TYPE##_ARRAY_TYPE)           \
647   TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
648 
TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)649 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
650 #undef TYPED_ARRAY_TYPE_CHECKER
651 
652 
653 bool Object::IsFixedTypedArrayBase() const {
654   if (!Object::IsHeapObject()) return false;
655 
656   InstanceType instance_type =
657       HeapObject::cast(this)->map()->instance_type();
658   return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
659           instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
660 }
661 
662 
IsJSReceiver()663 bool Object::IsJSReceiver() const {
664   STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
665   return IsHeapObject() &&
666       HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
667 }
668 
669 
IsJSObject()670 bool Object::IsJSObject() const {
671   STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
672   return IsHeapObject() &&
673       HeapObject::cast(this)->map()->instance_type() >= FIRST_JS_OBJECT_TYPE;
674 }
675 
676 
IsJSProxy()677 bool Object::IsJSProxy() const {
678   if (!Object::IsHeapObject()) return false;
679   return  HeapObject::cast(this)->map()->IsJSProxyMap();
680 }
681 
682 
TYPE_CHECKER(JSFunctionProxy,JS_FUNCTION_PROXY_TYPE)683 TYPE_CHECKER(JSFunctionProxy, JS_FUNCTION_PROXY_TYPE)
684 TYPE_CHECKER(JSSet, JS_SET_TYPE)
685 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
686 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
687 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
688 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
689 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
690 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
691 TYPE_CHECKER(Map, MAP_TYPE)
692 TYPE_CHECKER(FixedArray, FIXED_ARRAY_TYPE)
693 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
694 TYPE_CHECKER(ConstantPoolArray, CONSTANT_POOL_ARRAY_TYPE)
695 
696 
697 bool Object::IsJSWeakCollection() const {
698   return IsJSWeakMap() || IsJSWeakSet();
699 }
700 
701 
IsDescriptorArray()702 bool Object::IsDescriptorArray() const {
703   return IsFixedArray();
704 }
705 
706 
IsTransitionArray()707 bool Object::IsTransitionArray() const {
708   return IsFixedArray();
709 }
710 
711 
IsTypeFeedbackVector()712 bool Object::IsTypeFeedbackVector() const { return IsFixedArray(); }
713 
714 
IsDeoptimizationInputData()715 bool Object::IsDeoptimizationInputData() const {
716   // Must be a fixed array.
717   if (!IsFixedArray()) return false;
718 
719   // There's no sure way to detect the difference between a fixed array and
720   // a deoptimization data array.  Since this is used for asserts we can
721   // check that the length is zero or else the fixed size plus a multiple of
722   // the entry size.
723   int length = FixedArray::cast(this)->length();
724   if (length == 0) return true;
725 
726   length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
727   return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
728 }
729 
730 
IsDeoptimizationOutputData()731 bool Object::IsDeoptimizationOutputData() const {
732   if (!IsFixedArray()) return false;
733   // There's actually no way to see the difference between a fixed array and
734   // a deoptimization data array.  Since this is used for asserts we can check
735   // that the length is plausible though.
736   if (FixedArray::cast(this)->length() % 2 != 0) return false;
737   return true;
738 }
739 
740 
IsDependentCode()741 bool Object::IsDependentCode() const {
742   if (!IsFixedArray()) return false;
743   // There's actually no way to see the difference between a fixed array and
744   // a dependent codes array.
745   return true;
746 }
747 
748 
IsContext()749 bool Object::IsContext() const {
750   if (!Object::IsHeapObject()) return false;
751   Map* map = HeapObject::cast(this)->map();
752   Heap* heap = map->GetHeap();
753   return (map == heap->function_context_map() ||
754       map == heap->catch_context_map() ||
755       map == heap->with_context_map() ||
756       map == heap->native_context_map() ||
757       map == heap->block_context_map() ||
758       map == heap->module_context_map() ||
759       map == heap->global_context_map());
760 }
761 
762 
IsNativeContext()763 bool Object::IsNativeContext() const {
764   return Object::IsHeapObject() &&
765       HeapObject::cast(this)->map() ==
766       HeapObject::cast(this)->GetHeap()->native_context_map();
767 }
768 
769 
IsScopeInfo()770 bool Object::IsScopeInfo() const {
771   return Object::IsHeapObject() &&
772       HeapObject::cast(this)->map() ==
773       HeapObject::cast(this)->GetHeap()->scope_info_map();
774 }
775 
776 
TYPE_CHECKER(JSFunction,JS_FUNCTION_TYPE)777 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
778 
779 
780 template <> inline bool Is<JSFunction>(Object* obj) {
781   return obj->IsJSFunction();
782 }
783 
784 
TYPE_CHECKER(Code,CODE_TYPE)785 TYPE_CHECKER(Code, CODE_TYPE)
786 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
787 TYPE_CHECKER(Cell, CELL_TYPE)
788 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
789 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
790 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
791 TYPE_CHECKER(JSModule, JS_MODULE_TYPE)
792 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
793 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
794 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
795 
796 
797 bool Object::IsStringWrapper() const {
798   return IsJSValue() && JSValue::cast(this)->value()->IsString();
799 }
800 
801 
TYPE_CHECKER(Foreign,FOREIGN_TYPE)802 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
803 
804 
805 bool Object::IsBoolean() const {
806   return IsOddball() &&
807       ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
808 }
809 
810 
TYPE_CHECKER(JSArray,JS_ARRAY_TYPE)811 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
812 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
813 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
814 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
815 
816 
817 bool Object::IsJSArrayBufferView() const {
818   return IsJSDataView() || IsJSTypedArray();
819 }
820 
821 
TYPE_CHECKER(JSRegExp,JS_REGEXP_TYPE)822 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
823 
824 
825 template <> inline bool Is<JSArray>(Object* obj) {
826   return obj->IsJSArray();
827 }
828 
829 
IsHashTable()830 bool Object::IsHashTable() const {
831   return Object::IsHeapObject() &&
832       HeapObject::cast(this)->map() ==
833       HeapObject::cast(this)->GetHeap()->hash_table_map();
834 }
835 
836 
IsWeakHashTable()837 bool Object::IsWeakHashTable() const {
838   return IsHashTable();
839 }
840 
841 
IsDictionary()842 bool Object::IsDictionary() const {
843   return IsHashTable() &&
844       this != HeapObject::cast(this)->GetHeap()->string_table();
845 }
846 
847 
IsNameDictionary()848 bool Object::IsNameDictionary() const {
849   return IsDictionary();
850 }
851 
852 
IsSeededNumberDictionary()853 bool Object::IsSeededNumberDictionary() const {
854   return IsDictionary();
855 }
856 
857 
IsUnseededNumberDictionary()858 bool Object::IsUnseededNumberDictionary() const {
859   return IsDictionary();
860 }
861 
862 
IsStringTable()863 bool Object::IsStringTable() const {
864   return IsHashTable();
865 }
866 
867 
IsJSFunctionResultCache()868 bool Object::IsJSFunctionResultCache() const {
869   if (!IsFixedArray()) return false;
870   const FixedArray* self = FixedArray::cast(this);
871   int length = self->length();
872   if (length < JSFunctionResultCache::kEntriesIndex) return false;
873   if ((length - JSFunctionResultCache::kEntriesIndex)
874       % JSFunctionResultCache::kEntrySize != 0) {
875     return false;
876   }
877 #ifdef VERIFY_HEAP
878   if (FLAG_verify_heap) {
879     // TODO(svenpanne) We use const_cast here and below to break our dependency
880     // cycle between the predicates and the verifiers. This can be removed when
881     // the verifiers are const-correct, too.
882     reinterpret_cast<JSFunctionResultCache*>(const_cast<Object*>(this))->
883         JSFunctionResultCacheVerify();
884   }
885 #endif
886   return true;
887 }
888 
889 
IsNormalizedMapCache()890 bool Object::IsNormalizedMapCache() const {
891   return NormalizedMapCache::IsNormalizedMapCache(this);
892 }
893 
894 
GetIndex(Handle<Map> map)895 int NormalizedMapCache::GetIndex(Handle<Map> map) {
896   return map->Hash() % NormalizedMapCache::kEntries;
897 }
898 
899 
IsNormalizedMapCache(const Object * obj)900 bool NormalizedMapCache::IsNormalizedMapCache(const Object* obj) {
901   if (!obj->IsFixedArray()) return false;
902   if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
903     return false;
904   }
905 #ifdef VERIFY_HEAP
906   if (FLAG_verify_heap) {
907     reinterpret_cast<NormalizedMapCache*>(const_cast<Object*>(obj))->
908         NormalizedMapCacheVerify();
909   }
910 #endif
911   return true;
912 }
913 
914 
IsCompilationCacheTable()915 bool Object::IsCompilationCacheTable() const {
916   return IsHashTable();
917 }
918 
919 
IsCodeCacheHashTable()920 bool Object::IsCodeCacheHashTable() const {
921   return IsHashTable();
922 }
923 
924 
IsPolymorphicCodeCacheHashTable()925 bool Object::IsPolymorphicCodeCacheHashTable() const {
926   return IsHashTable();
927 }
928 
929 
IsMapCache()930 bool Object::IsMapCache() const {
931   return IsHashTable();
932 }
933 
934 
IsObjectHashTable()935 bool Object::IsObjectHashTable() const {
936   return IsHashTable();
937 }
938 
939 
IsOrderedHashTable()940 bool Object::IsOrderedHashTable() const {
941   return IsHeapObject() &&
942       HeapObject::cast(this)->map() ==
943       HeapObject::cast(this)->GetHeap()->ordered_hash_table_map();
944 }
945 
946 
IsOrderedHashSet()947 bool Object::IsOrderedHashSet() const {
948   return IsOrderedHashTable();
949 }
950 
951 
IsOrderedHashMap()952 bool Object::IsOrderedHashMap() const {
953   return IsOrderedHashTable();
954 }
955 
956 
IsPrimitive()957 bool Object::IsPrimitive() const {
958   return IsOddball() || IsNumber() || IsString();
959 }
960 
961 
IsJSGlobalProxy()962 bool Object::IsJSGlobalProxy() const {
963   bool result = IsHeapObject() &&
964                 (HeapObject::cast(this)->map()->instance_type() ==
965                  JS_GLOBAL_PROXY_TYPE);
966   DCHECK(!result ||
967          HeapObject::cast(this)->map()->is_access_check_needed());
968   return result;
969 }
970 
971 
IsGlobalObject()972 bool Object::IsGlobalObject() const {
973   if (!IsHeapObject()) return false;
974 
975   InstanceType type = HeapObject::cast(this)->map()->instance_type();
976   return type == JS_GLOBAL_OBJECT_TYPE ||
977          type == JS_BUILTINS_OBJECT_TYPE;
978 }
979 
980 
TYPE_CHECKER(JSGlobalObject,JS_GLOBAL_OBJECT_TYPE)981 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
982 TYPE_CHECKER(JSBuiltinsObject, JS_BUILTINS_OBJECT_TYPE)
983 
984 
985 bool Object::IsUndetectableObject() const {
986   return IsHeapObject()
987     && HeapObject::cast(this)->map()->is_undetectable();
988 }
989 
990 
IsAccessCheckNeeded()991 bool Object::IsAccessCheckNeeded() const {
992   if (!IsHeapObject()) return false;
993   if (IsJSGlobalProxy()) {
994     const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
995     GlobalObject* global = proxy->GetIsolate()->context()->global_object();
996     return proxy->IsDetachedFrom(global);
997   }
998   return HeapObject::cast(this)->map()->is_access_check_needed();
999 }
1000 
1001 
IsStruct()1002 bool Object::IsStruct() const {
1003   if (!IsHeapObject()) return false;
1004   switch (HeapObject::cast(this)->map()->instance_type()) {
1005 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
1006   STRUCT_LIST(MAKE_STRUCT_CASE)
1007 #undef MAKE_STRUCT_CASE
1008     default: return false;
1009   }
1010 }
1011 
1012 
1013 #define MAKE_STRUCT_PREDICATE(NAME, Name, name)                         \
1014   bool Object::Is##Name() const {                                       \
1015     return Object::IsHeapObject()                                       \
1016       && HeapObject::cast(this)->map()->instance_type() == NAME##_TYPE; \
1017   }
STRUCT_LIST(MAKE_STRUCT_PREDICATE)1018   STRUCT_LIST(MAKE_STRUCT_PREDICATE)
1019 #undef MAKE_STRUCT_PREDICATE
1020 
1021 
1022 bool Object::IsUndefined() const {
1023   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUndefined;
1024 }
1025 
1026 
IsNull()1027 bool Object::IsNull() const {
1028   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kNull;
1029 }
1030 
1031 
IsTheHole()1032 bool Object::IsTheHole() const {
1033   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTheHole;
1034 }
1035 
1036 
IsException()1037 bool Object::IsException() const {
1038   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kException;
1039 }
1040 
1041 
IsUninitialized()1042 bool Object::IsUninitialized() const {
1043   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kUninitialized;
1044 }
1045 
1046 
IsTrue()1047 bool Object::IsTrue() const {
1048   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kTrue;
1049 }
1050 
1051 
IsFalse()1052 bool Object::IsFalse() const {
1053   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kFalse;
1054 }
1055 
1056 
IsArgumentsMarker()1057 bool Object::IsArgumentsMarker() const {
1058   return IsOddball() && Oddball::cast(this)->kind() == Oddball::kArgumentMarker;
1059 }
1060 
1061 
Number()1062 double Object::Number() {
1063   DCHECK(IsNumber());
1064   return IsSmi()
1065     ? static_cast<double>(reinterpret_cast<Smi*>(this)->value())
1066     : reinterpret_cast<HeapNumber*>(this)->value();
1067 }
1068 
1069 
IsNaN()1070 bool Object::IsNaN() const {
1071   return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
1072 }
1073 
1074 
IsMinusZero()1075 bool Object::IsMinusZero() const {
1076   return this->IsHeapNumber() &&
1077          i::IsMinusZero(HeapNumber::cast(this)->value());
1078 }
1079 
1080 
ToSmi(Isolate * isolate,Handle<Object> object)1081 MaybeHandle<Smi> Object::ToSmi(Isolate* isolate, Handle<Object> object) {
1082   if (object->IsSmi()) return Handle<Smi>::cast(object);
1083   if (object->IsHeapNumber()) {
1084     double value = Handle<HeapNumber>::cast(object)->value();
1085     int int_value = FastD2I(value);
1086     if (value == FastI2D(int_value) && Smi::IsValid(int_value)) {
1087       return handle(Smi::FromInt(int_value), isolate);
1088     }
1089   }
1090   return Handle<Smi>();
1091 }
1092 
1093 
ToObject(Isolate * isolate,Handle<Object> object)1094 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1095                                          Handle<Object> object) {
1096   return ToObject(
1097       isolate, object, handle(isolate->context()->native_context(), isolate));
1098 }
1099 
1100 
HasSpecificClassOf(String * name)1101 bool Object::HasSpecificClassOf(String* name) {
1102   return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1103 }
1104 
1105 
GetProperty(Handle<Object> object,Handle<Name> name)1106 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1107                                         Handle<Name> name) {
1108   LookupIterator it(object, name);
1109   return GetProperty(&it);
1110 }
1111 
1112 
GetElement(Isolate * isolate,Handle<Object> object,uint32_t index)1113 MaybeHandle<Object> Object::GetElement(Isolate* isolate,
1114                                        Handle<Object> object,
1115                                        uint32_t index) {
1116   // GetElement can trigger a getter which can cause allocation.
1117   // This was not always the case. This DCHECK is here to catch
1118   // leftover incorrect uses.
1119   DCHECK(AllowHeapAllocation::IsAllowed());
1120   return Object::GetElementWithReceiver(isolate, object, object, index);
1121 }
1122 
1123 
GetPropertyOrElement(Handle<Object> object,Handle<Name> name)1124 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
1125                                                  Handle<Name> name) {
1126   uint32_t index;
1127   Isolate* isolate = name->GetIsolate();
1128   if (name->AsArrayIndex(&index)) return GetElement(isolate, object, index);
1129   return GetProperty(object, name);
1130 }
1131 
1132 
GetProperty(Isolate * isolate,Handle<Object> object,const char * name)1133 MaybeHandle<Object> Object::GetProperty(Isolate* isolate,
1134                                         Handle<Object> object,
1135                                         const char* name) {
1136   Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1137   DCHECK(!str.is_null());
1138 #ifdef DEBUG
1139   uint32_t index;  // Assert that the name is not an array index.
1140   DCHECK(!str->AsArrayIndex(&index));
1141 #endif  // DEBUG
1142   return GetProperty(object, str);
1143 }
1144 
1145 
GetElementWithHandler(Handle<JSProxy> proxy,Handle<Object> receiver,uint32_t index)1146 MaybeHandle<Object> JSProxy::GetElementWithHandler(Handle<JSProxy> proxy,
1147                                                    Handle<Object> receiver,
1148                                                    uint32_t index) {
1149   return GetPropertyWithHandler(
1150       proxy, receiver, proxy->GetIsolate()->factory()->Uint32ToString(index));
1151 }
1152 
1153 
SetElementWithHandler(Handle<JSProxy> proxy,Handle<JSReceiver> receiver,uint32_t index,Handle<Object> value,StrictMode strict_mode)1154 MaybeHandle<Object> JSProxy::SetElementWithHandler(Handle<JSProxy> proxy,
1155                                                    Handle<JSReceiver> receiver,
1156                                                    uint32_t index,
1157                                                    Handle<Object> value,
1158                                                    StrictMode strict_mode) {
1159   Isolate* isolate = proxy->GetIsolate();
1160   Handle<String> name = isolate->factory()->Uint32ToString(index);
1161   return SetPropertyWithHandler(proxy, receiver, name, value, strict_mode);
1162 }
1163 
1164 
HasElementWithHandler(Handle<JSProxy> proxy,uint32_t index)1165 Maybe<bool> JSProxy::HasElementWithHandler(Handle<JSProxy> proxy,
1166                                            uint32_t index) {
1167   Isolate* isolate = proxy->GetIsolate();
1168   Handle<String> name = isolate->factory()->Uint32ToString(index);
1169   return HasPropertyWithHandler(proxy, name);
1170 }
1171 
1172 
1173 #define FIELD_ADDR(p, offset) \
1174   (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1175 
1176 #define FIELD_ADDR_CONST(p, offset) \
1177   (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1178 
1179 #define READ_FIELD(p, offset) \
1180   (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1181 
1182 #define ACQUIRE_READ_FIELD(p, offset)           \
1183   reinterpret_cast<Object*>(base::Acquire_Load( \
1184       reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1185 
1186 #define NOBARRIER_READ_FIELD(p, offset)           \
1187   reinterpret_cast<Object*>(base::NoBarrier_Load( \
1188       reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1189 
1190 #define WRITE_FIELD(p, offset, value) \
1191   (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1192 
1193 #define RELEASE_WRITE_FIELD(p, offset, value)                     \
1194   base::Release_Store(                                            \
1195       reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1196       reinterpret_cast<base::AtomicWord>(value));
1197 
1198 #define NOBARRIER_WRITE_FIELD(p, offset, value)                   \
1199   base::NoBarrier_Store(                                          \
1200       reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1201       reinterpret_cast<base::AtomicWord>(value));
1202 
1203 #define WRITE_BARRIER(heap, object, offset, value)                      \
1204   heap->incremental_marking()->RecordWrite(                             \
1205       object, HeapObject::RawField(object, offset), value);             \
1206   if (heap->InNewSpace(value)) {                                        \
1207     heap->RecordWrite(object->address(), offset);                       \
1208   }
1209 
1210 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode)    \
1211   if (mode == UPDATE_WRITE_BARRIER) {                                   \
1212     heap->incremental_marking()->RecordWrite(                           \
1213       object, HeapObject::RawField(object, offset), value);             \
1214     if (heap->InNewSpace(value)) {                                      \
1215       heap->RecordWrite(object->address(), offset);                     \
1216     }                                                                   \
1217   }
1218 
1219 #ifndef V8_TARGET_ARCH_MIPS
1220   #define READ_DOUBLE_FIELD(p, offset) \
1221     (*reinterpret_cast<const double*>(FIELD_ADDR_CONST(p, offset)))
1222 #else  // V8_TARGET_ARCH_MIPS
1223   // Prevent gcc from using load-double (mips ldc1) on (possibly)
1224   // non-64-bit aligned HeapNumber::value.
read_double_field(const void * p,int offset)1225   static inline double read_double_field(const void* p, int offset) {
1226     union conversion {
1227       double d;
1228       uint32_t u[2];
1229     } c;
1230     c.u[0] = (*reinterpret_cast<const uint32_t*>(
1231         FIELD_ADDR_CONST(p, offset)));
1232     c.u[1] = (*reinterpret_cast<const uint32_t*>(
1233         FIELD_ADDR_CONST(p, offset + 4)));
1234     return c.d;
1235   }
1236   #define READ_DOUBLE_FIELD(p, offset) read_double_field(p, offset)
1237 #endif  // V8_TARGET_ARCH_MIPS
1238 
1239 #ifndef V8_TARGET_ARCH_MIPS
1240   #define WRITE_DOUBLE_FIELD(p, offset, value) \
1241     (*reinterpret_cast<double*>(FIELD_ADDR(p, offset)) = value)
1242 #else  // V8_TARGET_ARCH_MIPS
1243   // Prevent gcc from using store-double (mips sdc1) on (possibly)
1244   // non-64-bit aligned HeapNumber::value.
write_double_field(void * p,int offset,double value)1245   static inline void write_double_field(void* p, int offset,
1246                                         double value) {
1247     union conversion {
1248       double d;
1249       uint32_t u[2];
1250     } c;
1251     c.d = value;
1252     (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset))) = c.u[0];
1253     (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset + 4))) = c.u[1];
1254   }
1255   #define WRITE_DOUBLE_FIELD(p, offset, value) \
1256     write_double_field(p, offset, value)
1257 #endif  // V8_TARGET_ARCH_MIPS
1258 
1259 
1260 #define READ_INT_FIELD(p, offset) \
1261   (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1262 
1263 #define WRITE_INT_FIELD(p, offset, value) \
1264   (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1265 
1266 #define READ_INTPTR_FIELD(p, offset) \
1267   (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1268 
1269 #define WRITE_INTPTR_FIELD(p, offset, value) \
1270   (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1271 
1272 #define READ_UINT32_FIELD(p, offset) \
1273   (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1274 
1275 #define WRITE_UINT32_FIELD(p, offset, value) \
1276   (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1277 
1278 #define READ_INT32_FIELD(p, offset) \
1279   (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1280 
1281 #define WRITE_INT32_FIELD(p, offset, value) \
1282   (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1283 
1284 #define READ_INT64_FIELD(p, offset) \
1285   (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1286 
1287 #define WRITE_INT64_FIELD(p, offset, value) \
1288   (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1289 
1290 #define READ_SHORT_FIELD(p, offset) \
1291   (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1292 
1293 #define WRITE_SHORT_FIELD(p, offset, value) \
1294   (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1295 
1296 #define READ_BYTE_FIELD(p, offset) \
1297   (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1298 
1299 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1300   static_cast<byte>(base::NoBarrier_Load(    \
1301       reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1302 
1303 #define WRITE_BYTE_FIELD(p, offset, value) \
1304   (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1305 
1306 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value)           \
1307   base::NoBarrier_Store(                                       \
1308       reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1309       static_cast<base::Atomic8>(value));
1310 
RawField(HeapObject * obj,int byte_offset)1311 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1312   return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1313 }
1314 
1315 
value()1316 int Smi::value() const {
1317   return Internals::SmiValue(this);
1318 }
1319 
1320 
FromInt(int value)1321 Smi* Smi::FromInt(int value) {
1322   DCHECK(Smi::IsValid(value));
1323   return reinterpret_cast<Smi*>(Internals::IntToSmi(value));
1324 }
1325 
1326 
FromIntptr(intptr_t value)1327 Smi* Smi::FromIntptr(intptr_t value) {
1328   DCHECK(Smi::IsValid(value));
1329   int smi_shift_bits = kSmiTagSize + kSmiShiftSize;
1330   return reinterpret_cast<Smi*>((value << smi_shift_bits) | kSmiTag);
1331 }
1332 
1333 
IsValid(intptr_t value)1334 bool Smi::IsValid(intptr_t value) {
1335   bool result = Internals::IsValidSmi(value);
1336   DCHECK_EQ(result, value >= kMinValue && value <= kMaxValue);
1337   return result;
1338 }
1339 
1340 
FromMap(const Map * map)1341 MapWord MapWord::FromMap(const Map* map) {
1342   return MapWord(reinterpret_cast<uintptr_t>(map));
1343 }
1344 
1345 
ToMap()1346 Map* MapWord::ToMap() {
1347   return reinterpret_cast<Map*>(value_);
1348 }
1349 
1350 
IsForwardingAddress()1351 bool MapWord::IsForwardingAddress() {
1352   return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1353 }
1354 
1355 
FromForwardingAddress(HeapObject * object)1356 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1357   Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1358   return MapWord(reinterpret_cast<uintptr_t>(raw));
1359 }
1360 
1361 
ToForwardingAddress()1362 HeapObject* MapWord::ToForwardingAddress() {
1363   DCHECK(IsForwardingAddress());
1364   return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1365 }
1366 
1367 
1368 #ifdef VERIFY_HEAP
VerifyObjectField(int offset)1369 void HeapObject::VerifyObjectField(int offset) {
1370   VerifyPointer(READ_FIELD(this, offset));
1371 }
1372 
VerifySmiField(int offset)1373 void HeapObject::VerifySmiField(int offset) {
1374   CHECK(READ_FIELD(this, offset)->IsSmi());
1375 }
1376 #endif
1377 
1378 
GetHeap()1379 Heap* HeapObject::GetHeap() const {
1380   Heap* heap =
1381       MemoryChunk::FromAddress(reinterpret_cast<const byte*>(this))->heap();
1382   SLOW_DCHECK(heap != NULL);
1383   return heap;
1384 }
1385 
1386 
GetIsolate()1387 Isolate* HeapObject::GetIsolate() const {
1388   return GetHeap()->isolate();
1389 }
1390 
1391 
map()1392 Map* HeapObject::map() const {
1393 #ifdef DEBUG
1394   // Clear mark potentially added by PathTracer.
1395   uintptr_t raw_value =
1396       map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1397   return MapWord::FromRawValue(raw_value).ToMap();
1398 #else
1399   return map_word().ToMap();
1400 #endif
1401 }
1402 
1403 
set_map(Map * value)1404 void HeapObject::set_map(Map* value) {
1405   set_map_word(MapWord::FromMap(value));
1406   if (value != NULL) {
1407     // TODO(1600) We are passing NULL as a slot because maps can never be on
1408     // evacuation candidate.
1409     value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1410   }
1411 }
1412 
1413 
synchronized_map()1414 Map* HeapObject::synchronized_map() {
1415   return synchronized_map_word().ToMap();
1416 }
1417 
1418 
synchronized_set_map(Map * value)1419 void HeapObject::synchronized_set_map(Map* value) {
1420   synchronized_set_map_word(MapWord::FromMap(value));
1421   if (value != NULL) {
1422     // TODO(1600) We are passing NULL as a slot because maps can never be on
1423     // evacuation candidate.
1424     value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1425   }
1426 }
1427 
1428 
synchronized_set_map_no_write_barrier(Map * value)1429 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1430   synchronized_set_map_word(MapWord::FromMap(value));
1431 }
1432 
1433 
1434 // Unsafe accessor omitting write barrier.
set_map_no_write_barrier(Map * value)1435 void HeapObject::set_map_no_write_barrier(Map* value) {
1436   set_map_word(MapWord::FromMap(value));
1437 }
1438 
1439 
map_word()1440 MapWord HeapObject::map_word() const {
1441   return MapWord(
1442       reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1443 }
1444 
1445 
set_map_word(MapWord map_word)1446 void HeapObject::set_map_word(MapWord map_word) {
1447   NOBARRIER_WRITE_FIELD(
1448       this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1449 }
1450 
1451 
synchronized_map_word()1452 MapWord HeapObject::synchronized_map_word() const {
1453   return MapWord(
1454       reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1455 }
1456 
1457 
synchronized_set_map_word(MapWord map_word)1458 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1459   RELEASE_WRITE_FIELD(
1460       this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1461 }
1462 
1463 
FromAddress(Address address)1464 HeapObject* HeapObject::FromAddress(Address address) {
1465   DCHECK_TAG_ALIGNED(address);
1466   return reinterpret_cast<HeapObject*>(address + kHeapObjectTag);
1467 }
1468 
1469 
address()1470 Address HeapObject::address() {
1471   return reinterpret_cast<Address>(this) - kHeapObjectTag;
1472 }
1473 
1474 
Size()1475 int HeapObject::Size() {
1476   return SizeFromMap(map());
1477 }
1478 
1479 
MayContainRawValues()1480 bool HeapObject::MayContainRawValues() {
1481   InstanceType type = map()->instance_type();
1482   if (type <= LAST_NAME_TYPE) {
1483     if (type == SYMBOL_TYPE) {
1484       return false;
1485     }
1486     DCHECK(type < FIRST_NONSTRING_TYPE);
1487     // There are four string representations: sequential strings, external
1488     // strings, cons strings, and sliced strings.
1489     // Only the former two contain raw values and no heap pointers (besides the
1490     // map-word).
1491     return ((type & kIsIndirectStringMask) != kIsIndirectStringTag);
1492   }
1493   // The ConstantPoolArray contains heap pointers, but also raw values.
1494   if (type == CONSTANT_POOL_ARRAY_TYPE) return true;
1495   return (type <= LAST_DATA_TYPE);
1496 }
1497 
1498 
IteratePointers(ObjectVisitor * v,int start,int end)1499 void HeapObject::IteratePointers(ObjectVisitor* v, int start, int end) {
1500   v->VisitPointers(reinterpret_cast<Object**>(FIELD_ADDR(this, start)),
1501                    reinterpret_cast<Object**>(FIELD_ADDR(this, end)));
1502 }
1503 
1504 
IteratePointer(ObjectVisitor * v,int offset)1505 void HeapObject::IteratePointer(ObjectVisitor* v, int offset) {
1506   v->VisitPointer(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1507 }
1508 
1509 
IterateNextCodeLink(ObjectVisitor * v,int offset)1510 void HeapObject::IterateNextCodeLink(ObjectVisitor* v, int offset) {
1511   v->VisitNextCodeLink(reinterpret_cast<Object**>(FIELD_ADDR(this, offset)));
1512 }
1513 
1514 
value()1515 double HeapNumber::value() const {
1516   return READ_DOUBLE_FIELD(this, kValueOffset);
1517 }
1518 
1519 
set_value(double value)1520 void HeapNumber::set_value(double value) {
1521   WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1522 }
1523 
1524 
get_exponent()1525 int HeapNumber::get_exponent() {
1526   return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1527           kExponentShift) - kExponentBias;
1528 }
1529 
1530 
get_sign()1531 int HeapNumber::get_sign() {
1532   return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1533 }
1534 
1535 
ACCESSORS(JSObject,properties,FixedArray,kPropertiesOffset)1536 ACCESSORS(JSObject, properties, FixedArray, kPropertiesOffset)
1537 
1538 
1539 Object** FixedArray::GetFirstElementAddress() {
1540   return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1541 }
1542 
1543 
ContainsOnlySmisOrHoles()1544 bool FixedArray::ContainsOnlySmisOrHoles() {
1545   Object* the_hole = GetHeap()->the_hole_value();
1546   Object** current = GetFirstElementAddress();
1547   for (int i = 0; i < length(); ++i) {
1548     Object* candidate = *current++;
1549     if (!candidate->IsSmi() && candidate != the_hole) return false;
1550   }
1551   return true;
1552 }
1553 
1554 
elements()1555 FixedArrayBase* JSObject::elements() const {
1556   Object* array = READ_FIELD(this, kElementsOffset);
1557   return static_cast<FixedArrayBase*>(array);
1558 }
1559 
1560 
ValidateElements(Handle<JSObject> object)1561 void JSObject::ValidateElements(Handle<JSObject> object) {
1562 #ifdef ENABLE_SLOW_DCHECKS
1563   if (FLAG_enable_slow_asserts) {
1564     ElementsAccessor* accessor = object->GetElementsAccessor();
1565     accessor->Validate(object);
1566   }
1567 #endif
1568 }
1569 
1570 
Initialize()1571 void AllocationSite::Initialize() {
1572   set_transition_info(Smi::FromInt(0));
1573   SetElementsKind(GetInitialFastElementsKind());
1574   set_nested_site(Smi::FromInt(0));
1575   set_pretenure_data(Smi::FromInt(0));
1576   set_pretenure_create_count(Smi::FromInt(0));
1577   set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1578                      SKIP_WRITE_BARRIER);
1579 }
1580 
1581 
MarkZombie()1582 void AllocationSite::MarkZombie() {
1583   DCHECK(!IsZombie());
1584   Initialize();
1585   set_pretenure_decision(kZombie);
1586 }
1587 
1588 
1589 // Heuristic: We only need to create allocation site info if the boilerplate
1590 // elements kind is the initial elements kind.
GetMode(ElementsKind boilerplate_elements_kind)1591 AllocationSiteMode AllocationSite::GetMode(
1592     ElementsKind boilerplate_elements_kind) {
1593   if (FLAG_pretenuring_call_new ||
1594       IsFastSmiElementsKind(boilerplate_elements_kind)) {
1595     return TRACK_ALLOCATION_SITE;
1596   }
1597 
1598   return DONT_TRACK_ALLOCATION_SITE;
1599 }
1600 
1601 
GetMode(ElementsKind from,ElementsKind to)1602 AllocationSiteMode AllocationSite::GetMode(ElementsKind from,
1603                                            ElementsKind to) {
1604   if (FLAG_pretenuring_call_new ||
1605       (IsFastSmiElementsKind(from) &&
1606        IsMoreGeneralElementsKindTransition(from, to))) {
1607     return TRACK_ALLOCATION_SITE;
1608   }
1609 
1610   return DONT_TRACK_ALLOCATION_SITE;
1611 }
1612 
1613 
CanTrack(InstanceType type)1614 inline bool AllocationSite::CanTrack(InstanceType type) {
1615   if (FLAG_allocation_site_pretenuring) {
1616     return type == JS_ARRAY_TYPE ||
1617         type == JS_OBJECT_TYPE ||
1618         type < FIRST_NONSTRING_TYPE;
1619   }
1620   return type == JS_ARRAY_TYPE;
1621 }
1622 
1623 
ToDependencyGroup(Reason reason)1624 inline DependentCode::DependencyGroup AllocationSite::ToDependencyGroup(
1625     Reason reason) {
1626   switch (reason) {
1627     case TENURING:
1628       return DependentCode::kAllocationSiteTenuringChangedGroup;
1629       break;
1630     case TRANSITIONS:
1631       return DependentCode::kAllocationSiteTransitionChangedGroup;
1632       break;
1633   }
1634   UNREACHABLE();
1635   return DependentCode::kAllocationSiteTransitionChangedGroup;
1636 }
1637 
1638 
set_memento_found_count(int count)1639 inline void AllocationSite::set_memento_found_count(int count) {
1640   int value = pretenure_data()->value();
1641   // Verify that we can count more mementos than we can possibly find in one
1642   // new space collection.
1643   DCHECK((GetHeap()->MaxSemiSpaceSize() /
1644           (StaticVisitorBase::kMinObjectSizeInWords * kPointerSize +
1645            AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1646   DCHECK(count < MementoFoundCountBits::kMax);
1647   set_pretenure_data(
1648       Smi::FromInt(MementoFoundCountBits::update(value, count)),
1649       SKIP_WRITE_BARRIER);
1650 }
1651 
IncrementMementoFoundCount()1652 inline bool AllocationSite::IncrementMementoFoundCount() {
1653   if (IsZombie()) return false;
1654 
1655   int value = memento_found_count();
1656   set_memento_found_count(value + 1);
1657   return memento_found_count() == kPretenureMinimumCreated;
1658 }
1659 
1660 
IncrementMementoCreateCount()1661 inline void AllocationSite::IncrementMementoCreateCount() {
1662   DCHECK(FLAG_allocation_site_pretenuring);
1663   int value = memento_create_count();
1664   set_memento_create_count(value + 1);
1665 }
1666 
1667 
MakePretenureDecision(PretenureDecision current_decision,double ratio,bool maximum_size_scavenge)1668 inline bool AllocationSite::MakePretenureDecision(
1669     PretenureDecision current_decision,
1670     double ratio,
1671     bool maximum_size_scavenge) {
1672   // Here we just allow state transitions from undecided or maybe tenure
1673   // to don't tenure, maybe tenure, or tenure.
1674   if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1675     if (ratio >= kPretenureRatio) {
1676       // We just transition into tenure state when the semi-space was at
1677       // maximum capacity.
1678       if (maximum_size_scavenge) {
1679         set_deopt_dependent_code(true);
1680         set_pretenure_decision(kTenure);
1681         // Currently we just need to deopt when we make a state transition to
1682         // tenure.
1683         return true;
1684       }
1685       set_pretenure_decision(kMaybeTenure);
1686     } else {
1687       set_pretenure_decision(kDontTenure);
1688     }
1689   }
1690   return false;
1691 }
1692 
1693 
DigestPretenuringFeedback(bool maximum_size_scavenge)1694 inline bool AllocationSite::DigestPretenuringFeedback(
1695     bool maximum_size_scavenge) {
1696   bool deopt = false;
1697   int create_count = memento_create_count();
1698   int found_count = memento_found_count();
1699   bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1700   double ratio =
1701       minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1702           static_cast<double>(found_count) / create_count : 0.0;
1703   PretenureDecision current_decision = pretenure_decision();
1704 
1705   if (minimum_mementos_created) {
1706     deopt = MakePretenureDecision(
1707         current_decision, ratio, maximum_size_scavenge);
1708   }
1709 
1710   if (FLAG_trace_pretenuring_statistics) {
1711     PrintF(
1712         "AllocationSite(%p): (created, found, ratio) (%d, %d, %f) %s => %s\n",
1713          static_cast<void*>(this), create_count, found_count, ratio,
1714          PretenureDecisionName(current_decision),
1715          PretenureDecisionName(pretenure_decision()));
1716   }
1717 
1718   // Clear feedback calculation fields until the next gc.
1719   set_memento_found_count(0);
1720   set_memento_create_count(0);
1721   return deopt;
1722 }
1723 
1724 
EnsureCanContainHeapObjectElements(Handle<JSObject> object)1725 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1726   JSObject::ValidateElements(object);
1727   ElementsKind elements_kind = object->map()->elements_kind();
1728   if (!IsFastObjectElementsKind(elements_kind)) {
1729     if (IsFastHoleyElementsKind(elements_kind)) {
1730       TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1731     } else {
1732       TransitionElementsKind(object, FAST_ELEMENTS);
1733     }
1734   }
1735 }
1736 
1737 
EnsureCanContainElements(Handle<JSObject> object,Object ** objects,uint32_t count,EnsureElementsMode mode)1738 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1739                                         Object** objects,
1740                                         uint32_t count,
1741                                         EnsureElementsMode mode) {
1742   ElementsKind current_kind = object->map()->elements_kind();
1743   ElementsKind target_kind = current_kind;
1744   {
1745     DisallowHeapAllocation no_allocation;
1746     DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1747     bool is_holey = IsFastHoleyElementsKind(current_kind);
1748     if (current_kind == FAST_HOLEY_ELEMENTS) return;
1749     Heap* heap = object->GetHeap();
1750     Object* the_hole = heap->the_hole_value();
1751     for (uint32_t i = 0; i < count; ++i) {
1752       Object* current = *objects++;
1753       if (current == the_hole) {
1754         is_holey = true;
1755         target_kind = GetHoleyElementsKind(target_kind);
1756       } else if (!current->IsSmi()) {
1757         if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1758           if (IsFastSmiElementsKind(target_kind)) {
1759             if (is_holey) {
1760               target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1761             } else {
1762               target_kind = FAST_DOUBLE_ELEMENTS;
1763             }
1764           }
1765         } else if (is_holey) {
1766           target_kind = FAST_HOLEY_ELEMENTS;
1767           break;
1768         } else {
1769           target_kind = FAST_ELEMENTS;
1770         }
1771       }
1772     }
1773   }
1774   if (target_kind != current_kind) {
1775     TransitionElementsKind(object, target_kind);
1776   }
1777 }
1778 
1779 
EnsureCanContainElements(Handle<JSObject> object,Handle<FixedArrayBase> elements,uint32_t length,EnsureElementsMode mode)1780 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1781                                         Handle<FixedArrayBase> elements,
1782                                         uint32_t length,
1783                                         EnsureElementsMode mode) {
1784   Heap* heap = object->GetHeap();
1785   if (elements->map() != heap->fixed_double_array_map()) {
1786     DCHECK(elements->map() == heap->fixed_array_map() ||
1787            elements->map() == heap->fixed_cow_array_map());
1788     if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1789       mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1790     }
1791     Object** objects =
1792         Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1793     EnsureCanContainElements(object, objects, length, mode);
1794     return;
1795   }
1796 
1797   DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1798   if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1799     TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1800   } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1801     Handle<FixedDoubleArray> double_array =
1802         Handle<FixedDoubleArray>::cast(elements);
1803     for (uint32_t i = 0; i < length; ++i) {
1804       if (double_array->is_the_hole(i)) {
1805         TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1806         return;
1807       }
1808     }
1809     TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1810   }
1811 }
1812 
1813 
SetMapAndElements(Handle<JSObject> object,Handle<Map> new_map,Handle<FixedArrayBase> value)1814 void JSObject::SetMapAndElements(Handle<JSObject> object,
1815                                  Handle<Map> new_map,
1816                                  Handle<FixedArrayBase> value) {
1817   JSObject::MigrateToMap(object, new_map);
1818   DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1819           (*value == object->GetHeap()->empty_fixed_array())) ==
1820          (value->map() == object->GetHeap()->fixed_array_map() ||
1821           value->map() == object->GetHeap()->fixed_cow_array_map()));
1822   DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1823          (object->map()->has_fast_double_elements() ==
1824           value->IsFixedDoubleArray()));
1825   object->set_elements(*value);
1826 }
1827 
1828 
set_elements(FixedArrayBase * value,WriteBarrierMode mode)1829 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1830   WRITE_FIELD(this, kElementsOffset, value);
1831   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1832 }
1833 
1834 
initialize_properties()1835 void JSObject::initialize_properties() {
1836   DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
1837   WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
1838 }
1839 
1840 
initialize_elements()1841 void JSObject::initialize_elements() {
1842   FixedArrayBase* elements = map()->GetInitialElements();
1843   WRITE_FIELD(this, kElementsOffset, elements);
1844 }
1845 
1846 
ExpectedTransitionKey(Handle<Map> map)1847 Handle<String> Map::ExpectedTransitionKey(Handle<Map> map) {
1848   DisallowHeapAllocation no_gc;
1849   if (!map->HasTransitionArray()) return Handle<String>::null();
1850   TransitionArray* transitions = map->transitions();
1851   if (!transitions->IsSimpleTransition()) return Handle<String>::null();
1852   int transition = TransitionArray::kSimpleTransitionIndex;
1853   PropertyDetails details = transitions->GetTargetDetails(transition);
1854   Name* name = transitions->GetKey(transition);
1855   if (details.type() != FIELD) return Handle<String>::null();
1856   if (details.attributes() != NONE) return Handle<String>::null();
1857   if (!name->IsString()) return Handle<String>::null();
1858   return Handle<String>(String::cast(name));
1859 }
1860 
1861 
ExpectedTransitionTarget(Handle<Map> map)1862 Handle<Map> Map::ExpectedTransitionTarget(Handle<Map> map) {
1863   DCHECK(!ExpectedTransitionKey(map).is_null());
1864   return Handle<Map>(map->transitions()->GetTarget(
1865       TransitionArray::kSimpleTransitionIndex));
1866 }
1867 
1868 
FindTransitionToField(Handle<Map> map,Handle<Name> key)1869 Handle<Map> Map::FindTransitionToField(Handle<Map> map, Handle<Name> key) {
1870   DisallowHeapAllocation no_allocation;
1871   if (!map->HasTransitionArray()) return Handle<Map>::null();
1872   TransitionArray* transitions = map->transitions();
1873   int transition = transitions->Search(*key);
1874   if (transition == TransitionArray::kNotFound) return Handle<Map>::null();
1875   PropertyDetails target_details = transitions->GetTargetDetails(transition);
1876   if (target_details.type() != FIELD) return Handle<Map>::null();
1877   if (target_details.attributes() != NONE) return Handle<Map>::null();
1878   return Handle<Map>(transitions->GetTarget(transition));
1879 }
1880 
1881 
ACCESSORS(Oddball,to_string,String,kToStringOffset)1882 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1883 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1884 
1885 
1886 byte Oddball::kind() const {
1887   return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1888 }
1889 
1890 
set_kind(byte value)1891 void Oddball::set_kind(byte value) {
1892   WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1893 }
1894 
1895 
value()1896 Object* Cell::value() const {
1897   return READ_FIELD(this, kValueOffset);
1898 }
1899 
1900 
set_value(Object * val,WriteBarrierMode ignored)1901 void Cell::set_value(Object* val, WriteBarrierMode ignored) {
1902   // The write barrier is not used for global property cells.
1903   DCHECK(!val->IsPropertyCell() && !val->IsCell());
1904   WRITE_FIELD(this, kValueOffset, val);
1905 }
1906 
ACCESSORS(PropertyCell,dependent_code,DependentCode,kDependentCodeOffset)1907 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1908 
1909 Object* PropertyCell::type_raw() const {
1910   return READ_FIELD(this, kTypeOffset);
1911 }
1912 
1913 
set_type_raw(Object * val,WriteBarrierMode ignored)1914 void PropertyCell::set_type_raw(Object* val, WriteBarrierMode ignored) {
1915   WRITE_FIELD(this, kTypeOffset, val);
1916 }
1917 
1918 
GetHeaderSize()1919 int JSObject::GetHeaderSize() {
1920   InstanceType type = map()->instance_type();
1921   // Check for the most common kind of JavaScript object before
1922   // falling into the generic switch. This speeds up the internal
1923   // field operations considerably on average.
1924   if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
1925   switch (type) {
1926     case JS_GENERATOR_OBJECT_TYPE:
1927       return JSGeneratorObject::kSize;
1928     case JS_MODULE_TYPE:
1929       return JSModule::kSize;
1930     case JS_GLOBAL_PROXY_TYPE:
1931       return JSGlobalProxy::kSize;
1932     case JS_GLOBAL_OBJECT_TYPE:
1933       return JSGlobalObject::kSize;
1934     case JS_BUILTINS_OBJECT_TYPE:
1935       return JSBuiltinsObject::kSize;
1936     case JS_FUNCTION_TYPE:
1937       return JSFunction::kSize;
1938     case JS_VALUE_TYPE:
1939       return JSValue::kSize;
1940     case JS_DATE_TYPE:
1941       return JSDate::kSize;
1942     case JS_ARRAY_TYPE:
1943       return JSArray::kSize;
1944     case JS_ARRAY_BUFFER_TYPE:
1945       return JSArrayBuffer::kSize;
1946     case JS_TYPED_ARRAY_TYPE:
1947       return JSTypedArray::kSize;
1948     case JS_DATA_VIEW_TYPE:
1949       return JSDataView::kSize;
1950     case JS_SET_TYPE:
1951       return JSSet::kSize;
1952     case JS_MAP_TYPE:
1953       return JSMap::kSize;
1954     case JS_SET_ITERATOR_TYPE:
1955       return JSSetIterator::kSize;
1956     case JS_MAP_ITERATOR_TYPE:
1957       return JSMapIterator::kSize;
1958     case JS_WEAK_MAP_TYPE:
1959       return JSWeakMap::kSize;
1960     case JS_WEAK_SET_TYPE:
1961       return JSWeakSet::kSize;
1962     case JS_REGEXP_TYPE:
1963       return JSRegExp::kSize;
1964     case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
1965       return JSObject::kHeaderSize;
1966     case JS_MESSAGE_OBJECT_TYPE:
1967       return JSMessageObject::kSize;
1968     default:
1969       // TODO(jkummerow): Re-enable this. Blink currently hits this
1970       // from its CustomElementConstructorBuilder.
1971       // UNREACHABLE();
1972       return 0;
1973   }
1974 }
1975 
1976 
GetInternalFieldCount()1977 int JSObject::GetInternalFieldCount() {
1978   DCHECK(1 << kPointerSizeLog2 == kPointerSize);
1979   // Make sure to adjust for the number of in-object properties. These
1980   // properties do contribute to the size, but are not internal fields.
1981   return ((Size() - GetHeaderSize()) >> kPointerSizeLog2) -
1982          map()->inobject_properties();
1983 }
1984 
1985 
GetInternalFieldOffset(int index)1986 int JSObject::GetInternalFieldOffset(int index) {
1987   DCHECK(index < GetInternalFieldCount() && index >= 0);
1988   return GetHeaderSize() + (kPointerSize * index);
1989 }
1990 
1991 
GetInternalField(int index)1992 Object* JSObject::GetInternalField(int index) {
1993   DCHECK(index < GetInternalFieldCount() && index >= 0);
1994   // Internal objects do follow immediately after the header, whereas in-object
1995   // properties are at the end of the object. Therefore there is no need
1996   // to adjust the index here.
1997   return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
1998 }
1999 
2000 
SetInternalField(int index,Object * value)2001 void JSObject::SetInternalField(int index, Object* value) {
2002   DCHECK(index < GetInternalFieldCount() && index >= 0);
2003   // Internal objects do follow immediately after the header, whereas in-object
2004   // properties are at the end of the object. Therefore there is no need
2005   // to adjust the index here.
2006   int offset = GetHeaderSize() + (kPointerSize * index);
2007   WRITE_FIELD(this, offset, value);
2008   WRITE_BARRIER(GetHeap(), this, offset, value);
2009 }
2010 
2011 
SetInternalField(int index,Smi * value)2012 void JSObject::SetInternalField(int index, Smi* value) {
2013   DCHECK(index < GetInternalFieldCount() && index >= 0);
2014   // Internal objects do follow immediately after the header, whereas in-object
2015   // properties are at the end of the object. Therefore there is no need
2016   // to adjust the index here.
2017   int offset = GetHeaderSize() + (kPointerSize * index);
2018   WRITE_FIELD(this, offset, value);
2019 }
2020 
2021 
2022 // Access fast-case object properties at index. The use of these routines
2023 // is needed to correctly distinguish between properties stored in-object and
2024 // properties stored in the properties array.
RawFastPropertyAt(FieldIndex index)2025 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2026   if (index.is_inobject()) {
2027     return READ_FIELD(this, index.offset());
2028   } else {
2029     return properties()->get(index.outobject_array_index());
2030   }
2031 }
2032 
2033 
FastPropertyAtPut(FieldIndex index,Object * value)2034 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2035   if (index.is_inobject()) {
2036     int offset = index.offset();
2037     WRITE_FIELD(this, offset, value);
2038     WRITE_BARRIER(GetHeap(), this, offset, value);
2039   } else {
2040     properties()->set(index.outobject_array_index(), value);
2041   }
2042 }
2043 
2044 
GetInObjectPropertyOffset(int index)2045 int JSObject::GetInObjectPropertyOffset(int index) {
2046   return map()->GetInObjectPropertyOffset(index);
2047 }
2048 
2049 
InObjectPropertyAt(int index)2050 Object* JSObject::InObjectPropertyAt(int index) {
2051   int offset = GetInObjectPropertyOffset(index);
2052   return READ_FIELD(this, offset);
2053 }
2054 
2055 
InObjectPropertyAtPut(int index,Object * value,WriteBarrierMode mode)2056 Object* JSObject::InObjectPropertyAtPut(int index,
2057                                         Object* value,
2058                                         WriteBarrierMode mode) {
2059   // Adjust for the number of properties stored in the object.
2060   int offset = GetInObjectPropertyOffset(index);
2061   WRITE_FIELD(this, offset, value);
2062   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2063   return value;
2064 }
2065 
2066 
2067 
InitializeBody(Map * map,Object * pre_allocated_value,Object * filler_value)2068 void JSObject::InitializeBody(Map* map,
2069                               Object* pre_allocated_value,
2070                               Object* filler_value) {
2071   DCHECK(!filler_value->IsHeapObject() ||
2072          !GetHeap()->InNewSpace(filler_value));
2073   DCHECK(!pre_allocated_value->IsHeapObject() ||
2074          !GetHeap()->InNewSpace(pre_allocated_value));
2075   int size = map->instance_size();
2076   int offset = kHeaderSize;
2077   if (filler_value != pre_allocated_value) {
2078     int pre_allocated = map->pre_allocated_property_fields();
2079     DCHECK(pre_allocated * kPointerSize + kHeaderSize <= size);
2080     for (int i = 0; i < pre_allocated; i++) {
2081       WRITE_FIELD(this, offset, pre_allocated_value);
2082       offset += kPointerSize;
2083     }
2084   }
2085   while (offset < size) {
2086     WRITE_FIELD(this, offset, filler_value);
2087     offset += kPointerSize;
2088   }
2089 }
2090 
2091 
HasFastProperties()2092 bool JSObject::HasFastProperties() {
2093   DCHECK(properties()->IsDictionary() == map()->is_dictionary_map());
2094   return !properties()->IsDictionary();
2095 }
2096 
2097 
TooManyFastProperties(StoreFromKeyed store_mode)2098 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2099   if (unused_property_fields() != 0) return false;
2100   if (is_prototype_map()) return false;
2101   int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2102   int limit = Max(minimum, inobject_properties());
2103   int external = NumberOfFields() - inobject_properties();
2104   return external > limit;
2105 }
2106 
2107 
InitializeBody(int object_size)2108 void Struct::InitializeBody(int object_size) {
2109   Object* value = GetHeap()->undefined_value();
2110   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2111     WRITE_FIELD(this, offset, value);
2112   }
2113 }
2114 
2115 
ToArrayIndex(uint32_t * index)2116 bool Object::ToArrayIndex(uint32_t* index) {
2117   if (IsSmi()) {
2118     int value = Smi::cast(this)->value();
2119     if (value < 0) return false;
2120     *index = value;
2121     return true;
2122   }
2123   if (IsHeapNumber()) {
2124     double value = HeapNumber::cast(this)->value();
2125     uint32_t uint_value = static_cast<uint32_t>(value);
2126     if (value == static_cast<double>(uint_value)) {
2127       *index = uint_value;
2128       return true;
2129     }
2130   }
2131   return false;
2132 }
2133 
2134 
IsStringObjectWithCharacterAt(uint32_t index)2135 bool Object::IsStringObjectWithCharacterAt(uint32_t index) {
2136   if (!this->IsJSValue()) return false;
2137 
2138   JSValue* js_value = JSValue::cast(this);
2139   if (!js_value->value()->IsString()) return false;
2140 
2141   String* str = String::cast(js_value->value());
2142   if (index >= static_cast<uint32_t>(str->length())) return false;
2143 
2144   return true;
2145 }
2146 
2147 
VerifyApiCallResultType()2148 void Object::VerifyApiCallResultType() {
2149 #if ENABLE_EXTRA_CHECKS
2150   if (!(IsSmi() ||
2151         IsString() ||
2152         IsSymbol() ||
2153         IsSpecObject() ||
2154         IsHeapNumber() ||
2155         IsUndefined() ||
2156         IsTrue() ||
2157         IsFalse() ||
2158         IsNull())) {
2159     FATAL("API call returned invalid object");
2160   }
2161 #endif  // ENABLE_EXTRA_CHECKS
2162 }
2163 
2164 
get(int index)2165 Object* FixedArray::get(int index) {
2166   SLOW_DCHECK(index >= 0 && index < this->length());
2167   return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2168 }
2169 
2170 
get(Handle<FixedArray> array,int index)2171 Handle<Object> FixedArray::get(Handle<FixedArray> array, int index) {
2172   return handle(array->get(index), array->GetIsolate());
2173 }
2174 
2175 
is_the_hole(int index)2176 bool FixedArray::is_the_hole(int index) {
2177   return get(index) == GetHeap()->the_hole_value();
2178 }
2179 
2180 
set(int index,Smi * value)2181 void FixedArray::set(int index, Smi* value) {
2182   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2183   DCHECK(index >= 0 && index < this->length());
2184   DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2185   int offset = kHeaderSize + index * kPointerSize;
2186   WRITE_FIELD(this, offset, value);
2187 }
2188 
2189 
set(int index,Object * value)2190 void FixedArray::set(int index, Object* value) {
2191   DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2192   DCHECK_EQ(FIXED_ARRAY_TYPE, map()->instance_type());
2193   DCHECK(index >= 0 && index < this->length());
2194   int offset = kHeaderSize + index * kPointerSize;
2195   WRITE_FIELD(this, offset, value);
2196   WRITE_BARRIER(GetHeap(), this, offset, value);
2197 }
2198 
2199 
is_the_hole_nan(double value)2200 inline bool FixedDoubleArray::is_the_hole_nan(double value) {
2201   return bit_cast<uint64_t, double>(value) == kHoleNanInt64;
2202 }
2203 
2204 
hole_nan_as_double()2205 inline double FixedDoubleArray::hole_nan_as_double() {
2206   return bit_cast<double, uint64_t>(kHoleNanInt64);
2207 }
2208 
2209 
canonical_not_the_hole_nan_as_double()2210 inline double FixedDoubleArray::canonical_not_the_hole_nan_as_double() {
2211   DCHECK(bit_cast<uint64_t>(base::OS::nan_value()) != kHoleNanInt64);
2212   DCHECK((bit_cast<uint64_t>(base::OS::nan_value()) >> 32) != kHoleNanUpper32);
2213   return base::OS::nan_value();
2214 }
2215 
2216 
get_scalar(int index)2217 double FixedDoubleArray::get_scalar(int index) {
2218   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2219          map() != GetHeap()->fixed_array_map());
2220   DCHECK(index >= 0 && index < this->length());
2221   double result = READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2222   DCHECK(!is_the_hole_nan(result));
2223   return result;
2224 }
2225 
get_representation(int index)2226 int64_t FixedDoubleArray::get_representation(int index) {
2227   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2228          map() != GetHeap()->fixed_array_map());
2229   DCHECK(index >= 0 && index < this->length());
2230   return READ_INT64_FIELD(this, kHeaderSize + index * kDoubleSize);
2231 }
2232 
2233 
get(Handle<FixedDoubleArray> array,int index)2234 Handle<Object> FixedDoubleArray::get(Handle<FixedDoubleArray> array,
2235                                      int index) {
2236   if (array->is_the_hole(index)) {
2237     return array->GetIsolate()->factory()->the_hole_value();
2238   } else {
2239     return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
2240   }
2241 }
2242 
2243 
set(int index,double value)2244 void FixedDoubleArray::set(int index, double value) {
2245   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2246          map() != GetHeap()->fixed_array_map());
2247   int offset = kHeaderSize + index * kDoubleSize;
2248   if (std::isnan(value)) value = canonical_not_the_hole_nan_as_double();
2249   WRITE_DOUBLE_FIELD(this, offset, value);
2250 }
2251 
2252 
set_the_hole(int index)2253 void FixedDoubleArray::set_the_hole(int index) {
2254   DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2255          map() != GetHeap()->fixed_array_map());
2256   int offset = kHeaderSize + index * kDoubleSize;
2257   WRITE_DOUBLE_FIELD(this, offset, hole_nan_as_double());
2258 }
2259 
2260 
is_the_hole(int index)2261 bool FixedDoubleArray::is_the_hole(int index) {
2262   int offset = kHeaderSize + index * kDoubleSize;
2263   return is_the_hole_nan(READ_DOUBLE_FIELD(this, offset));
2264 }
2265 
2266 
data_start()2267 double* FixedDoubleArray::data_start() {
2268   return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2269 }
2270 
2271 
FillWithHoles(int from,int to)2272 void FixedDoubleArray::FillWithHoles(int from, int to) {
2273   for (int i = from; i < to; i++) {
2274     set_the_hole(i);
2275   }
2276 }
2277 
2278 
increment(Type type)2279 void ConstantPoolArray::NumberOfEntries::increment(Type type) {
2280   DCHECK(type < NUMBER_OF_TYPES);
2281   element_counts_[type]++;
2282 }
2283 
2284 
equals(const ConstantPoolArray::NumberOfEntries & other)2285 int ConstantPoolArray::NumberOfEntries::equals(
2286     const ConstantPoolArray::NumberOfEntries& other) const {
2287   for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2288     if (element_counts_[i] != other.element_counts_[i]) return false;
2289   }
2290   return true;
2291 }
2292 
2293 
is_empty()2294 bool ConstantPoolArray::NumberOfEntries::is_empty() const {
2295   return total_count() == 0;
2296 }
2297 
2298 
count_of(Type type)2299 int ConstantPoolArray::NumberOfEntries::count_of(Type type) const {
2300   DCHECK(type < NUMBER_OF_TYPES);
2301   return element_counts_[type];
2302 }
2303 
2304 
base_of(Type type)2305 int ConstantPoolArray::NumberOfEntries::base_of(Type type) const {
2306   int base = 0;
2307   DCHECK(type < NUMBER_OF_TYPES);
2308   for (int i = 0; i < type; i++) {
2309     base += element_counts_[i];
2310   }
2311   return base;
2312 }
2313 
2314 
total_count()2315 int ConstantPoolArray::NumberOfEntries::total_count() const {
2316   int count = 0;
2317   for (int i = 0; i < NUMBER_OF_TYPES; i++) {
2318     count += element_counts_[i];
2319   }
2320   return count;
2321 }
2322 
2323 
are_in_range(int min,int max)2324 int ConstantPoolArray::NumberOfEntries::are_in_range(int min, int max) const {
2325   for (int i = FIRST_TYPE; i < NUMBER_OF_TYPES; i++) {
2326     if (element_counts_[i] < min || element_counts_[i] > max) {
2327       return false;
2328     }
2329   }
2330   return true;
2331 }
2332 
2333 
next_index()2334 int ConstantPoolArray::Iterator::next_index() {
2335   DCHECK(!is_finished());
2336   int ret = next_index_++;
2337   update_section();
2338   return ret;
2339 }
2340 
2341 
is_finished()2342 bool ConstantPoolArray::Iterator::is_finished() {
2343   return next_index_ > array_->last_index(type_, final_section_);
2344 }
2345 
2346 
update_section()2347 void ConstantPoolArray::Iterator::update_section() {
2348   if (next_index_ > array_->last_index(type_, current_section_) &&
2349       current_section_ != final_section_) {
2350     DCHECK(final_section_ == EXTENDED_SECTION);
2351     current_section_ = EXTENDED_SECTION;
2352     next_index_ = array_->first_index(type_, EXTENDED_SECTION);
2353   }
2354 }
2355 
2356 
is_extended_layout()2357 bool ConstantPoolArray::is_extended_layout() {
2358   uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2359   return IsExtendedField::decode(small_layout_1);
2360 }
2361 
2362 
final_section()2363 ConstantPoolArray::LayoutSection ConstantPoolArray::final_section() {
2364   return is_extended_layout() ? EXTENDED_SECTION : SMALL_SECTION;
2365 }
2366 
2367 
first_extended_section_index()2368 int ConstantPoolArray::first_extended_section_index() {
2369   DCHECK(is_extended_layout());
2370   uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2371   return TotalCountField::decode(small_layout_2);
2372 }
2373 
2374 
get_extended_section_header_offset()2375 int ConstantPoolArray::get_extended_section_header_offset() {
2376   return RoundUp(SizeFor(NumberOfEntries(this, SMALL_SECTION)), kInt64Size);
2377 }
2378 
2379 
get_weak_object_state()2380 ConstantPoolArray::WeakObjectState ConstantPoolArray::get_weak_object_state() {
2381   uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2382   return WeakObjectStateField::decode(small_layout_2);
2383 }
2384 
2385 
set_weak_object_state(ConstantPoolArray::WeakObjectState state)2386 void ConstantPoolArray::set_weak_object_state(
2387       ConstantPoolArray::WeakObjectState state) {
2388   uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2389   small_layout_2 = WeakObjectStateField::update(small_layout_2, state);
2390   WRITE_INT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2391 }
2392 
2393 
first_index(Type type,LayoutSection section)2394 int ConstantPoolArray::first_index(Type type, LayoutSection section) {
2395   int index = 0;
2396   if (section == EXTENDED_SECTION) {
2397     DCHECK(is_extended_layout());
2398     index += first_extended_section_index();
2399   }
2400 
2401   for (Type type_iter = FIRST_TYPE; type_iter < type;
2402        type_iter = next_type(type_iter)) {
2403     index += number_of_entries(type_iter, section);
2404   }
2405 
2406   return index;
2407 }
2408 
2409 
last_index(Type type,LayoutSection section)2410 int ConstantPoolArray::last_index(Type type, LayoutSection section) {
2411   return first_index(type, section) + number_of_entries(type, section) - 1;
2412 }
2413 
2414 
number_of_entries(Type type,LayoutSection section)2415 int ConstantPoolArray::number_of_entries(Type type, LayoutSection section) {
2416   if (section == SMALL_SECTION) {
2417     uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2418     uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2419     switch (type) {
2420       case INT64:
2421         return Int64CountField::decode(small_layout_1);
2422       case CODE_PTR:
2423         return CodePtrCountField::decode(small_layout_1);
2424       case HEAP_PTR:
2425         return HeapPtrCountField::decode(small_layout_1);
2426       case INT32:
2427         return Int32CountField::decode(small_layout_2);
2428       default:
2429         UNREACHABLE();
2430         return 0;
2431     }
2432   } else {
2433     DCHECK(section == EXTENDED_SECTION && is_extended_layout());
2434     int offset = get_extended_section_header_offset();
2435     switch (type) {
2436       case INT64:
2437         offset += kExtendedInt64CountOffset;
2438         break;
2439       case CODE_PTR:
2440         offset += kExtendedCodePtrCountOffset;
2441         break;
2442       case HEAP_PTR:
2443         offset += kExtendedHeapPtrCountOffset;
2444         break;
2445       case INT32:
2446         offset += kExtendedInt32CountOffset;
2447         break;
2448       default:
2449         UNREACHABLE();
2450     }
2451     return READ_INT_FIELD(this, offset);
2452   }
2453 }
2454 
2455 
offset_is_type(int offset,Type type)2456 bool ConstantPoolArray::offset_is_type(int offset, Type type) {
2457   return (offset >= OffsetOfElementAt(first_index(type, SMALL_SECTION)) &&
2458           offset <= OffsetOfElementAt(last_index(type, SMALL_SECTION))) ||
2459          (is_extended_layout() &&
2460           offset >= OffsetOfElementAt(first_index(type, EXTENDED_SECTION)) &&
2461           offset <= OffsetOfElementAt(last_index(type, EXTENDED_SECTION)));
2462 }
2463 
2464 
get_type(int index)2465 ConstantPoolArray::Type ConstantPoolArray::get_type(int index) {
2466   LayoutSection section;
2467   if (is_extended_layout() && index >= first_extended_section_index()) {
2468     section = EXTENDED_SECTION;
2469   } else {
2470     section = SMALL_SECTION;
2471   }
2472 
2473   Type type = FIRST_TYPE;
2474   while (index > last_index(type, section)) {
2475     type = next_type(type);
2476   }
2477   DCHECK(type <= LAST_TYPE);
2478   return type;
2479 }
2480 
2481 
get_int64_entry(int index)2482 int64_t ConstantPoolArray::get_int64_entry(int index) {
2483   DCHECK(map() == GetHeap()->constant_pool_array_map());
2484   DCHECK(get_type(index) == INT64);
2485   return READ_INT64_FIELD(this, OffsetOfElementAt(index));
2486 }
2487 
2488 
get_int64_entry_as_double(int index)2489 double ConstantPoolArray::get_int64_entry_as_double(int index) {
2490   STATIC_ASSERT(kDoubleSize == kInt64Size);
2491   DCHECK(map() == GetHeap()->constant_pool_array_map());
2492   DCHECK(get_type(index) == INT64);
2493   return READ_DOUBLE_FIELD(this, OffsetOfElementAt(index));
2494 }
2495 
2496 
get_code_ptr_entry(int index)2497 Address ConstantPoolArray::get_code_ptr_entry(int index) {
2498   DCHECK(map() == GetHeap()->constant_pool_array_map());
2499   DCHECK(get_type(index) == CODE_PTR);
2500   return reinterpret_cast<Address>(READ_FIELD(this, OffsetOfElementAt(index)));
2501 }
2502 
2503 
get_heap_ptr_entry(int index)2504 Object* ConstantPoolArray::get_heap_ptr_entry(int index) {
2505   DCHECK(map() == GetHeap()->constant_pool_array_map());
2506   DCHECK(get_type(index) == HEAP_PTR);
2507   return READ_FIELD(this, OffsetOfElementAt(index));
2508 }
2509 
2510 
get_int32_entry(int index)2511 int32_t ConstantPoolArray::get_int32_entry(int index) {
2512   DCHECK(map() == GetHeap()->constant_pool_array_map());
2513   DCHECK(get_type(index) == INT32);
2514   return READ_INT32_FIELD(this, OffsetOfElementAt(index));
2515 }
2516 
2517 
set(int index,int64_t value)2518 void ConstantPoolArray::set(int index, int64_t value) {
2519   DCHECK(map() == GetHeap()->constant_pool_array_map());
2520   DCHECK(get_type(index) == INT64);
2521   WRITE_INT64_FIELD(this, OffsetOfElementAt(index), value);
2522 }
2523 
2524 
set(int index,double value)2525 void ConstantPoolArray::set(int index, double value) {
2526   STATIC_ASSERT(kDoubleSize == kInt64Size);
2527   DCHECK(map() == GetHeap()->constant_pool_array_map());
2528   DCHECK(get_type(index) == INT64);
2529   WRITE_DOUBLE_FIELD(this, OffsetOfElementAt(index), value);
2530 }
2531 
2532 
set(int index,Address value)2533 void ConstantPoolArray::set(int index, Address value) {
2534   DCHECK(map() == GetHeap()->constant_pool_array_map());
2535   DCHECK(get_type(index) == CODE_PTR);
2536   WRITE_FIELD(this, OffsetOfElementAt(index), reinterpret_cast<Object*>(value));
2537 }
2538 
2539 
set(int index,Object * value)2540 void ConstantPoolArray::set(int index, Object* value) {
2541   DCHECK(map() == GetHeap()->constant_pool_array_map());
2542   DCHECK(!GetHeap()->InNewSpace(value));
2543   DCHECK(get_type(index) == HEAP_PTR);
2544   WRITE_FIELD(this, OffsetOfElementAt(index), value);
2545   WRITE_BARRIER(GetHeap(), this, OffsetOfElementAt(index), value);
2546 }
2547 
2548 
set(int index,int32_t value)2549 void ConstantPoolArray::set(int index, int32_t value) {
2550   DCHECK(map() == GetHeap()->constant_pool_array_map());
2551   DCHECK(get_type(index) == INT32);
2552   WRITE_INT32_FIELD(this, OffsetOfElementAt(index), value);
2553 }
2554 
2555 
set_at_offset(int offset,int32_t value)2556 void ConstantPoolArray::set_at_offset(int offset, int32_t value) {
2557   DCHECK(map() == GetHeap()->constant_pool_array_map());
2558   DCHECK(offset_is_type(offset, INT32));
2559   WRITE_INT32_FIELD(this, offset, value);
2560 }
2561 
2562 
set_at_offset(int offset,int64_t value)2563 void ConstantPoolArray::set_at_offset(int offset, int64_t value) {
2564   DCHECK(map() == GetHeap()->constant_pool_array_map());
2565   DCHECK(offset_is_type(offset, INT64));
2566   WRITE_INT64_FIELD(this, offset, value);
2567 }
2568 
2569 
set_at_offset(int offset,double value)2570 void ConstantPoolArray::set_at_offset(int offset, double value) {
2571   DCHECK(map() == GetHeap()->constant_pool_array_map());
2572   DCHECK(offset_is_type(offset, INT64));
2573   WRITE_DOUBLE_FIELD(this, offset, value);
2574 }
2575 
2576 
set_at_offset(int offset,Address value)2577 void ConstantPoolArray::set_at_offset(int offset, Address value) {
2578   DCHECK(map() == GetHeap()->constant_pool_array_map());
2579   DCHECK(offset_is_type(offset, CODE_PTR));
2580   WRITE_FIELD(this, offset, reinterpret_cast<Object*>(value));
2581   WRITE_BARRIER(GetHeap(), this, offset, reinterpret_cast<Object*>(value));
2582 }
2583 
2584 
set_at_offset(int offset,Object * value)2585 void ConstantPoolArray::set_at_offset(int offset, Object* value) {
2586   DCHECK(map() == GetHeap()->constant_pool_array_map());
2587   DCHECK(!GetHeap()->InNewSpace(value));
2588   DCHECK(offset_is_type(offset, HEAP_PTR));
2589   WRITE_FIELD(this, offset, value);
2590   WRITE_BARRIER(GetHeap(), this, offset, value);
2591 }
2592 
2593 
Init(const NumberOfEntries & small)2594 void ConstantPoolArray::Init(const NumberOfEntries& small) {
2595   uint32_t small_layout_1 =
2596       Int64CountField::encode(small.count_of(INT64)) |
2597       CodePtrCountField::encode(small.count_of(CODE_PTR)) |
2598       HeapPtrCountField::encode(small.count_of(HEAP_PTR)) |
2599       IsExtendedField::encode(false);
2600   uint32_t small_layout_2 =
2601       Int32CountField::encode(small.count_of(INT32)) |
2602       TotalCountField::encode(small.total_count()) |
2603       WeakObjectStateField::encode(NO_WEAK_OBJECTS);
2604   WRITE_UINT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2605   WRITE_UINT32_FIELD(this, kSmallLayout2Offset, small_layout_2);
2606   if (kHeaderSize != kFirstEntryOffset) {
2607     DCHECK(kFirstEntryOffset - kHeaderSize == kInt32Size);
2608     WRITE_UINT32_FIELD(this, kHeaderSize, 0);  // Zero out header padding.
2609   }
2610 }
2611 
2612 
InitExtended(const NumberOfEntries & small,const NumberOfEntries & extended)2613 void ConstantPoolArray::InitExtended(const NumberOfEntries& small,
2614                                      const NumberOfEntries& extended) {
2615   // Initialize small layout fields first.
2616   Init(small);
2617 
2618   // Set is_extended_layout field.
2619   uint32_t small_layout_1 = READ_UINT32_FIELD(this, kSmallLayout1Offset);
2620   small_layout_1 = IsExtendedField::update(small_layout_1, true);
2621   WRITE_INT32_FIELD(this, kSmallLayout1Offset, small_layout_1);
2622 
2623   // Initialize the extended layout fields.
2624   int extended_header_offset = get_extended_section_header_offset();
2625   WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt64CountOffset,
2626       extended.count_of(INT64));
2627   WRITE_INT_FIELD(this, extended_header_offset + kExtendedCodePtrCountOffset,
2628       extended.count_of(CODE_PTR));
2629   WRITE_INT_FIELD(this, extended_header_offset + kExtendedHeapPtrCountOffset,
2630       extended.count_of(HEAP_PTR));
2631   WRITE_INT_FIELD(this, extended_header_offset + kExtendedInt32CountOffset,
2632       extended.count_of(INT32));
2633 }
2634 
2635 
size()2636 int ConstantPoolArray::size() {
2637   NumberOfEntries small(this, SMALL_SECTION);
2638   if (!is_extended_layout()) {
2639     return SizeFor(small);
2640   } else {
2641     NumberOfEntries extended(this, EXTENDED_SECTION);
2642     return SizeForExtended(small, extended);
2643   }
2644 }
2645 
2646 
length()2647 int ConstantPoolArray::length() {
2648   uint32_t small_layout_2 = READ_UINT32_FIELD(this, kSmallLayout2Offset);
2649   int length = TotalCountField::decode(small_layout_2);
2650   if (is_extended_layout()) {
2651     length += number_of_entries(INT64, EXTENDED_SECTION) +
2652               number_of_entries(CODE_PTR, EXTENDED_SECTION) +
2653               number_of_entries(HEAP_PTR, EXTENDED_SECTION) +
2654               number_of_entries(INT32, EXTENDED_SECTION);
2655   }
2656   return length;
2657 }
2658 
2659 
GetWriteBarrierMode(const DisallowHeapAllocation & promise)2660 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2661     const DisallowHeapAllocation& promise) {
2662   Heap* heap = GetHeap();
2663   if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2664   if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2665   return UPDATE_WRITE_BARRIER;
2666 }
2667 
2668 
set(int index,Object * value,WriteBarrierMode mode)2669 void FixedArray::set(int index,
2670                      Object* value,
2671                      WriteBarrierMode mode) {
2672   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2673   DCHECK(index >= 0 && index < this->length());
2674   int offset = kHeaderSize + index * kPointerSize;
2675   WRITE_FIELD(this, offset, value);
2676   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2677 }
2678 
2679 
NoIncrementalWriteBarrierSet(FixedArray * array,int index,Object * value)2680 void FixedArray::NoIncrementalWriteBarrierSet(FixedArray* array,
2681                                               int index,
2682                                               Object* value) {
2683   DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2684   DCHECK(index >= 0 && index < array->length());
2685   int offset = kHeaderSize + index * kPointerSize;
2686   WRITE_FIELD(array, offset, value);
2687   Heap* heap = array->GetHeap();
2688   if (heap->InNewSpace(value)) {
2689     heap->RecordWrite(array->address(), offset);
2690   }
2691 }
2692 
2693 
NoWriteBarrierSet(FixedArray * array,int index,Object * value)2694 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2695                                    int index,
2696                                    Object* value) {
2697   DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2698   DCHECK(index >= 0 && index < array->length());
2699   DCHECK(!array->GetHeap()->InNewSpace(value));
2700   WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2701 }
2702 
2703 
set_undefined(int index)2704 void FixedArray::set_undefined(int index) {
2705   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2706   DCHECK(index >= 0 && index < this->length());
2707   DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2708   WRITE_FIELD(this,
2709               kHeaderSize + index * kPointerSize,
2710               GetHeap()->undefined_value());
2711 }
2712 
2713 
set_null(int index)2714 void FixedArray::set_null(int index) {
2715   DCHECK(index >= 0 && index < this->length());
2716   DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2717   WRITE_FIELD(this,
2718               kHeaderSize + index * kPointerSize,
2719               GetHeap()->null_value());
2720 }
2721 
2722 
set_the_hole(int index)2723 void FixedArray::set_the_hole(int index) {
2724   DCHECK(map() != GetHeap()->fixed_cow_array_map());
2725   DCHECK(index >= 0 && index < this->length());
2726   DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2727   WRITE_FIELD(this,
2728               kHeaderSize + index * kPointerSize,
2729               GetHeap()->the_hole_value());
2730 }
2731 
2732 
FillWithHoles(int from,int to)2733 void FixedArray::FillWithHoles(int from, int to) {
2734   for (int i = from; i < to; i++) {
2735     set_the_hole(i);
2736   }
2737 }
2738 
2739 
data_start()2740 Object** FixedArray::data_start() {
2741   return HeapObject::RawField(this, kHeaderSize);
2742 }
2743 
2744 
IsEmpty()2745 bool DescriptorArray::IsEmpty() {
2746   DCHECK(length() >= kFirstIndex ||
2747          this == GetHeap()->empty_descriptor_array());
2748   return length() < kFirstIndex;
2749 }
2750 
2751 
SetNumberOfDescriptors(int number_of_descriptors)2752 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2753   WRITE_FIELD(
2754       this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2755 }
2756 
2757 
2758 // Perform a binary search in a fixed array. Low and high are entry indices. If
2759 // there are three entries in this array it should be called with low=0 and
2760 // high=2.
2761 template<SearchMode search_mode, typename T>
BinarySearch(T * array,Name * name,int low,int high,int valid_entries)2762 int BinarySearch(T* array, Name* name, int low, int high, int valid_entries) {
2763   uint32_t hash = name->Hash();
2764   int limit = high;
2765 
2766   DCHECK(low <= high);
2767 
2768   while (low != high) {
2769     int mid = (low + high) / 2;
2770     Name* mid_name = array->GetSortedKey(mid);
2771     uint32_t mid_hash = mid_name->Hash();
2772 
2773     if (mid_hash >= hash) {
2774       high = mid;
2775     } else {
2776       low = mid + 1;
2777     }
2778   }
2779 
2780   for (; low <= limit; ++low) {
2781     int sort_index = array->GetSortedKeyIndex(low);
2782     Name* entry = array->GetKey(sort_index);
2783     if (entry->Hash() != hash) break;
2784     if (entry->Equals(name)) {
2785       if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2786         return sort_index;
2787       }
2788       return T::kNotFound;
2789     }
2790   }
2791 
2792   return T::kNotFound;
2793 }
2794 
2795 
2796 // Perform a linear search in this fixed array. len is the number of entry
2797 // indices that are valid.
2798 template<SearchMode search_mode, typename T>
LinearSearch(T * array,Name * name,int len,int valid_entries)2799 int LinearSearch(T* array, Name* name, int len, int valid_entries) {
2800   uint32_t hash = name->Hash();
2801   if (search_mode == ALL_ENTRIES) {
2802     for (int number = 0; number < len; number++) {
2803       int sorted_index = array->GetSortedKeyIndex(number);
2804       Name* entry = array->GetKey(sorted_index);
2805       uint32_t current_hash = entry->Hash();
2806       if (current_hash > hash) break;
2807       if (current_hash == hash && entry->Equals(name)) return sorted_index;
2808     }
2809   } else {
2810     DCHECK(len >= valid_entries);
2811     for (int number = 0; number < valid_entries; number++) {
2812       Name* entry = array->GetKey(number);
2813       uint32_t current_hash = entry->Hash();
2814       if (current_hash == hash && entry->Equals(name)) return number;
2815     }
2816   }
2817   return T::kNotFound;
2818 }
2819 
2820 
2821 template<SearchMode search_mode, typename T>
Search(T * array,Name * name,int valid_entries)2822 int Search(T* array, Name* name, int valid_entries) {
2823   if (search_mode == VALID_ENTRIES) {
2824     SLOW_DCHECK(array->IsSortedNoDuplicates(valid_entries));
2825   } else {
2826     SLOW_DCHECK(array->IsSortedNoDuplicates());
2827   }
2828 
2829   int nof = array->number_of_entries();
2830   if (nof == 0) return T::kNotFound;
2831 
2832   // Fast case: do linear search for small arrays.
2833   const int kMaxElementsForLinearSearch = 8;
2834   if ((search_mode == ALL_ENTRIES &&
2835        nof <= kMaxElementsForLinearSearch) ||
2836       (search_mode == VALID_ENTRIES &&
2837        valid_entries <= (kMaxElementsForLinearSearch * 3))) {
2838     return LinearSearch<search_mode>(array, name, nof, valid_entries);
2839   }
2840 
2841   // Slow case: perform binary search.
2842   return BinarySearch<search_mode>(array, name, 0, nof - 1, valid_entries);
2843 }
2844 
2845 
Search(Name * name,int valid_descriptors)2846 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2847   return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors);
2848 }
2849 
2850 
SearchWithCache(Name * name,Map * map)2851 int DescriptorArray::SearchWithCache(Name* name, Map* map) {
2852   int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2853   if (number_of_own_descriptors == 0) return kNotFound;
2854 
2855   DescriptorLookupCache* cache = GetIsolate()->descriptor_lookup_cache();
2856   int number = cache->Lookup(map, name);
2857 
2858   if (number == DescriptorLookupCache::kAbsent) {
2859     number = Search(name, number_of_own_descriptors);
2860     cache->Update(map, name, number);
2861   }
2862 
2863   return number;
2864 }
2865 
2866 
GetLastDescriptorDetails()2867 PropertyDetails Map::GetLastDescriptorDetails() {
2868   return instance_descriptors()->GetDetails(LastAdded());
2869 }
2870 
2871 
LookupDescriptor(JSObject * holder,Name * name,LookupResult * result)2872 void Map::LookupDescriptor(JSObject* holder,
2873                            Name* name,
2874                            LookupResult* result) {
2875   DescriptorArray* descriptors = this->instance_descriptors();
2876   int number = descriptors->SearchWithCache(name, this);
2877   if (number == DescriptorArray::kNotFound) return result->NotFound();
2878   result->DescriptorResult(holder, descriptors->GetDetails(number), number);
2879 }
2880 
2881 
LookupTransition(JSObject * holder,Name * name,LookupResult * result)2882 void Map::LookupTransition(JSObject* holder,
2883                            Name* name,
2884                            LookupResult* result) {
2885   int transition_index = this->SearchTransition(name);
2886   if (transition_index == TransitionArray::kNotFound) return result->NotFound();
2887   result->TransitionResult(holder, this->GetTransition(transition_index));
2888 }
2889 
2890 
GetInitialElements()2891 FixedArrayBase* Map::GetInitialElements() {
2892   if (has_fast_smi_or_object_elements() ||
2893       has_fast_double_elements()) {
2894     DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
2895     return GetHeap()->empty_fixed_array();
2896   } else if (has_external_array_elements()) {
2897     ExternalArray* empty_array = GetHeap()->EmptyExternalArrayForMap(this);
2898     DCHECK(!GetHeap()->InNewSpace(empty_array));
2899     return empty_array;
2900   } else if (has_fixed_typed_array_elements()) {
2901     FixedTypedArrayBase* empty_array =
2902       GetHeap()->EmptyFixedTypedArrayForMap(this);
2903     DCHECK(!GetHeap()->InNewSpace(empty_array));
2904     return empty_array;
2905   } else {
2906     UNREACHABLE();
2907   }
2908   return NULL;
2909 }
2910 
2911 
GetKeySlot(int descriptor_number)2912 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2913   DCHECK(descriptor_number < number_of_descriptors());
2914   return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2915 }
2916 
2917 
GetDescriptorStartSlot(int descriptor_number)2918 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2919   return GetKeySlot(descriptor_number);
2920 }
2921 
2922 
GetDescriptorEndSlot(int descriptor_number)2923 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2924   return GetValueSlot(descriptor_number - 1) + 1;
2925 }
2926 
2927 
GetKey(int descriptor_number)2928 Name* DescriptorArray::GetKey(int descriptor_number) {
2929   DCHECK(descriptor_number < number_of_descriptors());
2930   return Name::cast(get(ToKeyIndex(descriptor_number)));
2931 }
2932 
2933 
GetSortedKeyIndex(int descriptor_number)2934 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2935   return GetDetails(descriptor_number).pointer();
2936 }
2937 
2938 
GetSortedKey(int descriptor_number)2939 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2940   return GetKey(GetSortedKeyIndex(descriptor_number));
2941 }
2942 
2943 
SetSortedKey(int descriptor_index,int pointer)2944 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2945   PropertyDetails details = GetDetails(descriptor_index);
2946   set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2947 }
2948 
2949 
SetRepresentation(int descriptor_index,Representation representation)2950 void DescriptorArray::SetRepresentation(int descriptor_index,
2951                                         Representation representation) {
2952   DCHECK(!representation.IsNone());
2953   PropertyDetails details = GetDetails(descriptor_index);
2954   set(ToDetailsIndex(descriptor_index),
2955       details.CopyWithRepresentation(representation).AsSmi());
2956 }
2957 
2958 
GetValueSlot(int descriptor_number)2959 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
2960   DCHECK(descriptor_number < number_of_descriptors());
2961   return RawFieldOfElementAt(ToValueIndex(descriptor_number));
2962 }
2963 
2964 
GetValueOffset(int descriptor_number)2965 int DescriptorArray::GetValueOffset(int descriptor_number) {
2966   return OffsetOfElementAt(ToValueIndex(descriptor_number));
2967 }
2968 
2969 
GetValue(int descriptor_number)2970 Object* DescriptorArray::GetValue(int descriptor_number) {
2971   DCHECK(descriptor_number < number_of_descriptors());
2972   return get(ToValueIndex(descriptor_number));
2973 }
2974 
2975 
SetValue(int descriptor_index,Object * value)2976 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
2977   set(ToValueIndex(descriptor_index), value);
2978 }
2979 
2980 
GetDetails(int descriptor_number)2981 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
2982   DCHECK(descriptor_number < number_of_descriptors());
2983   Object* details = get(ToDetailsIndex(descriptor_number));
2984   return PropertyDetails(Smi::cast(details));
2985 }
2986 
2987 
GetType(int descriptor_number)2988 PropertyType DescriptorArray::GetType(int descriptor_number) {
2989   return GetDetails(descriptor_number).type();
2990 }
2991 
2992 
GetFieldIndex(int descriptor_number)2993 int DescriptorArray::GetFieldIndex(int descriptor_number) {
2994   DCHECK(GetDetails(descriptor_number).type() == FIELD);
2995   return GetDetails(descriptor_number).field_index();
2996 }
2997 
2998 
GetFieldType(int descriptor_number)2999 HeapType* DescriptorArray::GetFieldType(int descriptor_number) {
3000   DCHECK(GetDetails(descriptor_number).type() == FIELD);
3001   return HeapType::cast(GetValue(descriptor_number));
3002 }
3003 
3004 
GetConstant(int descriptor_number)3005 Object* DescriptorArray::GetConstant(int descriptor_number) {
3006   return GetValue(descriptor_number);
3007 }
3008 
3009 
GetCallbacksObject(int descriptor_number)3010 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3011   DCHECK(GetType(descriptor_number) == CALLBACKS);
3012   return GetValue(descriptor_number);
3013 }
3014 
3015 
GetCallbacks(int descriptor_number)3016 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3017   DCHECK(GetType(descriptor_number) == CALLBACKS);
3018   Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3019   return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3020 }
3021 
3022 
Get(int descriptor_number,Descriptor * desc)3023 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3024   desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3025              handle(GetValue(descriptor_number), GetIsolate()),
3026              GetDetails(descriptor_number));
3027 }
3028 
3029 
Set(int descriptor_number,Descriptor * desc,const WhitenessWitness &)3030 void DescriptorArray::Set(int descriptor_number,
3031                           Descriptor* desc,
3032                           const WhitenessWitness&) {
3033   // Range check.
3034   DCHECK(descriptor_number < number_of_descriptors());
3035 
3036   NoIncrementalWriteBarrierSet(this,
3037                                ToKeyIndex(descriptor_number),
3038                                *desc->GetKey());
3039   NoIncrementalWriteBarrierSet(this,
3040                                ToValueIndex(descriptor_number),
3041                                *desc->GetValue());
3042   NoIncrementalWriteBarrierSet(this,
3043                                ToDetailsIndex(descriptor_number),
3044                                desc->GetDetails().AsSmi());
3045 }
3046 
3047 
Set(int descriptor_number,Descriptor * desc)3048 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3049   // Range check.
3050   DCHECK(descriptor_number < number_of_descriptors());
3051 
3052   set(ToKeyIndex(descriptor_number), *desc->GetKey());
3053   set(ToValueIndex(descriptor_number), *desc->GetValue());
3054   set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3055 }
3056 
3057 
Append(Descriptor * desc)3058 void DescriptorArray::Append(Descriptor* desc) {
3059   DisallowHeapAllocation no_gc;
3060   int descriptor_number = number_of_descriptors();
3061   SetNumberOfDescriptors(descriptor_number + 1);
3062   Set(descriptor_number, desc);
3063 
3064   uint32_t hash = desc->GetKey()->Hash();
3065 
3066   int insertion;
3067 
3068   for (insertion = descriptor_number; insertion > 0; --insertion) {
3069     Name* key = GetSortedKey(insertion - 1);
3070     if (key->Hash() <= hash) break;
3071     SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3072   }
3073 
3074   SetSortedKey(insertion, descriptor_number);
3075 }
3076 
3077 
SwapSortedKeys(int first,int second)3078 void DescriptorArray::SwapSortedKeys(int first, int second) {
3079   int first_key = GetSortedKeyIndex(first);
3080   SetSortedKey(first, GetSortedKeyIndex(second));
3081   SetSortedKey(second, first_key);
3082 }
3083 
3084 
WhitenessWitness(DescriptorArray * array)3085 DescriptorArray::WhitenessWitness::WhitenessWitness(DescriptorArray* array)
3086     : marking_(array->GetHeap()->incremental_marking()) {
3087   marking_->EnterNoMarkingScope();
3088   DCHECK(!marking_->IsMarking() ||
3089          Marking::Color(array) == Marking::WHITE_OBJECT);
3090 }
3091 
3092 
~WhitenessWitness()3093 DescriptorArray::WhitenessWitness::~WhitenessWitness() {
3094   marking_->LeaveNoMarkingScope();
3095 }
3096 
3097 
3098 template<typename Derived, typename Shape, typename Key>
ComputeCapacity(int at_least_space_for)3099 int HashTable<Derived, Shape, Key>::ComputeCapacity(int at_least_space_for) {
3100   const int kMinCapacity = 32;
3101   int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3102   if (capacity < kMinCapacity) {
3103     capacity = kMinCapacity;  // Guarantee min capacity.
3104   }
3105   return capacity;
3106 }
3107 
3108 
3109 template<typename Derived, typename Shape, typename Key>
FindEntry(Key key)3110 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3111   return FindEntry(GetIsolate(), key);
3112 }
3113 
3114 
3115 // Find entry for key otherwise return kNotFound.
3116 template<typename Derived, typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key)3117 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3118   uint32_t capacity = Capacity();
3119   uint32_t entry = FirstProbe(HashTable::Hash(key), capacity);
3120   uint32_t count = 1;
3121   // EnsureCapacity will guarantee the hash table is never full.
3122   while (true) {
3123     Object* element = KeyAt(entry);
3124     // Empty entry. Uses raw unchecked accessors because it is called by the
3125     // string table during bootstrapping.
3126     if (element == isolate->heap()->raw_unchecked_undefined_value()) break;
3127     if (element != isolate->heap()->raw_unchecked_the_hole_value() &&
3128         Shape::IsMatch(key, element)) return entry;
3129     entry = NextProbe(entry, count++, capacity);
3130   }
3131   return kNotFound;
3132 }
3133 
3134 
requires_slow_elements()3135 bool SeededNumberDictionary::requires_slow_elements() {
3136   Object* max_index_object = get(kMaxNumberKeyIndex);
3137   if (!max_index_object->IsSmi()) return false;
3138   return 0 !=
3139       (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3140 }
3141 
max_number_key()3142 uint32_t SeededNumberDictionary::max_number_key() {
3143   DCHECK(!requires_slow_elements());
3144   Object* max_index_object = get(kMaxNumberKeyIndex);
3145   if (!max_index_object->IsSmi()) return 0;
3146   uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3147   return value >> kRequiresSlowElementsTagSize;
3148 }
3149 
set_requires_slow_elements()3150 void SeededNumberDictionary::set_requires_slow_elements() {
3151   set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3152 }
3153 
3154 
3155 // ------------------------------------
3156 // Cast operations
3157 
3158 
3159 CAST_ACCESSOR(AccessorInfo)
CAST_ACCESSOR(ByteArray)3160 CAST_ACCESSOR(ByteArray)
3161 CAST_ACCESSOR(Cell)
3162 CAST_ACCESSOR(Code)
3163 CAST_ACCESSOR(CodeCacheHashTable)
3164 CAST_ACCESSOR(CompilationCacheTable)
3165 CAST_ACCESSOR(ConsString)
3166 CAST_ACCESSOR(ConstantPoolArray)
3167 CAST_ACCESSOR(DeoptimizationInputData)
3168 CAST_ACCESSOR(DeoptimizationOutputData)
3169 CAST_ACCESSOR(DependentCode)
3170 CAST_ACCESSOR(DescriptorArray)
3171 CAST_ACCESSOR(ExternalArray)
3172 CAST_ACCESSOR(ExternalOneByteString)
3173 CAST_ACCESSOR(ExternalFloat32Array)
3174 CAST_ACCESSOR(ExternalFloat64Array)
3175 CAST_ACCESSOR(ExternalInt16Array)
3176 CAST_ACCESSOR(ExternalInt32Array)
3177 CAST_ACCESSOR(ExternalInt8Array)
3178 CAST_ACCESSOR(ExternalString)
3179 CAST_ACCESSOR(ExternalTwoByteString)
3180 CAST_ACCESSOR(ExternalUint16Array)
3181 CAST_ACCESSOR(ExternalUint32Array)
3182 CAST_ACCESSOR(ExternalUint8Array)
3183 CAST_ACCESSOR(ExternalUint8ClampedArray)
3184 CAST_ACCESSOR(FixedArray)
3185 CAST_ACCESSOR(FixedArrayBase)
3186 CAST_ACCESSOR(FixedDoubleArray)
3187 CAST_ACCESSOR(FixedTypedArrayBase)
3188 CAST_ACCESSOR(Foreign)
3189 CAST_ACCESSOR(FreeSpace)
3190 CAST_ACCESSOR(GlobalObject)
3191 CAST_ACCESSOR(HeapObject)
3192 CAST_ACCESSOR(JSArray)
3193 CAST_ACCESSOR(JSArrayBuffer)
3194 CAST_ACCESSOR(JSArrayBufferView)
3195 CAST_ACCESSOR(JSBuiltinsObject)
3196 CAST_ACCESSOR(JSDataView)
3197 CAST_ACCESSOR(JSDate)
3198 CAST_ACCESSOR(JSFunction)
3199 CAST_ACCESSOR(JSFunctionProxy)
3200 CAST_ACCESSOR(JSFunctionResultCache)
3201 CAST_ACCESSOR(JSGeneratorObject)
3202 CAST_ACCESSOR(JSGlobalObject)
3203 CAST_ACCESSOR(JSGlobalProxy)
3204 CAST_ACCESSOR(JSMap)
3205 CAST_ACCESSOR(JSMapIterator)
3206 CAST_ACCESSOR(JSMessageObject)
3207 CAST_ACCESSOR(JSModule)
3208 CAST_ACCESSOR(JSObject)
3209 CAST_ACCESSOR(JSProxy)
3210 CAST_ACCESSOR(JSReceiver)
3211 CAST_ACCESSOR(JSRegExp)
3212 CAST_ACCESSOR(JSSet)
3213 CAST_ACCESSOR(JSSetIterator)
3214 CAST_ACCESSOR(JSTypedArray)
3215 CAST_ACCESSOR(JSValue)
3216 CAST_ACCESSOR(JSWeakMap)
3217 CAST_ACCESSOR(JSWeakSet)
3218 CAST_ACCESSOR(Map)
3219 CAST_ACCESSOR(MapCache)
3220 CAST_ACCESSOR(Name)
3221 CAST_ACCESSOR(NameDictionary)
3222 CAST_ACCESSOR(NormalizedMapCache)
3223 CAST_ACCESSOR(Object)
3224 CAST_ACCESSOR(ObjectHashTable)
3225 CAST_ACCESSOR(Oddball)
3226 CAST_ACCESSOR(OrderedHashMap)
3227 CAST_ACCESSOR(OrderedHashSet)
3228 CAST_ACCESSOR(PolymorphicCodeCacheHashTable)
3229 CAST_ACCESSOR(PropertyCell)
3230 CAST_ACCESSOR(ScopeInfo)
3231 CAST_ACCESSOR(SeededNumberDictionary)
3232 CAST_ACCESSOR(SeqOneByteString)
3233 CAST_ACCESSOR(SeqString)
3234 CAST_ACCESSOR(SeqTwoByteString)
3235 CAST_ACCESSOR(SharedFunctionInfo)
3236 CAST_ACCESSOR(SlicedString)
3237 CAST_ACCESSOR(Smi)
3238 CAST_ACCESSOR(String)
3239 CAST_ACCESSOR(StringTable)
3240 CAST_ACCESSOR(Struct)
3241 CAST_ACCESSOR(Symbol)
3242 CAST_ACCESSOR(UnseededNumberDictionary)
3243 CAST_ACCESSOR(WeakHashTable)
3244 
3245 
3246 template <class Traits>
3247 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3248   SLOW_DCHECK(object->IsHeapObject() &&
3249               HeapObject::cast(object)->map()->instance_type() ==
3250               Traits::kInstanceType);
3251   return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3252 }
3253 
3254 
3255 template <class Traits>
3256 const FixedTypedArray<Traits>*
cast(const Object * object)3257 FixedTypedArray<Traits>::cast(const Object* object) {
3258   SLOW_DCHECK(object->IsHeapObject() &&
3259               HeapObject::cast(object)->map()->instance_type() ==
3260               Traits::kInstanceType);
3261   return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3262 }
3263 
3264 
3265 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
STRUCT_LIST(MAKE_STRUCT_CAST)3266   STRUCT_LIST(MAKE_STRUCT_CAST)
3267 #undef MAKE_STRUCT_CAST
3268 
3269 
3270 template <typename Derived, typename Shape, typename Key>
3271 HashTable<Derived, Shape, Key>*
3272 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3273   SLOW_DCHECK(obj->IsHashTable());
3274   return reinterpret_cast<HashTable*>(obj);
3275 }
3276 
3277 
3278 template <typename Derived, typename Shape, typename Key>
3279 const HashTable<Derived, Shape, Key>*
cast(const Object * obj)3280 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3281   SLOW_DCHECK(obj->IsHashTable());
3282   return reinterpret_cast<const HashTable*>(obj);
3283 }
3284 
3285 
SMI_ACCESSORS(FixedArrayBase,length,kLengthOffset)3286 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3287 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3288 
3289 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3290 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3291 
3292 SMI_ACCESSORS(String, length, kLengthOffset)
3293 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3294 
3295 
3296 uint32_t Name::hash_field() {
3297   return READ_UINT32_FIELD(this, kHashFieldOffset);
3298 }
3299 
3300 
set_hash_field(uint32_t value)3301 void Name::set_hash_field(uint32_t value) {
3302   WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3303 #if V8_HOST_ARCH_64_BIT
3304   WRITE_UINT32_FIELD(this, kHashFieldOffset + kIntSize, 0);
3305 #endif
3306 }
3307 
3308 
Equals(Name * other)3309 bool Name::Equals(Name* other) {
3310   if (other == this) return true;
3311   if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3312       this->IsSymbol() || other->IsSymbol()) {
3313     return false;
3314   }
3315   return String::cast(this)->SlowEquals(String::cast(other));
3316 }
3317 
3318 
Equals(Handle<Name> one,Handle<Name> two)3319 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3320   if (one.is_identical_to(two)) return true;
3321   if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3322       one->IsSymbol() || two->IsSymbol()) {
3323     return false;
3324   }
3325   return String::SlowEquals(Handle<String>::cast(one),
3326                             Handle<String>::cast(two));
3327 }
3328 
3329 
ACCESSORS(Symbol,name,Object,kNameOffset)3330 ACCESSORS(Symbol, name, Object, kNameOffset)
3331 ACCESSORS(Symbol, flags, Smi, kFlagsOffset)
3332 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3333 BOOL_ACCESSORS(Symbol, flags, is_own, kOwnBit)
3334 
3335 
3336 bool String::Equals(String* other) {
3337   if (other == this) return true;
3338   if (this->IsInternalizedString() && other->IsInternalizedString()) {
3339     return false;
3340   }
3341   return SlowEquals(other);
3342 }
3343 
3344 
Equals(Handle<String> one,Handle<String> two)3345 bool String::Equals(Handle<String> one, Handle<String> two) {
3346   if (one.is_identical_to(two)) return true;
3347   if (one->IsInternalizedString() && two->IsInternalizedString()) {
3348     return false;
3349   }
3350   return SlowEquals(one, two);
3351 }
3352 
3353 
Flatten(Handle<String> string,PretenureFlag pretenure)3354 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3355   if (!string->IsConsString()) return string;
3356   Handle<ConsString> cons = Handle<ConsString>::cast(string);
3357   if (cons->IsFlat()) return handle(cons->first());
3358   return SlowFlatten(cons, pretenure);
3359 }
3360 
3361 
Get(int index)3362 uint16_t String::Get(int index) {
3363   DCHECK(index >= 0 && index < length());
3364   switch (StringShape(this).full_representation_tag()) {
3365     case kSeqStringTag | kOneByteStringTag:
3366       return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3367     case kSeqStringTag | kTwoByteStringTag:
3368       return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3369     case kConsStringTag | kOneByteStringTag:
3370     case kConsStringTag | kTwoByteStringTag:
3371       return ConsString::cast(this)->ConsStringGet(index);
3372     case kExternalStringTag | kOneByteStringTag:
3373       return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3374     case kExternalStringTag | kTwoByteStringTag:
3375       return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3376     case kSlicedStringTag | kOneByteStringTag:
3377     case kSlicedStringTag | kTwoByteStringTag:
3378       return SlicedString::cast(this)->SlicedStringGet(index);
3379     default:
3380       break;
3381   }
3382 
3383   UNREACHABLE();
3384   return 0;
3385 }
3386 
3387 
Set(int index,uint16_t value)3388 void String::Set(int index, uint16_t value) {
3389   DCHECK(index >= 0 && index < length());
3390   DCHECK(StringShape(this).IsSequential());
3391 
3392   return this->IsOneByteRepresentation()
3393       ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3394       : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3395 }
3396 
3397 
IsFlat()3398 bool String::IsFlat() {
3399   if (!StringShape(this).IsCons()) return true;
3400   return ConsString::cast(this)->second()->length() == 0;
3401 }
3402 
3403 
GetUnderlying()3404 String* String::GetUnderlying() {
3405   // Giving direct access to underlying string only makes sense if the
3406   // wrapping string is already flattened.
3407   DCHECK(this->IsFlat());
3408   DCHECK(StringShape(this).IsIndirect());
3409   STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3410   const int kUnderlyingOffset = SlicedString::kParentOffset;
3411   return String::cast(READ_FIELD(this, kUnderlyingOffset));
3412 }
3413 
3414 
3415 template<class Visitor>
VisitFlat(Visitor * visitor,String * string,const int offset)3416 ConsString* String::VisitFlat(Visitor* visitor,
3417                               String* string,
3418                               const int offset) {
3419   int slice_offset = offset;
3420   const int length = string->length();
3421   DCHECK(offset <= length);
3422   while (true) {
3423     int32_t type = string->map()->instance_type();
3424     switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3425       case kSeqStringTag | kOneByteStringTag:
3426         visitor->VisitOneByteString(
3427             SeqOneByteString::cast(string)->GetChars() + slice_offset,
3428             length - offset);
3429         return NULL;
3430 
3431       case kSeqStringTag | kTwoByteStringTag:
3432         visitor->VisitTwoByteString(
3433             SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3434             length - offset);
3435         return NULL;
3436 
3437       case kExternalStringTag | kOneByteStringTag:
3438         visitor->VisitOneByteString(
3439             ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3440             length - offset);
3441         return NULL;
3442 
3443       case kExternalStringTag | kTwoByteStringTag:
3444         visitor->VisitTwoByteString(
3445             ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3446             length - offset);
3447         return NULL;
3448 
3449       case kSlicedStringTag | kOneByteStringTag:
3450       case kSlicedStringTag | kTwoByteStringTag: {
3451         SlicedString* slicedString = SlicedString::cast(string);
3452         slice_offset += slicedString->offset();
3453         string = slicedString->parent();
3454         continue;
3455       }
3456 
3457       case kConsStringTag | kOneByteStringTag:
3458       case kConsStringTag | kTwoByteStringTag:
3459         return ConsString::cast(string);
3460 
3461       default:
3462         UNREACHABLE();
3463         return NULL;
3464     }
3465   }
3466 }
3467 
3468 
SeqOneByteStringGet(int index)3469 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3470   DCHECK(index >= 0 && index < length());
3471   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3472 }
3473 
3474 
SeqOneByteStringSet(int index,uint16_t value)3475 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3476   DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3477   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3478                    static_cast<byte>(value));
3479 }
3480 
3481 
GetCharsAddress()3482 Address SeqOneByteString::GetCharsAddress() {
3483   return FIELD_ADDR(this, kHeaderSize);
3484 }
3485 
3486 
GetChars()3487 uint8_t* SeqOneByteString::GetChars() {
3488   return reinterpret_cast<uint8_t*>(GetCharsAddress());
3489 }
3490 
3491 
GetCharsAddress()3492 Address SeqTwoByteString::GetCharsAddress() {
3493   return FIELD_ADDR(this, kHeaderSize);
3494 }
3495 
3496 
GetChars()3497 uc16* SeqTwoByteString::GetChars() {
3498   return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3499 }
3500 
3501 
SeqTwoByteStringGet(int index)3502 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3503   DCHECK(index >= 0 && index < length());
3504   return READ_SHORT_FIELD(this, kHeaderSize + index * kShortSize);
3505 }
3506 
3507 
SeqTwoByteStringSet(int index,uint16_t value)3508 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3509   DCHECK(index >= 0 && index < length());
3510   WRITE_SHORT_FIELD(this, kHeaderSize + index * kShortSize, value);
3511 }
3512 
3513 
SeqTwoByteStringSize(InstanceType instance_type)3514 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3515   return SizeFor(length());
3516 }
3517 
3518 
SeqOneByteStringSize(InstanceType instance_type)3519 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3520   return SizeFor(length());
3521 }
3522 
3523 
parent()3524 String* SlicedString::parent() {
3525   return String::cast(READ_FIELD(this, kParentOffset));
3526 }
3527 
3528 
set_parent(String * parent,WriteBarrierMode mode)3529 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3530   DCHECK(parent->IsSeqString() || parent->IsExternalString());
3531   WRITE_FIELD(this, kParentOffset, parent);
3532   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3533 }
3534 
3535 
SMI_ACCESSORS(SlicedString,offset,kOffsetOffset)3536 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3537 
3538 
3539 String* ConsString::first() {
3540   return String::cast(READ_FIELD(this, kFirstOffset));
3541 }
3542 
3543 
unchecked_first()3544 Object* ConsString::unchecked_first() {
3545   return READ_FIELD(this, kFirstOffset);
3546 }
3547 
3548 
set_first(String * value,WriteBarrierMode mode)3549 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3550   WRITE_FIELD(this, kFirstOffset, value);
3551   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3552 }
3553 
3554 
second()3555 String* ConsString::second() {
3556   return String::cast(READ_FIELD(this, kSecondOffset));
3557 }
3558 
3559 
unchecked_second()3560 Object* ConsString::unchecked_second() {
3561   return READ_FIELD(this, kSecondOffset);
3562 }
3563 
3564 
set_second(String * value,WriteBarrierMode mode)3565 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3566   WRITE_FIELD(this, kSecondOffset, value);
3567   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3568 }
3569 
3570 
is_short()3571 bool ExternalString::is_short() {
3572   InstanceType type = map()->instance_type();
3573   return (type & kShortExternalStringMask) == kShortExternalStringTag;
3574 }
3575 
3576 
resource()3577 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3578   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3579 }
3580 
3581 
update_data_cache()3582 void ExternalOneByteString::update_data_cache() {
3583   if (is_short()) return;
3584   const char** data_field =
3585       reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3586   *data_field = resource()->data();
3587 }
3588 
3589 
set_resource(const ExternalOneByteString::Resource * resource)3590 void ExternalOneByteString::set_resource(
3591     const ExternalOneByteString::Resource* resource) {
3592   DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3593   *reinterpret_cast<const Resource**>(
3594       FIELD_ADDR(this, kResourceOffset)) = resource;
3595   if (resource != NULL) update_data_cache();
3596 }
3597 
3598 
GetChars()3599 const uint8_t* ExternalOneByteString::GetChars() {
3600   return reinterpret_cast<const uint8_t*>(resource()->data());
3601 }
3602 
3603 
ExternalOneByteStringGet(int index)3604 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3605   DCHECK(index >= 0 && index < length());
3606   return GetChars()[index];
3607 }
3608 
3609 
resource()3610 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
3611   return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3612 }
3613 
3614 
update_data_cache()3615 void ExternalTwoByteString::update_data_cache() {
3616   if (is_short()) return;
3617   const uint16_t** data_field =
3618       reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
3619   *data_field = resource()->data();
3620 }
3621 
3622 
set_resource(const ExternalTwoByteString::Resource * resource)3623 void ExternalTwoByteString::set_resource(
3624     const ExternalTwoByteString::Resource* resource) {
3625   *reinterpret_cast<const Resource**>(
3626       FIELD_ADDR(this, kResourceOffset)) = resource;
3627   if (resource != NULL) update_data_cache();
3628 }
3629 
3630 
GetChars()3631 const uint16_t* ExternalTwoByteString::GetChars() {
3632   return resource()->data();
3633 }
3634 
3635 
ExternalTwoByteStringGet(int index)3636 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
3637   DCHECK(index >= 0 && index < length());
3638   return GetChars()[index];
3639 }
3640 
3641 
ExternalTwoByteStringGetData(unsigned start)3642 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
3643       unsigned start) {
3644   return GetChars() + start;
3645 }
3646 
3647 
OffsetForDepth(int depth)3648 int ConsStringIteratorOp::OffsetForDepth(int depth) {
3649   return depth & kDepthMask;
3650 }
3651 
3652 
PushLeft(ConsString * string)3653 void ConsStringIteratorOp::PushLeft(ConsString* string) {
3654   frames_[depth_++ & kDepthMask] = string;
3655 }
3656 
3657 
PushRight(ConsString * string)3658 void ConsStringIteratorOp::PushRight(ConsString* string) {
3659   // Inplace update.
3660   frames_[(depth_-1) & kDepthMask] = string;
3661 }
3662 
3663 
AdjustMaximumDepth()3664 void ConsStringIteratorOp::AdjustMaximumDepth() {
3665   if (depth_ > maximum_depth_) maximum_depth_ = depth_;
3666 }
3667 
3668 
Pop()3669 void ConsStringIteratorOp::Pop() {
3670   DCHECK(depth_ > 0);
3671   DCHECK(depth_ <= maximum_depth_);
3672   depth_--;
3673 }
3674 
3675 
GetNext()3676 uint16_t StringCharacterStream::GetNext() {
3677   DCHECK(buffer8_ != NULL && end_ != NULL);
3678   // Advance cursor if needed.
3679   if (buffer8_ == end_) HasMore();
3680   DCHECK(buffer8_ < end_);
3681   return is_one_byte_ ? *buffer8_++ : *buffer16_++;
3682 }
3683 
3684 
StringCharacterStream(String * string,ConsStringIteratorOp * op,int offset)3685 StringCharacterStream::StringCharacterStream(String* string,
3686                                              ConsStringIteratorOp* op,
3687                                              int offset)
3688   : is_one_byte_(false),
3689     op_(op) {
3690   Reset(string, offset);
3691 }
3692 
3693 
Reset(String * string,int offset)3694 void StringCharacterStream::Reset(String* string, int offset) {
3695   buffer8_ = NULL;
3696   end_ = NULL;
3697   ConsString* cons_string = String::VisitFlat(this, string, offset);
3698   op_->Reset(cons_string, offset);
3699   if (cons_string != NULL) {
3700     string = op_->Next(&offset);
3701     if (string != NULL) String::VisitFlat(this, string, offset);
3702   }
3703 }
3704 
3705 
HasMore()3706 bool StringCharacterStream::HasMore() {
3707   if (buffer8_ != end_) return true;
3708   int offset;
3709   String* string = op_->Next(&offset);
3710   DCHECK_EQ(offset, 0);
3711   if (string == NULL) return false;
3712   String::VisitFlat(this, string);
3713   DCHECK(buffer8_ != end_);
3714   return true;
3715 }
3716 
3717 
VisitOneByteString(const uint8_t * chars,int length)3718 void StringCharacterStream::VisitOneByteString(
3719     const uint8_t* chars, int length) {
3720   is_one_byte_ = true;
3721   buffer8_ = chars;
3722   end_ = chars + length;
3723 }
3724 
3725 
VisitTwoByteString(const uint16_t * chars,int length)3726 void StringCharacterStream::VisitTwoByteString(
3727     const uint16_t* chars, int length) {
3728   is_one_byte_ = false;
3729   buffer16_ = chars;
3730   end_ = reinterpret_cast<const uint8_t*>(chars + length);
3731 }
3732 
3733 
MakeZeroSize()3734 void JSFunctionResultCache::MakeZeroSize() {
3735   set_finger_index(kEntriesIndex);
3736   set_size(kEntriesIndex);
3737 }
3738 
3739 
Clear()3740 void JSFunctionResultCache::Clear() {
3741   int cache_size = size();
3742   Object** entries_start = RawFieldOfElementAt(kEntriesIndex);
3743   MemsetPointer(entries_start,
3744                 GetHeap()->the_hole_value(),
3745                 cache_size - kEntriesIndex);
3746   MakeZeroSize();
3747 }
3748 
3749 
size()3750 int JSFunctionResultCache::size() {
3751   return Smi::cast(get(kCacheSizeIndex))->value();
3752 }
3753 
3754 
set_size(int size)3755 void JSFunctionResultCache::set_size(int size) {
3756   set(kCacheSizeIndex, Smi::FromInt(size));
3757 }
3758 
3759 
finger_index()3760 int JSFunctionResultCache::finger_index() {
3761   return Smi::cast(get(kFingerIndex))->value();
3762 }
3763 
3764 
set_finger_index(int finger_index)3765 void JSFunctionResultCache::set_finger_index(int finger_index) {
3766   set(kFingerIndex, Smi::FromInt(finger_index));
3767 }
3768 
3769 
get(int index)3770 byte ByteArray::get(int index) {
3771   DCHECK(index >= 0 && index < this->length());
3772   return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3773 }
3774 
3775 
set(int index,byte value)3776 void ByteArray::set(int index, byte value) {
3777   DCHECK(index >= 0 && index < this->length());
3778   WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
3779 }
3780 
3781 
get_int(int index)3782 int ByteArray::get_int(int index) {
3783   DCHECK(index >= 0 && (index * kIntSize) < this->length());
3784   return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
3785 }
3786 
3787 
FromDataStartAddress(Address address)3788 ByteArray* ByteArray::FromDataStartAddress(Address address) {
3789   DCHECK_TAG_ALIGNED(address);
3790   return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
3791 }
3792 
3793 
GetDataStartAddress()3794 Address ByteArray::GetDataStartAddress() {
3795   return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
3796 }
3797 
3798 
external_uint8_clamped_pointer()3799 uint8_t* ExternalUint8ClampedArray::external_uint8_clamped_pointer() {
3800   return reinterpret_cast<uint8_t*>(external_pointer());
3801 }
3802 
3803 
get_scalar(int index)3804 uint8_t ExternalUint8ClampedArray::get_scalar(int index) {
3805   DCHECK((index >= 0) && (index < this->length()));
3806   uint8_t* ptr = external_uint8_clamped_pointer();
3807   return ptr[index];
3808 }
3809 
3810 
get(Handle<ExternalUint8ClampedArray> array,int index)3811 Handle<Object> ExternalUint8ClampedArray::get(
3812     Handle<ExternalUint8ClampedArray> array,
3813     int index) {
3814   return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3815                      array->GetIsolate());
3816 }
3817 
3818 
set(int index,uint8_t value)3819 void ExternalUint8ClampedArray::set(int index, uint8_t value) {
3820   DCHECK((index >= 0) && (index < this->length()));
3821   uint8_t* ptr = external_uint8_clamped_pointer();
3822   ptr[index] = value;
3823 }
3824 
3825 
external_pointer()3826 void* ExternalArray::external_pointer() const {
3827   intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
3828   return reinterpret_cast<void*>(ptr);
3829 }
3830 
3831 
set_external_pointer(void * value,WriteBarrierMode mode)3832 void ExternalArray::set_external_pointer(void* value, WriteBarrierMode mode) {
3833   intptr_t ptr = reinterpret_cast<intptr_t>(value);
3834   WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
3835 }
3836 
3837 
get_scalar(int index)3838 int8_t ExternalInt8Array::get_scalar(int index) {
3839   DCHECK((index >= 0) && (index < this->length()));
3840   int8_t* ptr = static_cast<int8_t*>(external_pointer());
3841   return ptr[index];
3842 }
3843 
3844 
get(Handle<ExternalInt8Array> array,int index)3845 Handle<Object> ExternalInt8Array::get(Handle<ExternalInt8Array> array,
3846                                       int index) {
3847   return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3848                      array->GetIsolate());
3849 }
3850 
3851 
set(int index,int8_t value)3852 void ExternalInt8Array::set(int index, int8_t value) {
3853   DCHECK((index >= 0) && (index < this->length()));
3854   int8_t* ptr = static_cast<int8_t*>(external_pointer());
3855   ptr[index] = value;
3856 }
3857 
3858 
get_scalar(int index)3859 uint8_t ExternalUint8Array::get_scalar(int index) {
3860   DCHECK((index >= 0) && (index < this->length()));
3861   uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3862   return ptr[index];
3863 }
3864 
3865 
get(Handle<ExternalUint8Array> array,int index)3866 Handle<Object> ExternalUint8Array::get(Handle<ExternalUint8Array> array,
3867                                        int index) {
3868   return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3869                      array->GetIsolate());
3870 }
3871 
3872 
set(int index,uint8_t value)3873 void ExternalUint8Array::set(int index, uint8_t value) {
3874   DCHECK((index >= 0) && (index < this->length()));
3875   uint8_t* ptr = static_cast<uint8_t*>(external_pointer());
3876   ptr[index] = value;
3877 }
3878 
3879 
get_scalar(int index)3880 int16_t ExternalInt16Array::get_scalar(int index) {
3881   DCHECK((index >= 0) && (index < this->length()));
3882   int16_t* ptr = static_cast<int16_t*>(external_pointer());
3883   return ptr[index];
3884 }
3885 
3886 
get(Handle<ExternalInt16Array> array,int index)3887 Handle<Object> ExternalInt16Array::get(Handle<ExternalInt16Array> array,
3888                                        int index) {
3889   return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3890                      array->GetIsolate());
3891 }
3892 
3893 
set(int index,int16_t value)3894 void ExternalInt16Array::set(int index, int16_t value) {
3895   DCHECK((index >= 0) && (index < this->length()));
3896   int16_t* ptr = static_cast<int16_t*>(external_pointer());
3897   ptr[index] = value;
3898 }
3899 
3900 
get_scalar(int index)3901 uint16_t ExternalUint16Array::get_scalar(int index) {
3902   DCHECK((index >= 0) && (index < this->length()));
3903   uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3904   return ptr[index];
3905 }
3906 
3907 
get(Handle<ExternalUint16Array> array,int index)3908 Handle<Object> ExternalUint16Array::get(Handle<ExternalUint16Array> array,
3909                                         int index) {
3910   return Handle<Smi>(Smi::FromInt(array->get_scalar(index)),
3911                      array->GetIsolate());
3912 }
3913 
3914 
set(int index,uint16_t value)3915 void ExternalUint16Array::set(int index, uint16_t value) {
3916   DCHECK((index >= 0) && (index < this->length()));
3917   uint16_t* ptr = static_cast<uint16_t*>(external_pointer());
3918   ptr[index] = value;
3919 }
3920 
3921 
get_scalar(int index)3922 int32_t ExternalInt32Array::get_scalar(int index) {
3923   DCHECK((index >= 0) && (index < this->length()));
3924   int32_t* ptr = static_cast<int32_t*>(external_pointer());
3925   return ptr[index];
3926 }
3927 
3928 
get(Handle<ExternalInt32Array> array,int index)3929 Handle<Object> ExternalInt32Array::get(Handle<ExternalInt32Array> array,
3930                                        int index) {
3931   return array->GetIsolate()->factory()->
3932       NewNumberFromInt(array->get_scalar(index));
3933 }
3934 
3935 
set(int index,int32_t value)3936 void ExternalInt32Array::set(int index, int32_t value) {
3937   DCHECK((index >= 0) && (index < this->length()));
3938   int32_t* ptr = static_cast<int32_t*>(external_pointer());
3939   ptr[index] = value;
3940 }
3941 
3942 
get_scalar(int index)3943 uint32_t ExternalUint32Array::get_scalar(int index) {
3944   DCHECK((index >= 0) && (index < this->length()));
3945   uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3946   return ptr[index];
3947 }
3948 
3949 
get(Handle<ExternalUint32Array> array,int index)3950 Handle<Object> ExternalUint32Array::get(Handle<ExternalUint32Array> array,
3951                                         int index) {
3952   return array->GetIsolate()->factory()->
3953       NewNumberFromUint(array->get_scalar(index));
3954 }
3955 
3956 
set(int index,uint32_t value)3957 void ExternalUint32Array::set(int index, uint32_t value) {
3958   DCHECK((index >= 0) && (index < this->length()));
3959   uint32_t* ptr = static_cast<uint32_t*>(external_pointer());
3960   ptr[index] = value;
3961 }
3962 
3963 
get_scalar(int index)3964 float ExternalFloat32Array::get_scalar(int index) {
3965   DCHECK((index >= 0) && (index < this->length()));
3966   float* ptr = static_cast<float*>(external_pointer());
3967   return ptr[index];
3968 }
3969 
3970 
get(Handle<ExternalFloat32Array> array,int index)3971 Handle<Object> ExternalFloat32Array::get(Handle<ExternalFloat32Array> array,
3972                                          int index) {
3973   return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3974 }
3975 
3976 
set(int index,float value)3977 void ExternalFloat32Array::set(int index, float value) {
3978   DCHECK((index >= 0) && (index < this->length()));
3979   float* ptr = static_cast<float*>(external_pointer());
3980   ptr[index] = value;
3981 }
3982 
3983 
get_scalar(int index)3984 double ExternalFloat64Array::get_scalar(int index) {
3985   DCHECK((index >= 0) && (index < this->length()));
3986   double* ptr = static_cast<double*>(external_pointer());
3987   return ptr[index];
3988 }
3989 
3990 
get(Handle<ExternalFloat64Array> array,int index)3991 Handle<Object> ExternalFloat64Array::get(Handle<ExternalFloat64Array> array,
3992                                          int index) {
3993   return array->GetIsolate()->factory()->NewNumber(array->get_scalar(index));
3994 }
3995 
3996 
set(int index,double value)3997 void ExternalFloat64Array::set(int index, double value) {
3998   DCHECK((index >= 0) && (index < this->length()));
3999   double* ptr = static_cast<double*>(external_pointer());
4000   ptr[index] = value;
4001 }
4002 
4003 
DataPtr()4004 void* FixedTypedArrayBase::DataPtr() {
4005   return FIELD_ADDR(this, kDataOffset);
4006 }
4007 
4008 
DataSize(InstanceType type)4009 int FixedTypedArrayBase::DataSize(InstanceType type) {
4010   int element_size;
4011   switch (type) {
4012 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size)                       \
4013     case FIXED_##TYPE##_ARRAY_TYPE:                                           \
4014       element_size = size;                                                    \
4015       break;
4016 
4017     TYPED_ARRAYS(TYPED_ARRAY_CASE)
4018 #undef TYPED_ARRAY_CASE
4019     default:
4020       UNREACHABLE();
4021       return 0;
4022   }
4023   return length() * element_size;
4024 }
4025 
4026 
DataSize()4027 int FixedTypedArrayBase::DataSize() {
4028   return DataSize(map()->instance_type());
4029 }
4030 
4031 
size()4032 int FixedTypedArrayBase::size() {
4033   return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4034 }
4035 
4036 
TypedArraySize(InstanceType type)4037 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4038   return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4039 }
4040 
4041 
defaultValue()4042 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4043 
4044 
defaultValue()4045 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4046 
4047 
defaultValue()4048 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4049 
4050 
defaultValue()4051 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4052 
4053 
defaultValue()4054 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4055 
4056 
defaultValue()4057 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4058 
4059 
defaultValue()4060 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4061 
4062 
defaultValue()4063 float Float32ArrayTraits::defaultValue() {
4064   return static_cast<float>(base::OS::nan_value());
4065 }
4066 
4067 
defaultValue()4068 double Float64ArrayTraits::defaultValue() { return base::OS::nan_value(); }
4069 
4070 
4071 template <class Traits>
get_scalar(int index)4072 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4073   DCHECK((index >= 0) && (index < this->length()));
4074   ElementType* ptr = reinterpret_cast<ElementType*>(
4075       FIELD_ADDR(this, kDataOffset));
4076   return ptr[index];
4077 }
4078 
4079 
4080 template<> inline
4081 FixedTypedArray<Float64ArrayTraits>::ElementType
get_scalar(int index)4082     FixedTypedArray<Float64ArrayTraits>::get_scalar(int index) {
4083   DCHECK((index >= 0) && (index < this->length()));
4084   return READ_DOUBLE_FIELD(this, ElementOffset(index));
4085 }
4086 
4087 
4088 template <class Traits>
set(int index,ElementType value)4089 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4090   DCHECK((index >= 0) && (index < this->length()));
4091   ElementType* ptr = reinterpret_cast<ElementType*>(
4092       FIELD_ADDR(this, kDataOffset));
4093   ptr[index] = value;
4094 }
4095 
4096 
4097 template<> inline
set(int index,Float64ArrayTraits::ElementType value)4098 void FixedTypedArray<Float64ArrayTraits>::set(
4099     int index, Float64ArrayTraits::ElementType value) {
4100   DCHECK((index >= 0) && (index < this->length()));
4101   WRITE_DOUBLE_FIELD(this, ElementOffset(index), value);
4102 }
4103 
4104 
4105 template <class Traits>
from_int(int value)4106 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4107   return static_cast<ElementType>(value);
4108 }
4109 
4110 
4111 template <> inline
from_int(int value)4112 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4113   if (value < 0) return 0;
4114   if (value > 0xFF) return 0xFF;
4115   return static_cast<uint8_t>(value);
4116 }
4117 
4118 
4119 template <class Traits>
from_double(double value)4120 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4121     double value) {
4122   return static_cast<ElementType>(DoubleToInt32(value));
4123 }
4124 
4125 
4126 template<> inline
from_double(double value)4127 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4128   if (value < 0) return 0;
4129   if (value > 0xFF) return 0xFF;
4130   return static_cast<uint8_t>(lrint(value));
4131 }
4132 
4133 
4134 template<> inline
from_double(double value)4135 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4136   return static_cast<float>(value);
4137 }
4138 
4139 
4140 template<> inline
from_double(double value)4141 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4142   return value;
4143 }
4144 
4145 
4146 template <class Traits>
get(Handle<FixedTypedArray<Traits>> array,int index)4147 Handle<Object> FixedTypedArray<Traits>::get(
4148     Handle<FixedTypedArray<Traits> > array,
4149     int index) {
4150   return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4151 }
4152 
4153 
4154 template <class Traits>
SetValue(Handle<FixedTypedArray<Traits>> array,uint32_t index,Handle<Object> value)4155 Handle<Object> FixedTypedArray<Traits>::SetValue(
4156     Handle<FixedTypedArray<Traits> > array,
4157     uint32_t index,
4158     Handle<Object> value) {
4159   ElementType cast_value = Traits::defaultValue();
4160   if (index < static_cast<uint32_t>(array->length())) {
4161     if (value->IsSmi()) {
4162       int int_value = Handle<Smi>::cast(value)->value();
4163       cast_value = from_int(int_value);
4164     } else if (value->IsHeapNumber()) {
4165       double double_value = Handle<HeapNumber>::cast(value)->value();
4166       cast_value = from_double(double_value);
4167     } else {
4168       // Clamp undefined to the default value. All other types have been
4169       // converted to a number type further up in the call chain.
4170       DCHECK(value->IsUndefined());
4171     }
4172     array->set(index, cast_value);
4173   }
4174   return Traits::ToHandle(array->GetIsolate(), cast_value);
4175 }
4176 
4177 
ToHandle(Isolate * isolate,uint8_t scalar)4178 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4179   return handle(Smi::FromInt(scalar), isolate);
4180 }
4181 
4182 
ToHandle(Isolate * isolate,uint8_t scalar)4183 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4184                                                  uint8_t scalar) {
4185   return handle(Smi::FromInt(scalar), isolate);
4186 }
4187 
4188 
ToHandle(Isolate * isolate,int8_t scalar)4189 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4190   return handle(Smi::FromInt(scalar), isolate);
4191 }
4192 
4193 
ToHandle(Isolate * isolate,uint16_t scalar)4194 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4195   return handle(Smi::FromInt(scalar), isolate);
4196 }
4197 
4198 
ToHandle(Isolate * isolate,int16_t scalar)4199 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4200   return handle(Smi::FromInt(scalar), isolate);
4201 }
4202 
4203 
ToHandle(Isolate * isolate,uint32_t scalar)4204 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4205   return isolate->factory()->NewNumberFromUint(scalar);
4206 }
4207 
4208 
ToHandle(Isolate * isolate,int32_t scalar)4209 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4210   return isolate->factory()->NewNumberFromInt(scalar);
4211 }
4212 
4213 
ToHandle(Isolate * isolate,float scalar)4214 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4215   return isolate->factory()->NewNumber(scalar);
4216 }
4217 
4218 
ToHandle(Isolate * isolate,double scalar)4219 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4220   return isolate->factory()->NewNumber(scalar);
4221 }
4222 
4223 
visitor_id()4224 int Map::visitor_id() {
4225   return READ_BYTE_FIELD(this, kVisitorIdOffset);
4226 }
4227 
4228 
set_visitor_id(int id)4229 void Map::set_visitor_id(int id) {
4230   DCHECK(0 <= id && id < 256);
4231   WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4232 }
4233 
4234 
instance_size()4235 int Map::instance_size() {
4236   return NOBARRIER_READ_BYTE_FIELD(
4237       this, kInstanceSizeOffset) << kPointerSizeLog2;
4238 }
4239 
4240 
inobject_properties()4241 int Map::inobject_properties() {
4242   return READ_BYTE_FIELD(this, kInObjectPropertiesOffset);
4243 }
4244 
4245 
pre_allocated_property_fields()4246 int Map::pre_allocated_property_fields() {
4247   return READ_BYTE_FIELD(this, kPreAllocatedPropertyFieldsOffset);
4248 }
4249 
4250 
GetInObjectPropertyOffset(int index)4251 int Map::GetInObjectPropertyOffset(int index) {
4252   // Adjust for the number of properties stored in the object.
4253   index -= inobject_properties();
4254   DCHECK(index <= 0);
4255   return instance_size() + (index * kPointerSize);
4256 }
4257 
4258 
SizeFromMap(Map * map)4259 int HeapObject::SizeFromMap(Map* map) {
4260   int instance_size = map->instance_size();
4261   if (instance_size != kVariableSizeSentinel) return instance_size;
4262   // Only inline the most frequent cases.
4263   InstanceType instance_type = map->instance_type();
4264   if (instance_type == FIXED_ARRAY_TYPE) {
4265     return FixedArray::BodyDescriptor::SizeOf(map, this);
4266   }
4267   if (instance_type == ONE_BYTE_STRING_TYPE ||
4268       instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4269     return SeqOneByteString::SizeFor(
4270         reinterpret_cast<SeqOneByteString*>(this)->length());
4271   }
4272   if (instance_type == BYTE_ARRAY_TYPE) {
4273     return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4274   }
4275   if (instance_type == FREE_SPACE_TYPE) {
4276     return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4277   }
4278   if (instance_type == STRING_TYPE ||
4279       instance_type == INTERNALIZED_STRING_TYPE) {
4280     return SeqTwoByteString::SizeFor(
4281         reinterpret_cast<SeqTwoByteString*>(this)->length());
4282   }
4283   if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4284     return FixedDoubleArray::SizeFor(
4285         reinterpret_cast<FixedDoubleArray*>(this)->length());
4286   }
4287   if (instance_type == CONSTANT_POOL_ARRAY_TYPE) {
4288     return reinterpret_cast<ConstantPoolArray*>(this)->size();
4289   }
4290   if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4291       instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4292     return reinterpret_cast<FixedTypedArrayBase*>(
4293         this)->TypedArraySize(instance_type);
4294   }
4295   DCHECK(instance_type == CODE_TYPE);
4296   return reinterpret_cast<Code*>(this)->CodeSize();
4297 }
4298 
4299 
set_instance_size(int value)4300 void Map::set_instance_size(int value) {
4301   DCHECK_EQ(0, value & (kPointerSize - 1));
4302   value >>= kPointerSizeLog2;
4303   DCHECK(0 <= value && value < 256);
4304   NOBARRIER_WRITE_BYTE_FIELD(
4305       this, kInstanceSizeOffset, static_cast<byte>(value));
4306 }
4307 
4308 
set_inobject_properties(int value)4309 void Map::set_inobject_properties(int value) {
4310   DCHECK(0 <= value && value < 256);
4311   WRITE_BYTE_FIELD(this, kInObjectPropertiesOffset, static_cast<byte>(value));
4312 }
4313 
4314 
set_pre_allocated_property_fields(int value)4315 void Map::set_pre_allocated_property_fields(int value) {
4316   DCHECK(0 <= value && value < 256);
4317   WRITE_BYTE_FIELD(this,
4318                    kPreAllocatedPropertyFieldsOffset,
4319                    static_cast<byte>(value));
4320 }
4321 
4322 
instance_type()4323 InstanceType Map::instance_type() {
4324   return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4325 }
4326 
4327 
set_instance_type(InstanceType value)4328 void Map::set_instance_type(InstanceType value) {
4329   WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4330 }
4331 
4332 
unused_property_fields()4333 int Map::unused_property_fields() {
4334   return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4335 }
4336 
4337 
set_unused_property_fields(int value)4338 void Map::set_unused_property_fields(int value) {
4339   WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4340 }
4341 
4342 
bit_field()4343 byte Map::bit_field() {
4344   return READ_BYTE_FIELD(this, kBitFieldOffset);
4345 }
4346 
4347 
set_bit_field(byte value)4348 void Map::set_bit_field(byte value) {
4349   WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4350 }
4351 
4352 
bit_field2()4353 byte Map::bit_field2() {
4354   return READ_BYTE_FIELD(this, kBitField2Offset);
4355 }
4356 
4357 
set_bit_field2(byte value)4358 void Map::set_bit_field2(byte value) {
4359   WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4360 }
4361 
4362 
set_non_instance_prototype(bool value)4363 void Map::set_non_instance_prototype(bool value) {
4364   if (value) {
4365     set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4366   } else {
4367     set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4368   }
4369 }
4370 
4371 
has_non_instance_prototype()4372 bool Map::has_non_instance_prototype() {
4373   return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4374 }
4375 
4376 
set_function_with_prototype(bool value)4377 void Map::set_function_with_prototype(bool value) {
4378   set_bit_field(FunctionWithPrototype::update(bit_field(), value));
4379 }
4380 
4381 
function_with_prototype()4382 bool Map::function_with_prototype() {
4383   return FunctionWithPrototype::decode(bit_field());
4384 }
4385 
4386 
set_is_access_check_needed(bool access_check_needed)4387 void Map::set_is_access_check_needed(bool access_check_needed) {
4388   if (access_check_needed) {
4389     set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4390   } else {
4391     set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4392   }
4393 }
4394 
4395 
is_access_check_needed()4396 bool Map::is_access_check_needed() {
4397   return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4398 }
4399 
4400 
set_is_extensible(bool value)4401 void Map::set_is_extensible(bool value) {
4402   if (value) {
4403     set_bit_field2(bit_field2() | (1 << kIsExtensible));
4404   } else {
4405     set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4406   }
4407 }
4408 
is_extensible()4409 bool Map::is_extensible() {
4410   return ((1 << kIsExtensible) & bit_field2()) != 0;
4411 }
4412 
4413 
set_is_prototype_map(bool value)4414 void Map::set_is_prototype_map(bool value) {
4415   set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4416 }
4417 
is_prototype_map()4418 bool Map::is_prototype_map() {
4419   return IsPrototypeMapBits::decode(bit_field2());
4420 }
4421 
4422 
set_dictionary_map(bool value)4423 void Map::set_dictionary_map(bool value) {
4424   uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4425   new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4426   set_bit_field3(new_bit_field3);
4427 }
4428 
4429 
is_dictionary_map()4430 bool Map::is_dictionary_map() {
4431   return DictionaryMap::decode(bit_field3());
4432 }
4433 
4434 
flags()4435 Code::Flags Code::flags() {
4436   return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4437 }
4438 
4439 
set_owns_descriptors(bool owns_descriptors)4440 void Map::set_owns_descriptors(bool owns_descriptors) {
4441   set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4442 }
4443 
4444 
owns_descriptors()4445 bool Map::owns_descriptors() {
4446   return OwnsDescriptors::decode(bit_field3());
4447 }
4448 
4449 
set_has_instance_call_handler()4450 void Map::set_has_instance_call_handler() {
4451   set_bit_field3(HasInstanceCallHandler::update(bit_field3(), true));
4452 }
4453 
4454 
has_instance_call_handler()4455 bool Map::has_instance_call_handler() {
4456   return HasInstanceCallHandler::decode(bit_field3());
4457 }
4458 
4459 
deprecate()4460 void Map::deprecate() {
4461   set_bit_field3(Deprecated::update(bit_field3(), true));
4462 }
4463 
4464 
is_deprecated()4465 bool Map::is_deprecated() {
4466   return Deprecated::decode(bit_field3());
4467 }
4468 
4469 
set_migration_target(bool value)4470 void Map::set_migration_target(bool value) {
4471   set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4472 }
4473 
4474 
is_migration_target()4475 bool Map::is_migration_target() {
4476   return IsMigrationTarget::decode(bit_field3());
4477 }
4478 
4479 
set_done_inobject_slack_tracking(bool value)4480 void Map::set_done_inobject_slack_tracking(bool value) {
4481   set_bit_field3(DoneInobjectSlackTracking::update(bit_field3(), value));
4482 }
4483 
4484 
done_inobject_slack_tracking()4485 bool Map::done_inobject_slack_tracking() {
4486   return DoneInobjectSlackTracking::decode(bit_field3());
4487 }
4488 
4489 
set_construction_count(int value)4490 void Map::set_construction_count(int value) {
4491   set_bit_field3(ConstructionCount::update(bit_field3(), value));
4492 }
4493 
4494 
construction_count()4495 int Map::construction_count() {
4496   return ConstructionCount::decode(bit_field3());
4497 }
4498 
4499 
freeze()4500 void Map::freeze() {
4501   set_bit_field3(IsFrozen::update(bit_field3(), true));
4502 }
4503 
4504 
is_frozen()4505 bool Map::is_frozen() {
4506   return IsFrozen::decode(bit_field3());
4507 }
4508 
4509 
mark_unstable()4510 void Map::mark_unstable() {
4511   set_bit_field3(IsUnstable::update(bit_field3(), true));
4512 }
4513 
4514 
is_stable()4515 bool Map::is_stable() {
4516   return !IsUnstable::decode(bit_field3());
4517 }
4518 
4519 
has_code_cache()4520 bool Map::has_code_cache() {
4521   return code_cache() != GetIsolate()->heap()->empty_fixed_array();
4522 }
4523 
4524 
CanBeDeprecated()4525 bool Map::CanBeDeprecated() {
4526   int descriptor = LastAdded();
4527   for (int i = 0; i <= descriptor; i++) {
4528     PropertyDetails details = instance_descriptors()->GetDetails(i);
4529     if (details.representation().IsNone()) return true;
4530     if (details.representation().IsSmi()) return true;
4531     if (details.representation().IsDouble()) return true;
4532     if (details.representation().IsHeapObject()) return true;
4533     if (details.type() == CONSTANT) return true;
4534   }
4535   return false;
4536 }
4537 
4538 
NotifyLeafMapLayoutChange()4539 void Map::NotifyLeafMapLayoutChange() {
4540   if (is_stable()) {
4541     mark_unstable();
4542     dependent_code()->DeoptimizeDependentCodeGroup(
4543         GetIsolate(),
4544         DependentCode::kPrototypeCheckGroup);
4545   }
4546 }
4547 
4548 
CanOmitMapChecks()4549 bool Map::CanOmitMapChecks() {
4550   return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4551 }
4552 
4553 
number_of_entries(DependencyGroup group)4554 int DependentCode::number_of_entries(DependencyGroup group) {
4555   if (length() == 0) return 0;
4556   return Smi::cast(get(group))->value();
4557 }
4558 
4559 
set_number_of_entries(DependencyGroup group,int value)4560 void DependentCode::set_number_of_entries(DependencyGroup group, int value) {
4561   set(group, Smi::FromInt(value));
4562 }
4563 
4564 
is_code_at(int i)4565 bool DependentCode::is_code_at(int i) {
4566   return get(kCodesStartIndex + i)->IsCode();
4567 }
4568 
code_at(int i)4569 Code* DependentCode::code_at(int i) {
4570   return Code::cast(get(kCodesStartIndex + i));
4571 }
4572 
4573 
compilation_info_at(int i)4574 CompilationInfo* DependentCode::compilation_info_at(int i) {
4575   return reinterpret_cast<CompilationInfo*>(
4576       Foreign::cast(get(kCodesStartIndex + i))->foreign_address());
4577 }
4578 
4579 
set_object_at(int i,Object * object)4580 void DependentCode::set_object_at(int i, Object* object) {
4581   set(kCodesStartIndex + i, object);
4582 }
4583 
4584 
object_at(int i)4585 Object* DependentCode::object_at(int i) {
4586   return get(kCodesStartIndex + i);
4587 }
4588 
4589 
slot_at(int i)4590 Object** DependentCode::slot_at(int i) {
4591   return RawFieldOfElementAt(kCodesStartIndex + i);
4592 }
4593 
4594 
clear_at(int i)4595 void DependentCode::clear_at(int i) {
4596   set_undefined(kCodesStartIndex + i);
4597 }
4598 
4599 
copy(int from,int to)4600 void DependentCode::copy(int from, int to) {
4601   set(kCodesStartIndex + to, get(kCodesStartIndex + from));
4602 }
4603 
4604 
ExtendGroup(DependencyGroup group)4605 void DependentCode::ExtendGroup(DependencyGroup group) {
4606   GroupStartIndexes starts(this);
4607   for (int g = kGroupCount - 1; g > group; g--) {
4608     if (starts.at(g) < starts.at(g + 1)) {
4609       copy(starts.at(g), starts.at(g + 1));
4610     }
4611   }
4612 }
4613 
4614 
set_flags(Code::Flags flags)4615 void Code::set_flags(Code::Flags flags) {
4616   STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
4617   WRITE_INT_FIELD(this, kFlagsOffset, flags);
4618 }
4619 
4620 
kind()4621 Code::Kind Code::kind() {
4622   return ExtractKindFromFlags(flags());
4623 }
4624 
4625 
IsCodeStubOrIC()4626 bool Code::IsCodeStubOrIC() {
4627   return kind() == STUB || kind() == HANDLER || kind() == LOAD_IC ||
4628          kind() == KEYED_LOAD_IC || kind() == CALL_IC || kind() == STORE_IC ||
4629          kind() == KEYED_STORE_IC || kind() == BINARY_OP_IC ||
4630          kind() == COMPARE_IC || kind() == COMPARE_NIL_IC ||
4631          kind() == TO_BOOLEAN_IC;
4632 }
4633 
4634 
ic_state()4635 InlineCacheState Code::ic_state() {
4636   InlineCacheState result = ExtractICStateFromFlags(flags());
4637   // Only allow uninitialized or debugger states for non-IC code
4638   // objects. This is used in the debugger to determine whether or not
4639   // a call to code object has been replaced with a debug break call.
4640   DCHECK(is_inline_cache_stub() ||
4641          result == UNINITIALIZED ||
4642          result == DEBUG_STUB);
4643   return result;
4644 }
4645 
4646 
extra_ic_state()4647 ExtraICState Code::extra_ic_state() {
4648   DCHECK(is_inline_cache_stub() || ic_state() == DEBUG_STUB);
4649   return ExtractExtraICStateFromFlags(flags());
4650 }
4651 
4652 
type()4653 Code::StubType Code::type() {
4654   return ExtractTypeFromFlags(flags());
4655 }
4656 
4657 
4658 // For initialization.
set_raw_kind_specific_flags1(int value)4659 void Code::set_raw_kind_specific_flags1(int value) {
4660   WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
4661 }
4662 
4663 
set_raw_kind_specific_flags2(int value)4664 void Code::set_raw_kind_specific_flags2(int value) {
4665   WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
4666 }
4667 
4668 
is_crankshafted()4669 inline bool Code::is_crankshafted() {
4670   return IsCrankshaftedField::decode(
4671       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4672 }
4673 
4674 
is_hydrogen_stub()4675 inline bool Code::is_hydrogen_stub() {
4676   return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
4677 }
4678 
4679 
set_is_crankshafted(bool value)4680 inline void Code::set_is_crankshafted(bool value) {
4681   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4682   int updated = IsCrankshaftedField::update(previous, value);
4683   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4684 }
4685 
4686 
is_turbofanned()4687 inline bool Code::is_turbofanned() {
4688   DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
4689   return IsTurbofannedField::decode(
4690       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4691 }
4692 
4693 
set_is_turbofanned(bool value)4694 inline void Code::set_is_turbofanned(bool value) {
4695   DCHECK(kind() == OPTIMIZED_FUNCTION || kind() == STUB);
4696   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4697   int updated = IsTurbofannedField::update(previous, value);
4698   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4699 }
4700 
4701 
optimizable()4702 bool Code::optimizable() {
4703   DCHECK_EQ(FUNCTION, kind());
4704   return READ_BYTE_FIELD(this, kOptimizableOffset) == 1;
4705 }
4706 
4707 
set_optimizable(bool value)4708 void Code::set_optimizable(bool value) {
4709   DCHECK_EQ(FUNCTION, kind());
4710   WRITE_BYTE_FIELD(this, kOptimizableOffset, value ? 1 : 0);
4711 }
4712 
4713 
has_deoptimization_support()4714 bool Code::has_deoptimization_support() {
4715   DCHECK_EQ(FUNCTION, kind());
4716   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4717   return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
4718 }
4719 
4720 
set_has_deoptimization_support(bool value)4721 void Code::set_has_deoptimization_support(bool value) {
4722   DCHECK_EQ(FUNCTION, kind());
4723   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4724   flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
4725   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4726 }
4727 
4728 
has_debug_break_slots()4729 bool Code::has_debug_break_slots() {
4730   DCHECK_EQ(FUNCTION, kind());
4731   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4732   return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
4733 }
4734 
4735 
set_has_debug_break_slots(bool value)4736 void Code::set_has_debug_break_slots(bool value) {
4737   DCHECK_EQ(FUNCTION, kind());
4738   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4739   flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
4740   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4741 }
4742 
4743 
is_compiled_optimizable()4744 bool Code::is_compiled_optimizable() {
4745   DCHECK_EQ(FUNCTION, kind());
4746   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4747   return FullCodeFlagsIsCompiledOptimizable::decode(flags);
4748 }
4749 
4750 
set_compiled_optimizable(bool value)4751 void Code::set_compiled_optimizable(bool value) {
4752   DCHECK_EQ(FUNCTION, kind());
4753   byte flags = READ_BYTE_FIELD(this, kFullCodeFlags);
4754   flags = FullCodeFlagsIsCompiledOptimizable::update(flags, value);
4755   WRITE_BYTE_FIELD(this, kFullCodeFlags, flags);
4756 }
4757 
4758 
allow_osr_at_loop_nesting_level()4759 int Code::allow_osr_at_loop_nesting_level() {
4760   DCHECK_EQ(FUNCTION, kind());
4761   int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4762   return AllowOSRAtLoopNestingLevelField::decode(fields);
4763 }
4764 
4765 
set_allow_osr_at_loop_nesting_level(int level)4766 void Code::set_allow_osr_at_loop_nesting_level(int level) {
4767   DCHECK_EQ(FUNCTION, kind());
4768   DCHECK(level >= 0 && level <= kMaxLoopNestingMarker);
4769   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4770   int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
4771   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4772 }
4773 
4774 
profiler_ticks()4775 int Code::profiler_ticks() {
4776   DCHECK_EQ(FUNCTION, kind());
4777   return READ_BYTE_FIELD(this, kProfilerTicksOffset);
4778 }
4779 
4780 
set_profiler_ticks(int ticks)4781 void Code::set_profiler_ticks(int ticks) {
4782   DCHECK(ticks < 256);
4783   if (kind() == FUNCTION) {
4784     WRITE_BYTE_FIELD(this, kProfilerTicksOffset, ticks);
4785   }
4786 }
4787 
4788 
builtin_index()4789 int Code::builtin_index() {
4790   DCHECK_EQ(BUILTIN, kind());
4791   return READ_INT32_FIELD(this, kKindSpecificFlags1Offset);
4792 }
4793 
4794 
set_builtin_index(int index)4795 void Code::set_builtin_index(int index) {
4796   DCHECK_EQ(BUILTIN, kind());
4797   WRITE_INT32_FIELD(this, kKindSpecificFlags1Offset, index);
4798 }
4799 
4800 
stack_slots()4801 unsigned Code::stack_slots() {
4802   DCHECK(is_crankshafted());
4803   return StackSlotsField::decode(
4804       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4805 }
4806 
4807 
set_stack_slots(unsigned slots)4808 void Code::set_stack_slots(unsigned slots) {
4809   CHECK(slots <= (1 << kStackSlotsBitCount));
4810   DCHECK(is_crankshafted());
4811   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4812   int updated = StackSlotsField::update(previous, slots);
4813   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4814 }
4815 
4816 
safepoint_table_offset()4817 unsigned Code::safepoint_table_offset() {
4818   DCHECK(is_crankshafted());
4819   return SafepointTableOffsetField::decode(
4820       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
4821 }
4822 
4823 
set_safepoint_table_offset(unsigned offset)4824 void Code::set_safepoint_table_offset(unsigned offset) {
4825   CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
4826   DCHECK(is_crankshafted());
4827   DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
4828   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4829   int updated = SafepointTableOffsetField::update(previous, offset);
4830   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4831 }
4832 
4833 
back_edge_table_offset()4834 unsigned Code::back_edge_table_offset() {
4835   DCHECK_EQ(FUNCTION, kind());
4836   return BackEdgeTableOffsetField::decode(
4837       READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
4838 }
4839 
4840 
set_back_edge_table_offset(unsigned offset)4841 void Code::set_back_edge_table_offset(unsigned offset) {
4842   DCHECK_EQ(FUNCTION, kind());
4843   DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
4844   offset = offset >> kPointerSizeLog2;
4845   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
4846   int updated = BackEdgeTableOffsetField::update(previous, offset);
4847   WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
4848 }
4849 
4850 
back_edges_patched_for_osr()4851 bool Code::back_edges_patched_for_osr() {
4852   DCHECK_EQ(FUNCTION, kind());
4853   return allow_osr_at_loop_nesting_level() > 0;
4854 }
4855 
4856 
to_boolean_state()4857 byte Code::to_boolean_state() {
4858   return extra_ic_state();
4859 }
4860 
4861 
has_function_cache()4862 bool Code::has_function_cache() {
4863   DCHECK(kind() == STUB);
4864   return HasFunctionCacheField::decode(
4865       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4866 }
4867 
4868 
set_has_function_cache(bool flag)4869 void Code::set_has_function_cache(bool flag) {
4870   DCHECK(kind() == STUB);
4871   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4872   int updated = HasFunctionCacheField::update(previous, flag);
4873   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4874 }
4875 
4876 
marked_for_deoptimization()4877 bool Code::marked_for_deoptimization() {
4878   DCHECK(kind() == OPTIMIZED_FUNCTION);
4879   return MarkedForDeoptimizationField::decode(
4880       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4881 }
4882 
4883 
set_marked_for_deoptimization(bool flag)4884 void Code::set_marked_for_deoptimization(bool flag) {
4885   DCHECK(kind() == OPTIMIZED_FUNCTION);
4886   DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
4887   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4888   int updated = MarkedForDeoptimizationField::update(previous, flag);
4889   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4890 }
4891 
4892 
is_weak_stub()4893 bool Code::is_weak_stub() {
4894   return CanBeWeakStub() && WeakStubField::decode(
4895       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4896 }
4897 
4898 
mark_as_weak_stub()4899 void Code::mark_as_weak_stub() {
4900   DCHECK(CanBeWeakStub());
4901   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4902   int updated = WeakStubField::update(previous, true);
4903   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4904 }
4905 
4906 
is_invalidated_weak_stub()4907 bool Code::is_invalidated_weak_stub() {
4908   return is_weak_stub() && InvalidatedWeakStubField::decode(
4909       READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
4910 }
4911 
4912 
mark_as_invalidated_weak_stub()4913 void Code::mark_as_invalidated_weak_stub() {
4914   DCHECK(is_inline_cache_stub());
4915   int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
4916   int updated = InvalidatedWeakStubField::update(previous, true);
4917   WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
4918 }
4919 
4920 
is_inline_cache_stub()4921 bool Code::is_inline_cache_stub() {
4922   Kind kind = this->kind();
4923   switch (kind) {
4924 #define CASE(name) case name: return true;
4925     IC_KIND_LIST(CASE)
4926 #undef CASE
4927     default: return false;
4928   }
4929 }
4930 
4931 
is_keyed_stub()4932 bool Code::is_keyed_stub() {
4933   return is_keyed_load_stub() || is_keyed_store_stub();
4934 }
4935 
4936 
is_debug_stub()4937 bool Code::is_debug_stub() {
4938   return ic_state() == DEBUG_STUB;
4939 }
4940 
4941 
constant_pool()4942 ConstantPoolArray* Code::constant_pool() {
4943   return ConstantPoolArray::cast(READ_FIELD(this, kConstantPoolOffset));
4944 }
4945 
4946 
set_constant_pool(Object * value)4947 void Code::set_constant_pool(Object* value) {
4948   DCHECK(value->IsConstantPoolArray());
4949   WRITE_FIELD(this, kConstantPoolOffset, value);
4950   WRITE_BARRIER(GetHeap(), this, kConstantPoolOffset, value);
4951 }
4952 
4953 
ComputeFlags(Kind kind,InlineCacheState ic_state,ExtraICState extra_ic_state,StubType type,CacheHolderFlag holder)4954 Code::Flags Code::ComputeFlags(Kind kind, InlineCacheState ic_state,
4955                                ExtraICState extra_ic_state, StubType type,
4956                                CacheHolderFlag holder) {
4957   // Compute the bit mask.
4958   unsigned int bits = KindField::encode(kind)
4959       | ICStateField::encode(ic_state)
4960       | TypeField::encode(type)
4961       | ExtraICStateField::encode(extra_ic_state)
4962       | CacheHolderField::encode(holder);
4963   return static_cast<Flags>(bits);
4964 }
4965 
4966 
ComputeMonomorphicFlags(Kind kind,ExtraICState extra_ic_state,CacheHolderFlag holder,StubType type)4967 Code::Flags Code::ComputeMonomorphicFlags(Kind kind,
4968                                           ExtraICState extra_ic_state,
4969                                           CacheHolderFlag holder,
4970                                           StubType type) {
4971   return ComputeFlags(kind, MONOMORPHIC, extra_ic_state, type, holder);
4972 }
4973 
4974 
ComputeHandlerFlags(Kind handler_kind,StubType type,CacheHolderFlag holder)4975 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind, StubType type,
4976                                       CacheHolderFlag holder) {
4977   return ComputeFlags(Code::HANDLER, MONOMORPHIC, handler_kind, type, holder);
4978 }
4979 
4980 
ExtractKindFromFlags(Flags flags)4981 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
4982   return KindField::decode(flags);
4983 }
4984 
4985 
ExtractICStateFromFlags(Flags flags)4986 InlineCacheState Code::ExtractICStateFromFlags(Flags flags) {
4987   return ICStateField::decode(flags);
4988 }
4989 
4990 
ExtractExtraICStateFromFlags(Flags flags)4991 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
4992   return ExtraICStateField::decode(flags);
4993 }
4994 
4995 
ExtractTypeFromFlags(Flags flags)4996 Code::StubType Code::ExtractTypeFromFlags(Flags flags) {
4997   return TypeField::decode(flags);
4998 }
4999 
5000 
ExtractCacheHolderFromFlags(Flags flags)5001 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5002   return CacheHolderField::decode(flags);
5003 }
5004 
5005 
RemoveTypeFromFlags(Flags flags)5006 Code::Flags Code::RemoveTypeFromFlags(Flags flags) {
5007   int bits = flags & ~TypeField::kMask;
5008   return static_cast<Flags>(bits);
5009 }
5010 
5011 
RemoveTypeAndHolderFromFlags(Flags flags)5012 Code::Flags Code::RemoveTypeAndHolderFromFlags(Flags flags) {
5013   int bits = flags & ~TypeField::kMask & ~CacheHolderField::kMask;
5014   return static_cast<Flags>(bits);
5015 }
5016 
5017 
GetCodeFromTargetAddress(Address address)5018 Code* Code::GetCodeFromTargetAddress(Address address) {
5019   HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5020   // GetCodeFromTargetAddress might be called when marking objects during mark
5021   // sweep. reinterpret_cast is therefore used instead of the more appropriate
5022   // Code::cast. Code::cast does not work when the object's map is
5023   // marked.
5024   Code* result = reinterpret_cast<Code*>(code);
5025   return result;
5026 }
5027 
5028 
GetObjectFromEntryAddress(Address location_of_address)5029 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5030   return HeapObject::
5031       FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5032 }
5033 
5034 
IsWeakObjectInOptimizedCode(Object * object)5035 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5036   if (!FLAG_collect_maps) return false;
5037   if (object->IsMap()) {
5038     return Map::cast(object)->CanTransition() &&
5039            FLAG_weak_embedded_maps_in_optimized_code;
5040   }
5041   if (object->IsJSObject() ||
5042       (object->IsCell() && Cell::cast(object)->value()->IsJSObject())) {
5043     return FLAG_weak_embedded_objects_in_optimized_code;
5044   }
5045   return false;
5046 }
5047 
5048 
5049 class Code::FindAndReplacePattern {
5050  public:
FindAndReplacePattern()5051   FindAndReplacePattern() : count_(0) { }
Add(Handle<Map> map_to_find,Handle<Object> obj_to_replace)5052   void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5053     DCHECK(count_ < kMaxCount);
5054     find_[count_] = map_to_find;
5055     replace_[count_] = obj_to_replace;
5056     ++count_;
5057   }
5058  private:
5059   static const int kMaxCount = 4;
5060   int count_;
5061   Handle<Map> find_[kMaxCount];
5062   Handle<Object> replace_[kMaxCount];
5063   friend class Code;
5064 };
5065 
5066 
IsWeakObjectInIC(Object * object)5067 bool Code::IsWeakObjectInIC(Object* object) {
5068   return object->IsMap() && Map::cast(object)->CanTransition() &&
5069          FLAG_collect_maps &&
5070          FLAG_weak_embedded_maps_in_ic;
5071 }
5072 
5073 
prototype()5074 Object* Map::prototype() const {
5075   return READ_FIELD(this, kPrototypeOffset);
5076 }
5077 
5078 
set_prototype(Object * value,WriteBarrierMode mode)5079 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5080   DCHECK(value->IsNull() || value->IsJSReceiver());
5081   WRITE_FIELD(this, kPrototypeOffset, value);
5082   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5083 }
5084 
5085 
5086 // If the descriptor is using the empty transition array, install a new empty
5087 // transition array that will have place for an element transition.
EnsureHasTransitionArray(Handle<Map> map)5088 static void EnsureHasTransitionArray(Handle<Map> map) {
5089   Handle<TransitionArray> transitions;
5090   if (!map->HasTransitionArray()) {
5091     transitions = TransitionArray::Allocate(map->GetIsolate(), 0);
5092     transitions->set_back_pointer_storage(map->GetBackPointer());
5093   } else if (!map->transitions()->IsFullTransitionArray()) {
5094     transitions = TransitionArray::ExtendToFullTransitionArray(map);
5095   } else {
5096     return;
5097   }
5098   map->set_transitions(*transitions);
5099 }
5100 
5101 
InitializeDescriptors(DescriptorArray * descriptors)5102 void Map::InitializeDescriptors(DescriptorArray* descriptors) {
5103   int len = descriptors->number_of_descriptors();
5104   set_instance_descriptors(descriptors);
5105   SetNumberOfOwnDescriptors(len);
5106 }
5107 
5108 
ACCESSORS(Map,instance_descriptors,DescriptorArray,kDescriptorsOffset)5109 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5110 
5111 
5112 void Map::set_bit_field3(uint32_t bits) {
5113   if (kInt32Size != kPointerSize) {
5114     WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5115   }
5116   WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5117 }
5118 
5119 
bit_field3()5120 uint32_t Map::bit_field3() {
5121   return READ_UINT32_FIELD(this, kBitField3Offset);
5122 }
5123 
5124 
AppendDescriptor(Descriptor * desc)5125 void Map::AppendDescriptor(Descriptor* desc) {
5126   DescriptorArray* descriptors = instance_descriptors();
5127   int number_of_own_descriptors = NumberOfOwnDescriptors();
5128   DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5129   descriptors->Append(desc);
5130   SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5131 }
5132 
5133 
GetBackPointer()5134 Object* Map::GetBackPointer() {
5135   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5136   if (object->IsDescriptorArray()) {
5137     return TransitionArray::cast(object)->back_pointer_storage();
5138   } else {
5139     DCHECK(object->IsMap() || object->IsUndefined());
5140     return object;
5141   }
5142 }
5143 
5144 
HasElementsTransition()5145 bool Map::HasElementsTransition() {
5146   return HasTransitionArray() && transitions()->HasElementsTransition();
5147 }
5148 
5149 
HasTransitionArray()5150 bool Map::HasTransitionArray() const {
5151   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5152   return object->IsTransitionArray();
5153 }
5154 
5155 
elements_transition_map()5156 Map* Map::elements_transition_map() {
5157   int index = transitions()->Search(GetHeap()->elements_transition_symbol());
5158   return transitions()->GetTarget(index);
5159 }
5160 
5161 
CanHaveMoreTransitions()5162 bool Map::CanHaveMoreTransitions() {
5163   if (!HasTransitionArray()) return true;
5164   return FixedArray::SizeFor(transitions()->length() +
5165                              TransitionArray::kTransitionSize)
5166       <= Page::kMaxRegularHeapObjectSize;
5167 }
5168 
5169 
GetTransition(int transition_index)5170 Map* Map::GetTransition(int transition_index) {
5171   return transitions()->GetTarget(transition_index);
5172 }
5173 
5174 
SearchTransition(Name * name)5175 int Map::SearchTransition(Name* name) {
5176   if (HasTransitionArray()) return transitions()->Search(name);
5177   return TransitionArray::kNotFound;
5178 }
5179 
5180 
GetPrototypeTransitions()5181 FixedArray* Map::GetPrototypeTransitions() {
5182   if (!HasTransitionArray()) return GetHeap()->empty_fixed_array();
5183   if (!transitions()->HasPrototypeTransitions()) {
5184     return GetHeap()->empty_fixed_array();
5185   }
5186   return transitions()->GetPrototypeTransitions();
5187 }
5188 
5189 
SetPrototypeTransitions(Handle<Map> map,Handle<FixedArray> proto_transitions)5190 void Map::SetPrototypeTransitions(
5191     Handle<Map> map, Handle<FixedArray> proto_transitions) {
5192   EnsureHasTransitionArray(map);
5193   int old_number_of_transitions = map->NumberOfProtoTransitions();
5194 #ifdef DEBUG
5195   if (map->HasPrototypeTransitions()) {
5196     DCHECK(map->GetPrototypeTransitions() != *proto_transitions);
5197     map->ZapPrototypeTransitions();
5198   }
5199 #endif
5200   map->transitions()->SetPrototypeTransitions(*proto_transitions);
5201   map->SetNumberOfProtoTransitions(old_number_of_transitions);
5202 }
5203 
5204 
HasPrototypeTransitions()5205 bool Map::HasPrototypeTransitions() {
5206   return HasTransitionArray() && transitions()->HasPrototypeTransitions();
5207 }
5208 
5209 
transitions()5210 TransitionArray* Map::transitions() const {
5211   DCHECK(HasTransitionArray());
5212   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5213   return TransitionArray::cast(object);
5214 }
5215 
5216 
set_transitions(TransitionArray * transition_array,WriteBarrierMode mode)5217 void Map::set_transitions(TransitionArray* transition_array,
5218                           WriteBarrierMode mode) {
5219   // Transition arrays are not shared. When one is replaced, it should not
5220   // keep referenced objects alive, so we zap it.
5221   // When there is another reference to the array somewhere (e.g. a handle),
5222   // not zapping turns from a waste of memory into a source of crashes.
5223   if (HasTransitionArray()) {
5224 #ifdef DEBUG
5225     for (int i = 0; i < transitions()->number_of_transitions(); i++) {
5226       Map* target = transitions()->GetTarget(i);
5227       if (target->instance_descriptors() == instance_descriptors()) {
5228         Name* key = transitions()->GetKey(i);
5229         int new_target_index = transition_array->Search(key);
5230         DCHECK(new_target_index != TransitionArray::kNotFound);
5231         DCHECK(transition_array->GetTarget(new_target_index) == target);
5232       }
5233     }
5234 #endif
5235     DCHECK(transitions() != transition_array);
5236     ZapTransitions();
5237   }
5238 
5239   WRITE_FIELD(this, kTransitionsOrBackPointerOffset, transition_array);
5240   CONDITIONAL_WRITE_BARRIER(
5241       GetHeap(), this, kTransitionsOrBackPointerOffset, transition_array, mode);
5242 }
5243 
5244 
init_back_pointer(Object * undefined)5245 void Map::init_back_pointer(Object* undefined) {
5246   DCHECK(undefined->IsUndefined());
5247   WRITE_FIELD(this, kTransitionsOrBackPointerOffset, undefined);
5248 }
5249 
5250 
SetBackPointer(Object * value,WriteBarrierMode mode)5251 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5252   DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5253   DCHECK((value->IsUndefined() && GetBackPointer()->IsMap()) ||
5254          (value->IsMap() && GetBackPointer()->IsUndefined()));
5255   Object* object = READ_FIELD(this, kTransitionsOrBackPointerOffset);
5256   if (object->IsTransitionArray()) {
5257     TransitionArray::cast(object)->set_back_pointer_storage(value);
5258   } else {
5259     WRITE_FIELD(this, kTransitionsOrBackPointerOffset, value);
5260     CONDITIONAL_WRITE_BARRIER(
5261         GetHeap(), this, kTransitionsOrBackPointerOffset, value, mode);
5262   }
5263 }
5264 
5265 
ACCESSORS(Map,code_cache,Object,kCodeCacheOffset)5266 ACCESSORS(Map, code_cache, Object, kCodeCacheOffset)
5267 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5268 ACCESSORS(Map, constructor, Object, kConstructorOffset)
5269 
5270 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5271 ACCESSORS(JSFunction, literals_or_bindings, FixedArray, kLiteralsOffset)
5272 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5273 
5274 ACCESSORS(GlobalObject, builtins, JSBuiltinsObject, kBuiltinsOffset)
5275 ACCESSORS(GlobalObject, native_context, Context, kNativeContextOffset)
5276 ACCESSORS(GlobalObject, global_context, Context, kGlobalContextOffset)
5277 ACCESSORS(GlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5278 
5279 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5280 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5281 
5282 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5283 ACCESSORS_TO_SMI(AccessorInfo, flag, kFlagOffset)
5284 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5285           kExpectedReceiverTypeOffset)
5286 
5287 ACCESSORS(DeclaredAccessorDescriptor, serialized_data, ByteArray,
5288           kSerializedDataOffset)
5289 
5290 ACCESSORS(DeclaredAccessorInfo, descriptor, DeclaredAccessorDescriptor,
5291           kDescriptorOffset)
5292 
5293 ACCESSORS(ExecutableAccessorInfo, getter, Object, kGetterOffset)
5294 ACCESSORS(ExecutableAccessorInfo, setter, Object, kSetterOffset)
5295 ACCESSORS(ExecutableAccessorInfo, data, Object, kDataOffset)
5296 
5297 ACCESSORS(Box, value, Object, kValueOffset)
5298 
5299 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5300 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5301 
5302 ACCESSORS(AccessCheckInfo, named_callback, Object, kNamedCallbackOffset)
5303 ACCESSORS(AccessCheckInfo, indexed_callback, Object, kIndexedCallbackOffset)
5304 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5305 
5306 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5307 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5308 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5309 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5310 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5311 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5312 
5313 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5314 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5315 
5316 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5317 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5318 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5319 
5320 ACCESSORS(FunctionTemplateInfo, serial_number, Object, kSerialNumberOffset)
5321 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5322 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5323           kPrototypeTemplateOffset)
5324 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5325 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5326           kNamedPropertyHandlerOffset)
5327 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5328           kIndexedPropertyHandlerOffset)
5329 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5330           kInstanceTemplateOffset)
5331 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5332 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5333 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5334           kInstanceCallHandlerOffset)
5335 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5336           kAccessCheckInfoOffset)
5337 ACCESSORS_TO_SMI(FunctionTemplateInfo, flag, kFlagOffset)
5338 
5339 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5340 ACCESSORS(ObjectTemplateInfo, internal_field_count, Object,
5341           kInternalFieldCountOffset)
5342 
5343 ACCESSORS(SignatureInfo, receiver, Object, kReceiverOffset)
5344 ACCESSORS(SignatureInfo, args, Object, kArgsOffset)
5345 
5346 ACCESSORS(TypeSwitchInfo, types, Object, kTypesOffset)
5347 
5348 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5349 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5350 ACCESSORS_TO_SMI(AllocationSite, pretenure_data, kPretenureDataOffset)
5351 ACCESSORS_TO_SMI(AllocationSite, pretenure_create_count,
5352                  kPretenureCreateCountOffset)
5353 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5354           kDependentCodeOffset)
5355 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5356 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5357 
5358 ACCESSORS(Script, source, Object, kSourceOffset)
5359 ACCESSORS(Script, name, Object, kNameOffset)
5360 ACCESSORS(Script, id, Smi, kIdOffset)
5361 ACCESSORS_TO_SMI(Script, line_offset, kLineOffsetOffset)
5362 ACCESSORS_TO_SMI(Script, column_offset, kColumnOffsetOffset)
5363 ACCESSORS(Script, context_data, Object, kContextOffset)
5364 ACCESSORS(Script, wrapper, Foreign, kWrapperOffset)
5365 ACCESSORS_TO_SMI(Script, type, kTypeOffset)
5366 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5367 ACCESSORS(Script, eval_from_shared, Object, kEvalFromSharedOffset)
5368 ACCESSORS_TO_SMI(Script, eval_from_instructions_offset,
5369                  kEvalFrominstructionsOffsetOffset)
5370 ACCESSORS_TO_SMI(Script, flags, kFlagsOffset)
5371 BOOL_ACCESSORS(Script, flags, is_shared_cross_origin, kIsSharedCrossOriginBit)
5372 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5373 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5374 
5375 Script::CompilationType Script::compilation_type() {
5376   return BooleanBit::get(flags(), kCompilationTypeBit) ?
5377       COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5378 }
set_compilation_type(CompilationType type)5379 void Script::set_compilation_type(CompilationType type) {
5380   set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5381       type == COMPILATION_TYPE_EVAL));
5382 }
compilation_state()5383 Script::CompilationState Script::compilation_state() {
5384   return BooleanBit::get(flags(), kCompilationStateBit) ?
5385       COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5386 }
set_compilation_state(CompilationState state)5387 void Script::set_compilation_state(CompilationState state) {
5388   set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5389       state == COMPILATION_STATE_COMPILED));
5390 }
5391 
5392 
ACCESSORS(DebugInfo,shared,SharedFunctionInfo,kSharedFunctionInfoIndex)5393 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5394 ACCESSORS(DebugInfo, original_code, Code, kOriginalCodeIndex)
5395 ACCESSORS(DebugInfo, code, Code, kPatchedCodeIndex)
5396 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5397 
5398 ACCESSORS_TO_SMI(BreakPointInfo, code_position, kCodePositionIndex)
5399 ACCESSORS_TO_SMI(BreakPointInfo, source_position, kSourcePositionIndex)
5400 ACCESSORS_TO_SMI(BreakPointInfo, statement_position, kStatementPositionIndex)
5401 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
5402 
5403 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
5404 ACCESSORS(SharedFunctionInfo, optimized_code_map, Object,
5405                  kOptimizedCodeMapOffset)
5406 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
5407 ACCESSORS(SharedFunctionInfo, feedback_vector, TypeFeedbackVector,
5408           kFeedbackVectorOffset)
5409 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
5410           kInstanceClassNameOffset)
5411 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
5412 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
5413 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
5414 ACCESSORS(SharedFunctionInfo, inferred_name, String, kInferredNameOffset)
5415 
5416 
5417 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
5418 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
5419                kHiddenPrototypeBit)
5420 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
5421 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
5422                kNeedsAccessCheckBit)
5423 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
5424                kReadOnlyPrototypeBit)
5425 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
5426                kRemovePrototypeBit)
5427 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
5428                kDoNotCacheBit)
5429 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_expression,
5430                kIsExpressionBit)
5431 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
5432                kIsTopLevelBit)
5433 
5434 BOOL_ACCESSORS(SharedFunctionInfo,
5435                compiler_hints,
5436                allows_lazy_compilation,
5437                kAllowLazyCompilation)
5438 BOOL_ACCESSORS(SharedFunctionInfo,
5439                compiler_hints,
5440                allows_lazy_compilation_without_context,
5441                kAllowLazyCompilationWithoutContext)
5442 BOOL_ACCESSORS(SharedFunctionInfo,
5443                compiler_hints,
5444                uses_arguments,
5445                kUsesArguments)
5446 BOOL_ACCESSORS(SharedFunctionInfo,
5447                compiler_hints,
5448                has_duplicate_parameters,
5449                kHasDuplicateParameters)
5450 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
5451 
5452 
5453 #if V8_HOST_ARCH_32_BIT
5454 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
5455 SMI_ACCESSORS(SharedFunctionInfo, formal_parameter_count,
5456               kFormalParameterCountOffset)
5457 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
5458               kExpectedNofPropertiesOffset)
5459 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5460 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
5461               kStartPositionAndTypeOffset)
5462 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
5463 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
5464               kFunctionTokenPositionOffset)
5465 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
5466               kCompilerHintsOffset)
5467 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
5468               kOptCountAndBailoutReasonOffset)
5469 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
5470 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
5471 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
5472 
5473 #else
5474 
5475 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset)             \
5476   STATIC_ASSERT(holder::offset % kPointerSize == 0);              \
5477   int holder::name() const {                                      \
5478     int value = READ_INT_FIELD(this, offset);                     \
5479     DCHECK(kHeapObjectTag == 1);                                  \
5480     DCHECK((value & kHeapObjectTag) == 0);                        \
5481     return value >> 1;                                            \
5482   }                                                               \
5483   void holder::set_##name(int value) {                            \
5484     DCHECK(kHeapObjectTag == 1);                                  \
5485     DCHECK((value & 0xC0000000) == 0xC0000000 ||                  \
5486            (value & 0xC0000000) == 0x0);                          \
5487     WRITE_INT_FIELD(this,                                         \
5488                     offset,                                       \
5489                     (value << 1) & ~kHeapObjectTag);              \
5490   }
5491 
5492 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset)             \
5493   STATIC_ASSERT(holder::offset % kPointerSize == kIntSize);       \
5494   INT_ACCESSORS(holder, name, offset)
5495 
5496 
5497 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
5498 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5499                         formal_parameter_count,
5500                         kFormalParameterCountOffset)
5501 
5502 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5503                         expected_nof_properties,
5504                         kExpectedNofPropertiesOffset)
5505 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
5506 
5507 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
5508 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5509                         start_position_and_type,
5510                         kStartPositionAndTypeOffset)
5511 
5512 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5513                         function_token_position,
5514                         kFunctionTokenPositionOffset)
5515 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5516                         compiler_hints,
5517                         kCompilerHintsOffset)
5518 
5519 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5520                         opt_count_and_bailout_reason,
5521                         kOptCountAndBailoutReasonOffset)
5522 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
5523 
5524 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
5525                         ast_node_count,
5526                         kAstNodeCountOffset)
5527 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
5528                         profiler_ticks,
5529                         kProfilerTicksOffset)
5530 
5531 #endif
5532 
5533 
5534 BOOL_GETTER(SharedFunctionInfo,
5535             compiler_hints,
5536             optimization_disabled,
5537             kOptimizationDisabled)
5538 
5539 
5540 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
5541   set_compiler_hints(BooleanBit::set(compiler_hints(),
5542                                      kOptimizationDisabled,
5543                                      disable));
5544   // If disabling optimizations we reflect that in the code object so
5545   // it will not be counted as optimizable code.
5546   if ((code()->kind() == Code::FUNCTION) && disable) {
5547     code()->set_optimizable(false);
5548   }
5549 }
5550 
5551 
strict_mode()5552 StrictMode SharedFunctionInfo::strict_mode() {
5553   return BooleanBit::get(compiler_hints(), kStrictModeFunction)
5554       ? STRICT : SLOPPY;
5555 }
5556 
5557 
set_strict_mode(StrictMode strict_mode)5558 void SharedFunctionInfo::set_strict_mode(StrictMode strict_mode) {
5559   // We only allow mode transitions from sloppy to strict.
5560   DCHECK(this->strict_mode() == SLOPPY || this->strict_mode() == strict_mode);
5561   int hints = compiler_hints();
5562   hints = BooleanBit::set(hints, kStrictModeFunction, strict_mode == STRICT);
5563   set_compiler_hints(hints);
5564 }
5565 
5566 
kind()5567 FunctionKind SharedFunctionInfo::kind() {
5568   return FunctionKindBits::decode(compiler_hints());
5569 }
5570 
5571 
set_kind(FunctionKind kind)5572 void SharedFunctionInfo::set_kind(FunctionKind kind) {
5573   DCHECK(IsValidFunctionKind(kind));
5574   int hints = compiler_hints();
5575   hints = FunctionKindBits::update(hints, kind);
5576   set_compiler_hints(hints);
5577 }
5578 
5579 
BOOL_ACCESSORS(SharedFunctionInfo,compiler_hints,native,kNative)5580 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
5581 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, inline_builtin,
5582                kInlineBuiltin)
5583 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
5584                name_should_print_as_anonymous,
5585                kNameShouldPrintAsAnonymous)
5586 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, bound, kBoundFunction)
5587 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous, kIsAnonymous)
5588 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
5589 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_cache, kDontCache)
5590 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
5591 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_arrow, kIsArrow)
5592 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_generator, kIsGenerator)
5593 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_concise_method,
5594                kIsConciseMethod)
5595 
5596 ACCESSORS(CodeCache, default_cache, FixedArray, kDefaultCacheOffset)
5597 ACCESSORS(CodeCache, normal_type_cache, Object, kNormalTypeCacheOffset)
5598 
5599 ACCESSORS(PolymorphicCodeCache, cache, Object, kCacheOffset)
5600 
5601 bool Script::HasValidSource() {
5602   Object* src = this->source();
5603   if (!src->IsString()) return true;
5604   String* src_str = String::cast(src);
5605   if (!StringShape(src_str).IsExternal()) return true;
5606   if (src_str->IsOneByteRepresentation()) {
5607     return ExternalOneByteString::cast(src)->resource() != NULL;
5608   } else if (src_str->IsTwoByteRepresentation()) {
5609     return ExternalTwoByteString::cast(src)->resource() != NULL;
5610   }
5611   return true;
5612 }
5613 
5614 
DontAdaptArguments()5615 void SharedFunctionInfo::DontAdaptArguments() {
5616   DCHECK(code()->kind() == Code::BUILTIN);
5617   set_formal_parameter_count(kDontAdaptArgumentsSentinel);
5618 }
5619 
5620 
start_position()5621 int SharedFunctionInfo::start_position() const {
5622   return start_position_and_type() >> kStartPositionShift;
5623 }
5624 
5625 
set_start_position(int start_position)5626 void SharedFunctionInfo::set_start_position(int start_position) {
5627   set_start_position_and_type((start_position << kStartPositionShift)
5628     | (start_position_and_type() & ~kStartPositionMask));
5629 }
5630 
5631 
code()5632 Code* SharedFunctionInfo::code() const {
5633   return Code::cast(READ_FIELD(this, kCodeOffset));
5634 }
5635 
5636 
set_code(Code * value,WriteBarrierMode mode)5637 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
5638   DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
5639   WRITE_FIELD(this, kCodeOffset, value);
5640   CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
5641 }
5642 
5643 
ReplaceCode(Code * value)5644 void SharedFunctionInfo::ReplaceCode(Code* value) {
5645   // If the GC metadata field is already used then the function was
5646   // enqueued as a code flushing candidate and we remove it now.
5647   if (code()->gc_metadata() != NULL) {
5648     CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
5649     flusher->EvictCandidate(this);
5650   }
5651 
5652   DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
5653 
5654   set_code(value);
5655 }
5656 
5657 
scope_info()5658 ScopeInfo* SharedFunctionInfo::scope_info() const {
5659   return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
5660 }
5661 
5662 
set_scope_info(ScopeInfo * value,WriteBarrierMode mode)5663 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
5664                                         WriteBarrierMode mode) {
5665   WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
5666   CONDITIONAL_WRITE_BARRIER(GetHeap(),
5667                             this,
5668                             kScopeInfoOffset,
5669                             reinterpret_cast<Object*>(value),
5670                             mode);
5671 }
5672 
5673 
is_compiled()5674 bool SharedFunctionInfo::is_compiled() {
5675   return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
5676 }
5677 
5678 
IsApiFunction()5679 bool SharedFunctionInfo::IsApiFunction() {
5680   return function_data()->IsFunctionTemplateInfo();
5681 }
5682 
5683 
get_api_func_data()5684 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
5685   DCHECK(IsApiFunction());
5686   return FunctionTemplateInfo::cast(function_data());
5687 }
5688 
5689 
HasBuiltinFunctionId()5690 bool SharedFunctionInfo::HasBuiltinFunctionId() {
5691   return function_data()->IsSmi();
5692 }
5693 
5694 
builtin_function_id()5695 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
5696   DCHECK(HasBuiltinFunctionId());
5697   return static_cast<BuiltinFunctionId>(Smi::cast(function_data())->value());
5698 }
5699 
5700 
ic_age()5701 int SharedFunctionInfo::ic_age() {
5702   return ICAgeBits::decode(counters());
5703 }
5704 
5705 
set_ic_age(int ic_age)5706 void SharedFunctionInfo::set_ic_age(int ic_age) {
5707   set_counters(ICAgeBits::update(counters(), ic_age));
5708 }
5709 
5710 
deopt_count()5711 int SharedFunctionInfo::deopt_count() {
5712   return DeoptCountBits::decode(counters());
5713 }
5714 
5715 
set_deopt_count(int deopt_count)5716 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
5717   set_counters(DeoptCountBits::update(counters(), deopt_count));
5718 }
5719 
5720 
increment_deopt_count()5721 void SharedFunctionInfo::increment_deopt_count() {
5722   int value = counters();
5723   int deopt_count = DeoptCountBits::decode(value);
5724   deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
5725   set_counters(DeoptCountBits::update(value, deopt_count));
5726 }
5727 
5728 
opt_reenable_tries()5729 int SharedFunctionInfo::opt_reenable_tries() {
5730   return OptReenableTriesBits::decode(counters());
5731 }
5732 
5733 
set_opt_reenable_tries(int tries)5734 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
5735   set_counters(OptReenableTriesBits::update(counters(), tries));
5736 }
5737 
5738 
opt_count()5739 int SharedFunctionInfo::opt_count() {
5740   return OptCountBits::decode(opt_count_and_bailout_reason());
5741 }
5742 
5743 
set_opt_count(int opt_count)5744 void SharedFunctionInfo::set_opt_count(int opt_count) {
5745   set_opt_count_and_bailout_reason(
5746       OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
5747 }
5748 
5749 
DisableOptimizationReason()5750 BailoutReason SharedFunctionInfo::DisableOptimizationReason() {
5751   BailoutReason reason = static_cast<BailoutReason>(
5752       DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
5753   return reason;
5754 }
5755 
5756 
has_deoptimization_support()5757 bool SharedFunctionInfo::has_deoptimization_support() {
5758   Code* code = this->code();
5759   return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
5760 }
5761 
5762 
TryReenableOptimization()5763 void SharedFunctionInfo::TryReenableOptimization() {
5764   int tries = opt_reenable_tries();
5765   set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
5766   // We reenable optimization whenever the number of tries is a large
5767   // enough power of 2.
5768   if (tries >= 16 && (((tries - 1) & tries) == 0)) {
5769     set_optimization_disabled(false);
5770     set_opt_count(0);
5771     set_deopt_count(0);
5772     code()->set_optimizable(true);
5773   }
5774 }
5775 
5776 
IsBuiltin()5777 bool JSFunction::IsBuiltin() {
5778   return context()->global_object()->IsJSBuiltinsObject();
5779 }
5780 
5781 
IsFromNativeScript()5782 bool JSFunction::IsFromNativeScript() {
5783   Object* script = shared()->script();
5784   bool native = script->IsScript() &&
5785                 Script::cast(script)->type()->value() == Script::TYPE_NATIVE;
5786   DCHECK(!IsBuiltin() || native);  // All builtins are also native.
5787   return native;
5788 }
5789 
5790 
IsFromExtensionScript()5791 bool JSFunction::IsFromExtensionScript() {
5792   Object* script = shared()->script();
5793   return script->IsScript() &&
5794          Script::cast(script)->type()->value() == Script::TYPE_EXTENSION;
5795 }
5796 
5797 
NeedsArgumentsAdaption()5798 bool JSFunction::NeedsArgumentsAdaption() {
5799   return shared()->formal_parameter_count() !=
5800       SharedFunctionInfo::kDontAdaptArgumentsSentinel;
5801 }
5802 
5803 
IsOptimized()5804 bool JSFunction::IsOptimized() {
5805   return code()->kind() == Code::OPTIMIZED_FUNCTION;
5806 }
5807 
5808 
IsOptimizable()5809 bool JSFunction::IsOptimizable() {
5810   return code()->kind() == Code::FUNCTION && code()->optimizable();
5811 }
5812 
5813 
IsMarkedForOptimization()5814 bool JSFunction::IsMarkedForOptimization() {
5815   return code() == GetIsolate()->builtins()->builtin(
5816       Builtins::kCompileOptimized);
5817 }
5818 
5819 
IsMarkedForConcurrentOptimization()5820 bool JSFunction::IsMarkedForConcurrentOptimization() {
5821   return code() == GetIsolate()->builtins()->builtin(
5822       Builtins::kCompileOptimizedConcurrent);
5823 }
5824 
5825 
IsInOptimizationQueue()5826 bool JSFunction::IsInOptimizationQueue() {
5827   return code() == GetIsolate()->builtins()->builtin(
5828       Builtins::kInOptimizationQueue);
5829 }
5830 
5831 
IsInobjectSlackTrackingInProgress()5832 bool JSFunction::IsInobjectSlackTrackingInProgress() {
5833   return has_initial_map() &&
5834       initial_map()->construction_count() != JSFunction::kNoSlackTracking;
5835 }
5836 
5837 
code()5838 Code* JSFunction::code() {
5839   return Code::cast(
5840       Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
5841 }
5842 
5843 
set_code(Code * value)5844 void JSFunction::set_code(Code* value) {
5845   DCHECK(!GetHeap()->InNewSpace(value));
5846   Address entry = value->entry();
5847   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5848   GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
5849       this,
5850       HeapObject::RawField(this, kCodeEntryOffset),
5851       value);
5852 }
5853 
5854 
set_code_no_write_barrier(Code * value)5855 void JSFunction::set_code_no_write_barrier(Code* value) {
5856   DCHECK(!GetHeap()->InNewSpace(value));
5857   Address entry = value->entry();
5858   WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
5859 }
5860 
5861 
ReplaceCode(Code * code)5862 void JSFunction::ReplaceCode(Code* code) {
5863   bool was_optimized = IsOptimized();
5864   bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
5865 
5866   if (was_optimized && is_optimized) {
5867     shared()->EvictFromOptimizedCodeMap(this->code(),
5868         "Replacing with another optimized code");
5869   }
5870 
5871   set_code(code);
5872 
5873   // Add/remove the function from the list of optimized functions for this
5874   // context based on the state change.
5875   if (!was_optimized && is_optimized) {
5876     context()->native_context()->AddOptimizedFunction(this);
5877   }
5878   if (was_optimized && !is_optimized) {
5879     // TODO(titzer): linear in the number of optimized functions; fix!
5880     context()->native_context()->RemoveOptimizedFunction(this);
5881   }
5882 }
5883 
5884 
context()5885 Context* JSFunction::context() {
5886   return Context::cast(READ_FIELD(this, kContextOffset));
5887 }
5888 
5889 
global_proxy()5890 JSObject* JSFunction::global_proxy() {
5891   return context()->global_proxy();
5892 }
5893 
5894 
set_context(Object * value)5895 void JSFunction::set_context(Object* value) {
5896   DCHECK(value->IsUndefined() || value->IsContext());
5897   WRITE_FIELD(this, kContextOffset, value);
5898   WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
5899 }
5900 
ACCESSORS(JSFunction,prototype_or_initial_map,Object,kPrototypeOrInitialMapOffset)5901 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
5902           kPrototypeOrInitialMapOffset)
5903 
5904 
5905 Map* JSFunction::initial_map() {
5906   return Map::cast(prototype_or_initial_map());
5907 }
5908 
5909 
has_initial_map()5910 bool JSFunction::has_initial_map() {
5911   return prototype_or_initial_map()->IsMap();
5912 }
5913 
5914 
has_instance_prototype()5915 bool JSFunction::has_instance_prototype() {
5916   return has_initial_map() || !prototype_or_initial_map()->IsTheHole();
5917 }
5918 
5919 
has_prototype()5920 bool JSFunction::has_prototype() {
5921   return map()->has_non_instance_prototype() || has_instance_prototype();
5922 }
5923 
5924 
instance_prototype()5925 Object* JSFunction::instance_prototype() {
5926   DCHECK(has_instance_prototype());
5927   if (has_initial_map()) return initial_map()->prototype();
5928   // When there is no initial map and the prototype is a JSObject, the
5929   // initial map field is used for the prototype field.
5930   return prototype_or_initial_map();
5931 }
5932 
5933 
prototype()5934 Object* JSFunction::prototype() {
5935   DCHECK(has_prototype());
5936   // If the function's prototype property has been set to a non-JSObject
5937   // value, that value is stored in the constructor field of the map.
5938   if (map()->has_non_instance_prototype()) return map()->constructor();
5939   return instance_prototype();
5940 }
5941 
5942 
should_have_prototype()5943 bool JSFunction::should_have_prototype() {
5944   return map()->function_with_prototype();
5945 }
5946 
5947 
is_compiled()5948 bool JSFunction::is_compiled() {
5949   return code() != GetIsolate()->builtins()->builtin(Builtins::kCompileLazy);
5950 }
5951 
5952 
literals()5953 FixedArray* JSFunction::literals() {
5954   DCHECK(!shared()->bound());
5955   return literals_or_bindings();
5956 }
5957 
5958 
set_literals(FixedArray * literals)5959 void JSFunction::set_literals(FixedArray* literals) {
5960   DCHECK(!shared()->bound());
5961   set_literals_or_bindings(literals);
5962 }
5963 
5964 
function_bindings()5965 FixedArray* JSFunction::function_bindings() {
5966   DCHECK(shared()->bound());
5967   return literals_or_bindings();
5968 }
5969 
5970 
set_function_bindings(FixedArray * bindings)5971 void JSFunction::set_function_bindings(FixedArray* bindings) {
5972   DCHECK(shared()->bound());
5973   // Bound function literal may be initialized to the empty fixed array
5974   // before the bindings are set.
5975   DCHECK(bindings == GetHeap()->empty_fixed_array() ||
5976          bindings->map() == GetHeap()->fixed_cow_array_map());
5977   set_literals_or_bindings(bindings);
5978 }
5979 
5980 
NumberOfLiterals()5981 int JSFunction::NumberOfLiterals() {
5982   DCHECK(!shared()->bound());
5983   return literals()->length();
5984 }
5985 
5986 
javascript_builtin(Builtins::JavaScript id)5987 Object* JSBuiltinsObject::javascript_builtin(Builtins::JavaScript id) {
5988   DCHECK(id < kJSBuiltinsCount);  // id is unsigned.
5989   return READ_FIELD(this, OffsetOfFunctionWithId(id));
5990 }
5991 
5992 
set_javascript_builtin(Builtins::JavaScript id,Object * value)5993 void JSBuiltinsObject::set_javascript_builtin(Builtins::JavaScript id,
5994                                               Object* value) {
5995   DCHECK(id < kJSBuiltinsCount);  // id is unsigned.
5996   WRITE_FIELD(this, OffsetOfFunctionWithId(id), value);
5997   WRITE_BARRIER(GetHeap(), this, OffsetOfFunctionWithId(id), value);
5998 }
5999 
6000 
javascript_builtin_code(Builtins::JavaScript id)6001 Code* JSBuiltinsObject::javascript_builtin_code(Builtins::JavaScript id) {
6002   DCHECK(id < kJSBuiltinsCount);  // id is unsigned.
6003   return Code::cast(READ_FIELD(this, OffsetOfCodeWithId(id)));
6004 }
6005 
6006 
set_javascript_builtin_code(Builtins::JavaScript id,Code * value)6007 void JSBuiltinsObject::set_javascript_builtin_code(Builtins::JavaScript id,
6008                                                    Code* value) {
6009   DCHECK(id < kJSBuiltinsCount);  // id is unsigned.
6010   WRITE_FIELD(this, OffsetOfCodeWithId(id), value);
6011   DCHECK(!GetHeap()->InNewSpace(value));
6012 }
6013 
6014 
ACCESSORS(JSProxy,handler,Object,kHandlerOffset)6015 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6016 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6017 ACCESSORS(JSFunctionProxy, call_trap, Object, kCallTrapOffset)
6018 ACCESSORS(JSFunctionProxy, construct_trap, Object, kConstructTrapOffset)
6019 
6020 
6021 void JSProxy::InitializeBody(int object_size, Object* value) {
6022   DCHECK(!value->IsHeapObject() || !GetHeap()->InNewSpace(value));
6023   for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
6024     WRITE_FIELD(this, offset, value);
6025   }
6026 }
6027 
6028 
ACCESSORS(JSCollection,table,Object,kTableOffset)6029 ACCESSORS(JSCollection, table, Object, kTableOffset)
6030 
6031 
6032 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset)    \
6033   template<class Derived, class TableType>                           \
6034   type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6035     return type::cast(READ_FIELD(this, offset));                     \
6036   }                                                                  \
6037   template<class Derived, class TableType>                           \
6038   void OrderedHashTableIterator<Derived, TableType>::set_##name(     \
6039       type* value, WriteBarrierMode mode) {                          \
6040     WRITE_FIELD(this, offset, value);                                \
6041     CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6042   }
6043 
6044 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6045 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6046 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6047 
6048 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6049 
6050 
6051 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6052 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6053 
6054 
6055 Address Foreign::foreign_address() {
6056   return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6057 }
6058 
6059 
set_foreign_address(Address value)6060 void Foreign::set_foreign_address(Address value) {
6061   WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6062 }
6063 
6064 
ACCESSORS(JSGeneratorObject,function,JSFunction,kFunctionOffset)6065 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6066 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6067 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6068 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6069 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6070 SMI_ACCESSORS(JSGeneratorObject, stack_handler_index, kStackHandlerIndexOffset)
6071 
6072 bool JSGeneratorObject::is_suspended() {
6073   DCHECK_LT(kGeneratorExecuting, kGeneratorClosed);
6074   DCHECK_EQ(kGeneratorClosed, 0);
6075   return continuation() > 0;
6076 }
6077 
is_closed()6078 bool JSGeneratorObject::is_closed() {
6079   return continuation() == kGeneratorClosed;
6080 }
6081 
is_executing()6082 bool JSGeneratorObject::is_executing() {
6083   return continuation() == kGeneratorExecuting;
6084 }
6085 
ACCESSORS(JSModule,context,Object,kContextOffset)6086 ACCESSORS(JSModule, context, Object, kContextOffset)
6087 ACCESSORS(JSModule, scope_info, ScopeInfo, kScopeInfoOffset)
6088 
6089 
6090 ACCESSORS(JSValue, value, Object, kValueOffset)
6091 
6092 
6093 HeapNumber* HeapNumber::cast(Object* object) {
6094   SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6095   return reinterpret_cast<HeapNumber*>(object);
6096 }
6097 
6098 
cast(const Object * object)6099 const HeapNumber* HeapNumber::cast(const Object* object) {
6100   SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6101   return reinterpret_cast<const HeapNumber*>(object);
6102 }
6103 
6104 
ACCESSORS(JSDate,value,Object,kValueOffset)6105 ACCESSORS(JSDate, value, Object, kValueOffset)
6106 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6107 ACCESSORS(JSDate, year, Object, kYearOffset)
6108 ACCESSORS(JSDate, month, Object, kMonthOffset)
6109 ACCESSORS(JSDate, day, Object, kDayOffset)
6110 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6111 ACCESSORS(JSDate, hour, Object, kHourOffset)
6112 ACCESSORS(JSDate, min, Object, kMinOffset)
6113 ACCESSORS(JSDate, sec, Object, kSecOffset)
6114 
6115 
6116 ACCESSORS(JSMessageObject, type, String, kTypeOffset)
6117 ACCESSORS(JSMessageObject, arguments, JSArray, kArgumentsOffset)
6118 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6119 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6120 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6121 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6122 
6123 
6124 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6125 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6126 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6127 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6128 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6129 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6130 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6131 
6132 
6133 void Code::WipeOutHeader() {
6134   WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6135   WRITE_FIELD(this, kHandlerTableOffset, NULL);
6136   WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6137   WRITE_FIELD(this, kConstantPoolOffset, NULL);
6138   // Do not wipe out major/minor keys on a code stub or IC
6139   if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6140     WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6141   }
6142 }
6143 
6144 
type_feedback_info()6145 Object* Code::type_feedback_info() {
6146   DCHECK(kind() == FUNCTION);
6147   return raw_type_feedback_info();
6148 }
6149 
6150 
set_type_feedback_info(Object * value,WriteBarrierMode mode)6151 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6152   DCHECK(kind() == FUNCTION);
6153   set_raw_type_feedback_info(value, mode);
6154   CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6155                             value, mode);
6156 }
6157 
6158 
stub_key()6159 uint32_t Code::stub_key() {
6160   DCHECK(IsCodeStubOrIC());
6161   Smi* smi_key = Smi::cast(raw_type_feedback_info());
6162   return static_cast<uint32_t>(smi_key->value());
6163 }
6164 
6165 
set_stub_key(uint32_t key)6166 void Code::set_stub_key(uint32_t key) {
6167   DCHECK(IsCodeStubOrIC());
6168   set_raw_type_feedback_info(Smi::FromInt(key));
6169 }
6170 
6171 
ACCESSORS(Code,gc_metadata,Object,kGCMetadataOffset)6172 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6173 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6174 
6175 
6176 byte* Code::instruction_start()  {
6177   return FIELD_ADDR(this, kHeaderSize);
6178 }
6179 
6180 
instruction_end()6181 byte* Code::instruction_end()  {
6182   return instruction_start() + instruction_size();
6183 }
6184 
6185 
body_size()6186 int Code::body_size() {
6187   return RoundUp(instruction_size(), kObjectAlignment);
6188 }
6189 
6190 
unchecked_relocation_info()6191 ByteArray* Code::unchecked_relocation_info() {
6192   return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6193 }
6194 
6195 
relocation_start()6196 byte* Code::relocation_start() {
6197   return unchecked_relocation_info()->GetDataStartAddress();
6198 }
6199 
6200 
relocation_size()6201 int Code::relocation_size() {
6202   return unchecked_relocation_info()->length();
6203 }
6204 
6205 
entry()6206 byte* Code::entry() {
6207   return instruction_start();
6208 }
6209 
6210 
contains(byte * inner_pointer)6211 bool Code::contains(byte* inner_pointer) {
6212   return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6213 }
6214 
6215 
ACCESSORS(JSArray,length,Object,kLengthOffset)6216 ACCESSORS(JSArray, length, Object, kLengthOffset)
6217 
6218 
6219 void* JSArrayBuffer::backing_store() const {
6220   intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6221   return reinterpret_cast<void*>(ptr);
6222 }
6223 
6224 
set_backing_store(void * value,WriteBarrierMode mode)6225 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6226   intptr_t ptr = reinterpret_cast<intptr_t>(value);
6227   WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6228 }
6229 
6230 
ACCESSORS(JSArrayBuffer,byte_length,Object,kByteLengthOffset)6231 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6232 ACCESSORS_TO_SMI(JSArrayBuffer, flag, kFlagOffset)
6233 
6234 
6235 bool JSArrayBuffer::is_external() {
6236   return BooleanBit::get(flag(), kIsExternalBit);
6237 }
6238 
6239 
set_is_external(bool value)6240 void JSArrayBuffer::set_is_external(bool value) {
6241   set_flag(BooleanBit::set(flag(), kIsExternalBit, value));
6242 }
6243 
6244 
should_be_freed()6245 bool JSArrayBuffer::should_be_freed() {
6246   return BooleanBit::get(flag(), kShouldBeFreed);
6247 }
6248 
6249 
set_should_be_freed(bool value)6250 void JSArrayBuffer::set_should_be_freed(bool value) {
6251   set_flag(BooleanBit::set(flag(), kShouldBeFreed, value));
6252 }
6253 
6254 
ACCESSORS(JSArrayBuffer,weak_next,Object,kWeakNextOffset)6255 ACCESSORS(JSArrayBuffer, weak_next, Object, kWeakNextOffset)
6256 ACCESSORS(JSArrayBuffer, weak_first_view, Object, kWeakFirstViewOffset)
6257 
6258 
6259 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
6260 ACCESSORS(JSArrayBufferView, byte_offset, Object, kByteOffsetOffset)
6261 ACCESSORS(JSArrayBufferView, byte_length, Object, kByteLengthOffset)
6262 ACCESSORS(JSArrayBufferView, weak_next, Object, kWeakNextOffset)
6263 ACCESSORS(JSTypedArray, length, Object, kLengthOffset)
6264 
6265 ACCESSORS(JSRegExp, data, Object, kDataOffset)
6266 
6267 
6268 JSRegExp::Type JSRegExp::TypeTag() {
6269   Object* data = this->data();
6270   if (data->IsUndefined()) return JSRegExp::NOT_COMPILED;
6271   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
6272   return static_cast<JSRegExp::Type>(smi->value());
6273 }
6274 
6275 
CaptureCount()6276 int JSRegExp::CaptureCount() {
6277   switch (TypeTag()) {
6278     case ATOM:
6279       return 0;
6280     case IRREGEXP:
6281       return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
6282     default:
6283       UNREACHABLE();
6284       return -1;
6285   }
6286 }
6287 
6288 
GetFlags()6289 JSRegExp::Flags JSRegExp::GetFlags() {
6290   DCHECK(this->data()->IsFixedArray());
6291   Object* data = this->data();
6292   Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
6293   return Flags(smi->value());
6294 }
6295 
6296 
Pattern()6297 String* JSRegExp::Pattern() {
6298   DCHECK(this->data()->IsFixedArray());
6299   Object* data = this->data();
6300   String* pattern= String::cast(FixedArray::cast(data)->get(kSourceIndex));
6301   return pattern;
6302 }
6303 
6304 
DataAt(int index)6305 Object* JSRegExp::DataAt(int index) {
6306   DCHECK(TypeTag() != NOT_COMPILED);
6307   return FixedArray::cast(data())->get(index);
6308 }
6309 
6310 
SetDataAt(int index,Object * value)6311 void JSRegExp::SetDataAt(int index, Object* value) {
6312   DCHECK(TypeTag() != NOT_COMPILED);
6313   DCHECK(index >= kDataIndex);  // Only implementation data can be set this way.
6314   FixedArray::cast(data())->set(index, value);
6315 }
6316 
6317 
GetElementsKind()6318 ElementsKind JSObject::GetElementsKind() {
6319   ElementsKind kind = map()->elements_kind();
6320 #if DEBUG
6321   FixedArrayBase* fixed_array =
6322       reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
6323 
6324   // If a GC was caused while constructing this object, the elements
6325   // pointer may point to a one pointer filler map.
6326   if (ElementsAreSafeToExamine()) {
6327     Map* map = fixed_array->map();
6328     DCHECK((IsFastSmiOrObjectElementsKind(kind) &&
6329             (map == GetHeap()->fixed_array_map() ||
6330              map == GetHeap()->fixed_cow_array_map())) ||
6331            (IsFastDoubleElementsKind(kind) &&
6332             (fixed_array->IsFixedDoubleArray() ||
6333              fixed_array == GetHeap()->empty_fixed_array())) ||
6334            (kind == DICTIONARY_ELEMENTS &&
6335             fixed_array->IsFixedArray() &&
6336             fixed_array->IsDictionary()) ||
6337            (kind > DICTIONARY_ELEMENTS));
6338     DCHECK((kind != SLOPPY_ARGUMENTS_ELEMENTS) ||
6339            (elements()->IsFixedArray() && elements()->length() >= 2));
6340   }
6341 #endif
6342   return kind;
6343 }
6344 
6345 
GetElementsAccessor()6346 ElementsAccessor* JSObject::GetElementsAccessor() {
6347   return ElementsAccessor::ForKind(GetElementsKind());
6348 }
6349 
6350 
HasFastObjectElements()6351 bool JSObject::HasFastObjectElements() {
6352   return IsFastObjectElementsKind(GetElementsKind());
6353 }
6354 
6355 
HasFastSmiElements()6356 bool JSObject::HasFastSmiElements() {
6357   return IsFastSmiElementsKind(GetElementsKind());
6358 }
6359 
6360 
HasFastSmiOrObjectElements()6361 bool JSObject::HasFastSmiOrObjectElements() {
6362   return IsFastSmiOrObjectElementsKind(GetElementsKind());
6363 }
6364 
6365 
HasFastDoubleElements()6366 bool JSObject::HasFastDoubleElements() {
6367   return IsFastDoubleElementsKind(GetElementsKind());
6368 }
6369 
6370 
HasFastHoleyElements()6371 bool JSObject::HasFastHoleyElements() {
6372   return IsFastHoleyElementsKind(GetElementsKind());
6373 }
6374 
6375 
HasFastElements()6376 bool JSObject::HasFastElements() {
6377   return IsFastElementsKind(GetElementsKind());
6378 }
6379 
6380 
HasDictionaryElements()6381 bool JSObject::HasDictionaryElements() {
6382   return GetElementsKind() == DICTIONARY_ELEMENTS;
6383 }
6384 
6385 
HasSloppyArgumentsElements()6386 bool JSObject::HasSloppyArgumentsElements() {
6387   return GetElementsKind() == SLOPPY_ARGUMENTS_ELEMENTS;
6388 }
6389 
6390 
HasExternalArrayElements()6391 bool JSObject::HasExternalArrayElements() {
6392   HeapObject* array = elements();
6393   DCHECK(array != NULL);
6394   return array->IsExternalArray();
6395 }
6396 
6397 
6398 #define EXTERNAL_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)          \
6399 bool JSObject::HasExternal##Type##Elements() {                          \
6400   HeapObject* array = elements();                                       \
6401   DCHECK(array != NULL);                                                \
6402   if (!array->IsHeapObject())                                           \
6403     return false;                                                       \
6404   return array->map()->instance_type() == EXTERNAL_##TYPE##_ARRAY_TYPE; \
6405 }
6406 
TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)6407 TYPED_ARRAYS(EXTERNAL_ELEMENTS_CHECK)
6408 
6409 #undef EXTERNAL_ELEMENTS_CHECK
6410 
6411 
6412 bool JSObject::HasFixedTypedArrayElements() {
6413   HeapObject* array = elements();
6414   DCHECK(array != NULL);
6415   return array->IsFixedTypedArrayBase();
6416 }
6417 
6418 
6419 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size)         \
6420 bool JSObject::HasFixed##Type##Elements() {                               \
6421   HeapObject* array = elements();                                         \
6422   DCHECK(array != NULL);                                                  \
6423   if (!array->IsHeapObject())                                             \
6424     return false;                                                         \
6425   return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE;      \
6426 }
6427 
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)6428 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
6429 
6430 #undef FIXED_TYPED_ELEMENTS_CHECK
6431 
6432 
6433 bool JSObject::HasNamedInterceptor() {
6434   return map()->has_named_interceptor();
6435 }
6436 
6437 
HasIndexedInterceptor()6438 bool JSObject::HasIndexedInterceptor() {
6439   return map()->has_indexed_interceptor();
6440 }
6441 
6442 
property_dictionary()6443 NameDictionary* JSObject::property_dictionary() {
6444   DCHECK(!HasFastProperties());
6445   return NameDictionary::cast(properties());
6446 }
6447 
6448 
element_dictionary()6449 SeededNumberDictionary* JSObject::element_dictionary() {
6450   DCHECK(HasDictionaryElements());
6451   return SeededNumberDictionary::cast(elements());
6452 }
6453 
6454 
IsHashFieldComputed(uint32_t field)6455 bool Name::IsHashFieldComputed(uint32_t field) {
6456   return (field & kHashNotComputedMask) == 0;
6457 }
6458 
6459 
HasHashCode()6460 bool Name::HasHashCode() {
6461   return IsHashFieldComputed(hash_field());
6462 }
6463 
6464 
Hash()6465 uint32_t Name::Hash() {
6466   // Fast case: has hash code already been computed?
6467   uint32_t field = hash_field();
6468   if (IsHashFieldComputed(field)) return field >> kHashShift;
6469   // Slow case: compute hash code and set it. Has to be a string.
6470   return String::cast(this)->ComputeAndSetHash();
6471 }
6472 
IsOwn()6473 bool Name::IsOwn() {
6474   return this->IsSymbol() && Symbol::cast(this)->is_own();
6475 }
6476 
6477 
StringHasher(int length,uint32_t seed)6478 StringHasher::StringHasher(int length, uint32_t seed)
6479   : length_(length),
6480     raw_running_hash_(seed),
6481     array_index_(0),
6482     is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
6483     is_first_char_(true) {
6484   DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
6485 }
6486 
6487 
has_trivial_hash()6488 bool StringHasher::has_trivial_hash() {
6489   return length_ > String::kMaxHashCalcLength;
6490 }
6491 
6492 
AddCharacterCore(uint32_t running_hash,uint16_t c)6493 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
6494   running_hash += c;
6495   running_hash += (running_hash << 10);
6496   running_hash ^= (running_hash >> 6);
6497   return running_hash;
6498 }
6499 
6500 
GetHashCore(uint32_t running_hash)6501 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
6502   running_hash += (running_hash << 3);
6503   running_hash ^= (running_hash >> 11);
6504   running_hash += (running_hash << 15);
6505   if ((running_hash & String::kHashBitMask) == 0) {
6506     return kZeroHash;
6507   }
6508   return running_hash;
6509 }
6510 
6511 
AddCharacter(uint16_t c)6512 void StringHasher::AddCharacter(uint16_t c) {
6513   // Use the Jenkins one-at-a-time hash function to update the hash
6514   // for the given character.
6515   raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
6516 }
6517 
6518 
UpdateIndex(uint16_t c)6519 bool StringHasher::UpdateIndex(uint16_t c) {
6520   DCHECK(is_array_index_);
6521   if (c < '0' || c > '9') {
6522     is_array_index_ = false;
6523     return false;
6524   }
6525   int d = c - '0';
6526   if (is_first_char_) {
6527     is_first_char_ = false;
6528     if (c == '0' && length_ > 1) {
6529       is_array_index_ = false;
6530       return false;
6531     }
6532   }
6533   if (array_index_ > 429496729U - ((d + 2) >> 3)) {
6534     is_array_index_ = false;
6535     return false;
6536   }
6537   array_index_ = array_index_ * 10 + d;
6538   return true;
6539 }
6540 
6541 
6542 template<typename Char>
AddCharacters(const Char * chars,int length)6543 inline void StringHasher::AddCharacters(const Char* chars, int length) {
6544   DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
6545   int i = 0;
6546   if (is_array_index_) {
6547     for (; i < length; i++) {
6548       AddCharacter(chars[i]);
6549       if (!UpdateIndex(chars[i])) {
6550         i++;
6551         break;
6552       }
6553     }
6554   }
6555   for (; i < length; i++) {
6556     DCHECK(!is_array_index_);
6557     AddCharacter(chars[i]);
6558   }
6559 }
6560 
6561 
6562 template <typename schar>
HashSequentialString(const schar * chars,int length,uint32_t seed)6563 uint32_t StringHasher::HashSequentialString(const schar* chars,
6564                                             int length,
6565                                             uint32_t seed) {
6566   StringHasher hasher(length, seed);
6567   if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
6568   return hasher.GetHashField();
6569 }
6570 
6571 
Hash(String * string,uint32_t seed)6572 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
6573   IteratingStringHasher hasher(string->length(), seed);
6574   // Nothing to do.
6575   if (hasher.has_trivial_hash()) return hasher.GetHashField();
6576   ConsString* cons_string = String::VisitFlat(&hasher, string);
6577   // The string was flat.
6578   if (cons_string == NULL) return hasher.GetHashField();
6579   // This is a ConsString, iterate across it.
6580   ConsStringIteratorOp op(cons_string);
6581   int offset;
6582   while (NULL != (string = op.Next(&offset))) {
6583     String::VisitFlat(&hasher, string, offset);
6584   }
6585   return hasher.GetHashField();
6586 }
6587 
6588 
VisitOneByteString(const uint8_t * chars,int length)6589 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
6590                                                int length) {
6591   AddCharacters(chars, length);
6592 }
6593 
6594 
VisitTwoByteString(const uint16_t * chars,int length)6595 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
6596                                                int length) {
6597   AddCharacters(chars, length);
6598 }
6599 
6600 
AsArrayIndex(uint32_t * index)6601 bool Name::AsArrayIndex(uint32_t* index) {
6602   return IsString() && String::cast(this)->AsArrayIndex(index);
6603 }
6604 
6605 
AsArrayIndex(uint32_t * index)6606 bool String::AsArrayIndex(uint32_t* index) {
6607   uint32_t field = hash_field();
6608   if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
6609     return false;
6610   }
6611   return SlowAsArrayIndex(index);
6612 }
6613 
6614 
SetForwardedInternalizedString(String * canonical)6615 void String::SetForwardedInternalizedString(String* canonical) {
6616   DCHECK(IsInternalizedString());
6617   DCHECK(HasHashCode());
6618   if (canonical == this) return;  // No need to forward.
6619   DCHECK(SlowEquals(canonical));
6620   DCHECK(canonical->IsInternalizedString());
6621   DCHECK(canonical->HasHashCode());
6622   WRITE_FIELD(this, kHashFieldOffset, canonical);
6623   // Setting the hash field to a tagged value sets the LSB, causing the hash
6624   // code to be interpreted as uninitialized.  We use this fact to recognize
6625   // that we have a forwarded string.
6626   DCHECK(!HasHashCode());
6627 }
6628 
6629 
GetForwardedInternalizedString()6630 String* String::GetForwardedInternalizedString() {
6631   DCHECK(IsInternalizedString());
6632   if (HasHashCode()) return this;
6633   String* canonical = String::cast(READ_FIELD(this, kHashFieldOffset));
6634   DCHECK(canonical->IsInternalizedString());
6635   DCHECK(SlowEquals(canonical));
6636   DCHECK(canonical->HasHashCode());
6637   return canonical;
6638 }
6639 
6640 
GetConstructor()6641 Object* JSReceiver::GetConstructor() {
6642   return map()->constructor();
6643 }
6644 
6645 
HasProperty(Handle<JSReceiver> object,Handle<Name> name)6646 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
6647                                     Handle<Name> name) {
6648   if (object->IsJSProxy()) {
6649     Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6650     return JSProxy::HasPropertyWithHandler(proxy, name);
6651   }
6652   Maybe<PropertyAttributes> result = GetPropertyAttributes(object, name);
6653   if (!result.has_value) return Maybe<bool>();
6654   return maybe(result.value != ABSENT);
6655 }
6656 
6657 
HasOwnProperty(Handle<JSReceiver> object,Handle<Name> name)6658 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
6659                                        Handle<Name> name) {
6660   if (object->IsJSProxy()) {
6661     Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6662     return JSProxy::HasPropertyWithHandler(proxy, name);
6663   }
6664   Maybe<PropertyAttributes> result = GetOwnPropertyAttributes(object, name);
6665   if (!result.has_value) return Maybe<bool>();
6666   return maybe(result.value != ABSENT);
6667 }
6668 
6669 
GetPropertyAttributes(Handle<JSReceiver> object,Handle<Name> key)6670 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
6671     Handle<JSReceiver> object, Handle<Name> key) {
6672   uint32_t index;
6673   if (object->IsJSObject() && key->AsArrayIndex(&index)) {
6674     return GetElementAttribute(object, index);
6675   }
6676   LookupIterator it(object, key);
6677   return GetPropertyAttributes(&it);
6678 }
6679 
6680 
GetElementAttribute(Handle<JSReceiver> object,uint32_t index)6681 Maybe<PropertyAttributes> JSReceiver::GetElementAttribute(
6682     Handle<JSReceiver> object, uint32_t index) {
6683   if (object->IsJSProxy()) {
6684     return JSProxy::GetElementAttributeWithHandler(
6685         Handle<JSProxy>::cast(object), object, index);
6686   }
6687   return JSObject::GetElementAttributeWithReceiver(
6688       Handle<JSObject>::cast(object), object, index, true);
6689 }
6690 
6691 
IsDetached()6692 bool JSGlobalObject::IsDetached() {
6693   return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
6694 }
6695 
6696 
IsDetachedFrom(GlobalObject * global)6697 bool JSGlobalProxy::IsDetachedFrom(GlobalObject* global) const {
6698   const PrototypeIterator iter(this->GetIsolate(),
6699                                const_cast<JSGlobalProxy*>(this));
6700   return iter.GetCurrent() != global;
6701 }
6702 
6703 
GetOrCreateIdentityHash(Handle<JSReceiver> object)6704 Handle<Smi> JSReceiver::GetOrCreateIdentityHash(Handle<JSReceiver> object) {
6705   return object->IsJSProxy()
6706       ? JSProxy::GetOrCreateIdentityHash(Handle<JSProxy>::cast(object))
6707       : JSObject::GetOrCreateIdentityHash(Handle<JSObject>::cast(object));
6708 }
6709 
6710 
GetIdentityHash()6711 Object* JSReceiver::GetIdentityHash() {
6712   return IsJSProxy()
6713       ? JSProxy::cast(this)->GetIdentityHash()
6714       : JSObject::cast(this)->GetIdentityHash();
6715 }
6716 
6717 
HasElement(Handle<JSReceiver> object,uint32_t index)6718 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
6719   if (object->IsJSProxy()) {
6720     Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6721     return JSProxy::HasElementWithHandler(proxy, index);
6722   }
6723   Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
6724       Handle<JSObject>::cast(object), object, index, true);
6725   if (!result.has_value) return Maybe<bool>();
6726   return maybe(result.value != ABSENT);
6727 }
6728 
6729 
HasOwnElement(Handle<JSReceiver> object,uint32_t index)6730 Maybe<bool> JSReceiver::HasOwnElement(Handle<JSReceiver> object,
6731                                       uint32_t index) {
6732   if (object->IsJSProxy()) {
6733     Handle<JSProxy> proxy = Handle<JSProxy>::cast(object);
6734     return JSProxy::HasElementWithHandler(proxy, index);
6735   }
6736   Maybe<PropertyAttributes> result = JSObject::GetElementAttributeWithReceiver(
6737       Handle<JSObject>::cast(object), object, index, false);
6738   if (!result.has_value) return Maybe<bool>();
6739   return maybe(result.value != ABSENT);
6740 }
6741 
6742 
GetOwnElementAttribute(Handle<JSReceiver> object,uint32_t index)6743 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttribute(
6744     Handle<JSReceiver> object, uint32_t index) {
6745   if (object->IsJSProxy()) {
6746     return JSProxy::GetElementAttributeWithHandler(
6747         Handle<JSProxy>::cast(object), object, index);
6748   }
6749   return JSObject::GetElementAttributeWithReceiver(
6750       Handle<JSObject>::cast(object), object, index, false);
6751 }
6752 
6753 
all_can_read()6754 bool AccessorInfo::all_can_read() {
6755   return BooleanBit::get(flag(), kAllCanReadBit);
6756 }
6757 
6758 
set_all_can_read(bool value)6759 void AccessorInfo::set_all_can_read(bool value) {
6760   set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
6761 }
6762 
6763 
all_can_write()6764 bool AccessorInfo::all_can_write() {
6765   return BooleanBit::get(flag(), kAllCanWriteBit);
6766 }
6767 
6768 
set_all_can_write(bool value)6769 void AccessorInfo::set_all_can_write(bool value) {
6770   set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
6771 }
6772 
6773 
property_attributes()6774 PropertyAttributes AccessorInfo::property_attributes() {
6775   return AttributesField::decode(static_cast<uint32_t>(flag()->value()));
6776 }
6777 
6778 
set_property_attributes(PropertyAttributes attributes)6779 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
6780   set_flag(Smi::FromInt(AttributesField::update(flag()->value(), attributes)));
6781 }
6782 
6783 
IsCompatibleReceiver(Object * receiver)6784 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
6785   if (!HasExpectedReceiverType()) return true;
6786   if (!receiver->IsJSObject()) return false;
6787   return FunctionTemplateInfo::cast(expected_receiver_type())
6788       ->IsTemplateFor(JSObject::cast(receiver)->map());
6789 }
6790 
6791 
clear_setter()6792 void ExecutableAccessorInfo::clear_setter() {
6793   set_setter(GetIsolate()->heap()->undefined_value(), SKIP_WRITE_BARRIER);
6794 }
6795 
6796 
6797 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value)6798 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6799                                                Handle<Object> key,
6800                                                Handle<Object> value) {
6801   SetEntry(entry, key, value, PropertyDetails(Smi::FromInt(0)));
6802 }
6803 
6804 
6805 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)6806 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
6807                                                Handle<Object> key,
6808                                                Handle<Object> value,
6809                                                PropertyDetails details) {
6810   DCHECK(!key->IsName() ||
6811          details.IsDeleted() ||
6812          details.dictionary_index() > 0);
6813   int index = DerivedHashTable::EntryToIndex(entry);
6814   DisallowHeapAllocation no_gc;
6815   WriteBarrierMode mode = FixedArray::GetWriteBarrierMode(no_gc);
6816   FixedArray::set(index, *key, mode);
6817   FixedArray::set(index+1, *value, mode);
6818   FixedArray::set(index+2, details.AsSmi());
6819 }
6820 
6821 
IsMatch(uint32_t key,Object * other)6822 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
6823   DCHECK(other->IsNumber());
6824   return key == static_cast<uint32_t>(other->Number());
6825 }
6826 
6827 
Hash(uint32_t key)6828 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
6829   return ComputeIntegerHash(key, 0);
6830 }
6831 
6832 
HashForObject(uint32_t key,Object * other)6833 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
6834                                                       Object* other) {
6835   DCHECK(other->IsNumber());
6836   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
6837 }
6838 
6839 
SeededHash(uint32_t key,uint32_t seed)6840 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
6841   return ComputeIntegerHash(key, seed);
6842 }
6843 
6844 
SeededHashForObject(uint32_t key,uint32_t seed,Object * other)6845 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
6846                                                           uint32_t seed,
6847                                                           Object* other) {
6848   DCHECK(other->IsNumber());
6849   return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
6850 }
6851 
6852 
AsHandle(Isolate * isolate,uint32_t key)6853 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
6854   return isolate->factory()->NewNumberFromUint(key);
6855 }
6856 
6857 
IsMatch(Handle<Name> key,Object * other)6858 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
6859   // We know that all entries in a hash table had their hash keys created.
6860   // Use that knowledge to have fast failure.
6861   if (key->Hash() != Name::cast(other)->Hash()) return false;
6862   return key->Equals(Name::cast(other));
6863 }
6864 
6865 
Hash(Handle<Name> key)6866 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
6867   return key->Hash();
6868 }
6869 
6870 
HashForObject(Handle<Name> key,Object * other)6871 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
6872   return Name::cast(other)->Hash();
6873 }
6874 
6875 
AsHandle(Isolate * isolate,Handle<Name> key)6876 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
6877                                              Handle<Name> key) {
6878   DCHECK(key->IsUniqueName());
6879   return key;
6880 }
6881 
6882 
DoGenerateNewEnumerationIndices(Handle<NameDictionary> dictionary)6883 void NameDictionary::DoGenerateNewEnumerationIndices(
6884     Handle<NameDictionary> dictionary) {
6885   DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
6886 }
6887 
6888 
IsMatch(Handle<Object> key,Object * other)6889 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
6890   return key->SameValue(other);
6891 }
6892 
6893 
Hash(Handle<Object> key)6894 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
6895   return Smi::cast(key->GetHash())->value();
6896 }
6897 
6898 
HashForObject(Handle<Object> key,Object * other)6899 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
6900                                              Object* other) {
6901   return Smi::cast(other->GetHash())->value();
6902 }
6903 
6904 
AsHandle(Isolate * isolate,Handle<Object> key)6905 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
6906                                               Handle<Object> key) {
6907   return key;
6908 }
6909 
6910 
Shrink(Handle<ObjectHashTable> table,Handle<Object> key)6911 Handle<ObjectHashTable> ObjectHashTable::Shrink(
6912     Handle<ObjectHashTable> table, Handle<Object> key) {
6913   return DerivedHashTable::Shrink(table, key);
6914 }
6915 
6916 
6917 template <int entrysize>
IsMatch(Handle<Object> key,Object * other)6918 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
6919   return key->SameValue(other);
6920 }
6921 
6922 
6923 template <int entrysize>
Hash(Handle<Object> key)6924 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
6925   intptr_t hash = reinterpret_cast<intptr_t>(*key);
6926   return (uint32_t)(hash & 0xFFFFFFFF);
6927 }
6928 
6929 
6930 template <int entrysize>
HashForObject(Handle<Object> key,Object * other)6931 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
6932                                                       Object* other) {
6933   intptr_t hash = reinterpret_cast<intptr_t>(other);
6934   return (uint32_t)(hash & 0xFFFFFFFF);
6935 }
6936 
6937 
6938 template <int entrysize>
AsHandle(Isolate * isolate,Handle<Object> key)6939 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
6940                                                        Handle<Object> key) {
6941   return key;
6942 }
6943 
6944 
ClearCodeCache(Heap * heap)6945 void Map::ClearCodeCache(Heap* heap) {
6946   // No write barrier is needed since empty_fixed_array is not in new space.
6947   // Please note this function is used during marking:
6948   //  - MarkCompactCollector::MarkUnmarkedObject
6949   //  - IncrementalMarking::Step
6950   DCHECK(!heap->InNewSpace(heap->empty_fixed_array()));
6951   WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
6952 }
6953 
6954 
EnsureSize(Handle<JSArray> array,int required_size)6955 void JSArray::EnsureSize(Handle<JSArray> array, int required_size) {
6956   DCHECK(array->HasFastSmiOrObjectElements());
6957   Handle<FixedArray> elts = handle(FixedArray::cast(array->elements()));
6958   const int kArraySizeThatFitsComfortablyInNewSpace = 128;
6959   if (elts->length() < required_size) {
6960     // Doubling in size would be overkill, but leave some slack to avoid
6961     // constantly growing.
6962     Expand(array, required_size + (required_size >> 3));
6963     // It's a performance benefit to keep a frequently used array in new-space.
6964   } else if (!array->GetHeap()->new_space()->Contains(*elts) &&
6965              required_size < kArraySizeThatFitsComfortablyInNewSpace) {
6966     // Expand will allocate a new backing store in new space even if the size
6967     // we asked for isn't larger than what we had before.
6968     Expand(array, required_size);
6969   }
6970 }
6971 
6972 
set_length(Smi * length)6973 void JSArray::set_length(Smi* length) {
6974   // Don't need a write barrier for a Smi.
6975   set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
6976 }
6977 
6978 
AllowsSetElementsLength()6979 bool JSArray::AllowsSetElementsLength() {
6980   bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
6981   DCHECK(result == !HasExternalArrayElements());
6982   return result;
6983 }
6984 
6985 
SetContent(Handle<JSArray> array,Handle<FixedArrayBase> storage)6986 void JSArray::SetContent(Handle<JSArray> array,
6987                          Handle<FixedArrayBase> storage) {
6988   EnsureCanContainElements(array, storage, storage->length(),
6989                            ALLOW_COPIED_DOUBLE_ELEMENTS);
6990 
6991   DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
6992           IsFastDoubleElementsKind(array->GetElementsKind())) ||
6993          ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
6994           (IsFastObjectElementsKind(array->GetElementsKind()) ||
6995            (IsFastSmiElementsKind(array->GetElementsKind()) &&
6996             Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
6997   array->set_elements(*storage);
6998   array->set_length(Smi::FromInt(storage->length()));
6999 }
7000 
7001 
ic_total_count()7002 int TypeFeedbackInfo::ic_total_count() {
7003   int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7004   return ICTotalCountField::decode(current);
7005 }
7006 
7007 
set_ic_total_count(int count)7008 void TypeFeedbackInfo::set_ic_total_count(int count) {
7009   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7010   value = ICTotalCountField::update(value,
7011                                     ICTotalCountField::decode(count));
7012   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7013 }
7014 
7015 
ic_with_type_info_count()7016 int TypeFeedbackInfo::ic_with_type_info_count() {
7017   int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7018   return ICsWithTypeInfoCountField::decode(current);
7019 }
7020 
7021 
change_ic_with_type_info_count(int delta)7022 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
7023   if (delta == 0) return;
7024   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7025   int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
7026   // We can get negative count here when the type-feedback info is
7027   // shared between two code objects. The can only happen when
7028   // the debugger made a shallow copy of code object (see Heap::CopyCode).
7029   // Since we do not optimize when the debugger is active, we can skip
7030   // this counter update.
7031   if (new_count >= 0) {
7032     new_count &= ICsWithTypeInfoCountField::kMask;
7033     value = ICsWithTypeInfoCountField::update(value, new_count);
7034     WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7035   }
7036 }
7037 
7038 
ic_generic_count()7039 int TypeFeedbackInfo::ic_generic_count() {
7040   return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
7041 }
7042 
7043 
change_ic_generic_count(int delta)7044 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
7045   if (delta == 0) return;
7046   int new_count = ic_generic_count() + delta;
7047   if (new_count >= 0) {
7048     new_count &= ~Smi::kMinValue;
7049     WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
7050   }
7051 }
7052 
7053 
initialize_storage()7054 void TypeFeedbackInfo::initialize_storage() {
7055   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(0));
7056   WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(0));
7057   WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(0));
7058 }
7059 
7060 
change_own_type_change_checksum()7061 void TypeFeedbackInfo::change_own_type_change_checksum() {
7062   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7063   int checksum = OwnTypeChangeChecksum::decode(value);
7064   checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
7065   value = OwnTypeChangeChecksum::update(value, checksum);
7066   // Ensure packed bit field is in Smi range.
7067   if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7068   if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7069   WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
7070 }
7071 
7072 
set_inlined_type_change_checksum(int checksum)7073 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
7074   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7075   int mask = (1 << kTypeChangeChecksumBits) - 1;
7076   value = InlinedTypeChangeChecksum::update(value, checksum & mask);
7077   // Ensure packed bit field is in Smi range.
7078   if (value > Smi::kMaxValue) value |= Smi::kMinValue;
7079   if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
7080   WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
7081 }
7082 
7083 
own_type_change_checksum()7084 int TypeFeedbackInfo::own_type_change_checksum() {
7085   int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
7086   return OwnTypeChangeChecksum::decode(value);
7087 }
7088 
7089 
matches_inlined_type_change_checksum(int checksum)7090 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
7091   int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
7092   int mask = (1 << kTypeChangeChecksumBits) - 1;
7093   return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
7094 }
7095 
7096 
SMI_ACCESSORS(AliasedArgumentsEntry,aliased_context_slot,kAliasedContextSlot)7097 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
7098 
7099 
7100 Relocatable::Relocatable(Isolate* isolate) {
7101   isolate_ = isolate;
7102   prev_ = isolate->relocatable_top();
7103   isolate->set_relocatable_top(this);
7104 }
7105 
7106 
~Relocatable()7107 Relocatable::~Relocatable() {
7108   DCHECK_EQ(isolate_->relocatable_top(), this);
7109   isolate_->set_relocatable_top(prev_);
7110 }
7111 
7112 
SizeOf(Map * map,HeapObject * object)7113 int JSObject::BodyDescriptor::SizeOf(Map* map, HeapObject* object) {
7114   return map->instance_size();
7115 }
7116 
7117 
ForeignIterateBody(ObjectVisitor * v)7118 void Foreign::ForeignIterateBody(ObjectVisitor* v) {
7119   v->VisitExternalReference(
7120       reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7121 }
7122 
7123 
7124 template<typename StaticVisitor>
ForeignIterateBody()7125 void Foreign::ForeignIterateBody() {
7126   StaticVisitor::VisitExternalReference(
7127       reinterpret_cast<Address*>(FIELD_ADDR(this, kForeignAddressOffset)));
7128 }
7129 
7130 
ExternalOneByteStringIterateBody(ObjectVisitor * v)7131 void ExternalOneByteString::ExternalOneByteStringIterateBody(ObjectVisitor* v) {
7132   typedef v8::String::ExternalOneByteStringResource Resource;
7133   v->VisitExternalOneByteString(
7134       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7135 }
7136 
7137 
7138 template <typename StaticVisitor>
ExternalOneByteStringIterateBody()7139 void ExternalOneByteString::ExternalOneByteStringIterateBody() {
7140   typedef v8::String::ExternalOneByteStringResource Resource;
7141   StaticVisitor::VisitExternalOneByteString(
7142       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7143 }
7144 
7145 
ExternalTwoByteStringIterateBody(ObjectVisitor * v)7146 void ExternalTwoByteString::ExternalTwoByteStringIterateBody(ObjectVisitor* v) {
7147   typedef v8::String::ExternalStringResource Resource;
7148   v->VisitExternalTwoByteString(
7149       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7150 }
7151 
7152 
7153 template<typename StaticVisitor>
ExternalTwoByteStringIterateBody()7154 void ExternalTwoByteString::ExternalTwoByteStringIterateBody() {
7155   typedef v8::String::ExternalStringResource Resource;
7156   StaticVisitor::VisitExternalTwoByteString(
7157       reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset)));
7158 }
7159 
7160 
7161 template<int start_offset, int end_offset, int size>
IterateBody(HeapObject * obj,ObjectVisitor * v)7162 void FixedBodyDescriptor<start_offset, end_offset, size>::IterateBody(
7163     HeapObject* obj,
7164     ObjectVisitor* v) {
7165     v->VisitPointers(HeapObject::RawField(obj, start_offset),
7166                      HeapObject::RawField(obj, end_offset));
7167 }
7168 
7169 
7170 template<int start_offset>
IterateBody(HeapObject * obj,int object_size,ObjectVisitor * v)7171 void FlexibleBodyDescriptor<start_offset>::IterateBody(HeapObject* obj,
7172                                                        int object_size,
7173                                                        ObjectVisitor* v) {
7174   v->VisitPointers(HeapObject::RawField(obj, start_offset),
7175                    HeapObject::RawField(obj, object_size));
7176 }
7177 
7178 
7179 template<class Derived, class TableType>
CurrentKey()7180 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
7181   TableType* table(TableType::cast(this->table()));
7182   int index = Smi::cast(this->index())->value();
7183   Object* key = table->KeyAt(index);
7184   DCHECK(!key->IsTheHole());
7185   return key;
7186 }
7187 
7188 
PopulateValueArray(FixedArray * array)7189 void JSSetIterator::PopulateValueArray(FixedArray* array) {
7190   array->set(0, CurrentKey());
7191 }
7192 
7193 
PopulateValueArray(FixedArray * array)7194 void JSMapIterator::PopulateValueArray(FixedArray* array) {
7195   array->set(0, CurrentKey());
7196   array->set(1, CurrentValue());
7197 }
7198 
7199 
CurrentValue()7200 Object* JSMapIterator::CurrentValue() {
7201   OrderedHashMap* table(OrderedHashMap::cast(this->table()));
7202   int index = Smi::cast(this->index())->value();
7203   Object* value = table->ValueAt(index);
7204   DCHECK(!value->IsTheHole());
7205   return value;
7206 }
7207 
7208 
7209 #undef TYPE_CHECKER
7210 #undef CAST_ACCESSOR
7211 #undef INT_ACCESSORS
7212 #undef ACCESSORS
7213 #undef ACCESSORS_TO_SMI
7214 #undef SMI_ACCESSORS
7215 #undef SYNCHRONIZED_SMI_ACCESSORS
7216 #undef NOBARRIER_SMI_ACCESSORS
7217 #undef BOOL_GETTER
7218 #undef BOOL_ACCESSORS
7219 #undef FIELD_ADDR
7220 #undef FIELD_ADDR_CONST
7221 #undef READ_FIELD
7222 #undef NOBARRIER_READ_FIELD
7223 #undef WRITE_FIELD
7224 #undef NOBARRIER_WRITE_FIELD
7225 #undef WRITE_BARRIER
7226 #undef CONDITIONAL_WRITE_BARRIER
7227 #undef READ_DOUBLE_FIELD
7228 #undef WRITE_DOUBLE_FIELD
7229 #undef READ_INT_FIELD
7230 #undef WRITE_INT_FIELD
7231 #undef READ_INTPTR_FIELD
7232 #undef WRITE_INTPTR_FIELD
7233 #undef READ_UINT32_FIELD
7234 #undef WRITE_UINT32_FIELD
7235 #undef READ_SHORT_FIELD
7236 #undef WRITE_SHORT_FIELD
7237 #undef READ_BYTE_FIELD
7238 #undef WRITE_BYTE_FIELD
7239 #undef NOBARRIER_READ_BYTE_FIELD
7240 #undef NOBARRIER_WRITE_BYTE_FIELD
7241 
7242 } }  // namespace v8::internal
7243 
7244 #endif  // V8_OBJECTS_INL_H_
7245