1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 //
5 // Review notes:
6 //
7 // - The use of macros in these inline functions may seem superfluous
8 // but it is absolutely needed to make sure gcc generates optimal
9 // code. gcc is not happy when attempting to inline too deep.
10 //
11
12 #ifndef V8_OBJECTS_INL_H_
13 #define V8_OBJECTS_INL_H_
14
15 #include "src/base/atomicops.h"
16 #include "src/base/bits.h"
17 #include "src/builtins/builtins.h"
18 #include "src/contexts-inl.h"
19 #include "src/conversions-inl.h"
20 #include "src/factory.h"
21 #include "src/field-index-inl.h"
22 #include "src/field-type.h"
23 #include "src/handles-inl.h"
24 #include "src/heap/heap-inl.h"
25 #include "src/heap/heap.h"
26 #include "src/isolate-inl.h"
27 #include "src/isolate.h"
28 #include "src/keys.h"
29 #include "src/layout-descriptor-inl.h"
30 #include "src/lookup-cache-inl.h"
31 #include "src/lookup.h"
32 #include "src/objects.h"
33 #include "src/property.h"
34 #include "src/prototype.h"
35 #include "src/transitions-inl.h"
36 #include "src/type-feedback-vector-inl.h"
37 #include "src/v8memory.h"
38
39 namespace v8 {
40 namespace internal {
41
PropertyDetails(Smi * smi)42 PropertyDetails::PropertyDetails(Smi* smi) {
43 value_ = smi->value();
44 }
45
46
AsSmi()47 Smi* PropertyDetails::AsSmi() const {
48 // Ensure the upper 2 bits have the same value by sign extending it. This is
49 // necessary to be able to use the 31st bit of the property details.
50 int value = value_ << 1;
51 return Smi::FromInt(value >> 1);
52 }
53
54
field_width_in_words()55 int PropertyDetails::field_width_in_words() const {
56 DCHECK(location() == kField);
57 if (!FLAG_unbox_double_fields) return 1;
58 if (kDoubleSize == kPointerSize) return 1;
59 return representation().IsDouble() ? kDoubleSize / kPointerSize : 1;
60 }
61
62 #define TYPE_CHECKER(type, instancetype) \
63 bool HeapObject::Is##type() const { \
64 return map()->instance_type() == instancetype; \
65 }
66
67 #define CAST_ACCESSOR(type) \
68 type* type::cast(Object* object) { \
69 SLOW_DCHECK(object->Is##type()); \
70 return reinterpret_cast<type*>(object); \
71 } \
72 const type* type::cast(const Object* object) { \
73 SLOW_DCHECK(object->Is##type()); \
74 return reinterpret_cast<const type*>(object); \
75 }
76
77
78 #define INT_ACCESSORS(holder, name, offset) \
79 int holder::name() const { return READ_INT_FIELD(this, offset); } \
80 void holder::set_##name(int value) { WRITE_INT_FIELD(this, offset, value); }
81
82 #define ACCESSORS_CHECKED(holder, name, type, offset, condition) \
83 type* holder::name() const { \
84 DCHECK(condition); \
85 return type::cast(READ_FIELD(this, offset)); \
86 } \
87 void holder::set_##name(type* value, WriteBarrierMode mode) { \
88 DCHECK(condition); \
89 WRITE_FIELD(this, offset, value); \
90 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
91 }
92
93 #define ACCESSORS(holder, name, type, offset) \
94 ACCESSORS_CHECKED(holder, name, type, offset, true)
95
96 // Getter that returns a Smi as an int and writes an int as a Smi.
97 #define SMI_ACCESSORS_CHECKED(holder, name, offset, condition) \
98 int holder::name() const { \
99 DCHECK(condition); \
100 Object* value = READ_FIELD(this, offset); \
101 return Smi::cast(value)->value(); \
102 } \
103 void holder::set_##name(int value) { \
104 DCHECK(condition); \
105 WRITE_FIELD(this, offset, Smi::FromInt(value)); \
106 }
107
108 #define SMI_ACCESSORS(holder, name, offset) \
109 SMI_ACCESSORS_CHECKED(holder, name, offset, true)
110
111 #define SYNCHRONIZED_SMI_ACCESSORS(holder, name, offset) \
112 int holder::synchronized_##name() const { \
113 Object* value = ACQUIRE_READ_FIELD(this, offset); \
114 return Smi::cast(value)->value(); \
115 } \
116 void holder::synchronized_set_##name(int value) { \
117 RELEASE_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
118 }
119
120 #define NOBARRIER_SMI_ACCESSORS(holder, name, offset) \
121 int holder::nobarrier_##name() const { \
122 Object* value = NOBARRIER_READ_FIELD(this, offset); \
123 return Smi::cast(value)->value(); \
124 } \
125 void holder::nobarrier_set_##name(int value) { \
126 NOBARRIER_WRITE_FIELD(this, offset, Smi::FromInt(value)); \
127 }
128
129 #define BOOL_GETTER(holder, field, name, offset) \
130 bool holder::name() const { \
131 return BooleanBit::get(field(), offset); \
132 } \
133
134
135 #define BOOL_ACCESSORS(holder, field, name, offset) \
136 bool holder::name() const { \
137 return BooleanBit::get(field(), offset); \
138 } \
139 void holder::set_##name(bool value) { \
140 set_##field(BooleanBit::set(field(), offset, value)); \
141 }
142
IsFixedArrayBase()143 bool HeapObject::IsFixedArrayBase() const {
144 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
145 }
146
IsFixedArray()147 bool HeapObject::IsFixedArray() const {
148 InstanceType instance_type = map()->instance_type();
149 return instance_type == FIXED_ARRAY_TYPE ||
150 instance_type == TRANSITION_ARRAY_TYPE;
151 }
152
153
154 // External objects are not extensible, so the map check is enough.
IsExternal()155 bool HeapObject::IsExternal() const {
156 return map() == GetHeap()->external_map();
157 }
158
159
TYPE_CHECKER(HeapNumber,HEAP_NUMBER_TYPE)160 TYPE_CHECKER(HeapNumber, HEAP_NUMBER_TYPE)
161 TYPE_CHECKER(MutableHeapNumber, MUTABLE_HEAP_NUMBER_TYPE)
162 TYPE_CHECKER(Symbol, SYMBOL_TYPE)
163 TYPE_CHECKER(Simd128Value, SIMD128_VALUE_TYPE)
164
165 #define SIMD128_TYPE_CHECKER(TYPE, Type, type, lane_count, lane_type) \
166 bool HeapObject::Is##Type() const { return map() == GetHeap()->type##_map(); }
167 SIMD128_TYPES(SIMD128_TYPE_CHECKER)
168 #undef SIMD128_TYPE_CHECKER
169
170 #define IS_TYPE_FUNCTION_DEF(type_) \
171 bool Object::Is##type_() const { \
172 return IsHeapObject() && HeapObject::cast(this)->Is##type_(); \
173 }
174 HEAP_OBJECT_TYPE_LIST(IS_TYPE_FUNCTION_DEF)
175 #undef IS_TYPE_FUNCTION_DEF
176
177 #define IS_TYPE_FUNCTION_DEF(Type, Value) \
178 bool Object::Is##Type(Isolate* isolate) const { \
179 return this == isolate->heap()->Value(); \
180 } \
181 bool HeapObject::Is##Type(Isolate* isolate) const { \
182 return this == isolate->heap()->Value(); \
183 }
184 ODDBALL_LIST(IS_TYPE_FUNCTION_DEF)
185 #undef IS_TYPE_FUNCTION_DEF
186
187 bool HeapObject::IsString() const {
188 return map()->instance_type() < FIRST_NONSTRING_TYPE;
189 }
190
IsName()191 bool HeapObject::IsName() const {
192 return map()->instance_type() <= LAST_NAME_TYPE;
193 }
194
IsUniqueName()195 bool HeapObject::IsUniqueName() const {
196 return IsInternalizedString() || IsSymbol();
197 }
198
IsUniqueName()199 bool Name::IsUniqueName() const {
200 uint32_t type = map()->instance_type();
201 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) !=
202 (kStringTag | kNotInternalizedTag);
203 }
204
IsFunction()205 bool HeapObject::IsFunction() const {
206 STATIC_ASSERT(LAST_FUNCTION_TYPE == LAST_TYPE);
207 return map()->instance_type() >= FIRST_FUNCTION_TYPE;
208 }
209
IsCallable()210 bool HeapObject::IsCallable() const { return map()->is_callable(); }
211
IsConstructor()212 bool HeapObject::IsConstructor() const { return map()->is_constructor(); }
213
IsTemplateInfo()214 bool HeapObject::IsTemplateInfo() const {
215 return IsObjectTemplateInfo() || IsFunctionTemplateInfo();
216 }
217
IsInternalizedString()218 bool HeapObject::IsInternalizedString() const {
219 uint32_t type = map()->instance_type();
220 STATIC_ASSERT(kNotInternalizedTag != 0);
221 return (type & (kIsNotStringMask | kIsNotInternalizedMask)) ==
222 (kStringTag | kInternalizedTag);
223 }
224
IsConsString()225 bool HeapObject::IsConsString() const {
226 if (!IsString()) return false;
227 return StringShape(String::cast(this)).IsCons();
228 }
229
IsSlicedString()230 bool HeapObject::IsSlicedString() const {
231 if (!IsString()) return false;
232 return StringShape(String::cast(this)).IsSliced();
233 }
234
IsSeqString()235 bool HeapObject::IsSeqString() const {
236 if (!IsString()) return false;
237 return StringShape(String::cast(this)).IsSequential();
238 }
239
IsSeqOneByteString()240 bool HeapObject::IsSeqOneByteString() const {
241 if (!IsString()) return false;
242 return StringShape(String::cast(this)).IsSequential() &&
243 String::cast(this)->IsOneByteRepresentation();
244 }
245
IsSeqTwoByteString()246 bool HeapObject::IsSeqTwoByteString() const {
247 if (!IsString()) return false;
248 return StringShape(String::cast(this)).IsSequential() &&
249 String::cast(this)->IsTwoByteRepresentation();
250 }
251
IsExternalString()252 bool HeapObject::IsExternalString() const {
253 if (!IsString()) return false;
254 return StringShape(String::cast(this)).IsExternal();
255 }
256
IsExternalOneByteString()257 bool HeapObject::IsExternalOneByteString() const {
258 if (!IsString()) return false;
259 return StringShape(String::cast(this)).IsExternal() &&
260 String::cast(this)->IsOneByteRepresentation();
261 }
262
IsExternalTwoByteString()263 bool HeapObject::IsExternalTwoByteString() const {
264 if (!IsString()) return false;
265 return StringShape(String::cast(this)).IsExternal() &&
266 String::cast(this)->IsTwoByteRepresentation();
267 }
268
HasValidElements()269 bool Object::HasValidElements() {
270 // Dictionary is covered under FixedArray.
271 return IsFixedArray() || IsFixedDoubleArray() || IsFixedTypedArrayBase();
272 }
273
274
KeyEquals(Object * second)275 bool Object::KeyEquals(Object* second) {
276 Object* first = this;
277 if (second->IsNumber()) {
278 if (first->IsNumber()) return first->Number() == second->Number();
279 Object* temp = first;
280 first = second;
281 second = temp;
282 }
283 if (first->IsNumber()) {
284 DCHECK_LE(0, first->Number());
285 uint32_t expected = static_cast<uint32_t>(first->Number());
286 uint32_t index;
287 return Name::cast(second)->AsArrayIndex(&index) && index == expected;
288 }
289 return Name::cast(first)->Equals(Name::cast(second));
290 }
291
292
FilterKey(PropertyFilter filter)293 bool Object::FilterKey(PropertyFilter filter) {
294 if (IsSymbol()) {
295 if (filter & SKIP_SYMBOLS) return true;
296 if (Symbol::cast(this)->is_private()) return true;
297 } else {
298 if (filter & SKIP_STRINGS) return true;
299 }
300 return false;
301 }
302
303
NewStorageFor(Isolate * isolate,Handle<Object> object,Representation representation)304 Handle<Object> Object::NewStorageFor(Isolate* isolate,
305 Handle<Object> object,
306 Representation representation) {
307 if (representation.IsSmi() && object->IsUninitialized(isolate)) {
308 return handle(Smi::kZero, isolate);
309 }
310 if (!representation.IsDouble()) return object;
311 double value;
312 if (object->IsUninitialized(isolate)) {
313 value = 0;
314 } else if (object->IsMutableHeapNumber()) {
315 value = HeapNumber::cast(*object)->value();
316 } else {
317 value = object->Number();
318 }
319 return isolate->factory()->NewHeapNumber(value, MUTABLE);
320 }
321
322
WrapForRead(Isolate * isolate,Handle<Object> object,Representation representation)323 Handle<Object> Object::WrapForRead(Isolate* isolate,
324 Handle<Object> object,
325 Representation representation) {
326 DCHECK(!object->IsUninitialized(isolate));
327 if (!representation.IsDouble()) {
328 DCHECK(object->FitsRepresentation(representation));
329 return object;
330 }
331 return isolate->factory()->NewHeapNumber(HeapNumber::cast(*object)->value());
332 }
333
334
StringShape(const String * str)335 StringShape::StringShape(const String* str)
336 : type_(str->map()->instance_type()) {
337 set_valid();
338 DCHECK((type_ & kIsNotStringMask) == kStringTag);
339 }
340
341
StringShape(Map * map)342 StringShape::StringShape(Map* map)
343 : type_(map->instance_type()) {
344 set_valid();
345 DCHECK((type_ & kIsNotStringMask) == kStringTag);
346 }
347
348
StringShape(InstanceType t)349 StringShape::StringShape(InstanceType t)
350 : type_(static_cast<uint32_t>(t)) {
351 set_valid();
352 DCHECK((type_ & kIsNotStringMask) == kStringTag);
353 }
354
355
IsInternalized()356 bool StringShape::IsInternalized() {
357 DCHECK(valid());
358 STATIC_ASSERT(kNotInternalizedTag != 0);
359 return (type_ & (kIsNotStringMask | kIsNotInternalizedMask)) ==
360 (kStringTag | kInternalizedTag);
361 }
362
363
IsOneByteRepresentation()364 bool String::IsOneByteRepresentation() const {
365 uint32_t type = map()->instance_type();
366 return (type & kStringEncodingMask) == kOneByteStringTag;
367 }
368
369
IsTwoByteRepresentation()370 bool String::IsTwoByteRepresentation() const {
371 uint32_t type = map()->instance_type();
372 return (type & kStringEncodingMask) == kTwoByteStringTag;
373 }
374
375
IsOneByteRepresentationUnderneath()376 bool String::IsOneByteRepresentationUnderneath() {
377 uint32_t type = map()->instance_type();
378 STATIC_ASSERT(kIsIndirectStringTag != 0);
379 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
380 DCHECK(IsFlat());
381 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
382 case kOneByteStringTag:
383 return true;
384 case kTwoByteStringTag:
385 return false;
386 default: // Cons or sliced string. Need to go deeper.
387 return GetUnderlying()->IsOneByteRepresentation();
388 }
389 }
390
391
IsTwoByteRepresentationUnderneath()392 bool String::IsTwoByteRepresentationUnderneath() {
393 uint32_t type = map()->instance_type();
394 STATIC_ASSERT(kIsIndirectStringTag != 0);
395 STATIC_ASSERT((kIsIndirectStringMask & kStringEncodingMask) == 0);
396 DCHECK(IsFlat());
397 switch (type & (kIsIndirectStringMask | kStringEncodingMask)) {
398 case kOneByteStringTag:
399 return false;
400 case kTwoByteStringTag:
401 return true;
402 default: // Cons or sliced string. Need to go deeper.
403 return GetUnderlying()->IsTwoByteRepresentation();
404 }
405 }
406
407
HasOnlyOneByteChars()408 bool String::HasOnlyOneByteChars() {
409 uint32_t type = map()->instance_type();
410 return (type & kOneByteDataHintMask) == kOneByteDataHintTag ||
411 IsOneByteRepresentation();
412 }
413
414
IsCons()415 bool StringShape::IsCons() {
416 return (type_ & kStringRepresentationMask) == kConsStringTag;
417 }
418
419
IsSliced()420 bool StringShape::IsSliced() {
421 return (type_ & kStringRepresentationMask) == kSlicedStringTag;
422 }
423
424
IsIndirect()425 bool StringShape::IsIndirect() {
426 return (type_ & kIsIndirectStringMask) == kIsIndirectStringTag;
427 }
428
429
IsExternal()430 bool StringShape::IsExternal() {
431 return (type_ & kStringRepresentationMask) == kExternalStringTag;
432 }
433
434
IsSequential()435 bool StringShape::IsSequential() {
436 return (type_ & kStringRepresentationMask) == kSeqStringTag;
437 }
438
439
representation_tag()440 StringRepresentationTag StringShape::representation_tag() {
441 uint32_t tag = (type_ & kStringRepresentationMask);
442 return static_cast<StringRepresentationTag>(tag);
443 }
444
445
encoding_tag()446 uint32_t StringShape::encoding_tag() {
447 return type_ & kStringEncodingMask;
448 }
449
450
full_representation_tag()451 uint32_t StringShape::full_representation_tag() {
452 return (type_ & (kStringRepresentationMask | kStringEncodingMask));
453 }
454
455
456 STATIC_ASSERT((kStringRepresentationMask | kStringEncodingMask) ==
457 Internals::kFullStringRepresentationMask);
458
459 STATIC_ASSERT(static_cast<uint32_t>(kStringEncodingMask) ==
460 Internals::kStringEncodingMask);
461
462
IsSequentialOneByte()463 bool StringShape::IsSequentialOneByte() {
464 return full_representation_tag() == (kSeqStringTag | kOneByteStringTag);
465 }
466
467
IsSequentialTwoByte()468 bool StringShape::IsSequentialTwoByte() {
469 return full_representation_tag() == (kSeqStringTag | kTwoByteStringTag);
470 }
471
472
IsExternalOneByte()473 bool StringShape::IsExternalOneByte() {
474 return full_representation_tag() == (kExternalStringTag | kOneByteStringTag);
475 }
476
477
478 STATIC_ASSERT((kExternalStringTag | kOneByteStringTag) ==
479 Internals::kExternalOneByteRepresentationTag);
480
481 STATIC_ASSERT(v8::String::ONE_BYTE_ENCODING == kOneByteStringTag);
482
483
IsExternalTwoByte()484 bool StringShape::IsExternalTwoByte() {
485 return full_representation_tag() == (kExternalStringTag | kTwoByteStringTag);
486 }
487
488
489 STATIC_ASSERT((kExternalStringTag | kTwoByteStringTag) ==
490 Internals::kExternalTwoByteRepresentationTag);
491
492 STATIC_ASSERT(v8::String::TWO_BYTE_ENCODING == kTwoByteStringTag);
493
494
Get(int index)495 uc32 FlatStringReader::Get(int index) {
496 if (is_one_byte_) {
497 return Get<uint8_t>(index);
498 } else {
499 return Get<uc16>(index);
500 }
501 }
502
503
504 template <typename Char>
Get(int index)505 Char FlatStringReader::Get(int index) {
506 DCHECK_EQ(is_one_byte_, sizeof(Char) == 1);
507 DCHECK(0 <= index && index <= length_);
508 if (sizeof(Char) == 1) {
509 return static_cast<Char>(static_cast<const uint8_t*>(start_)[index]);
510 } else {
511 return static_cast<Char>(static_cast<const uc16*>(start_)[index]);
512 }
513 }
514
515
AsHandle(Isolate * isolate,HashTableKey * key)516 Handle<Object> StringTableShape::AsHandle(Isolate* isolate, HashTableKey* key) {
517 return key->AsHandle(isolate);
518 }
519
520
AsHandle(Isolate * isolate,HashTableKey * key)521 Handle<Object> CompilationCacheShape::AsHandle(Isolate* isolate,
522 HashTableKey* key) {
523 return key->AsHandle(isolate);
524 }
525
526
AsHandle(Isolate * isolate,HashTableKey * key)527 Handle<Object> CodeCacheHashTableShape::AsHandle(Isolate* isolate,
528 HashTableKey* key) {
529 return key->AsHandle(isolate);
530 }
531
532 template <typename Char>
533 class SequentialStringKey : public HashTableKey {
534 public:
SequentialStringKey(Vector<const Char> string,uint32_t seed)535 explicit SequentialStringKey(Vector<const Char> string, uint32_t seed)
536 : string_(string), hash_field_(0), seed_(seed) { }
537
Hash()538 uint32_t Hash() override {
539 hash_field_ = StringHasher::HashSequentialString<Char>(string_.start(),
540 string_.length(),
541 seed_);
542
543 uint32_t result = hash_field_ >> String::kHashShift;
544 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
545 return result;
546 }
547
548
HashForObject(Object * other)549 uint32_t HashForObject(Object* other) override {
550 return String::cast(other)->Hash();
551 }
552
553 Vector<const Char> string_;
554 uint32_t hash_field_;
555 uint32_t seed_;
556 };
557
558
559 class OneByteStringKey : public SequentialStringKey<uint8_t> {
560 public:
OneByteStringKey(Vector<const uint8_t> str,uint32_t seed)561 OneByteStringKey(Vector<const uint8_t> str, uint32_t seed)
562 : SequentialStringKey<uint8_t>(str, seed) { }
563
IsMatch(Object * string)564 bool IsMatch(Object* string) override {
565 return String::cast(string)->IsOneByteEqualTo(string_);
566 }
567
568 Handle<Object> AsHandle(Isolate* isolate) override;
569 };
570
571
572 class SeqOneByteSubStringKey : public HashTableKey {
573 public:
SeqOneByteSubStringKey(Handle<SeqOneByteString> string,int from,int length)574 SeqOneByteSubStringKey(Handle<SeqOneByteString> string, int from, int length)
575 : string_(string), from_(from), length_(length) {
576 DCHECK(string_->IsSeqOneByteString());
577 }
578
Hash()579 uint32_t Hash() override {
580 DCHECK(length_ >= 0);
581 DCHECK(from_ + length_ <= string_->length());
582 const uint8_t* chars = string_->GetChars() + from_;
583 hash_field_ = StringHasher::HashSequentialString(
584 chars, length_, string_->GetHeap()->HashSeed());
585 uint32_t result = hash_field_ >> String::kHashShift;
586 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
587 return result;
588 }
589
HashForObject(Object * other)590 uint32_t HashForObject(Object* other) override {
591 return String::cast(other)->Hash();
592 }
593
594 bool IsMatch(Object* string) override;
595 Handle<Object> AsHandle(Isolate* isolate) override;
596
597 private:
598 Handle<SeqOneByteString> string_;
599 int from_;
600 int length_;
601 uint32_t hash_field_;
602 };
603
604
605 class TwoByteStringKey : public SequentialStringKey<uc16> {
606 public:
TwoByteStringKey(Vector<const uc16> str,uint32_t seed)607 explicit TwoByteStringKey(Vector<const uc16> str, uint32_t seed)
608 : SequentialStringKey<uc16>(str, seed) { }
609
IsMatch(Object * string)610 bool IsMatch(Object* string) override {
611 return String::cast(string)->IsTwoByteEqualTo(string_);
612 }
613
614 Handle<Object> AsHandle(Isolate* isolate) override;
615 };
616
617
618 // Utf8StringKey carries a vector of chars as key.
619 class Utf8StringKey : public HashTableKey {
620 public:
Utf8StringKey(Vector<const char> string,uint32_t seed)621 explicit Utf8StringKey(Vector<const char> string, uint32_t seed)
622 : string_(string), hash_field_(0), seed_(seed) { }
623
IsMatch(Object * string)624 bool IsMatch(Object* string) override {
625 return String::cast(string)->IsUtf8EqualTo(string_);
626 }
627
Hash()628 uint32_t Hash() override {
629 if (hash_field_ != 0) return hash_field_ >> String::kHashShift;
630 hash_field_ = StringHasher::ComputeUtf8Hash(string_, seed_, &chars_);
631 uint32_t result = hash_field_ >> String::kHashShift;
632 DCHECK(result != 0); // Ensure that the hash value of 0 is never computed.
633 return result;
634 }
635
HashForObject(Object * other)636 uint32_t HashForObject(Object* other) override {
637 return String::cast(other)->Hash();
638 }
639
AsHandle(Isolate * isolate)640 Handle<Object> AsHandle(Isolate* isolate) override {
641 if (hash_field_ == 0) Hash();
642 return isolate->factory()->NewInternalizedStringFromUtf8(
643 string_, chars_, hash_field_);
644 }
645
646 Vector<const char> string_;
647 uint32_t hash_field_;
648 int chars_; // Caches the number of characters when computing the hash code.
649 uint32_t seed_;
650 };
651
652
IsNumber()653 bool Object::IsNumber() const {
654 return IsSmi() || IsHeapNumber();
655 }
656
657
TYPE_CHECKER(ByteArray,BYTE_ARRAY_TYPE)658 TYPE_CHECKER(ByteArray, BYTE_ARRAY_TYPE)
659 TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
660 TYPE_CHECKER(FreeSpace, FREE_SPACE_TYPE)
661
662 bool HeapObject::IsFiller() const {
663 InstanceType instance_type = map()->instance_type();
664 return instance_type == FREE_SPACE_TYPE || instance_type == FILLER_TYPE;
665 }
666
667
668
669 #define TYPED_ARRAY_TYPE_CHECKER(Type, type, TYPE, ctype, size) \
670 TYPE_CHECKER(Fixed##Type##Array, FIXED_##TYPE##_ARRAY_TYPE)
671
TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)672 TYPED_ARRAYS(TYPED_ARRAY_TYPE_CHECKER)
673 #undef TYPED_ARRAY_TYPE_CHECKER
674
675 bool HeapObject::IsFixedTypedArrayBase() const {
676 InstanceType instance_type = map()->instance_type();
677 return (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
678 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE);
679 }
680
IsJSReceiver()681 bool HeapObject::IsJSReceiver() const {
682 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
683 return map()->instance_type() >= FIRST_JS_RECEIVER_TYPE;
684 }
685
IsJSObject()686 bool HeapObject::IsJSObject() const {
687 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
688 return map()->IsJSObjectMap();
689 }
690
IsJSProxy()691 bool HeapObject::IsJSProxy() const { return map()->IsJSProxyMap(); }
692
IsJSArrayIterator()693 bool HeapObject::IsJSArrayIterator() const {
694 InstanceType instance_type = map()->instance_type();
695 return (instance_type >= FIRST_ARRAY_ITERATOR_TYPE &&
696 instance_type <= LAST_ARRAY_ITERATOR_TYPE);
697 }
698
TYPE_CHECKER(JSSet,JS_SET_TYPE)699 TYPE_CHECKER(JSSet, JS_SET_TYPE)
700 TYPE_CHECKER(JSMap, JS_MAP_TYPE)
701 TYPE_CHECKER(JSSetIterator, JS_SET_ITERATOR_TYPE)
702 TYPE_CHECKER(JSMapIterator, JS_MAP_ITERATOR_TYPE)
703 TYPE_CHECKER(JSWeakMap, JS_WEAK_MAP_TYPE)
704 TYPE_CHECKER(JSWeakSet, JS_WEAK_SET_TYPE)
705 TYPE_CHECKER(JSContextExtensionObject, JS_CONTEXT_EXTENSION_OBJECT_TYPE)
706 TYPE_CHECKER(Map, MAP_TYPE)
707 TYPE_CHECKER(FixedDoubleArray, FIXED_DOUBLE_ARRAY_TYPE)
708 TYPE_CHECKER(WeakFixedArray, FIXED_ARRAY_TYPE)
709 TYPE_CHECKER(TransitionArray, TRANSITION_ARRAY_TYPE)
710 TYPE_CHECKER(JSStringIterator, JS_STRING_ITERATOR_TYPE)
711 TYPE_CHECKER(JSFixedArrayIterator, JS_FIXED_ARRAY_ITERATOR_TYPE)
712
713 bool HeapObject::IsJSWeakCollection() const {
714 return IsJSWeakMap() || IsJSWeakSet();
715 }
716
IsJSCollection()717 bool HeapObject::IsJSCollection() const { return IsJSMap() || IsJSSet(); }
718
IsDescriptorArray()719 bool HeapObject::IsDescriptorArray() const { return IsFixedArray(); }
720
IsFrameArray()721 bool HeapObject::IsFrameArray() const { return IsFixedArray(); }
722
IsArrayList()723 bool HeapObject::IsArrayList() const { return IsFixedArray(); }
724
IsRegExpMatchInfo()725 bool HeapObject::IsRegExpMatchInfo() const { return IsFixedArray(); }
726
IsLayoutDescriptor()727 bool Object::IsLayoutDescriptor() const {
728 return IsSmi() || IsFixedTypedArrayBase();
729 }
730
IsTypeFeedbackVector()731 bool HeapObject::IsTypeFeedbackVector() const { return IsFixedArray(); }
732
IsTypeFeedbackMetadata()733 bool HeapObject::IsTypeFeedbackMetadata() const { return IsFixedArray(); }
734
IsLiteralsArray()735 bool HeapObject::IsLiteralsArray() const { return IsFixedArray(); }
736
IsDeoptimizationInputData()737 bool HeapObject::IsDeoptimizationInputData() const {
738 // Must be a fixed array.
739 if (!IsFixedArray()) return false;
740
741 // There's no sure way to detect the difference between a fixed array and
742 // a deoptimization data array. Since this is used for asserts we can
743 // check that the length is zero or else the fixed size plus a multiple of
744 // the entry size.
745 int length = FixedArray::cast(this)->length();
746 if (length == 0) return true;
747
748 length -= DeoptimizationInputData::kFirstDeoptEntryIndex;
749 return length >= 0 && length % DeoptimizationInputData::kDeoptEntrySize == 0;
750 }
751
IsDeoptimizationOutputData()752 bool HeapObject::IsDeoptimizationOutputData() const {
753 if (!IsFixedArray()) return false;
754 // There's actually no way to see the difference between a fixed array and
755 // a deoptimization data array. Since this is used for asserts we can check
756 // that the length is plausible though.
757 if (FixedArray::cast(this)->length() % 2 != 0) return false;
758 return true;
759 }
760
IsHandlerTable()761 bool HeapObject::IsHandlerTable() const {
762 if (!IsFixedArray()) return false;
763 // There's actually no way to see the difference between a fixed array and
764 // a handler table array.
765 return true;
766 }
767
IsTemplateList()768 bool HeapObject::IsTemplateList() const {
769 if (!IsFixedArray()) return false;
770 // There's actually no way to see the difference between a fixed array and
771 // a template list.
772 if (FixedArray::cast(this)->length() < 1) return false;
773 return true;
774 }
775
IsDependentCode()776 bool HeapObject::IsDependentCode() const {
777 if (!IsFixedArray()) return false;
778 // There's actually no way to see the difference between a fixed array and
779 // a dependent codes array.
780 return true;
781 }
782
IsContext()783 bool HeapObject::IsContext() const {
784 Map* map = this->map();
785 Heap* heap = GetHeap();
786 return (
787 map == heap->function_context_map() || map == heap->catch_context_map() ||
788 map == heap->with_context_map() || map == heap->native_context_map() ||
789 map == heap->block_context_map() || map == heap->module_context_map() ||
790 map == heap->script_context_map() ||
791 map == heap->debug_evaluate_context_map());
792 }
793
IsNativeContext()794 bool HeapObject::IsNativeContext() const {
795 return map() == GetHeap()->native_context_map();
796 }
797
IsScriptContextTable()798 bool HeapObject::IsScriptContextTable() const {
799 return map() == GetHeap()->script_context_table_map();
800 }
801
IsScopeInfo()802 bool HeapObject::IsScopeInfo() const {
803 return map() == GetHeap()->scope_info_map();
804 }
805
IsModuleInfo()806 bool HeapObject::IsModuleInfo() const {
807 return map() == GetHeap()->module_info_map();
808 }
809
TYPE_CHECKER(JSBoundFunction,JS_BOUND_FUNCTION_TYPE)810 TYPE_CHECKER(JSBoundFunction, JS_BOUND_FUNCTION_TYPE)
811 TYPE_CHECKER(JSFunction, JS_FUNCTION_TYPE)
812
813
814 template <> inline bool Is<JSFunction>(Object* obj) {
815 return obj->IsJSFunction();
816 }
817
818
TYPE_CHECKER(Code,CODE_TYPE)819 TYPE_CHECKER(Code, CODE_TYPE)
820 TYPE_CHECKER(Oddball, ODDBALL_TYPE)
821 TYPE_CHECKER(Cell, CELL_TYPE)
822 TYPE_CHECKER(PropertyCell, PROPERTY_CELL_TYPE)
823 TYPE_CHECKER(WeakCell, WEAK_CELL_TYPE)
824 TYPE_CHECKER(SharedFunctionInfo, SHARED_FUNCTION_INFO_TYPE)
825 TYPE_CHECKER(JSDate, JS_DATE_TYPE)
826 TYPE_CHECKER(JSError, JS_ERROR_TYPE)
827 TYPE_CHECKER(JSGeneratorObject, JS_GENERATOR_OBJECT_TYPE)
828 TYPE_CHECKER(JSMessageObject, JS_MESSAGE_OBJECT_TYPE)
829 TYPE_CHECKER(JSPromise, JS_PROMISE_TYPE)
830 TYPE_CHECKER(JSValue, JS_VALUE_TYPE)
831
832 bool HeapObject::IsAbstractCode() const {
833 return IsBytecodeArray() || IsCode();
834 }
835
IsStringWrapper()836 bool HeapObject::IsStringWrapper() const {
837 return IsJSValue() && JSValue::cast(this)->value()->IsString();
838 }
839
840
TYPE_CHECKER(Foreign,FOREIGN_TYPE)841 TYPE_CHECKER(Foreign, FOREIGN_TYPE)
842
843 bool HeapObject::IsBoolean() const {
844 return IsOddball() &&
845 ((Oddball::cast(this)->kind() & Oddball::kNotBooleanMask) == 0);
846 }
847
848
TYPE_CHECKER(JSArray,JS_ARRAY_TYPE)849 TYPE_CHECKER(JSArray, JS_ARRAY_TYPE)
850 TYPE_CHECKER(JSArrayBuffer, JS_ARRAY_BUFFER_TYPE)
851 TYPE_CHECKER(JSTypedArray, JS_TYPED_ARRAY_TYPE)
852 TYPE_CHECKER(JSDataView, JS_DATA_VIEW_TYPE)
853
854 bool HeapObject::IsJSArrayBufferView() const {
855 return IsJSDataView() || IsJSTypedArray();
856 }
857
858
TYPE_CHECKER(JSRegExp,JS_REGEXP_TYPE)859 TYPE_CHECKER(JSRegExp, JS_REGEXP_TYPE)
860
861
862 template <> inline bool Is<JSArray>(Object* obj) {
863 return obj->IsJSArray();
864 }
865
IsHashTable()866 bool HeapObject::IsHashTable() const {
867 return map() == GetHeap()->hash_table_map();
868 }
869
IsWeakHashTable()870 bool HeapObject::IsWeakHashTable() const { return IsHashTable(); }
871
IsDictionary()872 bool HeapObject::IsDictionary() const {
873 return IsHashTable() && this != GetHeap()->string_table();
874 }
875
876
IsNameDictionary()877 bool Object::IsNameDictionary() const {
878 return IsDictionary();
879 }
880
881
IsGlobalDictionary()882 bool Object::IsGlobalDictionary() const { return IsDictionary(); }
883
884
IsSeededNumberDictionary()885 bool Object::IsSeededNumberDictionary() const {
886 return IsDictionary();
887 }
888
IsUnseededNumberDictionary()889 bool HeapObject::IsUnseededNumberDictionary() const {
890 return map() == GetHeap()->unseeded_number_dictionary_map();
891 }
892
IsStringTable()893 bool HeapObject::IsStringTable() const { return IsHashTable(); }
894
IsStringSet()895 bool HeapObject::IsStringSet() const { return IsHashTable(); }
896
IsObjectHashSet()897 bool HeapObject::IsObjectHashSet() const { return IsHashTable(); }
898
IsNormalizedMapCache()899 bool HeapObject::IsNormalizedMapCache() const {
900 return NormalizedMapCache::IsNormalizedMapCache(this);
901 }
902
903
GetIndex(Handle<Map> map)904 int NormalizedMapCache::GetIndex(Handle<Map> map) {
905 return map->Hash() % NormalizedMapCache::kEntries;
906 }
907
IsNormalizedMapCache(const HeapObject * obj)908 bool NormalizedMapCache::IsNormalizedMapCache(const HeapObject* obj) {
909 if (!obj->IsFixedArray()) return false;
910 if (FixedArray::cast(obj)->length() != NormalizedMapCache::kEntries) {
911 return false;
912 }
913 #ifdef VERIFY_HEAP
914 if (FLAG_verify_heap) {
915 reinterpret_cast<NormalizedMapCache*>(const_cast<HeapObject*>(obj))
916 ->NormalizedMapCacheVerify();
917 }
918 #endif
919 return true;
920 }
921
IsCompilationCacheTable()922 bool HeapObject::IsCompilationCacheTable() const { return IsHashTable(); }
923
IsCodeCacheHashTable()924 bool HeapObject::IsCodeCacheHashTable() const { return IsHashTable(); }
925
IsMapCache()926 bool HeapObject::IsMapCache() const { return IsHashTable(); }
927
IsObjectHashTable()928 bool HeapObject::IsObjectHashTable() const { return IsHashTable(); }
929
IsOrderedHashTable()930 bool HeapObject::IsOrderedHashTable() const {
931 return map() == GetHeap()->ordered_hash_table_map();
932 }
933
934
IsOrderedHashSet()935 bool Object::IsOrderedHashSet() const {
936 return IsOrderedHashTable();
937 }
938
939
IsOrderedHashMap()940 bool Object::IsOrderedHashMap() const {
941 return IsOrderedHashTable();
942 }
943
944
IsPrimitive()945 bool Object::IsPrimitive() const {
946 return IsSmi() || HeapObject::cast(this)->map()->IsPrimitiveMap();
947 }
948
IsJSGlobalProxy()949 bool HeapObject::IsJSGlobalProxy() const {
950 bool result = map()->instance_type() == JS_GLOBAL_PROXY_TYPE;
951 DCHECK(!result || map()->is_access_check_needed());
952 return result;
953 }
954
955
TYPE_CHECKER(JSGlobalObject,JS_GLOBAL_OBJECT_TYPE)956 TYPE_CHECKER(JSGlobalObject, JS_GLOBAL_OBJECT_TYPE)
957
958 bool HeapObject::IsUndetectable() const { return map()->is_undetectable(); }
959
IsAccessCheckNeeded()960 bool HeapObject::IsAccessCheckNeeded() const {
961 if (IsJSGlobalProxy()) {
962 const JSGlobalProxy* proxy = JSGlobalProxy::cast(this);
963 JSGlobalObject* global = proxy->GetIsolate()->context()->global_object();
964 return proxy->IsDetachedFrom(global);
965 }
966 return map()->is_access_check_needed();
967 }
968
IsStruct()969 bool HeapObject::IsStruct() const {
970 switch (map()->instance_type()) {
971 #define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE: return true;
972 STRUCT_LIST(MAKE_STRUCT_CASE)
973 #undef MAKE_STRUCT_CASE
974 default: return false;
975 }
976 }
977
978 #define MAKE_STRUCT_PREDICATE(NAME, Name, name) \
979 bool Object::Is##Name() const { \
980 return IsHeapObject() && HeapObject::cast(this)->Is##Name(); \
981 } \
982 bool HeapObject::Is##Name() const { \
983 return map()->instance_type() == NAME##_TYPE; \
984 }
STRUCT_LIST(MAKE_STRUCT_PREDICATE)985 STRUCT_LIST(MAKE_STRUCT_PREDICATE)
986 #undef MAKE_STRUCT_PREDICATE
987
988 double Object::Number() const {
989 DCHECK(IsNumber());
990 return IsSmi()
991 ? static_cast<double>(reinterpret_cast<const Smi*>(this)->value())
992 : reinterpret_cast<const HeapNumber*>(this)->value();
993 }
994
995
IsNaN()996 bool Object::IsNaN() const {
997 return this->IsHeapNumber() && std::isnan(HeapNumber::cast(this)->value());
998 }
999
1000
IsMinusZero()1001 bool Object::IsMinusZero() const {
1002 return this->IsHeapNumber() &&
1003 i::IsMinusZero(HeapNumber::cast(this)->value());
1004 }
1005
1006
OptimalRepresentation()1007 Representation Object::OptimalRepresentation() {
1008 if (!FLAG_track_fields) return Representation::Tagged();
1009 if (IsSmi()) {
1010 return Representation::Smi();
1011 } else if (FLAG_track_double_fields && IsHeapNumber()) {
1012 return Representation::Double();
1013 } else if (FLAG_track_computed_fields &&
1014 IsUninitialized(HeapObject::cast(this)->GetIsolate())) {
1015 return Representation::None();
1016 } else if (FLAG_track_heap_object_fields) {
1017 DCHECK(IsHeapObject());
1018 return Representation::HeapObject();
1019 } else {
1020 return Representation::Tagged();
1021 }
1022 }
1023
1024
OptimalElementsKind()1025 ElementsKind Object::OptimalElementsKind() {
1026 if (IsSmi()) return FAST_SMI_ELEMENTS;
1027 if (IsNumber()) return FAST_DOUBLE_ELEMENTS;
1028 return FAST_ELEMENTS;
1029 }
1030
1031
FitsRepresentation(Representation representation)1032 bool Object::FitsRepresentation(Representation representation) {
1033 if (FLAG_track_fields && representation.IsSmi()) {
1034 return IsSmi();
1035 } else if (FLAG_track_double_fields && representation.IsDouble()) {
1036 return IsMutableHeapNumber() || IsNumber();
1037 } else if (FLAG_track_heap_object_fields && representation.IsHeapObject()) {
1038 return IsHeapObject();
1039 } else if (FLAG_track_fields && representation.IsNone()) {
1040 return false;
1041 }
1042 return true;
1043 }
1044
ToUint32(uint32_t * value)1045 bool Object::ToUint32(uint32_t* value) {
1046 if (IsSmi()) {
1047 int num = Smi::cast(this)->value();
1048 if (num < 0) return false;
1049 *value = static_cast<uint32_t>(num);
1050 return true;
1051 }
1052 if (IsHeapNumber()) {
1053 double num = HeapNumber::cast(this)->value();
1054 if (num < 0) return false;
1055 uint32_t uint_value = FastD2UI(num);
1056 if (FastUI2D(uint_value) == num) {
1057 *value = uint_value;
1058 return true;
1059 }
1060 }
1061 return false;
1062 }
1063
1064 // static
ToObject(Isolate * isolate,Handle<Object> object)1065 MaybeHandle<JSReceiver> Object::ToObject(Isolate* isolate,
1066 Handle<Object> object) {
1067 if (object->IsJSReceiver()) return Handle<JSReceiver>::cast(object);
1068 return ToObject(isolate, object, isolate->native_context());
1069 }
1070
1071
1072 // static
ToName(Isolate * isolate,Handle<Object> input)1073 MaybeHandle<Name> Object::ToName(Isolate* isolate, Handle<Object> input) {
1074 if (input->IsName()) return Handle<Name>::cast(input);
1075 return ConvertToName(isolate, input);
1076 }
1077
1078 // static
ToPrimitive(Handle<Object> input,ToPrimitiveHint hint)1079 MaybeHandle<Object> Object::ToPrimitive(Handle<Object> input,
1080 ToPrimitiveHint hint) {
1081 if (input->IsPrimitive()) return input;
1082 return JSReceiver::ToPrimitive(Handle<JSReceiver>::cast(input), hint);
1083 }
1084
1085
HasSpecificClassOf(String * name)1086 bool Object::HasSpecificClassOf(String* name) {
1087 return this->IsJSObject() && (JSObject::cast(this)->class_name() == name);
1088 }
1089
GetProperty(Handle<Object> object,Handle<Name> name)1090 MaybeHandle<Object> Object::GetProperty(Handle<Object> object,
1091 Handle<Name> name) {
1092 LookupIterator it(object, name);
1093 if (!it.IsFound()) return it.factory()->undefined_value();
1094 return GetProperty(&it);
1095 }
1096
GetProperty(Handle<JSReceiver> receiver,Handle<Name> name)1097 MaybeHandle<Object> JSReceiver::GetProperty(Handle<JSReceiver> receiver,
1098 Handle<Name> name) {
1099 LookupIterator it(receiver, name, receiver);
1100 if (!it.IsFound()) return it.factory()->undefined_value();
1101 return Object::GetProperty(&it);
1102 }
1103
GetElement(Isolate * isolate,Handle<Object> object,uint32_t index)1104 MaybeHandle<Object> Object::GetElement(Isolate* isolate, Handle<Object> object,
1105 uint32_t index) {
1106 LookupIterator it(isolate, object, index);
1107 if (!it.IsFound()) return it.factory()->undefined_value();
1108 return GetProperty(&it);
1109 }
1110
GetElement(Isolate * isolate,Handle<JSReceiver> receiver,uint32_t index)1111 MaybeHandle<Object> JSReceiver::GetElement(Isolate* isolate,
1112 Handle<JSReceiver> receiver,
1113 uint32_t index) {
1114 LookupIterator it(isolate, receiver, index, receiver);
1115 if (!it.IsFound()) return it.factory()->undefined_value();
1116 return Object::GetProperty(&it);
1117 }
1118
GetDataProperty(Handle<JSReceiver> object,Handle<Name> name)1119 Handle<Object> JSReceiver::GetDataProperty(Handle<JSReceiver> object,
1120 Handle<Name> name) {
1121 LookupIterator it(object, name, object,
1122 LookupIterator::PROTOTYPE_CHAIN_SKIP_INTERCEPTOR);
1123 if (!it.IsFound()) return it.factory()->undefined_value();
1124 return GetDataProperty(&it);
1125 }
1126
SetElement(Isolate * isolate,Handle<Object> object,uint32_t index,Handle<Object> value,LanguageMode language_mode)1127 MaybeHandle<Object> Object::SetElement(Isolate* isolate, Handle<Object> object,
1128 uint32_t index, Handle<Object> value,
1129 LanguageMode language_mode) {
1130 LookupIterator it(isolate, object, index);
1131 MAYBE_RETURN_NULL(
1132 SetProperty(&it, value, language_mode, MAY_BE_STORE_FROM_KEYED));
1133 return value;
1134 }
1135
GetPrototype(Isolate * isolate,Handle<JSReceiver> receiver)1136 MaybeHandle<Object> JSReceiver::GetPrototype(Isolate* isolate,
1137 Handle<JSReceiver> receiver) {
1138 // We don't expect access checks to be needed on JSProxy objects.
1139 DCHECK(!receiver->IsAccessCheckNeeded() || receiver->IsJSObject());
1140 PrototypeIterator iter(isolate, receiver, kStartAtReceiver,
1141 PrototypeIterator::END_AT_NON_HIDDEN);
1142 do {
1143 if (!iter.AdvanceFollowingProxies()) return MaybeHandle<Object>();
1144 } while (!iter.IsAtEnd());
1145 return PrototypeIterator::GetCurrent(iter);
1146 }
1147
GetProperty(Isolate * isolate,Handle<JSReceiver> receiver,const char * name)1148 MaybeHandle<Object> JSReceiver::GetProperty(Isolate* isolate,
1149 Handle<JSReceiver> receiver,
1150 const char* name) {
1151 Handle<String> str = isolate->factory()->InternalizeUtf8String(name);
1152 return GetProperty(receiver, str);
1153 }
1154
1155 // static
OwnPropertyKeys(Handle<JSReceiver> object)1156 MUST_USE_RESULT MaybeHandle<FixedArray> JSReceiver::OwnPropertyKeys(
1157 Handle<JSReceiver> object) {
1158 return KeyAccumulator::GetKeys(object, KeyCollectionMode::kOwnOnly,
1159 ALL_PROPERTIES,
1160 GetKeysConversion::kConvertToString);
1161 }
1162
PrototypeHasNoElements(Isolate * isolate,JSObject * object)1163 bool JSObject::PrototypeHasNoElements(Isolate* isolate, JSObject* object) {
1164 DisallowHeapAllocation no_gc;
1165 HeapObject* prototype = HeapObject::cast(object->map()->prototype());
1166 HeapObject* null = isolate->heap()->null_value();
1167 HeapObject* empty = isolate->heap()->empty_fixed_array();
1168 while (prototype != null) {
1169 Map* map = prototype->map();
1170 if (map->instance_type() <= LAST_CUSTOM_ELEMENTS_RECEIVER) return false;
1171 if (JSObject::cast(prototype)->elements() != empty) return false;
1172 prototype = HeapObject::cast(map->prototype());
1173 }
1174 return true;
1175 }
1176
1177 #define FIELD_ADDR(p, offset) \
1178 (reinterpret_cast<byte*>(p) + offset - kHeapObjectTag)
1179
1180 #define FIELD_ADDR_CONST(p, offset) \
1181 (reinterpret_cast<const byte*>(p) + offset - kHeapObjectTag)
1182
1183 #define READ_FIELD(p, offset) \
1184 (*reinterpret_cast<Object* const*>(FIELD_ADDR_CONST(p, offset)))
1185
1186 #define ACQUIRE_READ_FIELD(p, offset) \
1187 reinterpret_cast<Object*>(base::Acquire_Load( \
1188 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1189
1190 #define NOBARRIER_READ_FIELD(p, offset) \
1191 reinterpret_cast<Object*>(base::NoBarrier_Load( \
1192 reinterpret_cast<const base::AtomicWord*>(FIELD_ADDR_CONST(p, offset))))
1193
1194 #define WRITE_FIELD(p, offset, value) \
1195 (*reinterpret_cast<Object**>(FIELD_ADDR(p, offset)) = value)
1196
1197 #define RELEASE_WRITE_FIELD(p, offset, value) \
1198 base::Release_Store( \
1199 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1200 reinterpret_cast<base::AtomicWord>(value));
1201
1202 #define NOBARRIER_WRITE_FIELD(p, offset, value) \
1203 base::NoBarrier_Store( \
1204 reinterpret_cast<base::AtomicWord*>(FIELD_ADDR(p, offset)), \
1205 reinterpret_cast<base::AtomicWord>(value));
1206
1207 #define WRITE_BARRIER(heap, object, offset, value) \
1208 heap->incremental_marking()->RecordWrite( \
1209 object, HeapObject::RawField(object, offset), value); \
1210 heap->RecordWrite(object, offset, value);
1211
1212 #define FIXED_ARRAY_ELEMENTS_WRITE_BARRIER(heap, array, start, length) \
1213 do { \
1214 heap->RecordFixedArrayElements(array, start, length); \
1215 heap->incremental_marking()->IterateBlackObject(array); \
1216 } while (false)
1217
1218 #define CONDITIONAL_WRITE_BARRIER(heap, object, offset, value, mode) \
1219 if (mode != SKIP_WRITE_BARRIER) { \
1220 if (mode == UPDATE_WRITE_BARRIER) { \
1221 heap->incremental_marking()->RecordWrite( \
1222 object, HeapObject::RawField(object, offset), value); \
1223 } \
1224 heap->RecordWrite(object, offset, value); \
1225 }
1226
1227 #define READ_DOUBLE_FIELD(p, offset) \
1228 ReadDoubleValue(FIELD_ADDR_CONST(p, offset))
1229
1230 #define WRITE_DOUBLE_FIELD(p, offset, value) \
1231 WriteDoubleValue(FIELD_ADDR(p, offset), value)
1232
1233 #define READ_INT_FIELD(p, offset) \
1234 (*reinterpret_cast<const int*>(FIELD_ADDR_CONST(p, offset)))
1235
1236 #define WRITE_INT_FIELD(p, offset, value) \
1237 (*reinterpret_cast<int*>(FIELD_ADDR(p, offset)) = value)
1238
1239 #define READ_INTPTR_FIELD(p, offset) \
1240 (*reinterpret_cast<const intptr_t*>(FIELD_ADDR_CONST(p, offset)))
1241
1242 #define WRITE_INTPTR_FIELD(p, offset, value) \
1243 (*reinterpret_cast<intptr_t*>(FIELD_ADDR(p, offset)) = value)
1244
1245 #define READ_UINT8_FIELD(p, offset) \
1246 (*reinterpret_cast<const uint8_t*>(FIELD_ADDR_CONST(p, offset)))
1247
1248 #define WRITE_UINT8_FIELD(p, offset, value) \
1249 (*reinterpret_cast<uint8_t*>(FIELD_ADDR(p, offset)) = value)
1250
1251 #define READ_INT8_FIELD(p, offset) \
1252 (*reinterpret_cast<const int8_t*>(FIELD_ADDR_CONST(p, offset)))
1253
1254 #define WRITE_INT8_FIELD(p, offset, value) \
1255 (*reinterpret_cast<int8_t*>(FIELD_ADDR(p, offset)) = value)
1256
1257 #define READ_UINT16_FIELD(p, offset) \
1258 (*reinterpret_cast<const uint16_t*>(FIELD_ADDR_CONST(p, offset)))
1259
1260 #define WRITE_UINT16_FIELD(p, offset, value) \
1261 (*reinterpret_cast<uint16_t*>(FIELD_ADDR(p, offset)) = value)
1262
1263 #define READ_INT16_FIELD(p, offset) \
1264 (*reinterpret_cast<const int16_t*>(FIELD_ADDR_CONST(p, offset)))
1265
1266 #define WRITE_INT16_FIELD(p, offset, value) \
1267 (*reinterpret_cast<int16_t*>(FIELD_ADDR(p, offset)) = value)
1268
1269 #define READ_UINT32_FIELD(p, offset) \
1270 (*reinterpret_cast<const uint32_t*>(FIELD_ADDR_CONST(p, offset)))
1271
1272 #define WRITE_UINT32_FIELD(p, offset, value) \
1273 (*reinterpret_cast<uint32_t*>(FIELD_ADDR(p, offset)) = value)
1274
1275 #define READ_INT32_FIELD(p, offset) \
1276 (*reinterpret_cast<const int32_t*>(FIELD_ADDR_CONST(p, offset)))
1277
1278 #define WRITE_INT32_FIELD(p, offset, value) \
1279 (*reinterpret_cast<int32_t*>(FIELD_ADDR(p, offset)) = value)
1280
1281 #define READ_FLOAT_FIELD(p, offset) \
1282 (*reinterpret_cast<const float*>(FIELD_ADDR_CONST(p, offset)))
1283
1284 #define WRITE_FLOAT_FIELD(p, offset, value) \
1285 (*reinterpret_cast<float*>(FIELD_ADDR(p, offset)) = value)
1286
1287 #define READ_UINT64_FIELD(p, offset) \
1288 (*reinterpret_cast<const uint64_t*>(FIELD_ADDR_CONST(p, offset)))
1289
1290 #define WRITE_UINT64_FIELD(p, offset, value) \
1291 (*reinterpret_cast<uint64_t*>(FIELD_ADDR(p, offset)) = value)
1292
1293 #define READ_INT64_FIELD(p, offset) \
1294 (*reinterpret_cast<const int64_t*>(FIELD_ADDR_CONST(p, offset)))
1295
1296 #define WRITE_INT64_FIELD(p, offset, value) \
1297 (*reinterpret_cast<int64_t*>(FIELD_ADDR(p, offset)) = value)
1298
1299 #define READ_BYTE_FIELD(p, offset) \
1300 (*reinterpret_cast<const byte*>(FIELD_ADDR_CONST(p, offset)))
1301
1302 #define NOBARRIER_READ_BYTE_FIELD(p, offset) \
1303 static_cast<byte>(base::NoBarrier_Load( \
1304 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
1305
1306 #define WRITE_BYTE_FIELD(p, offset, value) \
1307 (*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
1308
1309 #define NOBARRIER_WRITE_BYTE_FIELD(p, offset, value) \
1310 base::NoBarrier_Store( \
1311 reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset)), \
1312 static_cast<base::Atomic8>(value));
1313
RawField(HeapObject * obj,int byte_offset)1314 Object** HeapObject::RawField(HeapObject* obj, int byte_offset) {
1315 return reinterpret_cast<Object**>(FIELD_ADDR(obj, byte_offset));
1316 }
1317
1318
FromMap(const Map * map)1319 MapWord MapWord::FromMap(const Map* map) {
1320 return MapWord(reinterpret_cast<uintptr_t>(map));
1321 }
1322
1323
ToMap()1324 Map* MapWord::ToMap() {
1325 return reinterpret_cast<Map*>(value_);
1326 }
1327
IsForwardingAddress()1328 bool MapWord::IsForwardingAddress() const {
1329 return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
1330 }
1331
1332
FromForwardingAddress(HeapObject * object)1333 MapWord MapWord::FromForwardingAddress(HeapObject* object) {
1334 Address raw = reinterpret_cast<Address>(object) - kHeapObjectTag;
1335 return MapWord(reinterpret_cast<uintptr_t>(raw));
1336 }
1337
1338
ToForwardingAddress()1339 HeapObject* MapWord::ToForwardingAddress() {
1340 DCHECK(IsForwardingAddress());
1341 return HeapObject::FromAddress(reinterpret_cast<Address>(value_));
1342 }
1343
1344
1345 #ifdef VERIFY_HEAP
VerifyObjectField(int offset)1346 void HeapObject::VerifyObjectField(int offset) {
1347 VerifyPointer(READ_FIELD(this, offset));
1348 }
1349
VerifySmiField(int offset)1350 void HeapObject::VerifySmiField(int offset) {
1351 CHECK(READ_FIELD(this, offset)->IsSmi());
1352 }
1353 #endif
1354
1355
GetHeap()1356 Heap* HeapObject::GetHeap() const {
1357 Heap* heap = MemoryChunk::FromAddress(
1358 reinterpret_cast<Address>(const_cast<HeapObject*>(this)))
1359 ->heap();
1360 SLOW_DCHECK(heap != NULL);
1361 return heap;
1362 }
1363
1364
GetIsolate()1365 Isolate* HeapObject::GetIsolate() const {
1366 return GetHeap()->isolate();
1367 }
1368
1369
map()1370 Map* HeapObject::map() const {
1371 #ifdef DEBUG
1372 // Clear mark potentially added by PathTracer.
1373 uintptr_t raw_value =
1374 map_word().ToRawValue() & ~static_cast<uintptr_t>(PathTracer::kMarkTag);
1375 return MapWord::FromRawValue(raw_value).ToMap();
1376 #else
1377 return map_word().ToMap();
1378 #endif
1379 }
1380
1381
set_map(Map * value)1382 void HeapObject::set_map(Map* value) {
1383 set_map_word(MapWord::FromMap(value));
1384 if (value != NULL) {
1385 // TODO(1600) We are passing NULL as a slot because maps can never be on
1386 // evacuation candidate.
1387 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1388 }
1389 }
1390
1391
synchronized_map()1392 Map* HeapObject::synchronized_map() {
1393 return synchronized_map_word().ToMap();
1394 }
1395
1396
synchronized_set_map(Map * value)1397 void HeapObject::synchronized_set_map(Map* value) {
1398 synchronized_set_map_word(MapWord::FromMap(value));
1399 if (value != NULL) {
1400 // TODO(1600) We are passing NULL as a slot because maps can never be on
1401 // evacuation candidate.
1402 value->GetHeap()->incremental_marking()->RecordWrite(this, NULL, value);
1403 }
1404 }
1405
1406
synchronized_set_map_no_write_barrier(Map * value)1407 void HeapObject::synchronized_set_map_no_write_barrier(Map* value) {
1408 synchronized_set_map_word(MapWord::FromMap(value));
1409 }
1410
1411
1412 // Unsafe accessor omitting write barrier.
set_map_no_write_barrier(Map * value)1413 void HeapObject::set_map_no_write_barrier(Map* value) {
1414 set_map_word(MapWord::FromMap(value));
1415 }
1416
1417
map_word()1418 MapWord HeapObject::map_word() const {
1419 return MapWord(
1420 reinterpret_cast<uintptr_t>(NOBARRIER_READ_FIELD(this, kMapOffset)));
1421 }
1422
1423
set_map_word(MapWord map_word)1424 void HeapObject::set_map_word(MapWord map_word) {
1425 NOBARRIER_WRITE_FIELD(
1426 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1427 }
1428
1429
synchronized_map_word()1430 MapWord HeapObject::synchronized_map_word() const {
1431 return MapWord(
1432 reinterpret_cast<uintptr_t>(ACQUIRE_READ_FIELD(this, kMapOffset)));
1433 }
1434
1435
synchronized_set_map_word(MapWord map_word)1436 void HeapObject::synchronized_set_map_word(MapWord map_word) {
1437 RELEASE_WRITE_FIELD(
1438 this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
1439 }
1440
1441
Size()1442 int HeapObject::Size() {
1443 return SizeFromMap(map());
1444 }
1445
1446
value()1447 double HeapNumber::value() const {
1448 return READ_DOUBLE_FIELD(this, kValueOffset);
1449 }
1450
1451
set_value(double value)1452 void HeapNumber::set_value(double value) {
1453 WRITE_DOUBLE_FIELD(this, kValueOffset, value);
1454 }
1455
1456
get_exponent()1457 int HeapNumber::get_exponent() {
1458 return ((READ_INT_FIELD(this, kExponentOffset) & kExponentMask) >>
1459 kExponentShift) - kExponentBias;
1460 }
1461
1462
get_sign()1463 int HeapNumber::get_sign() {
1464 return READ_INT_FIELD(this, kExponentOffset) & kSignMask;
1465 }
1466
1467
Equals(Simd128Value * that)1468 bool Simd128Value::Equals(Simd128Value* that) {
1469 // TODO(bmeurer): This doesn't match the SIMD.js specification, but it seems
1470 // to be consistent with what the CompareICStub does, and what is tested in
1471 // the current SIMD.js testsuite.
1472 if (this == that) return true;
1473 #define SIMD128_VALUE(TYPE, Type, type, lane_count, lane_type) \
1474 if (this->Is##Type()) { \
1475 if (!that->Is##Type()) return false; \
1476 return Type::cast(this)->Equals(Type::cast(that)); \
1477 }
1478 SIMD128_TYPES(SIMD128_VALUE)
1479 #undef SIMD128_VALUE
1480 return false;
1481 }
1482
1483
1484 // static
Equals(Handle<Simd128Value> one,Handle<Simd128Value> two)1485 bool Simd128Value::Equals(Handle<Simd128Value> one, Handle<Simd128Value> two) {
1486 return one->Equals(*two);
1487 }
1488
1489
1490 #define SIMD128_VALUE_EQUALS(TYPE, Type, type, lane_count, lane_type) \
1491 bool Type::Equals(Type* that) { \
1492 for (int lane = 0; lane < lane_count; ++lane) { \
1493 if (this->get_lane(lane) != that->get_lane(lane)) return false; \
1494 } \
1495 return true; \
1496 }
1497 SIMD128_TYPES(SIMD128_VALUE_EQUALS)
1498 #undef SIMD128_VALUE_EQUALS
1499
1500
1501 #if defined(V8_TARGET_LITTLE_ENDIAN)
1502 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1503 lane_type value = \
1504 READ_##field_type##_FIELD(this, kValueOffset + lane * field_size);
1505 #elif defined(V8_TARGET_BIG_ENDIAN)
1506 #define SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1507 lane_type value = READ_##field_type##_FIELD( \
1508 this, kValueOffset + (lane_count - lane - 1) * field_size);
1509 #else
1510 #error Unknown byte ordering
1511 #endif
1512
1513 #if defined(V8_TARGET_LITTLE_ENDIAN)
1514 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1515 WRITE_##field_type##_FIELD(this, kValueOffset + lane * field_size, value);
1516 #elif defined(V8_TARGET_BIG_ENDIAN)
1517 #define SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1518 WRITE_##field_type##_FIELD( \
1519 this, kValueOffset + (lane_count - lane - 1) * field_size, value);
1520 #else
1521 #error Unknown byte ordering
1522 #endif
1523
1524 #define SIMD128_NUMERIC_LANE_FNS(type, lane_type, lane_count, field_type, \
1525 field_size) \
1526 lane_type type::get_lane(int lane) const { \
1527 DCHECK(lane < lane_count && lane >= 0); \
1528 SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1529 return value; \
1530 } \
1531 \
1532 void type::set_lane(int lane, lane_type value) { \
1533 DCHECK(lane < lane_count && lane >= 0); \
1534 SIMD128_WRITE_LANE(lane_count, field_type, field_size, value) \
1535 }
1536
1537 SIMD128_NUMERIC_LANE_FNS(Float32x4, float, 4, FLOAT, kFloatSize)
1538 SIMD128_NUMERIC_LANE_FNS(Int32x4, int32_t, 4, INT32, kInt32Size)
1539 SIMD128_NUMERIC_LANE_FNS(Uint32x4, uint32_t, 4, UINT32, kInt32Size)
1540 SIMD128_NUMERIC_LANE_FNS(Int16x8, int16_t, 8, INT16, kShortSize)
1541 SIMD128_NUMERIC_LANE_FNS(Uint16x8, uint16_t, 8, UINT16, kShortSize)
1542 SIMD128_NUMERIC_LANE_FNS(Int8x16, int8_t, 16, INT8, kCharSize)
1543 SIMD128_NUMERIC_LANE_FNS(Uint8x16, uint8_t, 16, UINT8, kCharSize)
1544 #undef SIMD128_NUMERIC_LANE_FNS
1545
1546
1547 #define SIMD128_BOOLEAN_LANE_FNS(type, lane_type, lane_count, field_type, \
1548 field_size) \
1549 bool type::get_lane(int lane) const { \
1550 DCHECK(lane < lane_count && lane >= 0); \
1551 SIMD128_READ_LANE(lane_type, lane_count, field_type, field_size) \
1552 DCHECK(value == 0 || value == -1); \
1553 return value != 0; \
1554 } \
1555 \
1556 void type::set_lane(int lane, bool value) { \
1557 DCHECK(lane < lane_count && lane >= 0); \
1558 int32_t int_val = value ? -1 : 0; \
1559 SIMD128_WRITE_LANE(lane_count, field_type, field_size, int_val) \
1560 }
1561
1562 SIMD128_BOOLEAN_LANE_FNS(Bool32x4, int32_t, 4, INT32, kInt32Size)
1563 SIMD128_BOOLEAN_LANE_FNS(Bool16x8, int16_t, 8, INT16, kShortSize)
1564 SIMD128_BOOLEAN_LANE_FNS(Bool8x16, int8_t, 16, INT8, kCharSize)
1565 #undef SIMD128_BOOLEAN_LANE_FNS
1566
1567 #undef SIMD128_READ_LANE
1568 #undef SIMD128_WRITE_LANE
1569
1570
ACCESSORS(JSReceiver,properties,FixedArray,kPropertiesOffset)1571 ACCESSORS(JSReceiver, properties, FixedArray, kPropertiesOffset)
1572
1573
1574 Object** FixedArray::GetFirstElementAddress() {
1575 return reinterpret_cast<Object**>(FIELD_ADDR(this, OffsetOfElementAt(0)));
1576 }
1577
1578
ContainsOnlySmisOrHoles()1579 bool FixedArray::ContainsOnlySmisOrHoles() {
1580 Object* the_hole = GetHeap()->the_hole_value();
1581 Object** current = GetFirstElementAddress();
1582 for (int i = 0; i < length(); ++i) {
1583 Object* candidate = *current++;
1584 if (!candidate->IsSmi() && candidate != the_hole) return false;
1585 }
1586 return true;
1587 }
1588
1589
elements()1590 FixedArrayBase* JSObject::elements() const {
1591 Object* array = READ_FIELD(this, kElementsOffset);
1592 return static_cast<FixedArrayBase*>(array);
1593 }
1594
1595
Initialize()1596 void AllocationSite::Initialize() {
1597 set_transition_info(Smi::kZero);
1598 SetElementsKind(GetInitialFastElementsKind());
1599 set_nested_site(Smi::kZero);
1600 set_pretenure_data(0);
1601 set_pretenure_create_count(0);
1602 set_dependent_code(DependentCode::cast(GetHeap()->empty_fixed_array()),
1603 SKIP_WRITE_BARRIER);
1604 }
1605
1606
IsZombie()1607 bool AllocationSite::IsZombie() { return pretenure_decision() == kZombie; }
1608
1609
IsMaybeTenure()1610 bool AllocationSite::IsMaybeTenure() {
1611 return pretenure_decision() == kMaybeTenure;
1612 }
1613
1614
PretenuringDecisionMade()1615 bool AllocationSite::PretenuringDecisionMade() {
1616 return pretenure_decision() != kUndecided;
1617 }
1618
1619
MarkZombie()1620 void AllocationSite::MarkZombie() {
1621 DCHECK(!IsZombie());
1622 Initialize();
1623 set_pretenure_decision(kZombie);
1624 }
1625
1626
GetElementsKind()1627 ElementsKind AllocationSite::GetElementsKind() {
1628 DCHECK(!SitePointsToLiteral());
1629 int value = Smi::cast(transition_info())->value();
1630 return ElementsKindBits::decode(value);
1631 }
1632
1633
SetElementsKind(ElementsKind kind)1634 void AllocationSite::SetElementsKind(ElementsKind kind) {
1635 int value = Smi::cast(transition_info())->value();
1636 set_transition_info(Smi::FromInt(ElementsKindBits::update(value, kind)),
1637 SKIP_WRITE_BARRIER);
1638 }
1639
1640
CanInlineCall()1641 bool AllocationSite::CanInlineCall() {
1642 int value = Smi::cast(transition_info())->value();
1643 return DoNotInlineBit::decode(value) == 0;
1644 }
1645
1646
SetDoNotInlineCall()1647 void AllocationSite::SetDoNotInlineCall() {
1648 int value = Smi::cast(transition_info())->value();
1649 set_transition_info(Smi::FromInt(DoNotInlineBit::update(value, true)),
1650 SKIP_WRITE_BARRIER);
1651 }
1652
1653
SitePointsToLiteral()1654 bool AllocationSite::SitePointsToLiteral() {
1655 // If transition_info is a smi, then it represents an ElementsKind
1656 // for a constructed array. Otherwise, it must be a boilerplate
1657 // for an object or array literal.
1658 return transition_info()->IsJSArray() || transition_info()->IsJSObject();
1659 }
1660
1661
1662 // Heuristic: We only need to create allocation site info if the boilerplate
1663 // elements kind is the initial elements kind.
GetMode(ElementsKind boilerplate_elements_kind)1664 AllocationSiteMode AllocationSite::GetMode(
1665 ElementsKind boilerplate_elements_kind) {
1666 if (IsFastSmiElementsKind(boilerplate_elements_kind)) {
1667 return TRACK_ALLOCATION_SITE;
1668 }
1669
1670 return DONT_TRACK_ALLOCATION_SITE;
1671 }
1672
CanTrack(InstanceType type)1673 inline bool AllocationSite::CanTrack(InstanceType type) {
1674 if (FLAG_allocation_site_pretenuring) {
1675 return type == JS_ARRAY_TYPE ||
1676 type == JS_OBJECT_TYPE ||
1677 type < FIRST_NONSTRING_TYPE;
1678 }
1679 return type == JS_ARRAY_TYPE;
1680 }
1681
1682
pretenure_decision()1683 AllocationSite::PretenureDecision AllocationSite::pretenure_decision() {
1684 int value = pretenure_data();
1685 return PretenureDecisionBits::decode(value);
1686 }
1687
1688
set_pretenure_decision(PretenureDecision decision)1689 void AllocationSite::set_pretenure_decision(PretenureDecision decision) {
1690 int value = pretenure_data();
1691 set_pretenure_data(PretenureDecisionBits::update(value, decision));
1692 }
1693
1694
deopt_dependent_code()1695 bool AllocationSite::deopt_dependent_code() {
1696 int value = pretenure_data();
1697 return DeoptDependentCodeBit::decode(value);
1698 }
1699
1700
set_deopt_dependent_code(bool deopt)1701 void AllocationSite::set_deopt_dependent_code(bool deopt) {
1702 int value = pretenure_data();
1703 set_pretenure_data(DeoptDependentCodeBit::update(value, deopt));
1704 }
1705
1706
memento_found_count()1707 int AllocationSite::memento_found_count() {
1708 int value = pretenure_data();
1709 return MementoFoundCountBits::decode(value);
1710 }
1711
1712
set_memento_found_count(int count)1713 inline void AllocationSite::set_memento_found_count(int count) {
1714 int value = pretenure_data();
1715 // Verify that we can count more mementos than we can possibly find in one
1716 // new space collection.
1717 DCHECK((GetHeap()->MaxSemiSpaceSize() /
1718 (Heap::kMinObjectSizeInWords * kPointerSize +
1719 AllocationMemento::kSize)) < MementoFoundCountBits::kMax);
1720 DCHECK(count < MementoFoundCountBits::kMax);
1721 set_pretenure_data(MementoFoundCountBits::update(value, count));
1722 }
1723
1724
memento_create_count()1725 int AllocationSite::memento_create_count() { return pretenure_create_count(); }
1726
1727
set_memento_create_count(int count)1728 void AllocationSite::set_memento_create_count(int count) {
1729 set_pretenure_create_count(count);
1730 }
1731
1732
IncrementMementoFoundCount(int increment)1733 bool AllocationSite::IncrementMementoFoundCount(int increment) {
1734 if (IsZombie()) return false;
1735
1736 int value = memento_found_count();
1737 set_memento_found_count(value + increment);
1738 return memento_found_count() >= kPretenureMinimumCreated;
1739 }
1740
1741
IncrementMementoCreateCount()1742 inline void AllocationSite::IncrementMementoCreateCount() {
1743 DCHECK(FLAG_allocation_site_pretenuring);
1744 int value = memento_create_count();
1745 set_memento_create_count(value + 1);
1746 }
1747
1748
MakePretenureDecision(PretenureDecision current_decision,double ratio,bool maximum_size_scavenge)1749 inline bool AllocationSite::MakePretenureDecision(
1750 PretenureDecision current_decision,
1751 double ratio,
1752 bool maximum_size_scavenge) {
1753 // Here we just allow state transitions from undecided or maybe tenure
1754 // to don't tenure, maybe tenure, or tenure.
1755 if ((current_decision == kUndecided || current_decision == kMaybeTenure)) {
1756 if (ratio >= kPretenureRatio) {
1757 // We just transition into tenure state when the semi-space was at
1758 // maximum capacity.
1759 if (maximum_size_scavenge) {
1760 set_deopt_dependent_code(true);
1761 set_pretenure_decision(kTenure);
1762 // Currently we just need to deopt when we make a state transition to
1763 // tenure.
1764 return true;
1765 }
1766 set_pretenure_decision(kMaybeTenure);
1767 } else {
1768 set_pretenure_decision(kDontTenure);
1769 }
1770 }
1771 return false;
1772 }
1773
1774
DigestPretenuringFeedback(bool maximum_size_scavenge)1775 inline bool AllocationSite::DigestPretenuringFeedback(
1776 bool maximum_size_scavenge) {
1777 bool deopt = false;
1778 int create_count = memento_create_count();
1779 int found_count = memento_found_count();
1780 bool minimum_mementos_created = create_count >= kPretenureMinimumCreated;
1781 double ratio =
1782 minimum_mementos_created || FLAG_trace_pretenuring_statistics ?
1783 static_cast<double>(found_count) / create_count : 0.0;
1784 PretenureDecision current_decision = pretenure_decision();
1785
1786 if (minimum_mementos_created) {
1787 deopt = MakePretenureDecision(
1788 current_decision, ratio, maximum_size_scavenge);
1789 }
1790
1791 if (FLAG_trace_pretenuring_statistics) {
1792 PrintIsolate(GetIsolate(),
1793 "pretenuring: AllocationSite(%p): (created, found, ratio) "
1794 "(%d, %d, %f) %s => %s\n",
1795 static_cast<void*>(this), create_count, found_count, ratio,
1796 PretenureDecisionName(current_decision),
1797 PretenureDecisionName(pretenure_decision()));
1798 }
1799
1800 // Clear feedback calculation fields until the next gc.
1801 set_memento_found_count(0);
1802 set_memento_create_count(0);
1803 return deopt;
1804 }
1805
1806
IsValid()1807 bool AllocationMemento::IsValid() {
1808 return allocation_site()->IsAllocationSite() &&
1809 !AllocationSite::cast(allocation_site())->IsZombie();
1810 }
1811
1812
GetAllocationSite()1813 AllocationSite* AllocationMemento::GetAllocationSite() {
1814 DCHECK(IsValid());
1815 return AllocationSite::cast(allocation_site());
1816 }
1817
GetAllocationSiteUnchecked()1818 Address AllocationMemento::GetAllocationSiteUnchecked() {
1819 return reinterpret_cast<Address>(allocation_site());
1820 }
1821
EnsureCanContainHeapObjectElements(Handle<JSObject> object)1822 void JSObject::EnsureCanContainHeapObjectElements(Handle<JSObject> object) {
1823 JSObject::ValidateElements(object);
1824 ElementsKind elements_kind = object->map()->elements_kind();
1825 if (!IsFastObjectElementsKind(elements_kind)) {
1826 if (IsFastHoleyElementsKind(elements_kind)) {
1827 TransitionElementsKind(object, FAST_HOLEY_ELEMENTS);
1828 } else {
1829 TransitionElementsKind(object, FAST_ELEMENTS);
1830 }
1831 }
1832 }
1833
1834
EnsureCanContainElements(Handle<JSObject> object,Object ** objects,uint32_t count,EnsureElementsMode mode)1835 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1836 Object** objects,
1837 uint32_t count,
1838 EnsureElementsMode mode) {
1839 ElementsKind current_kind = object->GetElementsKind();
1840 ElementsKind target_kind = current_kind;
1841 {
1842 DisallowHeapAllocation no_allocation;
1843 DCHECK(mode != ALLOW_COPIED_DOUBLE_ELEMENTS);
1844 bool is_holey = IsFastHoleyElementsKind(current_kind);
1845 if (current_kind == FAST_HOLEY_ELEMENTS) return;
1846 Object* the_hole = object->GetHeap()->the_hole_value();
1847 for (uint32_t i = 0; i < count; ++i) {
1848 Object* current = *objects++;
1849 if (current == the_hole) {
1850 is_holey = true;
1851 target_kind = GetHoleyElementsKind(target_kind);
1852 } else if (!current->IsSmi()) {
1853 if (mode == ALLOW_CONVERTED_DOUBLE_ELEMENTS && current->IsNumber()) {
1854 if (IsFastSmiElementsKind(target_kind)) {
1855 if (is_holey) {
1856 target_kind = FAST_HOLEY_DOUBLE_ELEMENTS;
1857 } else {
1858 target_kind = FAST_DOUBLE_ELEMENTS;
1859 }
1860 }
1861 } else if (is_holey) {
1862 target_kind = FAST_HOLEY_ELEMENTS;
1863 break;
1864 } else {
1865 target_kind = FAST_ELEMENTS;
1866 }
1867 }
1868 }
1869 }
1870 if (target_kind != current_kind) {
1871 TransitionElementsKind(object, target_kind);
1872 }
1873 }
1874
1875
EnsureCanContainElements(Handle<JSObject> object,Handle<FixedArrayBase> elements,uint32_t length,EnsureElementsMode mode)1876 void JSObject::EnsureCanContainElements(Handle<JSObject> object,
1877 Handle<FixedArrayBase> elements,
1878 uint32_t length,
1879 EnsureElementsMode mode) {
1880 Heap* heap = object->GetHeap();
1881 if (elements->map() != heap->fixed_double_array_map()) {
1882 DCHECK(elements->map() == heap->fixed_array_map() ||
1883 elements->map() == heap->fixed_cow_array_map());
1884 if (mode == ALLOW_COPIED_DOUBLE_ELEMENTS) {
1885 mode = DONT_ALLOW_DOUBLE_ELEMENTS;
1886 }
1887 Object** objects =
1888 Handle<FixedArray>::cast(elements)->GetFirstElementAddress();
1889 EnsureCanContainElements(object, objects, length, mode);
1890 return;
1891 }
1892
1893 DCHECK(mode == ALLOW_COPIED_DOUBLE_ELEMENTS);
1894 if (object->GetElementsKind() == FAST_HOLEY_SMI_ELEMENTS) {
1895 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1896 } else if (object->GetElementsKind() == FAST_SMI_ELEMENTS) {
1897 Handle<FixedDoubleArray> double_array =
1898 Handle<FixedDoubleArray>::cast(elements);
1899 for (uint32_t i = 0; i < length; ++i) {
1900 if (double_array->is_the_hole(i)) {
1901 TransitionElementsKind(object, FAST_HOLEY_DOUBLE_ELEMENTS);
1902 return;
1903 }
1904 }
1905 TransitionElementsKind(object, FAST_DOUBLE_ELEMENTS);
1906 }
1907 }
1908
1909
SetMapAndElements(Handle<JSObject> object,Handle<Map> new_map,Handle<FixedArrayBase> value)1910 void JSObject::SetMapAndElements(Handle<JSObject> object,
1911 Handle<Map> new_map,
1912 Handle<FixedArrayBase> value) {
1913 JSObject::MigrateToMap(object, new_map);
1914 DCHECK((object->map()->has_fast_smi_or_object_elements() ||
1915 (*value == object->GetHeap()->empty_fixed_array()) ||
1916 object->map()->has_fast_string_wrapper_elements()) ==
1917 (value->map() == object->GetHeap()->fixed_array_map() ||
1918 value->map() == object->GetHeap()->fixed_cow_array_map()));
1919 DCHECK((*value == object->GetHeap()->empty_fixed_array()) ||
1920 (object->map()->has_fast_double_elements() ==
1921 value->IsFixedDoubleArray()));
1922 object->set_elements(*value);
1923 }
1924
1925
set_elements(FixedArrayBase * value,WriteBarrierMode mode)1926 void JSObject::set_elements(FixedArrayBase* value, WriteBarrierMode mode) {
1927 WRITE_FIELD(this, kElementsOffset, value);
1928 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kElementsOffset, value, mode);
1929 }
1930
1931
initialize_elements()1932 void JSObject::initialize_elements() {
1933 FixedArrayBase* elements = map()->GetInitialElements();
1934 WRITE_FIELD(this, kElementsOffset, elements);
1935 }
1936
1937
GetIndexedInterceptor()1938 InterceptorInfo* JSObject::GetIndexedInterceptor() {
1939 return map()->GetIndexedInterceptor();
1940 }
1941
GetNamedInterceptor()1942 InterceptorInfo* JSObject::GetNamedInterceptor() {
1943 return map()->GetNamedInterceptor();
1944 }
1945
GetNamedInterceptor()1946 InterceptorInfo* Map::GetNamedInterceptor() {
1947 DCHECK(has_named_interceptor());
1948 JSFunction* constructor = JSFunction::cast(GetConstructor());
1949 DCHECK(constructor->shared()->IsApiFunction());
1950 return InterceptorInfo::cast(
1951 constructor->shared()->get_api_func_data()->named_property_handler());
1952 }
1953
GetIndexedInterceptor()1954 InterceptorInfo* Map::GetIndexedInterceptor() {
1955 DCHECK(has_indexed_interceptor());
1956 JSFunction* constructor = JSFunction::cast(GetConstructor());
1957 DCHECK(constructor->shared()->IsApiFunction());
1958 return InterceptorInfo::cast(
1959 constructor->shared()->get_api_func_data()->indexed_property_handler());
1960 }
1961
to_number_raw()1962 double Oddball::to_number_raw() const {
1963 return READ_DOUBLE_FIELD(this, kToNumberRawOffset);
1964 }
1965
set_to_number_raw(double value)1966 void Oddball::set_to_number_raw(double value) {
1967 WRITE_DOUBLE_FIELD(this, kToNumberRawOffset, value);
1968 }
1969
ACCESSORS(Oddball,to_string,String,kToStringOffset)1970 ACCESSORS(Oddball, to_string, String, kToStringOffset)
1971 ACCESSORS(Oddball, to_number, Object, kToNumberOffset)
1972 ACCESSORS(Oddball, type_of, String, kTypeOfOffset)
1973
1974
1975 byte Oddball::kind() const {
1976 return Smi::cast(READ_FIELD(this, kKindOffset))->value();
1977 }
1978
1979
set_kind(byte value)1980 void Oddball::set_kind(byte value) {
1981 WRITE_FIELD(this, kKindOffset, Smi::FromInt(value));
1982 }
1983
1984
1985 // static
ToNumber(Handle<Oddball> input)1986 Handle<Object> Oddball::ToNumber(Handle<Oddball> input) {
1987 return handle(input->to_number(), input->GetIsolate());
1988 }
1989
1990
ACCESSORS(Cell,value,Object,kValueOffset)1991 ACCESSORS(Cell, value, Object, kValueOffset)
1992 ACCESSORS(PropertyCell, dependent_code, DependentCode, kDependentCodeOffset)
1993 ACCESSORS(PropertyCell, property_details_raw, Object, kDetailsOffset)
1994 ACCESSORS(PropertyCell, value, Object, kValueOffset)
1995
1996
1997 PropertyDetails PropertyCell::property_details() {
1998 return PropertyDetails(Smi::cast(property_details_raw()));
1999 }
2000
2001
set_property_details(PropertyDetails details)2002 void PropertyCell::set_property_details(PropertyDetails details) {
2003 set_property_details_raw(details.AsSmi());
2004 }
2005
2006
value()2007 Object* WeakCell::value() const { return READ_FIELD(this, kValueOffset); }
2008
2009
clear()2010 void WeakCell::clear() {
2011 // Either the garbage collector is clearing the cell or we are simply
2012 // initializing the root empty weak cell.
2013 DCHECK(GetHeap()->gc_state() == Heap::MARK_COMPACT ||
2014 this == GetHeap()->empty_weak_cell());
2015 WRITE_FIELD(this, kValueOffset, Smi::kZero);
2016 }
2017
2018
initialize(HeapObject * val)2019 void WeakCell::initialize(HeapObject* val) {
2020 WRITE_FIELD(this, kValueOffset, val);
2021 // We just have to execute the generational barrier here because we never
2022 // mark through a weak cell and collect evacuation candidates when we process
2023 // all weak cells.
2024 WriteBarrierMode mode = Marking::IsBlack(ObjectMarking::MarkBitFrom(this))
2025 ? UPDATE_WRITE_BARRIER
2026 : UPDATE_WEAK_WRITE_BARRIER;
2027 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kValueOffset, val, mode);
2028 }
2029
cleared()2030 bool WeakCell::cleared() const { return value() == Smi::kZero; }
2031
next()2032 Object* WeakCell::next() const { return READ_FIELD(this, kNextOffset); }
2033
2034
set_next(Object * val,WriteBarrierMode mode)2035 void WeakCell::set_next(Object* val, WriteBarrierMode mode) {
2036 WRITE_FIELD(this, kNextOffset, val);
2037 if (mode == UPDATE_WRITE_BARRIER) {
2038 WRITE_BARRIER(GetHeap(), this, kNextOffset, val);
2039 }
2040 }
2041
2042
clear_next(Object * the_hole_value)2043 void WeakCell::clear_next(Object* the_hole_value) {
2044 DCHECK_EQ(GetHeap()->the_hole_value(), the_hole_value);
2045 set_next(the_hole_value, SKIP_WRITE_BARRIER);
2046 }
2047
next_cleared()2048 bool WeakCell::next_cleared() { return next()->IsTheHole(GetIsolate()); }
2049
GetHeaderSize()2050 int JSObject::GetHeaderSize() { return GetHeaderSize(map()->instance_type()); }
2051
2052
GetHeaderSize(InstanceType type)2053 int JSObject::GetHeaderSize(InstanceType type) {
2054 // Check for the most common kind of JavaScript object before
2055 // falling into the generic switch. This speeds up the internal
2056 // field operations considerably on average.
2057 if (type == JS_OBJECT_TYPE) return JSObject::kHeaderSize;
2058 switch (type) {
2059 case JS_API_OBJECT_TYPE:
2060 case JS_SPECIAL_API_OBJECT_TYPE:
2061 return JSObject::kHeaderSize;
2062 case JS_GENERATOR_OBJECT_TYPE:
2063 return JSGeneratorObject::kSize;
2064 case JS_GLOBAL_PROXY_TYPE:
2065 return JSGlobalProxy::kSize;
2066 case JS_GLOBAL_OBJECT_TYPE:
2067 return JSGlobalObject::kSize;
2068 case JS_BOUND_FUNCTION_TYPE:
2069 return JSBoundFunction::kSize;
2070 case JS_FUNCTION_TYPE:
2071 return JSFunction::kSize;
2072 case JS_VALUE_TYPE:
2073 return JSValue::kSize;
2074 case JS_DATE_TYPE:
2075 return JSDate::kSize;
2076 case JS_ARRAY_TYPE:
2077 return JSArray::kSize;
2078 case JS_ARRAY_BUFFER_TYPE:
2079 return JSArrayBuffer::kSize;
2080 case JS_TYPED_ARRAY_TYPE:
2081 return JSTypedArray::kSize;
2082 case JS_DATA_VIEW_TYPE:
2083 return JSDataView::kSize;
2084 case JS_SET_TYPE:
2085 return JSSet::kSize;
2086 case JS_MAP_TYPE:
2087 return JSMap::kSize;
2088 case JS_SET_ITERATOR_TYPE:
2089 return JSSetIterator::kSize;
2090 case JS_MAP_ITERATOR_TYPE:
2091 return JSMapIterator::kSize;
2092 case JS_WEAK_MAP_TYPE:
2093 return JSWeakMap::kSize;
2094 case JS_WEAK_SET_TYPE:
2095 return JSWeakSet::kSize;
2096 case JS_PROMISE_TYPE:
2097 return JSObject::kHeaderSize;
2098 case JS_REGEXP_TYPE:
2099 return JSRegExp::kSize;
2100 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
2101 return JSObject::kHeaderSize;
2102 case JS_MESSAGE_OBJECT_TYPE:
2103 return JSMessageObject::kSize;
2104 case JS_ARGUMENTS_TYPE:
2105 return JSArgumentsObject::kHeaderSize;
2106 case JS_ERROR_TYPE:
2107 return JSObject::kHeaderSize;
2108 case JS_STRING_ITERATOR_TYPE:
2109 return JSStringIterator::kSize;
2110 case JS_FIXED_ARRAY_ITERATOR_TYPE:
2111 return JSFixedArrayIterator::kHeaderSize;
2112 default:
2113 UNREACHABLE();
2114 return 0;
2115 }
2116 }
2117
2118
GetInternalFieldCount(Map * map)2119 int JSObject::GetInternalFieldCount(Map* map) {
2120 int instance_size = map->instance_size();
2121 if (instance_size == kVariableSizeSentinel) return 0;
2122 InstanceType instance_type = map->instance_type();
2123 return ((instance_size - GetHeaderSize(instance_type)) >> kPointerSizeLog2) -
2124 map->GetInObjectProperties();
2125 }
2126
2127
GetInternalFieldCount()2128 int JSObject::GetInternalFieldCount() { return GetInternalFieldCount(map()); }
2129
2130
GetInternalFieldOffset(int index)2131 int JSObject::GetInternalFieldOffset(int index) {
2132 DCHECK(index < GetInternalFieldCount() && index >= 0);
2133 return GetHeaderSize() + (kPointerSize * index);
2134 }
2135
2136
GetInternalField(int index)2137 Object* JSObject::GetInternalField(int index) {
2138 DCHECK(index < GetInternalFieldCount() && index >= 0);
2139 // Internal objects do follow immediately after the header, whereas in-object
2140 // properties are at the end of the object. Therefore there is no need
2141 // to adjust the index here.
2142 return READ_FIELD(this, GetHeaderSize() + (kPointerSize * index));
2143 }
2144
2145
SetInternalField(int index,Object * value)2146 void JSObject::SetInternalField(int index, Object* value) {
2147 DCHECK(index < GetInternalFieldCount() && index >= 0);
2148 // Internal objects do follow immediately after the header, whereas in-object
2149 // properties are at the end of the object. Therefore there is no need
2150 // to adjust the index here.
2151 int offset = GetHeaderSize() + (kPointerSize * index);
2152 WRITE_FIELD(this, offset, value);
2153 WRITE_BARRIER(GetHeap(), this, offset, value);
2154 }
2155
2156
SetInternalField(int index,Smi * value)2157 void JSObject::SetInternalField(int index, Smi* value) {
2158 DCHECK(index < GetInternalFieldCount() && index >= 0);
2159 // Internal objects do follow immediately after the header, whereas in-object
2160 // properties are at the end of the object. Therefore there is no need
2161 // to adjust the index here.
2162 int offset = GetHeaderSize() + (kPointerSize * index);
2163 WRITE_FIELD(this, offset, value);
2164 }
2165
2166
IsUnboxedDoubleField(FieldIndex index)2167 bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
2168 if (!FLAG_unbox_double_fields) return false;
2169 return map()->IsUnboxedDoubleField(index);
2170 }
2171
2172
IsUnboxedDoubleField(FieldIndex index)2173 bool Map::IsUnboxedDoubleField(FieldIndex index) {
2174 if (!FLAG_unbox_double_fields) return false;
2175 if (index.is_hidden_field() || !index.is_inobject()) return false;
2176 return !layout_descriptor()->IsTagged(index.property_index());
2177 }
2178
2179
2180 // Access fast-case object properties at index. The use of these routines
2181 // is needed to correctly distinguish between properties stored in-object and
2182 // properties stored in the properties array.
RawFastPropertyAt(FieldIndex index)2183 Object* JSObject::RawFastPropertyAt(FieldIndex index) {
2184 DCHECK(!IsUnboxedDoubleField(index));
2185 if (index.is_inobject()) {
2186 return READ_FIELD(this, index.offset());
2187 } else {
2188 return properties()->get(index.outobject_array_index());
2189 }
2190 }
2191
2192
RawFastDoublePropertyAt(FieldIndex index)2193 double JSObject::RawFastDoublePropertyAt(FieldIndex index) {
2194 DCHECK(IsUnboxedDoubleField(index));
2195 return READ_DOUBLE_FIELD(this, index.offset());
2196 }
2197
2198
RawFastPropertyAtPut(FieldIndex index,Object * value)2199 void JSObject::RawFastPropertyAtPut(FieldIndex index, Object* value) {
2200 if (index.is_inobject()) {
2201 int offset = index.offset();
2202 WRITE_FIELD(this, offset, value);
2203 WRITE_BARRIER(GetHeap(), this, offset, value);
2204 } else {
2205 properties()->set(index.outobject_array_index(), value);
2206 }
2207 }
2208
2209
RawFastDoublePropertyAtPut(FieldIndex index,double value)2210 void JSObject::RawFastDoublePropertyAtPut(FieldIndex index, double value) {
2211 WRITE_DOUBLE_FIELD(this, index.offset(), value);
2212 }
2213
2214
FastPropertyAtPut(FieldIndex index,Object * value)2215 void JSObject::FastPropertyAtPut(FieldIndex index, Object* value) {
2216 if (IsUnboxedDoubleField(index)) {
2217 DCHECK(value->IsMutableHeapNumber());
2218 RawFastDoublePropertyAtPut(index, HeapNumber::cast(value)->value());
2219 } else {
2220 RawFastPropertyAtPut(index, value);
2221 }
2222 }
2223
WriteToField(int descriptor,PropertyDetails details,Object * value)2224 void JSObject::WriteToField(int descriptor, PropertyDetails details,
2225 Object* value) {
2226 DCHECK(details.type() == DATA);
2227 DisallowHeapAllocation no_gc;
2228 FieldIndex index = FieldIndex::ForDescriptor(map(), descriptor);
2229 if (details.representation().IsDouble()) {
2230 // Nothing more to be done.
2231 if (value->IsUninitialized(this->GetIsolate())) {
2232 return;
2233 }
2234 if (IsUnboxedDoubleField(index)) {
2235 RawFastDoublePropertyAtPut(index, value->Number());
2236 } else {
2237 HeapNumber* box = HeapNumber::cast(RawFastPropertyAt(index));
2238 DCHECK(box->IsMutableHeapNumber());
2239 box->set_value(value->Number());
2240 }
2241 } else {
2242 RawFastPropertyAtPut(index, value);
2243 }
2244 }
2245
WriteToField(int descriptor,Object * value)2246 void JSObject::WriteToField(int descriptor, Object* value) {
2247 DescriptorArray* desc = map()->instance_descriptors();
2248 PropertyDetails details = desc->GetDetails(descriptor);
2249 WriteToField(descriptor, details, value);
2250 }
2251
GetInObjectPropertyOffset(int index)2252 int JSObject::GetInObjectPropertyOffset(int index) {
2253 return map()->GetInObjectPropertyOffset(index);
2254 }
2255
2256
InObjectPropertyAt(int index)2257 Object* JSObject::InObjectPropertyAt(int index) {
2258 int offset = GetInObjectPropertyOffset(index);
2259 return READ_FIELD(this, offset);
2260 }
2261
2262
InObjectPropertyAtPut(int index,Object * value,WriteBarrierMode mode)2263 Object* JSObject::InObjectPropertyAtPut(int index,
2264 Object* value,
2265 WriteBarrierMode mode) {
2266 // Adjust for the number of properties stored in the object.
2267 int offset = GetInObjectPropertyOffset(index);
2268 WRITE_FIELD(this, offset, value);
2269 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2270 return value;
2271 }
2272
2273
InitializeBody(Map * map,int start_offset,Object * pre_allocated_value,Object * filler_value)2274 void JSObject::InitializeBody(Map* map, int start_offset,
2275 Object* pre_allocated_value,
2276 Object* filler_value) {
2277 DCHECK(!filler_value->IsHeapObject() ||
2278 !GetHeap()->InNewSpace(filler_value));
2279 DCHECK(!pre_allocated_value->IsHeapObject() ||
2280 !GetHeap()->InNewSpace(pre_allocated_value));
2281 int size = map->instance_size();
2282 int offset = start_offset;
2283 if (filler_value != pre_allocated_value) {
2284 int end_of_pre_allocated_offset =
2285 size - (map->unused_property_fields() * kPointerSize);
2286 DCHECK_LE(kHeaderSize, end_of_pre_allocated_offset);
2287 while (offset < end_of_pre_allocated_offset) {
2288 WRITE_FIELD(this, offset, pre_allocated_value);
2289 offset += kPointerSize;
2290 }
2291 }
2292 while (offset < size) {
2293 WRITE_FIELD(this, offset, filler_value);
2294 offset += kPointerSize;
2295 }
2296 }
2297
2298
TooManyFastProperties(StoreFromKeyed store_mode)2299 bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
2300 if (unused_property_fields() != 0) return false;
2301 if (is_prototype_map()) return false;
2302 int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
2303 int limit = Max(minimum, GetInObjectProperties());
2304 int external = NumberOfFields() - GetInObjectProperties();
2305 return external > limit;
2306 }
2307
2308
InitializeBody(int object_size)2309 void Struct::InitializeBody(int object_size) {
2310 Object* value = GetHeap()->undefined_value();
2311 for (int offset = kHeaderSize; offset < object_size; offset += kPointerSize) {
2312 WRITE_FIELD(this, offset, value);
2313 }
2314 }
2315
ToArrayLength(uint32_t * index)2316 bool Object::ToArrayLength(uint32_t* index) { return Object::ToUint32(index); }
2317
2318
ToArrayIndex(uint32_t * index)2319 bool Object::ToArrayIndex(uint32_t* index) {
2320 return Object::ToUint32(index) && *index != kMaxUInt32;
2321 }
2322
2323
VerifyApiCallResultType()2324 void Object::VerifyApiCallResultType() {
2325 #if DEBUG
2326 if (IsSmi()) return;
2327 DCHECK(IsHeapObject());
2328 Isolate* isolate = HeapObject::cast(this)->GetIsolate();
2329 if (!(IsString() || IsSymbol() || IsJSReceiver() || IsHeapNumber() ||
2330 IsSimd128Value() || IsUndefined(isolate) || IsTrue(isolate) ||
2331 IsFalse(isolate) || IsNull(isolate))) {
2332 FATAL("API call returned invalid object");
2333 }
2334 #endif // DEBUG
2335 }
2336
2337
get(int index)2338 Object* FixedArray::get(int index) const {
2339 SLOW_DCHECK(index >= 0 && index < this->length());
2340 return READ_FIELD(this, kHeaderSize + index * kPointerSize);
2341 }
2342
get(FixedArray * array,int index,Isolate * isolate)2343 Handle<Object> FixedArray::get(FixedArray* array, int index, Isolate* isolate) {
2344 return handle(array->get(index), isolate);
2345 }
2346
2347 template <class T>
GetValue(Isolate * isolate,int index)2348 MaybeHandle<T> FixedArray::GetValue(Isolate* isolate, int index) const {
2349 Object* obj = get(index);
2350 if (obj->IsUndefined(isolate)) return MaybeHandle<T>();
2351 return Handle<T>(T::cast(obj), isolate);
2352 }
2353
2354 template <class T>
GetValueChecked(Isolate * isolate,int index)2355 Handle<T> FixedArray::GetValueChecked(Isolate* isolate, int index) const {
2356 Object* obj = get(index);
2357 CHECK(!obj->IsUndefined(isolate));
2358 return Handle<T>(T::cast(obj), isolate);
2359 }
is_the_hole(Isolate * isolate,int index)2360 bool FixedArray::is_the_hole(Isolate* isolate, int index) {
2361 return get(index)->IsTheHole(isolate);
2362 }
2363
set(int index,Smi * value)2364 void FixedArray::set(int index, Smi* value) {
2365 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2366 DCHECK(index >= 0 && index < this->length());
2367 DCHECK(reinterpret_cast<Object*>(value)->IsSmi());
2368 int offset = kHeaderSize + index * kPointerSize;
2369 WRITE_FIELD(this, offset, value);
2370 }
2371
2372
set(int index,Object * value)2373 void FixedArray::set(int index, Object* value) {
2374 DCHECK_NE(GetHeap()->fixed_cow_array_map(), map());
2375 DCHECK(IsFixedArray());
2376 DCHECK_GE(index, 0);
2377 DCHECK_LT(index, this->length());
2378 int offset = kHeaderSize + index * kPointerSize;
2379 WRITE_FIELD(this, offset, value);
2380 WRITE_BARRIER(GetHeap(), this, offset, value);
2381 }
2382
2383
get_scalar(int index)2384 double FixedDoubleArray::get_scalar(int index) {
2385 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2386 map() != GetHeap()->fixed_array_map());
2387 DCHECK(index >= 0 && index < this->length());
2388 DCHECK(!is_the_hole(index));
2389 return READ_DOUBLE_FIELD(this, kHeaderSize + index * kDoubleSize);
2390 }
2391
2392
get_representation(int index)2393 uint64_t FixedDoubleArray::get_representation(int index) {
2394 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2395 map() != GetHeap()->fixed_array_map());
2396 DCHECK(index >= 0 && index < this->length());
2397 int offset = kHeaderSize + index * kDoubleSize;
2398 return READ_UINT64_FIELD(this, offset);
2399 }
2400
get(FixedDoubleArray * array,int index,Isolate * isolate)2401 Handle<Object> FixedDoubleArray::get(FixedDoubleArray* array, int index,
2402 Isolate* isolate) {
2403 if (array->is_the_hole(index)) {
2404 return isolate->factory()->the_hole_value();
2405 } else {
2406 return isolate->factory()->NewNumber(array->get_scalar(index));
2407 }
2408 }
2409
2410
set(int index,double value)2411 void FixedDoubleArray::set(int index, double value) {
2412 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2413 map() != GetHeap()->fixed_array_map());
2414 int offset = kHeaderSize + index * kDoubleSize;
2415 if (std::isnan(value)) {
2416 WRITE_DOUBLE_FIELD(this, offset, std::numeric_limits<double>::quiet_NaN());
2417 } else {
2418 WRITE_DOUBLE_FIELD(this, offset, value);
2419 }
2420 DCHECK(!is_the_hole(index));
2421 }
2422
2423
set_the_hole(int index)2424 void FixedDoubleArray::set_the_hole(int index) {
2425 DCHECK(map() != GetHeap()->fixed_cow_array_map() &&
2426 map() != GetHeap()->fixed_array_map());
2427 int offset = kHeaderSize + index * kDoubleSize;
2428 WRITE_UINT64_FIELD(this, offset, kHoleNanInt64);
2429 }
2430
is_the_hole(Isolate * isolate,int index)2431 bool FixedDoubleArray::is_the_hole(Isolate* isolate, int index) {
2432 return is_the_hole(index);
2433 }
2434
is_the_hole(int index)2435 bool FixedDoubleArray::is_the_hole(int index) {
2436 return get_representation(index) == kHoleNanInt64;
2437 }
2438
2439
data_start()2440 double* FixedDoubleArray::data_start() {
2441 return reinterpret_cast<double*>(FIELD_ADDR(this, kHeaderSize));
2442 }
2443
2444
FillWithHoles(int from,int to)2445 void FixedDoubleArray::FillWithHoles(int from, int to) {
2446 for (int i = from; i < to; i++) {
2447 set_the_hole(i);
2448 }
2449 }
2450
2451
Get(int index)2452 Object* WeakFixedArray::Get(int index) const {
2453 Object* raw = FixedArray::cast(this)->get(index + kFirstIndex);
2454 if (raw->IsSmi()) return raw;
2455 DCHECK(raw->IsWeakCell());
2456 return WeakCell::cast(raw)->value();
2457 }
2458
2459
IsEmptySlot(int index)2460 bool WeakFixedArray::IsEmptySlot(int index) const {
2461 DCHECK(index < Length());
2462 return Get(index)->IsSmi();
2463 }
2464
2465
Clear(int index)2466 void WeakFixedArray::Clear(int index) {
2467 FixedArray::cast(this)->set(index + kFirstIndex, Smi::kZero);
2468 }
2469
2470
Length()2471 int WeakFixedArray::Length() const {
2472 return FixedArray::cast(this)->length() - kFirstIndex;
2473 }
2474
2475
last_used_index()2476 int WeakFixedArray::last_used_index() const {
2477 return Smi::cast(FixedArray::cast(this)->get(kLastUsedIndexIndex))->value();
2478 }
2479
2480
set_last_used_index(int index)2481 void WeakFixedArray::set_last_used_index(int index) {
2482 FixedArray::cast(this)->set(kLastUsedIndexIndex, Smi::FromInt(index));
2483 }
2484
2485
2486 template <class T>
Next()2487 T* WeakFixedArray::Iterator::Next() {
2488 if (list_ != NULL) {
2489 // Assert that list did not change during iteration.
2490 DCHECK_EQ(last_used_index_, list_->last_used_index());
2491 while (index_ < list_->Length()) {
2492 Object* item = list_->Get(index_++);
2493 if (item != Empty()) return T::cast(item);
2494 }
2495 list_ = NULL;
2496 }
2497 return NULL;
2498 }
2499
2500
Length()2501 int ArrayList::Length() {
2502 if (FixedArray::cast(this)->length() == 0) return 0;
2503 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
2504 }
2505
2506
SetLength(int length)2507 void ArrayList::SetLength(int length) {
2508 return FixedArray::cast(this)->set(kLengthIndex, Smi::FromInt(length));
2509 }
2510
2511
Get(int index)2512 Object* ArrayList::Get(int index) {
2513 return FixedArray::cast(this)->get(kFirstIndex + index);
2514 }
2515
2516
Slot(int index)2517 Object** ArrayList::Slot(int index) {
2518 return data_start() + kFirstIndex + index;
2519 }
2520
Set(int index,Object * obj,WriteBarrierMode mode)2521 void ArrayList::Set(int index, Object* obj, WriteBarrierMode mode) {
2522 FixedArray::cast(this)->set(kFirstIndex + index, obj, mode);
2523 }
2524
2525
Clear(int index,Object * undefined)2526 void ArrayList::Clear(int index, Object* undefined) {
2527 DCHECK(undefined->IsUndefined(GetIsolate()));
2528 FixedArray::cast(this)
2529 ->set(kFirstIndex + index, undefined, SKIP_WRITE_BARRIER);
2530 }
2531
NumberOfCaptureRegisters()2532 int RegExpMatchInfo::NumberOfCaptureRegisters() {
2533 DCHECK_GE(length(), kLastMatchOverhead);
2534 Object* obj = get(kNumberOfCapturesIndex);
2535 return Smi::cast(obj)->value();
2536 }
2537
SetNumberOfCaptureRegisters(int value)2538 void RegExpMatchInfo::SetNumberOfCaptureRegisters(int value) {
2539 DCHECK_GE(length(), kLastMatchOverhead);
2540 set(kNumberOfCapturesIndex, Smi::FromInt(value));
2541 }
2542
LastSubject()2543 String* RegExpMatchInfo::LastSubject() {
2544 DCHECK_GE(length(), kLastMatchOverhead);
2545 Object* obj = get(kLastSubjectIndex);
2546 return String::cast(obj);
2547 }
2548
SetLastSubject(String * value)2549 void RegExpMatchInfo::SetLastSubject(String* value) {
2550 DCHECK_GE(length(), kLastMatchOverhead);
2551 set(kLastSubjectIndex, value);
2552 }
2553
LastInput()2554 Object* RegExpMatchInfo::LastInput() {
2555 DCHECK_GE(length(), kLastMatchOverhead);
2556 return get(kLastInputIndex);
2557 }
2558
SetLastInput(Object * value)2559 void RegExpMatchInfo::SetLastInput(Object* value) {
2560 DCHECK_GE(length(), kLastMatchOverhead);
2561 set(kLastInputIndex, value);
2562 }
2563
Capture(int i)2564 int RegExpMatchInfo::Capture(int i) {
2565 DCHECK_LT(i, NumberOfCaptureRegisters());
2566 Object* obj = get(kFirstCaptureIndex + i);
2567 return Smi::cast(obj)->value();
2568 }
2569
SetCapture(int i,int value)2570 void RegExpMatchInfo::SetCapture(int i, int value) {
2571 DCHECK_LT(i, NumberOfCaptureRegisters());
2572 set(kFirstCaptureIndex + i, Smi::FromInt(value));
2573 }
2574
GetWriteBarrierMode(const DisallowHeapAllocation & promise)2575 WriteBarrierMode HeapObject::GetWriteBarrierMode(
2576 const DisallowHeapAllocation& promise) {
2577 Heap* heap = GetHeap();
2578 if (heap->incremental_marking()->IsMarking()) return UPDATE_WRITE_BARRIER;
2579 if (heap->InNewSpace(this)) return SKIP_WRITE_BARRIER;
2580 return UPDATE_WRITE_BARRIER;
2581 }
2582
2583
RequiredAlignment()2584 AllocationAlignment HeapObject::RequiredAlignment() {
2585 #ifdef V8_HOST_ARCH_32_BIT
2586 if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
2587 FixedArrayBase::cast(this)->length() != 0) {
2588 return kDoubleAligned;
2589 }
2590 if (IsHeapNumber()) return kDoubleUnaligned;
2591 if (IsSimd128Value()) return kSimd128Unaligned;
2592 #endif // V8_HOST_ARCH_32_BIT
2593 return kWordAligned;
2594 }
2595
2596
set(int index,Object * value,WriteBarrierMode mode)2597 void FixedArray::set(int index,
2598 Object* value,
2599 WriteBarrierMode mode) {
2600 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2601 DCHECK(index >= 0 && index < this->length());
2602 int offset = kHeaderSize + index * kPointerSize;
2603 NOBARRIER_WRITE_FIELD(this, offset, value);
2604 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode);
2605 }
2606
2607
NoWriteBarrierSet(FixedArray * array,int index,Object * value)2608 void FixedArray::NoWriteBarrierSet(FixedArray* array,
2609 int index,
2610 Object* value) {
2611 DCHECK(array->map() != array->GetHeap()->fixed_cow_array_map());
2612 DCHECK(index >= 0 && index < array->length());
2613 DCHECK(!array->GetHeap()->InNewSpace(value));
2614 NOBARRIER_WRITE_FIELD(array, kHeaderSize + index * kPointerSize, value);
2615 }
2616
2617
set_undefined(int index)2618 void FixedArray::set_undefined(int index) {
2619 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2620 DCHECK(index >= 0 && index < this->length());
2621 DCHECK(!GetHeap()->InNewSpace(GetHeap()->undefined_value()));
2622 WRITE_FIELD(this,
2623 kHeaderSize + index * kPointerSize,
2624 GetHeap()->undefined_value());
2625 }
2626
2627
set_null(int index)2628 void FixedArray::set_null(int index) {
2629 DCHECK(index >= 0 && index < this->length());
2630 DCHECK(!GetHeap()->InNewSpace(GetHeap()->null_value()));
2631 WRITE_FIELD(this,
2632 kHeaderSize + index * kPointerSize,
2633 GetHeap()->null_value());
2634 }
2635
2636
set_the_hole(int index)2637 void FixedArray::set_the_hole(int index) {
2638 DCHECK(map() != GetHeap()->fixed_cow_array_map());
2639 DCHECK(index >= 0 && index < this->length());
2640 DCHECK(!GetHeap()->InNewSpace(GetHeap()->the_hole_value()));
2641 WRITE_FIELD(this,
2642 kHeaderSize + index * kPointerSize,
2643 GetHeap()->the_hole_value());
2644 }
2645
2646
FillWithHoles(int from,int to)2647 void FixedArray::FillWithHoles(int from, int to) {
2648 for (int i = from; i < to; i++) {
2649 set_the_hole(i);
2650 }
2651 }
2652
2653
data_start()2654 Object** FixedArray::data_start() {
2655 return HeapObject::RawField(this, kHeaderSize);
2656 }
2657
2658
RawFieldOfElementAt(int index)2659 Object** FixedArray::RawFieldOfElementAt(int index) {
2660 return HeapObject::RawField(this, OffsetOfElementAt(index));
2661 }
2662
2663 #define DEFINE_FRAME_ARRAY_ACCESSORS(name, type) \
2664 type* FrameArray::name(int frame_ix) const { \
2665 Object* obj = \
2666 get(kFirstIndex + frame_ix * kElementsPerFrame + k##name##Offset); \
2667 return type::cast(obj); \
2668 } \
2669 \
2670 void FrameArray::Set##name(int frame_ix, type* value) { \
2671 set(kFirstIndex + frame_ix * kElementsPerFrame + k##name##Offset, value); \
2672 }
FRAME_ARRAY_FIELD_LIST(DEFINE_FRAME_ARRAY_ACCESSORS)2673 FRAME_ARRAY_FIELD_LIST(DEFINE_FRAME_ARRAY_ACCESSORS)
2674 #undef DEFINE_FRAME_ARRAY_ACCESSORS
2675
2676 bool FrameArray::IsWasmFrame(int frame_ix) const {
2677 const int flags = Flags(frame_ix)->value();
2678 return (flags & kIsWasmFrame) != 0;
2679 }
2680
IsAsmJsWasmFrame(int frame_ix)2681 bool FrameArray::IsAsmJsWasmFrame(int frame_ix) const {
2682 const int flags = Flags(frame_ix)->value();
2683 return (flags & kIsAsmJsWasmFrame) != 0;
2684 }
2685
FrameCount()2686 int FrameArray::FrameCount() const {
2687 const int frame_count = Smi::cast(get(kFrameCountIndex))->value();
2688 DCHECK_LE(0, frame_count);
2689 return frame_count;
2690 }
2691
IsEmpty()2692 bool DescriptorArray::IsEmpty() {
2693 DCHECK(length() >= kFirstIndex ||
2694 this == GetHeap()->empty_descriptor_array());
2695 return length() < kFirstIndex;
2696 }
2697
2698
number_of_descriptors()2699 int DescriptorArray::number_of_descriptors() {
2700 DCHECK(length() >= kFirstIndex || IsEmpty());
2701 int len = length();
2702 return len == 0 ? 0 : Smi::cast(get(kDescriptorLengthIndex))->value();
2703 }
2704
2705
number_of_descriptors_storage()2706 int DescriptorArray::number_of_descriptors_storage() {
2707 int len = length();
2708 return len == 0 ? 0 : (len - kFirstIndex) / kDescriptorSize;
2709 }
2710
2711
NumberOfSlackDescriptors()2712 int DescriptorArray::NumberOfSlackDescriptors() {
2713 return number_of_descriptors_storage() - number_of_descriptors();
2714 }
2715
2716
SetNumberOfDescriptors(int number_of_descriptors)2717 void DescriptorArray::SetNumberOfDescriptors(int number_of_descriptors) {
2718 WRITE_FIELD(
2719 this, kDescriptorLengthOffset, Smi::FromInt(number_of_descriptors));
2720 }
2721
2722
number_of_entries()2723 inline int DescriptorArray::number_of_entries() {
2724 return number_of_descriptors();
2725 }
2726
2727
HasEnumCache()2728 bool DescriptorArray::HasEnumCache() {
2729 return !IsEmpty() && !get(kEnumCacheIndex)->IsSmi();
2730 }
2731
2732
CopyEnumCacheFrom(DescriptorArray * array)2733 void DescriptorArray::CopyEnumCacheFrom(DescriptorArray* array) {
2734 set(kEnumCacheIndex, array->get(kEnumCacheIndex));
2735 }
2736
2737
GetEnumCache()2738 FixedArray* DescriptorArray::GetEnumCache() {
2739 DCHECK(HasEnumCache());
2740 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2741 return FixedArray::cast(bridge->get(kEnumCacheBridgeCacheIndex));
2742 }
2743
2744
HasEnumIndicesCache()2745 bool DescriptorArray::HasEnumIndicesCache() {
2746 if (IsEmpty()) return false;
2747 Object* object = get(kEnumCacheIndex);
2748 if (object->IsSmi()) return false;
2749 FixedArray* bridge = FixedArray::cast(object);
2750 return !bridge->get(kEnumCacheBridgeIndicesCacheIndex)->IsSmi();
2751 }
2752
2753
GetEnumIndicesCache()2754 FixedArray* DescriptorArray::GetEnumIndicesCache() {
2755 DCHECK(HasEnumIndicesCache());
2756 FixedArray* bridge = FixedArray::cast(get(kEnumCacheIndex));
2757 return FixedArray::cast(bridge->get(kEnumCacheBridgeIndicesCacheIndex));
2758 }
2759
2760
GetEnumCacheSlot()2761 Object** DescriptorArray::GetEnumCacheSlot() {
2762 DCHECK(HasEnumCache());
2763 return HeapObject::RawField(reinterpret_cast<HeapObject*>(this),
2764 kEnumCacheOffset);
2765 }
2766
2767 // Perform a binary search in a fixed array.
2768 template <SearchMode search_mode, typename T>
BinarySearch(T * array,Name * name,int valid_entries,int * out_insertion_index)2769 int BinarySearch(T* array, Name* name, int valid_entries,
2770 int* out_insertion_index) {
2771 DCHECK(search_mode == ALL_ENTRIES || out_insertion_index == NULL);
2772 int low = 0;
2773 int high = array->number_of_entries() - 1;
2774 uint32_t hash = name->hash_field();
2775 int limit = high;
2776
2777 DCHECK(low <= high);
2778
2779 while (low != high) {
2780 int mid = low + (high - low) / 2;
2781 Name* mid_name = array->GetSortedKey(mid);
2782 uint32_t mid_hash = mid_name->hash_field();
2783
2784 if (mid_hash >= hash) {
2785 high = mid;
2786 } else {
2787 low = mid + 1;
2788 }
2789 }
2790
2791 for (; low <= limit; ++low) {
2792 int sort_index = array->GetSortedKeyIndex(low);
2793 Name* entry = array->GetKey(sort_index);
2794 uint32_t current_hash = entry->hash_field();
2795 if (current_hash != hash) {
2796 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2797 *out_insertion_index = sort_index + (current_hash > hash ? 0 : 1);
2798 }
2799 return T::kNotFound;
2800 }
2801 if (entry == name) {
2802 if (search_mode == ALL_ENTRIES || sort_index < valid_entries) {
2803 return sort_index;
2804 }
2805 return T::kNotFound;
2806 }
2807 }
2808
2809 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2810 *out_insertion_index = limit + 1;
2811 }
2812 return T::kNotFound;
2813 }
2814
2815
2816 // Perform a linear search in this fixed array. len is the number of entry
2817 // indices that are valid.
2818 template <SearchMode search_mode, typename T>
LinearSearch(T * array,Name * name,int valid_entries,int * out_insertion_index)2819 int LinearSearch(T* array, Name* name, int valid_entries,
2820 int* out_insertion_index) {
2821 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2822 uint32_t hash = name->hash_field();
2823 int len = array->number_of_entries();
2824 for (int number = 0; number < len; number++) {
2825 int sorted_index = array->GetSortedKeyIndex(number);
2826 Name* entry = array->GetKey(sorted_index);
2827 uint32_t current_hash = entry->hash_field();
2828 if (current_hash > hash) {
2829 *out_insertion_index = sorted_index;
2830 return T::kNotFound;
2831 }
2832 if (entry == name) return sorted_index;
2833 }
2834 *out_insertion_index = len;
2835 return T::kNotFound;
2836 } else {
2837 DCHECK_LE(valid_entries, array->number_of_entries());
2838 DCHECK_NULL(out_insertion_index); // Not supported here.
2839 for (int number = 0; number < valid_entries; number++) {
2840 if (array->GetKey(number) == name) return number;
2841 }
2842 return T::kNotFound;
2843 }
2844 }
2845
2846
2847 template <SearchMode search_mode, typename T>
Search(T * array,Name * name,int valid_entries,int * out_insertion_index)2848 int Search(T* array, Name* name, int valid_entries, int* out_insertion_index) {
2849 SLOW_DCHECK(array->IsSortedNoDuplicates());
2850
2851 if (valid_entries == 0) {
2852 if (search_mode == ALL_ENTRIES && out_insertion_index != nullptr) {
2853 *out_insertion_index = 0;
2854 }
2855 return T::kNotFound;
2856 }
2857
2858 // Fast case: do linear search for small arrays.
2859 const int kMaxElementsForLinearSearch = 8;
2860 if (valid_entries <= kMaxElementsForLinearSearch) {
2861 return LinearSearch<search_mode>(array, name, valid_entries,
2862 out_insertion_index);
2863 }
2864
2865 // Slow case: perform binary search.
2866 return BinarySearch<search_mode>(array, name, valid_entries,
2867 out_insertion_index);
2868 }
2869
2870
Search(Name * name,int valid_descriptors)2871 int DescriptorArray::Search(Name* name, int valid_descriptors) {
2872 DCHECK(name->IsUniqueName());
2873 return internal::Search<VALID_ENTRIES>(this, name, valid_descriptors, NULL);
2874 }
2875
SearchWithCache(Isolate * isolate,Name * name,Map * map)2876 int DescriptorArray::SearchWithCache(Isolate* isolate, Name* name, Map* map) {
2877 DCHECK(name->IsUniqueName());
2878 int number_of_own_descriptors = map->NumberOfOwnDescriptors();
2879 if (number_of_own_descriptors == 0) return kNotFound;
2880
2881 DescriptorLookupCache* cache = isolate->descriptor_lookup_cache();
2882 int number = cache->Lookup(map, name);
2883
2884 if (number == DescriptorLookupCache::kAbsent) {
2885 number = Search(name, number_of_own_descriptors);
2886 cache->Update(map, name, number);
2887 }
2888
2889 return number;
2890 }
2891
GetLastDescriptorDetails()2892 PropertyDetails Map::GetLastDescriptorDetails() {
2893 return instance_descriptors()->GetDetails(LastAdded());
2894 }
2895
2896
LastAdded()2897 int Map::LastAdded() {
2898 int number_of_own_descriptors = NumberOfOwnDescriptors();
2899 DCHECK(number_of_own_descriptors > 0);
2900 return number_of_own_descriptors - 1;
2901 }
2902
2903
NumberOfOwnDescriptors()2904 int Map::NumberOfOwnDescriptors() {
2905 return NumberOfOwnDescriptorsBits::decode(bit_field3());
2906 }
2907
2908
SetNumberOfOwnDescriptors(int number)2909 void Map::SetNumberOfOwnDescriptors(int number) {
2910 DCHECK(number <= instance_descriptors()->number_of_descriptors());
2911 set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
2912 }
2913
2914
EnumLength()2915 int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
2916
2917
SetEnumLength(int length)2918 void Map::SetEnumLength(int length) {
2919 if (length != kInvalidEnumCacheSentinel) {
2920 DCHECK(length >= 0);
2921 DCHECK(length == 0 || instance_descriptors()->HasEnumCache());
2922 DCHECK(length <= NumberOfOwnDescriptors());
2923 }
2924 set_bit_field3(EnumLengthBits::update(bit_field3(), length));
2925 }
2926
2927
GetInitialElements()2928 FixedArrayBase* Map::GetInitialElements() {
2929 FixedArrayBase* result = nullptr;
2930 if (has_fast_elements() || has_fast_string_wrapper_elements()) {
2931 result = GetHeap()->empty_fixed_array();
2932 } else if (has_fast_sloppy_arguments_elements()) {
2933 result = GetHeap()->empty_sloppy_arguments_elements();
2934 } else if (has_fixed_typed_array_elements()) {
2935 result = GetHeap()->EmptyFixedTypedArrayForMap(this);
2936 } else {
2937 UNREACHABLE();
2938 }
2939 DCHECK(!GetHeap()->InNewSpace(result));
2940 return result;
2941 }
2942
2943 // static
ReconfigureProperty(Handle<Map> map,int modify_index,PropertyKind new_kind,PropertyAttributes new_attributes,Representation new_representation,Handle<FieldType> new_field_type,StoreMode store_mode)2944 Handle<Map> Map::ReconfigureProperty(Handle<Map> map, int modify_index,
2945 PropertyKind new_kind,
2946 PropertyAttributes new_attributes,
2947 Representation new_representation,
2948 Handle<FieldType> new_field_type,
2949 StoreMode store_mode) {
2950 return Reconfigure(map, map->elements_kind(), modify_index, new_kind,
2951 new_attributes, new_representation, new_field_type,
2952 store_mode);
2953 }
2954
2955 // static
ReconfigureElementsKind(Handle<Map> map,ElementsKind new_elements_kind)2956 Handle<Map> Map::ReconfigureElementsKind(Handle<Map> map,
2957 ElementsKind new_elements_kind) {
2958 return Reconfigure(map, new_elements_kind, -1, kData, NONE,
2959 Representation::None(), FieldType::None(map->GetIsolate()),
2960 ALLOW_IN_DESCRIPTOR);
2961 }
2962
GetKeySlot(int descriptor_number)2963 Object** DescriptorArray::GetKeySlot(int descriptor_number) {
2964 DCHECK(descriptor_number < number_of_descriptors());
2965 return RawFieldOfElementAt(ToKeyIndex(descriptor_number));
2966 }
2967
2968
GetDescriptorStartSlot(int descriptor_number)2969 Object** DescriptorArray::GetDescriptorStartSlot(int descriptor_number) {
2970 return GetKeySlot(descriptor_number);
2971 }
2972
2973
GetDescriptorEndSlot(int descriptor_number)2974 Object** DescriptorArray::GetDescriptorEndSlot(int descriptor_number) {
2975 return GetValueSlot(descriptor_number - 1) + 1;
2976 }
2977
2978
GetKey(int descriptor_number)2979 Name* DescriptorArray::GetKey(int descriptor_number) {
2980 DCHECK(descriptor_number < number_of_descriptors());
2981 return Name::cast(get(ToKeyIndex(descriptor_number)));
2982 }
2983
2984
GetSortedKeyIndex(int descriptor_number)2985 int DescriptorArray::GetSortedKeyIndex(int descriptor_number) {
2986 return GetDetails(descriptor_number).pointer();
2987 }
2988
2989
GetSortedKey(int descriptor_number)2990 Name* DescriptorArray::GetSortedKey(int descriptor_number) {
2991 return GetKey(GetSortedKeyIndex(descriptor_number));
2992 }
2993
2994
SetSortedKey(int descriptor_index,int pointer)2995 void DescriptorArray::SetSortedKey(int descriptor_index, int pointer) {
2996 PropertyDetails details = GetDetails(descriptor_index);
2997 set(ToDetailsIndex(descriptor_index), details.set_pointer(pointer).AsSmi());
2998 }
2999
3000
SetRepresentation(int descriptor_index,Representation representation)3001 void DescriptorArray::SetRepresentation(int descriptor_index,
3002 Representation representation) {
3003 DCHECK(!representation.IsNone());
3004 PropertyDetails details = GetDetails(descriptor_index);
3005 set(ToDetailsIndex(descriptor_index),
3006 details.CopyWithRepresentation(representation).AsSmi());
3007 }
3008
3009
GetValueSlot(int descriptor_number)3010 Object** DescriptorArray::GetValueSlot(int descriptor_number) {
3011 DCHECK(descriptor_number < number_of_descriptors());
3012 return RawFieldOfElementAt(ToValueIndex(descriptor_number));
3013 }
3014
3015
GetValueOffset(int descriptor_number)3016 int DescriptorArray::GetValueOffset(int descriptor_number) {
3017 return OffsetOfElementAt(ToValueIndex(descriptor_number));
3018 }
3019
3020
GetValue(int descriptor_number)3021 Object* DescriptorArray::GetValue(int descriptor_number) {
3022 DCHECK(descriptor_number < number_of_descriptors());
3023 return get(ToValueIndex(descriptor_number));
3024 }
3025
3026
SetValue(int descriptor_index,Object * value)3027 void DescriptorArray::SetValue(int descriptor_index, Object* value) {
3028 set(ToValueIndex(descriptor_index), value);
3029 }
3030
3031
GetDetails(int descriptor_number)3032 PropertyDetails DescriptorArray::GetDetails(int descriptor_number) {
3033 DCHECK(descriptor_number < number_of_descriptors());
3034 Object* details = get(ToDetailsIndex(descriptor_number));
3035 return PropertyDetails(Smi::cast(details));
3036 }
3037
3038
GetType(int descriptor_number)3039 PropertyType DescriptorArray::GetType(int descriptor_number) {
3040 return GetDetails(descriptor_number).type();
3041 }
3042
3043
GetFieldIndex(int descriptor_number)3044 int DescriptorArray::GetFieldIndex(int descriptor_number) {
3045 DCHECK(GetDetails(descriptor_number).location() == kField);
3046 return GetDetails(descriptor_number).field_index();
3047 }
3048
GetConstant(int descriptor_number)3049 Object* DescriptorArray::GetConstant(int descriptor_number) {
3050 return GetValue(descriptor_number);
3051 }
3052
3053
GetCallbacksObject(int descriptor_number)3054 Object* DescriptorArray::GetCallbacksObject(int descriptor_number) {
3055 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3056 return GetValue(descriptor_number);
3057 }
3058
3059
GetCallbacks(int descriptor_number)3060 AccessorDescriptor* DescriptorArray::GetCallbacks(int descriptor_number) {
3061 DCHECK(GetType(descriptor_number) == ACCESSOR_CONSTANT);
3062 Foreign* p = Foreign::cast(GetCallbacksObject(descriptor_number));
3063 return reinterpret_cast<AccessorDescriptor*>(p->foreign_address());
3064 }
3065
3066
Get(int descriptor_number,Descriptor * desc)3067 void DescriptorArray::Get(int descriptor_number, Descriptor* desc) {
3068 desc->Init(handle(GetKey(descriptor_number), GetIsolate()),
3069 handle(GetValue(descriptor_number), GetIsolate()),
3070 GetDetails(descriptor_number));
3071 }
3072
3073
SetDescriptor(int descriptor_number,Descriptor * desc)3074 void DescriptorArray::SetDescriptor(int descriptor_number, Descriptor* desc) {
3075 // Range check.
3076 DCHECK(descriptor_number < number_of_descriptors());
3077 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3078 set(ToValueIndex(descriptor_number), *desc->GetValue());
3079 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3080 }
3081
3082
Set(int descriptor_number,Descriptor * desc)3083 void DescriptorArray::Set(int descriptor_number, Descriptor* desc) {
3084 // Range check.
3085 DCHECK(descriptor_number < number_of_descriptors());
3086
3087 set(ToKeyIndex(descriptor_number), *desc->GetKey());
3088 set(ToValueIndex(descriptor_number), *desc->GetValue());
3089 set(ToDetailsIndex(descriptor_number), desc->GetDetails().AsSmi());
3090 }
3091
3092
Append(Descriptor * desc)3093 void DescriptorArray::Append(Descriptor* desc) {
3094 DisallowHeapAllocation no_gc;
3095 int descriptor_number = number_of_descriptors();
3096 SetNumberOfDescriptors(descriptor_number + 1);
3097 Set(descriptor_number, desc);
3098
3099 uint32_t hash = desc->GetKey()->Hash();
3100
3101 int insertion;
3102
3103 for (insertion = descriptor_number; insertion > 0; --insertion) {
3104 Name* key = GetSortedKey(insertion - 1);
3105 if (key->Hash() <= hash) break;
3106 SetSortedKey(insertion, GetSortedKeyIndex(insertion - 1));
3107 }
3108
3109 SetSortedKey(insertion, descriptor_number);
3110 }
3111
3112
SwapSortedKeys(int first,int second)3113 void DescriptorArray::SwapSortedKeys(int first, int second) {
3114 int first_key = GetSortedKeyIndex(first);
3115 SetSortedKey(first, GetSortedKeyIndex(second));
3116 SetSortedKey(second, first_key);
3117 }
3118
3119
type()3120 PropertyType DescriptorArray::Entry::type() { return descs_->GetType(index_); }
3121
3122
GetCallbackObject()3123 Object* DescriptorArray::Entry::GetCallbackObject() {
3124 return descs_->GetValue(index_);
3125 }
3126
3127
NumberOfElements()3128 int HashTableBase::NumberOfElements() {
3129 return Smi::cast(get(kNumberOfElementsIndex))->value();
3130 }
3131
3132
NumberOfDeletedElements()3133 int HashTableBase::NumberOfDeletedElements() {
3134 return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
3135 }
3136
3137
Capacity()3138 int HashTableBase::Capacity() {
3139 return Smi::cast(get(kCapacityIndex))->value();
3140 }
3141
3142
ElementAdded()3143 void HashTableBase::ElementAdded() {
3144 SetNumberOfElements(NumberOfElements() + 1);
3145 }
3146
3147
ElementRemoved()3148 void HashTableBase::ElementRemoved() {
3149 SetNumberOfElements(NumberOfElements() - 1);
3150 SetNumberOfDeletedElements(NumberOfDeletedElements() + 1);
3151 }
3152
3153
ElementsRemoved(int n)3154 void HashTableBase::ElementsRemoved(int n) {
3155 SetNumberOfElements(NumberOfElements() - n);
3156 SetNumberOfDeletedElements(NumberOfDeletedElements() + n);
3157 }
3158
3159
3160 // static
ComputeCapacity(int at_least_space_for)3161 int HashTableBase::ComputeCapacity(int at_least_space_for) {
3162 int capacity = base::bits::RoundUpToPowerOfTwo32(at_least_space_for * 2);
3163 return Max(capacity, kMinCapacity);
3164 }
3165
IsKey(Isolate * isolate,Object * k)3166 bool HashTableBase::IsKey(Isolate* isolate, Object* k) {
3167 Heap* heap = isolate->heap();
3168 return k != heap->the_hole_value() && k != heap->undefined_value();
3169 }
3170
IsKey(Object * k)3171 bool HashTableBase::IsKey(Object* k) {
3172 Isolate* isolate = this->GetIsolate();
3173 return !k->IsTheHole(isolate) && !k->IsUndefined(isolate);
3174 }
3175
3176
SetNumberOfElements(int nof)3177 void HashTableBase::SetNumberOfElements(int nof) {
3178 set(kNumberOfElementsIndex, Smi::FromInt(nof));
3179 }
3180
3181
SetNumberOfDeletedElements(int nod)3182 void HashTableBase::SetNumberOfDeletedElements(int nod) {
3183 set(kNumberOfDeletedElementsIndex, Smi::FromInt(nod));
3184 }
3185
3186 template <typename Key>
GetMap(Isolate * isolate)3187 Map* BaseShape<Key>::GetMap(Isolate* isolate) {
3188 return isolate->heap()->hash_table_map();
3189 }
3190
3191 template <typename Derived, typename Shape, typename Key>
FindEntry(Key key)3192 int HashTable<Derived, Shape, Key>::FindEntry(Key key) {
3193 return FindEntry(GetIsolate(), key);
3194 }
3195
3196
3197 template<typename Derived, typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key)3198 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key) {
3199 return FindEntry(isolate, key, HashTable::Hash(key));
3200 }
3201
3202 // Find entry for key otherwise return kNotFound.
3203 template <typename Derived, typename Shape, typename Key>
FindEntry(Isolate * isolate,Key key,int32_t hash)3204 int HashTable<Derived, Shape, Key>::FindEntry(Isolate* isolate, Key key,
3205 int32_t hash) {
3206 uint32_t capacity = Capacity();
3207 uint32_t entry = FirstProbe(hash, capacity);
3208 uint32_t count = 1;
3209 // EnsureCapacity will guarantee the hash table is never full.
3210 Object* undefined = isolate->heap()->undefined_value();
3211 Object* the_hole = isolate->heap()->the_hole_value();
3212 while (true) {
3213 Object* element = KeyAt(entry);
3214 // Empty entry. Uses raw unchecked accessors because it is called by the
3215 // string table during bootstrapping.
3216 if (element == undefined) break;
3217 if (element != the_hole && Shape::IsMatch(key, element)) return entry;
3218 entry = NextProbe(entry, count++, capacity);
3219 }
3220 return kNotFound;
3221 }
3222
3223 template <typename Derived, typename Shape, typename Key>
Has(Key key)3224 bool HashTable<Derived, Shape, Key>::Has(Key key) {
3225 return FindEntry(key) != kNotFound;
3226 }
3227
3228 template <typename Derived, typename Shape, typename Key>
Has(Isolate * isolate,Key key)3229 bool HashTable<Derived, Shape, Key>::Has(Isolate* isolate, Key key) {
3230 return FindEntry(isolate, key) != kNotFound;
3231 }
3232
Has(Isolate * isolate,Handle<Object> key,int32_t hash)3233 bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key, int32_t hash) {
3234 return FindEntry(isolate, key, hash) != kNotFound;
3235 }
3236
Has(Isolate * isolate,Handle<Object> key)3237 bool ObjectHashSet::Has(Isolate* isolate, Handle<Object> key) {
3238 Object* hash = key->GetHash();
3239 if (!hash->IsSmi()) return false;
3240 return FindEntry(isolate, key, Smi::cast(hash)->value()) != kNotFound;
3241 }
3242
IsMatch(String * key,Object * value)3243 bool StringSetShape::IsMatch(String* key, Object* value) {
3244 return value->IsString() && key->Equals(String::cast(value));
3245 }
3246
Hash(String * key)3247 uint32_t StringSetShape::Hash(String* key) { return key->Hash(); }
3248
HashForObject(String * key,Object * object)3249 uint32_t StringSetShape::HashForObject(String* key, Object* object) {
3250 return object->IsString() ? String::cast(object)->Hash() : 0;
3251 }
3252
requires_slow_elements()3253 bool SeededNumberDictionary::requires_slow_elements() {
3254 Object* max_index_object = get(kMaxNumberKeyIndex);
3255 if (!max_index_object->IsSmi()) return false;
3256 return 0 !=
3257 (Smi::cast(max_index_object)->value() & kRequiresSlowElementsMask);
3258 }
3259
3260
max_number_key()3261 uint32_t SeededNumberDictionary::max_number_key() {
3262 DCHECK(!requires_slow_elements());
3263 Object* max_index_object = get(kMaxNumberKeyIndex);
3264 if (!max_index_object->IsSmi()) return 0;
3265 uint32_t value = static_cast<uint32_t>(Smi::cast(max_index_object)->value());
3266 return value >> kRequiresSlowElementsTagSize;
3267 }
3268
3269
set_requires_slow_elements()3270 void SeededNumberDictionary::set_requires_slow_elements() {
3271 set(kMaxNumberKeyIndex, Smi::FromInt(kRequiresSlowElementsMask));
3272 }
3273
3274
3275 // ------------------------------------
3276 // Cast operations
3277
3278 CAST_ACCESSOR(AbstractCode)
CAST_ACCESSOR(ArrayList)3279 CAST_ACCESSOR(ArrayList)
3280 CAST_ACCESSOR(Bool16x8)
3281 CAST_ACCESSOR(Bool32x4)
3282 CAST_ACCESSOR(Bool8x16)
3283 CAST_ACCESSOR(ByteArray)
3284 CAST_ACCESSOR(BytecodeArray)
3285 CAST_ACCESSOR(Cell)
3286 CAST_ACCESSOR(Code)
3287 CAST_ACCESSOR(CodeCacheHashTable)
3288 CAST_ACCESSOR(CompilationCacheTable)
3289 CAST_ACCESSOR(ConsString)
3290 CAST_ACCESSOR(DeoptimizationInputData)
3291 CAST_ACCESSOR(DeoptimizationOutputData)
3292 CAST_ACCESSOR(DependentCode)
3293 CAST_ACCESSOR(DescriptorArray)
3294 CAST_ACCESSOR(ExternalOneByteString)
3295 CAST_ACCESSOR(ExternalString)
3296 CAST_ACCESSOR(ExternalTwoByteString)
3297 CAST_ACCESSOR(FixedArray)
3298 CAST_ACCESSOR(FixedArrayBase)
3299 CAST_ACCESSOR(FixedDoubleArray)
3300 CAST_ACCESSOR(FixedTypedArrayBase)
3301 CAST_ACCESSOR(Float32x4)
3302 CAST_ACCESSOR(Foreign)
3303 CAST_ACCESSOR(FrameArray)
3304 CAST_ACCESSOR(GlobalDictionary)
3305 CAST_ACCESSOR(HandlerTable)
3306 CAST_ACCESSOR(HeapObject)
3307 CAST_ACCESSOR(Int16x8)
3308 CAST_ACCESSOR(Int32x4)
3309 CAST_ACCESSOR(Int8x16)
3310 CAST_ACCESSOR(JSArray)
3311 CAST_ACCESSOR(JSArrayBuffer)
3312 CAST_ACCESSOR(JSArrayBufferView)
3313 CAST_ACCESSOR(JSBoundFunction)
3314 CAST_ACCESSOR(JSDataView)
3315 CAST_ACCESSOR(JSDate)
3316 CAST_ACCESSOR(JSFunction)
3317 CAST_ACCESSOR(JSGeneratorObject)
3318 CAST_ACCESSOR(JSGlobalObject)
3319 CAST_ACCESSOR(JSGlobalProxy)
3320 CAST_ACCESSOR(JSMap)
3321 CAST_ACCESSOR(JSMapIterator)
3322 CAST_ACCESSOR(JSMessageObject)
3323 CAST_ACCESSOR(JSModuleNamespace)
3324 CAST_ACCESSOR(JSFixedArrayIterator)
3325 CAST_ACCESSOR(JSObject)
3326 CAST_ACCESSOR(JSProxy)
3327 CAST_ACCESSOR(JSReceiver)
3328 CAST_ACCESSOR(JSRegExp)
3329 CAST_ACCESSOR(JSSet)
3330 CAST_ACCESSOR(JSSetIterator)
3331 CAST_ACCESSOR(JSStringIterator)
3332 CAST_ACCESSOR(JSArrayIterator)
3333 CAST_ACCESSOR(JSTypedArray)
3334 CAST_ACCESSOR(JSValue)
3335 CAST_ACCESSOR(JSWeakCollection)
3336 CAST_ACCESSOR(JSWeakMap)
3337 CAST_ACCESSOR(JSWeakSet)
3338 CAST_ACCESSOR(LayoutDescriptor)
3339 CAST_ACCESSOR(Map)
3340 CAST_ACCESSOR(ModuleInfo)
3341 CAST_ACCESSOR(Name)
3342 CAST_ACCESSOR(NameDictionary)
3343 CAST_ACCESSOR(NormalizedMapCache)
3344 CAST_ACCESSOR(Object)
3345 CAST_ACCESSOR(ObjectHashTable)
3346 CAST_ACCESSOR(ObjectHashSet)
3347 CAST_ACCESSOR(Oddball)
3348 CAST_ACCESSOR(OrderedHashMap)
3349 CAST_ACCESSOR(OrderedHashSet)
3350 CAST_ACCESSOR(PropertyCell)
3351 CAST_ACCESSOR(TemplateList)
3352 CAST_ACCESSOR(RegExpMatchInfo)
3353 CAST_ACCESSOR(ScopeInfo)
3354 CAST_ACCESSOR(SeededNumberDictionary)
3355 CAST_ACCESSOR(SeqOneByteString)
3356 CAST_ACCESSOR(SeqString)
3357 CAST_ACCESSOR(SeqTwoByteString)
3358 CAST_ACCESSOR(SharedFunctionInfo)
3359 CAST_ACCESSOR(Simd128Value)
3360 CAST_ACCESSOR(SlicedString)
3361 CAST_ACCESSOR(Smi)
3362 CAST_ACCESSOR(String)
3363 CAST_ACCESSOR(StringSet)
3364 CAST_ACCESSOR(StringTable)
3365 CAST_ACCESSOR(Struct)
3366 CAST_ACCESSOR(Symbol)
3367 CAST_ACCESSOR(TemplateInfo)
3368 CAST_ACCESSOR(Uint16x8)
3369 CAST_ACCESSOR(Uint32x4)
3370 CAST_ACCESSOR(Uint8x16)
3371 CAST_ACCESSOR(UnseededNumberDictionary)
3372 CAST_ACCESSOR(WeakCell)
3373 CAST_ACCESSOR(WeakFixedArray)
3374 CAST_ACCESSOR(WeakHashTable)
3375
3376 template <class T>
3377 PodArray<T>* PodArray<T>::cast(Object* object) {
3378 SLOW_DCHECK(object->IsByteArray());
3379 return reinterpret_cast<PodArray<T>*>(object);
3380 }
3381 template <class T>
cast(const Object * object)3382 const PodArray<T>* PodArray<T>::cast(const Object* object) {
3383 SLOW_DCHECK(object->IsByteArray());
3384 return reinterpret_cast<const PodArray<T>*>(object);
3385 }
3386
3387 // static
3388 template <class T>
New(Isolate * isolate,int length,PretenureFlag pretenure)3389 Handle<PodArray<T>> PodArray<T>::New(Isolate* isolate, int length,
3390 PretenureFlag pretenure) {
3391 return Handle<PodArray<T>>::cast(
3392 isolate->factory()->NewByteArray(length * sizeof(T), pretenure));
3393 }
3394
3395 // static
3396 template <class Traits>
3397 STATIC_CONST_MEMBER_DEFINITION const InstanceType
3398 FixedTypedArray<Traits>::kInstanceType;
3399
3400
3401 template <class Traits>
cast(Object * object)3402 FixedTypedArray<Traits>* FixedTypedArray<Traits>::cast(Object* object) {
3403 SLOW_DCHECK(object->IsHeapObject() &&
3404 HeapObject::cast(object)->map()->instance_type() ==
3405 Traits::kInstanceType);
3406 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3407 }
3408
3409
3410 template <class Traits>
3411 const FixedTypedArray<Traits>*
cast(const Object * object)3412 FixedTypedArray<Traits>::cast(const Object* object) {
3413 SLOW_DCHECK(object->IsHeapObject() &&
3414 HeapObject::cast(object)->map()->instance_type() ==
3415 Traits::kInstanceType);
3416 return reinterpret_cast<FixedTypedArray<Traits>*>(object);
3417 }
3418
3419
3420 #define DEFINE_DEOPT_ELEMENT_ACCESSORS(name, type) \
3421 type* DeoptimizationInputData::name() { \
3422 return type::cast(get(k##name##Index)); \
3423 } \
3424 void DeoptimizationInputData::Set##name(type* value) { \
3425 set(k##name##Index, value); \
3426 }
3427
DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray,ByteArray)3428 DEFINE_DEOPT_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
3429 DEFINE_DEOPT_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
3430 DEFINE_DEOPT_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
3431 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrAstId, Smi)
3432 DEFINE_DEOPT_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
3433 DEFINE_DEOPT_ELEMENT_ACCESSORS(OptimizationId, Smi)
3434 DEFINE_DEOPT_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
3435 DEFINE_DEOPT_ELEMENT_ACCESSORS(WeakCellCache, Object)
3436 DEFINE_DEOPT_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
3437
3438 #undef DEFINE_DEOPT_ELEMENT_ACCESSORS
3439
3440
3441 #define DEFINE_DEOPT_ENTRY_ACCESSORS(name, type) \
3442 type* DeoptimizationInputData::name(int i) { \
3443 return type::cast(get(IndexForEntry(i) + k##name##Offset)); \
3444 } \
3445 void DeoptimizationInputData::Set##name(int i, type* value) { \
3446 set(IndexForEntry(i) + k##name##Offset, value); \
3447 }
3448
3449 DEFINE_DEOPT_ENTRY_ACCESSORS(AstIdRaw, Smi)
3450 DEFINE_DEOPT_ENTRY_ACCESSORS(TranslationIndex, Smi)
3451 DEFINE_DEOPT_ENTRY_ACCESSORS(ArgumentsStackHeight, Smi)
3452 DEFINE_DEOPT_ENTRY_ACCESSORS(Pc, Smi)
3453
3454 #undef DEFINE_DEOPT_ENTRY_ACCESSORS
3455
3456
3457 BailoutId DeoptimizationInputData::AstId(int i) {
3458 return BailoutId(AstIdRaw(i)->value());
3459 }
3460
3461
SetAstId(int i,BailoutId value)3462 void DeoptimizationInputData::SetAstId(int i, BailoutId value) {
3463 SetAstIdRaw(i, Smi::FromInt(value.ToInt()));
3464 }
3465
3466
DeoptCount()3467 int DeoptimizationInputData::DeoptCount() {
3468 return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
3469 }
3470
3471
DeoptPoints()3472 int DeoptimizationOutputData::DeoptPoints() { return length() / 2; }
3473
3474
AstId(int index)3475 BailoutId DeoptimizationOutputData::AstId(int index) {
3476 return BailoutId(Smi::cast(get(index * 2))->value());
3477 }
3478
3479
SetAstId(int index,BailoutId id)3480 void DeoptimizationOutputData::SetAstId(int index, BailoutId id) {
3481 set(index * 2, Smi::FromInt(id.ToInt()));
3482 }
3483
3484
PcAndState(int index)3485 Smi* DeoptimizationOutputData::PcAndState(int index) {
3486 return Smi::cast(get(1 + index * 2));
3487 }
3488
3489
SetPcAndState(int index,Smi * offset)3490 void DeoptimizationOutputData::SetPcAndState(int index, Smi* offset) {
3491 set(1 + index * 2, offset);
3492 }
3493
3494
get(int index)3495 Object* LiteralsArray::get(int index) const { return FixedArray::get(index); }
3496
3497
set(int index,Object * value)3498 void LiteralsArray::set(int index, Object* value) {
3499 FixedArray::set(index, value);
3500 }
3501
3502
set(int index,Smi * value)3503 void LiteralsArray::set(int index, Smi* value) {
3504 FixedArray::set(index, value);
3505 }
3506
3507
set(int index,Object * value,WriteBarrierMode mode)3508 void LiteralsArray::set(int index, Object* value, WriteBarrierMode mode) {
3509 FixedArray::set(index, value, mode);
3510 }
3511
3512
cast(Object * object)3513 LiteralsArray* LiteralsArray::cast(Object* object) {
3514 SLOW_DCHECK(object->IsLiteralsArray());
3515 return reinterpret_cast<LiteralsArray*>(object);
3516 }
3517
3518
feedback_vector()3519 TypeFeedbackVector* LiteralsArray::feedback_vector() const {
3520 if (length() == 0) {
3521 return TypeFeedbackVector::cast(
3522 const_cast<FixedArray*>(FixedArray::cast(this)));
3523 }
3524 return TypeFeedbackVector::cast(get(kVectorIndex));
3525 }
3526
3527
set_feedback_vector(TypeFeedbackVector * vector)3528 void LiteralsArray::set_feedback_vector(TypeFeedbackVector* vector) {
3529 if (length() <= kVectorIndex) {
3530 DCHECK(vector->length() == 0);
3531 return;
3532 }
3533 set(kVectorIndex, vector);
3534 }
3535
3536
literal(int literal_index)3537 Object* LiteralsArray::literal(int literal_index) const {
3538 return get(kFirstLiteralIndex + literal_index);
3539 }
3540
3541
set_literal(int literal_index,Object * literal)3542 void LiteralsArray::set_literal(int literal_index, Object* literal) {
3543 set(kFirstLiteralIndex + literal_index, literal);
3544 }
3545
set_literal_undefined(int literal_index)3546 void LiteralsArray::set_literal_undefined(int literal_index) {
3547 set_undefined(kFirstLiteralIndex + literal_index);
3548 }
3549
literals_count()3550 int LiteralsArray::literals_count() const {
3551 return length() - kFirstLiteralIndex;
3552 }
3553
GetRangeStart(int index)3554 int HandlerTable::GetRangeStart(int index) const {
3555 return Smi::cast(get(index * kRangeEntrySize + kRangeStartIndex))->value();
3556 }
3557
GetRangeEnd(int index)3558 int HandlerTable::GetRangeEnd(int index) const {
3559 return Smi::cast(get(index * kRangeEntrySize + kRangeEndIndex))->value();
3560 }
3561
GetRangeHandler(int index)3562 int HandlerTable::GetRangeHandler(int index) const {
3563 return HandlerOffsetField::decode(
3564 Smi::cast(get(index * kRangeEntrySize + kRangeHandlerIndex))->value());
3565 }
3566
GetRangeData(int index)3567 int HandlerTable::GetRangeData(int index) const {
3568 return Smi::cast(get(index * kRangeEntrySize + kRangeDataIndex))->value();
3569 }
3570
SetRangeStart(int index,int value)3571 void HandlerTable::SetRangeStart(int index, int value) {
3572 set(index * kRangeEntrySize + kRangeStartIndex, Smi::FromInt(value));
3573 }
3574
3575
SetRangeEnd(int index,int value)3576 void HandlerTable::SetRangeEnd(int index, int value) {
3577 set(index * kRangeEntrySize + kRangeEndIndex, Smi::FromInt(value));
3578 }
3579
3580
SetRangeHandler(int index,int offset,CatchPrediction prediction)3581 void HandlerTable::SetRangeHandler(int index, int offset,
3582 CatchPrediction prediction) {
3583 int value = HandlerOffsetField::encode(offset) |
3584 HandlerPredictionField::encode(prediction);
3585 set(index * kRangeEntrySize + kRangeHandlerIndex, Smi::FromInt(value));
3586 }
3587
SetRangeData(int index,int value)3588 void HandlerTable::SetRangeData(int index, int value) {
3589 set(index * kRangeEntrySize + kRangeDataIndex, Smi::FromInt(value));
3590 }
3591
3592
SetReturnOffset(int index,int value)3593 void HandlerTable::SetReturnOffset(int index, int value) {
3594 set(index * kReturnEntrySize + kReturnOffsetIndex, Smi::FromInt(value));
3595 }
3596
SetReturnHandler(int index,int offset)3597 void HandlerTable::SetReturnHandler(int index, int offset) {
3598 int value = HandlerOffsetField::encode(offset);
3599 set(index * kReturnEntrySize + kReturnHandlerIndex, Smi::FromInt(value));
3600 }
3601
NumberOfRangeEntries()3602 int HandlerTable::NumberOfRangeEntries() const {
3603 return length() / kRangeEntrySize;
3604 }
3605
3606 #define MAKE_STRUCT_CAST(NAME, Name, name) CAST_ACCESSOR(Name)
STRUCT_LIST(MAKE_STRUCT_CAST)3607 STRUCT_LIST(MAKE_STRUCT_CAST)
3608 #undef MAKE_STRUCT_CAST
3609
3610
3611 template <typename Derived, typename Shape, typename Key>
3612 HashTable<Derived, Shape, Key>*
3613 HashTable<Derived, Shape, Key>::cast(Object* obj) {
3614 SLOW_DCHECK(obj->IsHashTable());
3615 return reinterpret_cast<HashTable*>(obj);
3616 }
3617
3618
3619 template <typename Derived, typename Shape, typename Key>
3620 const HashTable<Derived, Shape, Key>*
cast(const Object * obj)3621 HashTable<Derived, Shape, Key>::cast(const Object* obj) {
3622 SLOW_DCHECK(obj->IsHashTable());
3623 return reinterpret_cast<const HashTable*>(obj);
3624 }
3625
3626
SMI_ACCESSORS(FixedArrayBase,length,kLengthOffset)3627 SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3628 SYNCHRONIZED_SMI_ACCESSORS(FixedArrayBase, length, kLengthOffset)
3629
3630 SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3631 NOBARRIER_SMI_ACCESSORS(FreeSpace, size, kSizeOffset)
3632
3633 SMI_ACCESSORS(String, length, kLengthOffset)
3634 SYNCHRONIZED_SMI_ACCESSORS(String, length, kLengthOffset)
3635
3636
3637 int FreeSpace::Size() { return size(); }
3638
3639
next()3640 FreeSpace* FreeSpace::next() {
3641 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3642 (!GetHeap()->deserialization_complete() && map() == NULL));
3643 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3644 return reinterpret_cast<FreeSpace*>(
3645 Memory::Address_at(address() + kNextOffset));
3646 }
3647
3648
set_next(FreeSpace * next)3649 void FreeSpace::set_next(FreeSpace* next) {
3650 DCHECK(map() == GetHeap()->root(Heap::kFreeSpaceMapRootIndex) ||
3651 (!GetHeap()->deserialization_complete() && map() == NULL));
3652 DCHECK_LE(kNextOffset + kPointerSize, nobarrier_size());
3653 base::NoBarrier_Store(
3654 reinterpret_cast<base::AtomicWord*>(address() + kNextOffset),
3655 reinterpret_cast<base::AtomicWord>(next));
3656 }
3657
3658
cast(HeapObject * o)3659 FreeSpace* FreeSpace::cast(HeapObject* o) {
3660 SLOW_DCHECK(!o->GetHeap()->deserialization_complete() || o->IsFreeSpace());
3661 return reinterpret_cast<FreeSpace*>(o);
3662 }
3663
3664
hash_field()3665 uint32_t Name::hash_field() {
3666 return READ_UINT32_FIELD(this, kHashFieldOffset);
3667 }
3668
3669
set_hash_field(uint32_t value)3670 void Name::set_hash_field(uint32_t value) {
3671 WRITE_UINT32_FIELD(this, kHashFieldOffset, value);
3672 #if V8_HOST_ARCH_64_BIT
3673 #if V8_TARGET_LITTLE_ENDIAN
3674 WRITE_UINT32_FIELD(this, kHashFieldSlot + kIntSize, 0);
3675 #else
3676 WRITE_UINT32_FIELD(this, kHashFieldSlot, 0);
3677 #endif
3678 #endif
3679 }
3680
3681
Equals(Name * other)3682 bool Name::Equals(Name* other) {
3683 if (other == this) return true;
3684 if ((this->IsInternalizedString() && other->IsInternalizedString()) ||
3685 this->IsSymbol() || other->IsSymbol()) {
3686 return false;
3687 }
3688 return String::cast(this)->SlowEquals(String::cast(other));
3689 }
3690
3691
Equals(Handle<Name> one,Handle<Name> two)3692 bool Name::Equals(Handle<Name> one, Handle<Name> two) {
3693 if (one.is_identical_to(two)) return true;
3694 if ((one->IsInternalizedString() && two->IsInternalizedString()) ||
3695 one->IsSymbol() || two->IsSymbol()) {
3696 return false;
3697 }
3698 return String::SlowEquals(Handle<String>::cast(one),
3699 Handle<String>::cast(two));
3700 }
3701
3702
ACCESSORS(Symbol,name,Object,kNameOffset)3703 ACCESSORS(Symbol, name, Object, kNameOffset)
3704 SMI_ACCESSORS(Symbol, flags, kFlagsOffset)
3705 BOOL_ACCESSORS(Symbol, flags, is_private, kPrivateBit)
3706 BOOL_ACCESSORS(Symbol, flags, is_well_known_symbol, kWellKnownSymbolBit)
3707
3708
3709 bool String::Equals(String* other) {
3710 if (other == this) return true;
3711 if (this->IsInternalizedString() && other->IsInternalizedString()) {
3712 return false;
3713 }
3714 return SlowEquals(other);
3715 }
3716
3717
Equals(Handle<String> one,Handle<String> two)3718 bool String::Equals(Handle<String> one, Handle<String> two) {
3719 if (one.is_identical_to(two)) return true;
3720 if (one->IsInternalizedString() && two->IsInternalizedString()) {
3721 return false;
3722 }
3723 return SlowEquals(one, two);
3724 }
3725
3726
Flatten(Handle<String> string,PretenureFlag pretenure)3727 Handle<String> String::Flatten(Handle<String> string, PretenureFlag pretenure) {
3728 if (!string->IsConsString()) return string;
3729 Handle<ConsString> cons = Handle<ConsString>::cast(string);
3730 if (cons->IsFlat()) return handle(cons->first());
3731 return SlowFlatten(cons, pretenure);
3732 }
3733
3734
Get(int index)3735 uint16_t String::Get(int index) {
3736 DCHECK(index >= 0 && index < length());
3737 switch (StringShape(this).full_representation_tag()) {
3738 case kSeqStringTag | kOneByteStringTag:
3739 return SeqOneByteString::cast(this)->SeqOneByteStringGet(index);
3740 case kSeqStringTag | kTwoByteStringTag:
3741 return SeqTwoByteString::cast(this)->SeqTwoByteStringGet(index);
3742 case kConsStringTag | kOneByteStringTag:
3743 case kConsStringTag | kTwoByteStringTag:
3744 return ConsString::cast(this)->ConsStringGet(index);
3745 case kExternalStringTag | kOneByteStringTag:
3746 return ExternalOneByteString::cast(this)->ExternalOneByteStringGet(index);
3747 case kExternalStringTag | kTwoByteStringTag:
3748 return ExternalTwoByteString::cast(this)->ExternalTwoByteStringGet(index);
3749 case kSlicedStringTag | kOneByteStringTag:
3750 case kSlicedStringTag | kTwoByteStringTag:
3751 return SlicedString::cast(this)->SlicedStringGet(index);
3752 default:
3753 break;
3754 }
3755
3756 UNREACHABLE();
3757 return 0;
3758 }
3759
3760
Set(int index,uint16_t value)3761 void String::Set(int index, uint16_t value) {
3762 DCHECK(index >= 0 && index < length());
3763 DCHECK(StringShape(this).IsSequential());
3764
3765 return this->IsOneByteRepresentation()
3766 ? SeqOneByteString::cast(this)->SeqOneByteStringSet(index, value)
3767 : SeqTwoByteString::cast(this)->SeqTwoByteStringSet(index, value);
3768 }
3769
3770
IsFlat()3771 bool String::IsFlat() {
3772 if (!StringShape(this).IsCons()) return true;
3773 return ConsString::cast(this)->second()->length() == 0;
3774 }
3775
3776
GetUnderlying()3777 String* String::GetUnderlying() {
3778 // Giving direct access to underlying string only makes sense if the
3779 // wrapping string is already flattened.
3780 DCHECK(this->IsFlat());
3781 DCHECK(StringShape(this).IsIndirect());
3782 STATIC_ASSERT(ConsString::kFirstOffset == SlicedString::kParentOffset);
3783 const int kUnderlyingOffset = SlicedString::kParentOffset;
3784 return String::cast(READ_FIELD(this, kUnderlyingOffset));
3785 }
3786
3787
3788 template<class Visitor>
VisitFlat(Visitor * visitor,String * string,const int offset)3789 ConsString* String::VisitFlat(Visitor* visitor,
3790 String* string,
3791 const int offset) {
3792 int slice_offset = offset;
3793 const int length = string->length();
3794 DCHECK(offset <= length);
3795 while (true) {
3796 int32_t type = string->map()->instance_type();
3797 switch (type & (kStringRepresentationMask | kStringEncodingMask)) {
3798 case kSeqStringTag | kOneByteStringTag:
3799 visitor->VisitOneByteString(
3800 SeqOneByteString::cast(string)->GetChars() + slice_offset,
3801 length - offset);
3802 return NULL;
3803
3804 case kSeqStringTag | kTwoByteStringTag:
3805 visitor->VisitTwoByteString(
3806 SeqTwoByteString::cast(string)->GetChars() + slice_offset,
3807 length - offset);
3808 return NULL;
3809
3810 case kExternalStringTag | kOneByteStringTag:
3811 visitor->VisitOneByteString(
3812 ExternalOneByteString::cast(string)->GetChars() + slice_offset,
3813 length - offset);
3814 return NULL;
3815
3816 case kExternalStringTag | kTwoByteStringTag:
3817 visitor->VisitTwoByteString(
3818 ExternalTwoByteString::cast(string)->GetChars() + slice_offset,
3819 length - offset);
3820 return NULL;
3821
3822 case kSlicedStringTag | kOneByteStringTag:
3823 case kSlicedStringTag | kTwoByteStringTag: {
3824 SlicedString* slicedString = SlicedString::cast(string);
3825 slice_offset += slicedString->offset();
3826 string = slicedString->parent();
3827 continue;
3828 }
3829
3830 case kConsStringTag | kOneByteStringTag:
3831 case kConsStringTag | kTwoByteStringTag:
3832 return ConsString::cast(string);
3833
3834 default:
3835 UNREACHABLE();
3836 return NULL;
3837 }
3838 }
3839 }
3840
3841
3842 template <>
GetCharVector()3843 inline Vector<const uint8_t> String::GetCharVector() {
3844 String::FlatContent flat = GetFlatContent();
3845 DCHECK(flat.IsOneByte());
3846 return flat.ToOneByteVector();
3847 }
3848
3849
3850 template <>
GetCharVector()3851 inline Vector<const uc16> String::GetCharVector() {
3852 String::FlatContent flat = GetFlatContent();
3853 DCHECK(flat.IsTwoByte());
3854 return flat.ToUC16Vector();
3855 }
3856
3857
SeqOneByteStringGet(int index)3858 uint16_t SeqOneByteString::SeqOneByteStringGet(int index) {
3859 DCHECK(index >= 0 && index < length());
3860 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
3861 }
3862
3863
SeqOneByteStringSet(int index,uint16_t value)3864 void SeqOneByteString::SeqOneByteStringSet(int index, uint16_t value) {
3865 DCHECK(index >= 0 && index < length() && value <= kMaxOneByteCharCode);
3866 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize,
3867 static_cast<byte>(value));
3868 }
3869
3870
GetCharsAddress()3871 Address SeqOneByteString::GetCharsAddress() {
3872 return FIELD_ADDR(this, kHeaderSize);
3873 }
3874
3875
GetChars()3876 uint8_t* SeqOneByteString::GetChars() {
3877 return reinterpret_cast<uint8_t*>(GetCharsAddress());
3878 }
3879
3880
GetCharsAddress()3881 Address SeqTwoByteString::GetCharsAddress() {
3882 return FIELD_ADDR(this, kHeaderSize);
3883 }
3884
3885
GetChars()3886 uc16* SeqTwoByteString::GetChars() {
3887 return reinterpret_cast<uc16*>(FIELD_ADDR(this, kHeaderSize));
3888 }
3889
3890
SeqTwoByteStringGet(int index)3891 uint16_t SeqTwoByteString::SeqTwoByteStringGet(int index) {
3892 DCHECK(index >= 0 && index < length());
3893 return READ_UINT16_FIELD(this, kHeaderSize + index * kShortSize);
3894 }
3895
3896
SeqTwoByteStringSet(int index,uint16_t value)3897 void SeqTwoByteString::SeqTwoByteStringSet(int index, uint16_t value) {
3898 DCHECK(index >= 0 && index < length());
3899 WRITE_UINT16_FIELD(this, kHeaderSize + index * kShortSize, value);
3900 }
3901
3902
SeqTwoByteStringSize(InstanceType instance_type)3903 int SeqTwoByteString::SeqTwoByteStringSize(InstanceType instance_type) {
3904 return SizeFor(length());
3905 }
3906
3907
SeqOneByteStringSize(InstanceType instance_type)3908 int SeqOneByteString::SeqOneByteStringSize(InstanceType instance_type) {
3909 return SizeFor(length());
3910 }
3911
3912
parent()3913 String* SlicedString::parent() {
3914 return String::cast(READ_FIELD(this, kParentOffset));
3915 }
3916
3917
set_parent(String * parent,WriteBarrierMode mode)3918 void SlicedString::set_parent(String* parent, WriteBarrierMode mode) {
3919 DCHECK(parent->IsSeqString() || parent->IsExternalString());
3920 WRITE_FIELD(this, kParentOffset, parent);
3921 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kParentOffset, parent, mode);
3922 }
3923
3924
SMI_ACCESSORS(SlicedString,offset,kOffsetOffset)3925 SMI_ACCESSORS(SlicedString, offset, kOffsetOffset)
3926
3927
3928 String* ConsString::first() {
3929 return String::cast(READ_FIELD(this, kFirstOffset));
3930 }
3931
3932
unchecked_first()3933 Object* ConsString::unchecked_first() {
3934 return READ_FIELD(this, kFirstOffset);
3935 }
3936
3937
set_first(String * value,WriteBarrierMode mode)3938 void ConsString::set_first(String* value, WriteBarrierMode mode) {
3939 WRITE_FIELD(this, kFirstOffset, value);
3940 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kFirstOffset, value, mode);
3941 }
3942
3943
second()3944 String* ConsString::second() {
3945 return String::cast(READ_FIELD(this, kSecondOffset));
3946 }
3947
3948
unchecked_second()3949 Object* ConsString::unchecked_second() {
3950 return READ_FIELD(this, kSecondOffset);
3951 }
3952
3953
set_second(String * value,WriteBarrierMode mode)3954 void ConsString::set_second(String* value, WriteBarrierMode mode) {
3955 WRITE_FIELD(this, kSecondOffset, value);
3956 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kSecondOffset, value, mode);
3957 }
3958
3959
is_short()3960 bool ExternalString::is_short() {
3961 InstanceType type = map()->instance_type();
3962 return (type & kShortExternalStringMask) == kShortExternalStringTag;
3963 }
3964
3965
resource()3966 const ExternalOneByteString::Resource* ExternalOneByteString::resource() {
3967 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
3968 }
3969
3970
update_data_cache()3971 void ExternalOneByteString::update_data_cache() {
3972 if (is_short()) return;
3973 const char** data_field =
3974 reinterpret_cast<const char**>(FIELD_ADDR(this, kResourceDataOffset));
3975 *data_field = resource()->data();
3976 }
3977
3978
set_resource(const ExternalOneByteString::Resource * resource)3979 void ExternalOneByteString::set_resource(
3980 const ExternalOneByteString::Resource* resource) {
3981 DCHECK(IsAligned(reinterpret_cast<intptr_t>(resource), kPointerSize));
3982 *reinterpret_cast<const Resource**>(
3983 FIELD_ADDR(this, kResourceOffset)) = resource;
3984 if (resource != NULL) update_data_cache();
3985 }
3986
3987
GetChars()3988 const uint8_t* ExternalOneByteString::GetChars() {
3989 return reinterpret_cast<const uint8_t*>(resource()->data());
3990 }
3991
3992
ExternalOneByteStringGet(int index)3993 uint16_t ExternalOneByteString::ExternalOneByteStringGet(int index) {
3994 DCHECK(index >= 0 && index < length());
3995 return GetChars()[index];
3996 }
3997
3998
resource()3999 const ExternalTwoByteString::Resource* ExternalTwoByteString::resource() {
4000 return *reinterpret_cast<Resource**>(FIELD_ADDR(this, kResourceOffset));
4001 }
4002
4003
update_data_cache()4004 void ExternalTwoByteString::update_data_cache() {
4005 if (is_short()) return;
4006 const uint16_t** data_field =
4007 reinterpret_cast<const uint16_t**>(FIELD_ADDR(this, kResourceDataOffset));
4008 *data_field = resource()->data();
4009 }
4010
4011
set_resource(const ExternalTwoByteString::Resource * resource)4012 void ExternalTwoByteString::set_resource(
4013 const ExternalTwoByteString::Resource* resource) {
4014 *reinterpret_cast<const Resource**>(
4015 FIELD_ADDR(this, kResourceOffset)) = resource;
4016 if (resource != NULL) update_data_cache();
4017 }
4018
4019
GetChars()4020 const uint16_t* ExternalTwoByteString::GetChars() {
4021 return resource()->data();
4022 }
4023
4024
ExternalTwoByteStringGet(int index)4025 uint16_t ExternalTwoByteString::ExternalTwoByteStringGet(int index) {
4026 DCHECK(index >= 0 && index < length());
4027 return GetChars()[index];
4028 }
4029
4030
ExternalTwoByteStringGetData(unsigned start)4031 const uint16_t* ExternalTwoByteString::ExternalTwoByteStringGetData(
4032 unsigned start) {
4033 return GetChars() + start;
4034 }
4035
4036
OffsetForDepth(int depth)4037 int ConsStringIterator::OffsetForDepth(int depth) { return depth & kDepthMask; }
4038
4039
PushLeft(ConsString * string)4040 void ConsStringIterator::PushLeft(ConsString* string) {
4041 frames_[depth_++ & kDepthMask] = string;
4042 }
4043
4044
PushRight(ConsString * string)4045 void ConsStringIterator::PushRight(ConsString* string) {
4046 // Inplace update.
4047 frames_[(depth_-1) & kDepthMask] = string;
4048 }
4049
4050
AdjustMaximumDepth()4051 void ConsStringIterator::AdjustMaximumDepth() {
4052 if (depth_ > maximum_depth_) maximum_depth_ = depth_;
4053 }
4054
4055
Pop()4056 void ConsStringIterator::Pop() {
4057 DCHECK(depth_ > 0);
4058 DCHECK(depth_ <= maximum_depth_);
4059 depth_--;
4060 }
4061
4062
GetNext()4063 uint16_t StringCharacterStream::GetNext() {
4064 DCHECK(buffer8_ != NULL && end_ != NULL);
4065 // Advance cursor if needed.
4066 if (buffer8_ == end_) HasMore();
4067 DCHECK(buffer8_ < end_);
4068 return is_one_byte_ ? *buffer8_++ : *buffer16_++;
4069 }
4070
4071
StringCharacterStream(String * string,int offset)4072 StringCharacterStream::StringCharacterStream(String* string, int offset)
4073 : is_one_byte_(false) {
4074 Reset(string, offset);
4075 }
4076
4077
Reset(String * string,int offset)4078 void StringCharacterStream::Reset(String* string, int offset) {
4079 buffer8_ = NULL;
4080 end_ = NULL;
4081 ConsString* cons_string = String::VisitFlat(this, string, offset);
4082 iter_.Reset(cons_string, offset);
4083 if (cons_string != NULL) {
4084 string = iter_.Next(&offset);
4085 if (string != NULL) String::VisitFlat(this, string, offset);
4086 }
4087 }
4088
4089
HasMore()4090 bool StringCharacterStream::HasMore() {
4091 if (buffer8_ != end_) return true;
4092 int offset;
4093 String* string = iter_.Next(&offset);
4094 DCHECK_EQ(offset, 0);
4095 if (string == NULL) return false;
4096 String::VisitFlat(this, string);
4097 DCHECK(buffer8_ != end_);
4098 return true;
4099 }
4100
4101
VisitOneByteString(const uint8_t * chars,int length)4102 void StringCharacterStream::VisitOneByteString(
4103 const uint8_t* chars, int length) {
4104 is_one_byte_ = true;
4105 buffer8_ = chars;
4106 end_ = chars + length;
4107 }
4108
4109
VisitTwoByteString(const uint16_t * chars,int length)4110 void StringCharacterStream::VisitTwoByteString(
4111 const uint16_t* chars, int length) {
4112 is_one_byte_ = false;
4113 buffer16_ = chars;
4114 end_ = reinterpret_cast<const uint8_t*>(chars + length);
4115 }
4116
4117
Size()4118 int ByteArray::Size() { return RoundUp(length() + kHeaderSize, kPointerSize); }
4119
get(int index)4120 byte ByteArray::get(int index) {
4121 DCHECK(index >= 0 && index < this->length());
4122 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4123 }
4124
set(int index,byte value)4125 void ByteArray::set(int index, byte value) {
4126 DCHECK(index >= 0 && index < this->length());
4127 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4128 }
4129
copy_in(int index,const byte * buffer,int length)4130 void ByteArray::copy_in(int index, const byte* buffer, int length) {
4131 DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
4132 index + length <= this->length());
4133 byte* dst_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
4134 memcpy(dst_addr, buffer, length);
4135 }
4136
copy_out(int index,byte * buffer,int length)4137 void ByteArray::copy_out(int index, byte* buffer, int length) {
4138 DCHECK(index >= 0 && length >= 0 && length <= kMaxInt - index &&
4139 index + length <= this->length());
4140 const byte* src_addr = FIELD_ADDR(this, kHeaderSize + index * kCharSize);
4141 memcpy(buffer, src_addr, length);
4142 }
4143
get_int(int index)4144 int ByteArray::get_int(int index) {
4145 DCHECK(index >= 0 && index < this->length() / kIntSize);
4146 return READ_INT_FIELD(this, kHeaderSize + index * kIntSize);
4147 }
4148
set_int(int index,int value)4149 void ByteArray::set_int(int index, int value) {
4150 DCHECK(index >= 0 && index < this->length() / kIntSize);
4151 WRITE_INT_FIELD(this, kHeaderSize + index * kIntSize, value);
4152 }
4153
FromDataStartAddress(Address address)4154 ByteArray* ByteArray::FromDataStartAddress(Address address) {
4155 DCHECK_TAG_ALIGNED(address);
4156 return reinterpret_cast<ByteArray*>(address - kHeaderSize + kHeapObjectTag);
4157 }
4158
4159
ByteArraySize()4160 int ByteArray::ByteArraySize() { return SizeFor(this->length()); }
4161
4162
GetDataStartAddress()4163 Address ByteArray::GetDataStartAddress() {
4164 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4165 }
4166
4167
get(int index)4168 byte BytecodeArray::get(int index) {
4169 DCHECK(index >= 0 && index < this->length());
4170 return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
4171 }
4172
4173
set(int index,byte value)4174 void BytecodeArray::set(int index, byte value) {
4175 DCHECK(index >= 0 && index < this->length());
4176 WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
4177 }
4178
4179
set_frame_size(int frame_size)4180 void BytecodeArray::set_frame_size(int frame_size) {
4181 DCHECK_GE(frame_size, 0);
4182 DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
4183 WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
4184 }
4185
4186
frame_size()4187 int BytecodeArray::frame_size() const {
4188 return READ_INT_FIELD(this, kFrameSizeOffset);
4189 }
4190
4191
register_count()4192 int BytecodeArray::register_count() const {
4193 return frame_size() / kPointerSize;
4194 }
4195
4196
set_parameter_count(int number_of_parameters)4197 void BytecodeArray::set_parameter_count(int number_of_parameters) {
4198 DCHECK_GE(number_of_parameters, 0);
4199 // Parameter count is stored as the size on stack of the parameters to allow
4200 // it to be used directly by generated code.
4201 WRITE_INT_FIELD(this, kParameterSizeOffset,
4202 (number_of_parameters << kPointerSizeLog2));
4203 }
4204
interrupt_budget()4205 int BytecodeArray::interrupt_budget() const {
4206 return READ_INT_FIELD(this, kInterruptBudgetOffset);
4207 }
4208
set_interrupt_budget(int interrupt_budget)4209 void BytecodeArray::set_interrupt_budget(int interrupt_budget) {
4210 DCHECK_GE(interrupt_budget, 0);
4211 WRITE_INT_FIELD(this, kInterruptBudgetOffset, interrupt_budget);
4212 }
4213
osr_loop_nesting_level()4214 int BytecodeArray::osr_loop_nesting_level() const {
4215 return READ_INT8_FIELD(this, kOSRNestingLevelOffset);
4216 }
4217
set_osr_loop_nesting_level(int depth)4218 void BytecodeArray::set_osr_loop_nesting_level(int depth) {
4219 DCHECK(0 <= depth && depth <= AbstractCode::kMaxLoopNestingMarker);
4220 STATIC_ASSERT(AbstractCode::kMaxLoopNestingMarker < kMaxInt8);
4221 WRITE_INT8_FIELD(this, kOSRNestingLevelOffset, depth);
4222 }
4223
parameter_count()4224 int BytecodeArray::parameter_count() const {
4225 // Parameter count is stored as the size on stack of the parameters to allow
4226 // it to be used directly by generated code.
4227 return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
4228 }
4229
4230
ACCESSORS(BytecodeArray,constant_pool,FixedArray,kConstantPoolOffset)4231 ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
4232 ACCESSORS(BytecodeArray, handler_table, FixedArray, kHandlerTableOffset)
4233 ACCESSORS(BytecodeArray, source_position_table, ByteArray,
4234 kSourcePositionTableOffset)
4235
4236 Address BytecodeArray::GetFirstBytecodeAddress() {
4237 return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
4238 }
4239
4240
BytecodeArraySize()4241 int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }
4242
SizeIncludingMetadata()4243 int BytecodeArray::SizeIncludingMetadata() {
4244 int size = BytecodeArraySize();
4245 size += constant_pool()->Size();
4246 size += handler_table()->Size();
4247 size += source_position_table()->Size();
4248 return size;
4249 }
4250
ACCESSORS(FixedTypedArrayBase,base_pointer,Object,kBasePointerOffset)4251 ACCESSORS(FixedTypedArrayBase, base_pointer, Object, kBasePointerOffset)
4252
4253
4254 void* FixedTypedArrayBase::external_pointer() const {
4255 intptr_t ptr = READ_INTPTR_FIELD(this, kExternalPointerOffset);
4256 return reinterpret_cast<void*>(ptr);
4257 }
4258
4259
set_external_pointer(void * value,WriteBarrierMode mode)4260 void FixedTypedArrayBase::set_external_pointer(void* value,
4261 WriteBarrierMode mode) {
4262 intptr_t ptr = reinterpret_cast<intptr_t>(value);
4263 WRITE_INTPTR_FIELD(this, kExternalPointerOffset, ptr);
4264 }
4265
4266
DataPtr()4267 void* FixedTypedArrayBase::DataPtr() {
4268 return reinterpret_cast<void*>(
4269 reinterpret_cast<intptr_t>(base_pointer()) +
4270 reinterpret_cast<intptr_t>(external_pointer()));
4271 }
4272
4273
ElementSize(InstanceType type)4274 int FixedTypedArrayBase::ElementSize(InstanceType type) {
4275 int element_size;
4276 switch (type) {
4277 #define TYPED_ARRAY_CASE(Type, type, TYPE, ctype, size) \
4278 case FIXED_##TYPE##_ARRAY_TYPE: \
4279 element_size = size; \
4280 break;
4281
4282 TYPED_ARRAYS(TYPED_ARRAY_CASE)
4283 #undef TYPED_ARRAY_CASE
4284 default:
4285 UNREACHABLE();
4286 return 0;
4287 }
4288 return element_size;
4289 }
4290
4291
DataSize(InstanceType type)4292 int FixedTypedArrayBase::DataSize(InstanceType type) {
4293 if (base_pointer() == Smi::kZero) return 0;
4294 return length() * ElementSize(type);
4295 }
4296
4297
DataSize()4298 int FixedTypedArrayBase::DataSize() {
4299 return DataSize(map()->instance_type());
4300 }
4301
4302
size()4303 int FixedTypedArrayBase::size() {
4304 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
4305 }
4306
4307
TypedArraySize(InstanceType type)4308 int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
4309 return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
4310 }
4311
4312
TypedArraySize(InstanceType type,int length)4313 int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
4314 return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
4315 }
4316
4317
defaultValue()4318 uint8_t Uint8ArrayTraits::defaultValue() { return 0; }
4319
4320
defaultValue()4321 uint8_t Uint8ClampedArrayTraits::defaultValue() { return 0; }
4322
4323
defaultValue()4324 int8_t Int8ArrayTraits::defaultValue() { return 0; }
4325
4326
defaultValue()4327 uint16_t Uint16ArrayTraits::defaultValue() { return 0; }
4328
4329
defaultValue()4330 int16_t Int16ArrayTraits::defaultValue() { return 0; }
4331
4332
defaultValue()4333 uint32_t Uint32ArrayTraits::defaultValue() { return 0; }
4334
4335
defaultValue()4336 int32_t Int32ArrayTraits::defaultValue() { return 0; }
4337
4338
defaultValue()4339 float Float32ArrayTraits::defaultValue() {
4340 return std::numeric_limits<float>::quiet_NaN();
4341 }
4342
4343
defaultValue()4344 double Float64ArrayTraits::defaultValue() {
4345 return std::numeric_limits<double>::quiet_NaN();
4346 }
4347
4348
4349 template <class Traits>
get_scalar(int index)4350 typename Traits::ElementType FixedTypedArray<Traits>::get_scalar(int index) {
4351 DCHECK((index >= 0) && (index < this->length()));
4352 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4353 return ptr[index];
4354 }
4355
4356
4357 template <class Traits>
set(int index,ElementType value)4358 void FixedTypedArray<Traits>::set(int index, ElementType value) {
4359 DCHECK((index >= 0) && (index < this->length()));
4360 ElementType* ptr = reinterpret_cast<ElementType*>(DataPtr());
4361 ptr[index] = value;
4362 }
4363
4364
4365 template <class Traits>
from_int(int value)4366 typename Traits::ElementType FixedTypedArray<Traits>::from_int(int value) {
4367 return static_cast<ElementType>(value);
4368 }
4369
4370
4371 template <> inline
from_int(int value)4372 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_int(int value) {
4373 if (value < 0) return 0;
4374 if (value > 0xFF) return 0xFF;
4375 return static_cast<uint8_t>(value);
4376 }
4377
4378
4379 template <class Traits>
from_double(double value)4380 typename Traits::ElementType FixedTypedArray<Traits>::from_double(
4381 double value) {
4382 return static_cast<ElementType>(DoubleToInt32(value));
4383 }
4384
4385
4386 template<> inline
from_double(double value)4387 uint8_t FixedTypedArray<Uint8ClampedArrayTraits>::from_double(double value) {
4388 // Handle NaNs and less than zero values which clamp to zero.
4389 if (!(value > 0)) return 0;
4390 if (value > 0xFF) return 0xFF;
4391 return static_cast<uint8_t>(lrint(value));
4392 }
4393
4394
4395 template<> inline
from_double(double value)4396 float FixedTypedArray<Float32ArrayTraits>::from_double(double value) {
4397 return static_cast<float>(value);
4398 }
4399
4400
4401 template<> inline
from_double(double value)4402 double FixedTypedArray<Float64ArrayTraits>::from_double(double value) {
4403 return value;
4404 }
4405
4406 template <class Traits>
get(FixedTypedArray<Traits> * array,int index)4407 Handle<Object> FixedTypedArray<Traits>::get(FixedTypedArray<Traits>* array,
4408 int index) {
4409 return Traits::ToHandle(array->GetIsolate(), array->get_scalar(index));
4410 }
4411
4412
4413 template <class Traits>
SetValue(uint32_t index,Object * value)4414 void FixedTypedArray<Traits>::SetValue(uint32_t index, Object* value) {
4415 ElementType cast_value = Traits::defaultValue();
4416 if (value->IsSmi()) {
4417 int int_value = Smi::cast(value)->value();
4418 cast_value = from_int(int_value);
4419 } else if (value->IsHeapNumber()) {
4420 double double_value = HeapNumber::cast(value)->value();
4421 cast_value = from_double(double_value);
4422 } else {
4423 // Clamp undefined to the default value. All other types have been
4424 // converted to a number type further up in the call chain.
4425 DCHECK(value->IsUndefined(GetIsolate()));
4426 }
4427 set(index, cast_value);
4428 }
4429
4430
ToHandle(Isolate * isolate,uint8_t scalar)4431 Handle<Object> Uint8ArrayTraits::ToHandle(Isolate* isolate, uint8_t scalar) {
4432 return handle(Smi::FromInt(scalar), isolate);
4433 }
4434
4435
ToHandle(Isolate * isolate,uint8_t scalar)4436 Handle<Object> Uint8ClampedArrayTraits::ToHandle(Isolate* isolate,
4437 uint8_t scalar) {
4438 return handle(Smi::FromInt(scalar), isolate);
4439 }
4440
4441
ToHandle(Isolate * isolate,int8_t scalar)4442 Handle<Object> Int8ArrayTraits::ToHandle(Isolate* isolate, int8_t scalar) {
4443 return handle(Smi::FromInt(scalar), isolate);
4444 }
4445
4446
ToHandle(Isolate * isolate,uint16_t scalar)4447 Handle<Object> Uint16ArrayTraits::ToHandle(Isolate* isolate, uint16_t scalar) {
4448 return handle(Smi::FromInt(scalar), isolate);
4449 }
4450
4451
ToHandle(Isolate * isolate,int16_t scalar)4452 Handle<Object> Int16ArrayTraits::ToHandle(Isolate* isolate, int16_t scalar) {
4453 return handle(Smi::FromInt(scalar), isolate);
4454 }
4455
4456
ToHandle(Isolate * isolate,uint32_t scalar)4457 Handle<Object> Uint32ArrayTraits::ToHandle(Isolate* isolate, uint32_t scalar) {
4458 return isolate->factory()->NewNumberFromUint(scalar);
4459 }
4460
4461
ToHandle(Isolate * isolate,int32_t scalar)4462 Handle<Object> Int32ArrayTraits::ToHandle(Isolate* isolate, int32_t scalar) {
4463 return isolate->factory()->NewNumberFromInt(scalar);
4464 }
4465
4466
ToHandle(Isolate * isolate,float scalar)4467 Handle<Object> Float32ArrayTraits::ToHandle(Isolate* isolate, float scalar) {
4468 return isolate->factory()->NewNumber(scalar);
4469 }
4470
4471
ToHandle(Isolate * isolate,double scalar)4472 Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
4473 return isolate->factory()->NewNumber(scalar);
4474 }
4475
4476
visitor_id()4477 int Map::visitor_id() {
4478 return READ_BYTE_FIELD(this, kVisitorIdOffset);
4479 }
4480
4481
set_visitor_id(int id)4482 void Map::set_visitor_id(int id) {
4483 DCHECK(0 <= id && id < 256);
4484 WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
4485 }
4486
4487
instance_size()4488 int Map::instance_size() {
4489 return NOBARRIER_READ_BYTE_FIELD(
4490 this, kInstanceSizeOffset) << kPointerSizeLog2;
4491 }
4492
4493
inobject_properties_or_constructor_function_index()4494 int Map::inobject_properties_or_constructor_function_index() {
4495 return READ_BYTE_FIELD(this,
4496 kInObjectPropertiesOrConstructorFunctionIndexOffset);
4497 }
4498
4499
set_inobject_properties_or_constructor_function_index(int value)4500 void Map::set_inobject_properties_or_constructor_function_index(int value) {
4501 DCHECK(0 <= value && value < 256);
4502 WRITE_BYTE_FIELD(this, kInObjectPropertiesOrConstructorFunctionIndexOffset,
4503 static_cast<byte>(value));
4504 }
4505
4506
GetInObjectProperties()4507 int Map::GetInObjectProperties() {
4508 DCHECK(IsJSObjectMap());
4509 return inobject_properties_or_constructor_function_index();
4510 }
4511
4512
SetInObjectProperties(int value)4513 void Map::SetInObjectProperties(int value) {
4514 DCHECK(IsJSObjectMap());
4515 set_inobject_properties_or_constructor_function_index(value);
4516 }
4517
4518
GetConstructorFunctionIndex()4519 int Map::GetConstructorFunctionIndex() {
4520 DCHECK(IsPrimitiveMap());
4521 return inobject_properties_or_constructor_function_index();
4522 }
4523
4524
SetConstructorFunctionIndex(int value)4525 void Map::SetConstructorFunctionIndex(int value) {
4526 DCHECK(IsPrimitiveMap());
4527 set_inobject_properties_or_constructor_function_index(value);
4528 }
4529
4530
GetInObjectPropertyOffset(int index)4531 int Map::GetInObjectPropertyOffset(int index) {
4532 // Adjust for the number of properties stored in the object.
4533 index -= GetInObjectProperties();
4534 DCHECK(index <= 0);
4535 return instance_size() + (index * kPointerSize);
4536 }
4537
4538
AddMissingTransitionsForTesting(Handle<Map> split_map,Handle<DescriptorArray> descriptors,Handle<LayoutDescriptor> full_layout_descriptor)4539 Handle<Map> Map::AddMissingTransitionsForTesting(
4540 Handle<Map> split_map, Handle<DescriptorArray> descriptors,
4541 Handle<LayoutDescriptor> full_layout_descriptor) {
4542 return AddMissingTransitions(split_map, descriptors, full_layout_descriptor);
4543 }
4544
4545
SizeFromMap(Map * map)4546 int HeapObject::SizeFromMap(Map* map) {
4547 int instance_size = map->instance_size();
4548 if (instance_size != kVariableSizeSentinel) return instance_size;
4549 // Only inline the most frequent cases.
4550 InstanceType instance_type = map->instance_type();
4551 if (instance_type == FIXED_ARRAY_TYPE ||
4552 instance_type == TRANSITION_ARRAY_TYPE) {
4553 return FixedArray::SizeFor(
4554 reinterpret_cast<FixedArray*>(this)->synchronized_length());
4555 }
4556 if (instance_type == ONE_BYTE_STRING_TYPE ||
4557 instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
4558 // Strings may get concurrently truncated, hence we have to access its
4559 // length synchronized.
4560 return SeqOneByteString::SizeFor(
4561 reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
4562 }
4563 if (instance_type == BYTE_ARRAY_TYPE) {
4564 return reinterpret_cast<ByteArray*>(this)->ByteArraySize();
4565 }
4566 if (instance_type == BYTECODE_ARRAY_TYPE) {
4567 return reinterpret_cast<BytecodeArray*>(this)->BytecodeArraySize();
4568 }
4569 if (instance_type == FREE_SPACE_TYPE) {
4570 return reinterpret_cast<FreeSpace*>(this)->nobarrier_size();
4571 }
4572 if (instance_type == STRING_TYPE ||
4573 instance_type == INTERNALIZED_STRING_TYPE) {
4574 // Strings may get concurrently truncated, hence we have to access its
4575 // length synchronized.
4576 return SeqTwoByteString::SizeFor(
4577 reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
4578 }
4579 if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
4580 return FixedDoubleArray::SizeFor(
4581 reinterpret_cast<FixedDoubleArray*>(this)->length());
4582 }
4583 if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
4584 instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
4585 return reinterpret_cast<FixedTypedArrayBase*>(
4586 this)->TypedArraySize(instance_type);
4587 }
4588 DCHECK(instance_type == CODE_TYPE);
4589 return reinterpret_cast<Code*>(this)->CodeSize();
4590 }
4591
4592
set_instance_size(int value)4593 void Map::set_instance_size(int value) {
4594 DCHECK_EQ(0, value & (kPointerSize - 1));
4595 value >>= kPointerSizeLog2;
4596 DCHECK(0 <= value && value < 256);
4597 NOBARRIER_WRITE_BYTE_FIELD(
4598 this, kInstanceSizeOffset, static_cast<byte>(value));
4599 }
4600
4601
clear_unused()4602 void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
4603
4604
instance_type()4605 InstanceType Map::instance_type() {
4606 return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
4607 }
4608
4609
set_instance_type(InstanceType value)4610 void Map::set_instance_type(InstanceType value) {
4611 WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
4612 }
4613
4614
unused_property_fields()4615 int Map::unused_property_fields() {
4616 return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
4617 }
4618
4619
set_unused_property_fields(int value)4620 void Map::set_unused_property_fields(int value) {
4621 WRITE_BYTE_FIELD(this, kUnusedPropertyFieldsOffset, Min(value, 255));
4622 }
4623
4624
bit_field()4625 byte Map::bit_field() const { return READ_BYTE_FIELD(this, kBitFieldOffset); }
4626
4627
set_bit_field(byte value)4628 void Map::set_bit_field(byte value) {
4629 WRITE_BYTE_FIELD(this, kBitFieldOffset, value);
4630 }
4631
4632
bit_field2()4633 byte Map::bit_field2() const { return READ_BYTE_FIELD(this, kBitField2Offset); }
4634
4635
set_bit_field2(byte value)4636 void Map::set_bit_field2(byte value) {
4637 WRITE_BYTE_FIELD(this, kBitField2Offset, value);
4638 }
4639
4640
set_non_instance_prototype(bool value)4641 void Map::set_non_instance_prototype(bool value) {
4642 if (value) {
4643 set_bit_field(bit_field() | (1 << kHasNonInstancePrototype));
4644 } else {
4645 set_bit_field(bit_field() & ~(1 << kHasNonInstancePrototype));
4646 }
4647 }
4648
4649
has_non_instance_prototype()4650 bool Map::has_non_instance_prototype() {
4651 return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
4652 }
4653
4654
set_is_constructor(bool value)4655 void Map::set_is_constructor(bool value) {
4656 if (value) {
4657 set_bit_field(bit_field() | (1 << kIsConstructor));
4658 } else {
4659 set_bit_field(bit_field() & ~(1 << kIsConstructor));
4660 }
4661 }
4662
4663
is_constructor()4664 bool Map::is_constructor() const {
4665 return ((1 << kIsConstructor) & bit_field()) != 0;
4666 }
4667
set_has_hidden_prototype(bool value)4668 void Map::set_has_hidden_prototype(bool value) {
4669 set_bit_field3(HasHiddenPrototype::update(bit_field3(), value));
4670 }
4671
has_hidden_prototype()4672 bool Map::has_hidden_prototype() const {
4673 return HasHiddenPrototype::decode(bit_field3());
4674 }
4675
4676
set_has_indexed_interceptor()4677 void Map::set_has_indexed_interceptor() {
4678 set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
4679 }
4680
4681
has_indexed_interceptor()4682 bool Map::has_indexed_interceptor() {
4683 return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
4684 }
4685
4686
set_is_undetectable()4687 void Map::set_is_undetectable() {
4688 set_bit_field(bit_field() | (1 << kIsUndetectable));
4689 }
4690
4691
is_undetectable()4692 bool Map::is_undetectable() {
4693 return ((1 << kIsUndetectable) & bit_field()) != 0;
4694 }
4695
4696
set_has_named_interceptor()4697 void Map::set_has_named_interceptor() {
4698 set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
4699 }
4700
4701
has_named_interceptor()4702 bool Map::has_named_interceptor() {
4703 return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
4704 }
4705
4706
set_is_access_check_needed(bool access_check_needed)4707 void Map::set_is_access_check_needed(bool access_check_needed) {
4708 if (access_check_needed) {
4709 set_bit_field(bit_field() | (1 << kIsAccessCheckNeeded));
4710 } else {
4711 set_bit_field(bit_field() & ~(1 << kIsAccessCheckNeeded));
4712 }
4713 }
4714
4715
is_access_check_needed()4716 bool Map::is_access_check_needed() {
4717 return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
4718 }
4719
4720
set_is_extensible(bool value)4721 void Map::set_is_extensible(bool value) {
4722 if (value) {
4723 set_bit_field2(bit_field2() | (1 << kIsExtensible));
4724 } else {
4725 set_bit_field2(bit_field2() & ~(1 << kIsExtensible));
4726 }
4727 }
4728
is_extensible()4729 bool Map::is_extensible() {
4730 return ((1 << kIsExtensible) & bit_field2()) != 0;
4731 }
4732
4733
set_is_prototype_map(bool value)4734 void Map::set_is_prototype_map(bool value) {
4735 set_bit_field2(IsPrototypeMapBits::update(bit_field2(), value));
4736 }
4737
is_prototype_map()4738 bool Map::is_prototype_map() const {
4739 return IsPrototypeMapBits::decode(bit_field2());
4740 }
4741
should_be_fast_prototype_map()4742 bool Map::should_be_fast_prototype_map() const {
4743 if (!prototype_info()->IsPrototypeInfo()) return false;
4744 return PrototypeInfo::cast(prototype_info())->should_be_fast_map();
4745 }
4746
set_elements_kind(ElementsKind elements_kind)4747 void Map::set_elements_kind(ElementsKind elements_kind) {
4748 DCHECK(static_cast<int>(elements_kind) < kElementsKindCount);
4749 DCHECK(kElementsKindCount <= (1 << Map::ElementsKindBits::kSize));
4750 set_bit_field2(Map::ElementsKindBits::update(bit_field2(), elements_kind));
4751 DCHECK(this->elements_kind() == elements_kind);
4752 }
4753
4754
elements_kind()4755 ElementsKind Map::elements_kind() {
4756 return Map::ElementsKindBits::decode(bit_field2());
4757 }
4758
4759
has_fast_smi_elements()4760 bool Map::has_fast_smi_elements() {
4761 return IsFastSmiElementsKind(elements_kind());
4762 }
4763
has_fast_object_elements()4764 bool Map::has_fast_object_elements() {
4765 return IsFastObjectElementsKind(elements_kind());
4766 }
4767
has_fast_smi_or_object_elements()4768 bool Map::has_fast_smi_or_object_elements() {
4769 return IsFastSmiOrObjectElementsKind(elements_kind());
4770 }
4771
has_fast_double_elements()4772 bool Map::has_fast_double_elements() {
4773 return IsFastDoubleElementsKind(elements_kind());
4774 }
4775
has_fast_elements()4776 bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
4777
has_sloppy_arguments_elements()4778 bool Map::has_sloppy_arguments_elements() {
4779 return IsSloppyArgumentsElements(elements_kind());
4780 }
4781
has_fast_sloppy_arguments_elements()4782 bool Map::has_fast_sloppy_arguments_elements() {
4783 return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
4784 }
4785
has_fast_string_wrapper_elements()4786 bool Map::has_fast_string_wrapper_elements() {
4787 return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
4788 }
4789
has_fixed_typed_array_elements()4790 bool Map::has_fixed_typed_array_elements() {
4791 return IsFixedTypedArrayElementsKind(elements_kind());
4792 }
4793
has_dictionary_elements()4794 bool Map::has_dictionary_elements() {
4795 return IsDictionaryElementsKind(elements_kind());
4796 }
4797
4798
set_dictionary_map(bool value)4799 void Map::set_dictionary_map(bool value) {
4800 uint32_t new_bit_field3 = DictionaryMap::update(bit_field3(), value);
4801 new_bit_field3 = IsUnstable::update(new_bit_field3, value);
4802 set_bit_field3(new_bit_field3);
4803 }
4804
4805
is_dictionary_map()4806 bool Map::is_dictionary_map() {
4807 return DictionaryMap::decode(bit_field3());
4808 }
4809
4810
flags()4811 Code::Flags Code::flags() {
4812 return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
4813 }
4814
4815
set_owns_descriptors(bool owns_descriptors)4816 void Map::set_owns_descriptors(bool owns_descriptors) {
4817 set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
4818 }
4819
4820
owns_descriptors()4821 bool Map::owns_descriptors() {
4822 return OwnsDescriptors::decode(bit_field3());
4823 }
4824
4825
set_is_callable()4826 void Map::set_is_callable() { set_bit_field(bit_field() | (1 << kIsCallable)); }
4827
4828
is_callable()4829 bool Map::is_callable() const {
4830 return ((1 << kIsCallable) & bit_field()) != 0;
4831 }
4832
4833
deprecate()4834 void Map::deprecate() {
4835 set_bit_field3(Deprecated::update(bit_field3(), true));
4836 }
4837
4838
is_deprecated()4839 bool Map::is_deprecated() {
4840 return Deprecated::decode(bit_field3());
4841 }
4842
4843
set_migration_target(bool value)4844 void Map::set_migration_target(bool value) {
4845 set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
4846 }
4847
4848
is_migration_target()4849 bool Map::is_migration_target() {
4850 return IsMigrationTarget::decode(bit_field3());
4851 }
4852
set_immutable_proto(bool value)4853 void Map::set_immutable_proto(bool value) {
4854 set_bit_field3(ImmutablePrototype::update(bit_field3(), value));
4855 }
4856
is_immutable_proto()4857 bool Map::is_immutable_proto() {
4858 return ImmutablePrototype::decode(bit_field3());
4859 }
4860
set_new_target_is_base(bool value)4861 void Map::set_new_target_is_base(bool value) {
4862 set_bit_field3(NewTargetIsBase::update(bit_field3(), value));
4863 }
4864
4865
new_target_is_base()4866 bool Map::new_target_is_base() { return NewTargetIsBase::decode(bit_field3()); }
4867
4868
set_construction_counter(int value)4869 void Map::set_construction_counter(int value) {
4870 set_bit_field3(ConstructionCounter::update(bit_field3(), value));
4871 }
4872
4873
construction_counter()4874 int Map::construction_counter() {
4875 return ConstructionCounter::decode(bit_field3());
4876 }
4877
4878
mark_unstable()4879 void Map::mark_unstable() {
4880 set_bit_field3(IsUnstable::update(bit_field3(), true));
4881 }
4882
4883
is_stable()4884 bool Map::is_stable() {
4885 return !IsUnstable::decode(bit_field3());
4886 }
4887
4888
has_code_cache()4889 bool Map::has_code_cache() {
4890 // Code caches are always fixed arrays. The empty fixed array is used as a
4891 // sentinel for an absent code cache.
4892 return code_cache()->length() != 0;
4893 }
4894
4895
CanBeDeprecated()4896 bool Map::CanBeDeprecated() {
4897 int descriptor = LastAdded();
4898 for (int i = 0; i <= descriptor; i++) {
4899 PropertyDetails details = instance_descriptors()->GetDetails(i);
4900 if (details.representation().IsNone()) return true;
4901 if (details.representation().IsSmi()) return true;
4902 if (details.representation().IsDouble()) return true;
4903 if (details.representation().IsHeapObject()) return true;
4904 if (details.type() == DATA_CONSTANT) return true;
4905 }
4906 return false;
4907 }
4908
4909
NotifyLeafMapLayoutChange()4910 void Map::NotifyLeafMapLayoutChange() {
4911 if (is_stable()) {
4912 mark_unstable();
4913 dependent_code()->DeoptimizeDependentCodeGroup(
4914 GetIsolate(),
4915 DependentCode::kPrototypeCheckGroup);
4916 }
4917 }
4918
4919
CanTransition()4920 bool Map::CanTransition() {
4921 // Only JSObject and subtypes have map transitions and back pointers.
4922 STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
4923 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4924 }
4925
4926
IsBooleanMap()4927 bool Map::IsBooleanMap() { return this == GetHeap()->boolean_map(); }
IsPrimitiveMap()4928 bool Map::IsPrimitiveMap() {
4929 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
4930 return instance_type() <= LAST_PRIMITIVE_TYPE;
4931 }
IsJSReceiverMap()4932 bool Map::IsJSReceiverMap() {
4933 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4934 return instance_type() >= FIRST_JS_RECEIVER_TYPE;
4935 }
IsJSObjectMap()4936 bool Map::IsJSObjectMap() {
4937 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
4938 return instance_type() >= FIRST_JS_OBJECT_TYPE;
4939 }
IsJSArrayMap()4940 bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
IsJSFunctionMap()4941 bool Map::IsJSFunctionMap() { return instance_type() == JS_FUNCTION_TYPE; }
IsStringMap()4942 bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
IsJSProxyMap()4943 bool Map::IsJSProxyMap() { return instance_type() == JS_PROXY_TYPE; }
IsJSGlobalProxyMap()4944 bool Map::IsJSGlobalProxyMap() {
4945 return instance_type() == JS_GLOBAL_PROXY_TYPE;
4946 }
IsJSGlobalObjectMap()4947 bool Map::IsJSGlobalObjectMap() {
4948 return instance_type() == JS_GLOBAL_OBJECT_TYPE;
4949 }
IsJSTypedArrayMap()4950 bool Map::IsJSTypedArrayMap() { return instance_type() == JS_TYPED_ARRAY_TYPE; }
IsJSDataViewMap()4951 bool Map::IsJSDataViewMap() { return instance_type() == JS_DATA_VIEW_TYPE; }
4952
4953
CanOmitMapChecks()4954 bool Map::CanOmitMapChecks() {
4955 return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
4956 }
4957
4958
next_link()4959 DependentCode* DependentCode::next_link() {
4960 return DependentCode::cast(get(kNextLinkIndex));
4961 }
4962
4963
set_next_link(DependentCode * next)4964 void DependentCode::set_next_link(DependentCode* next) {
4965 set(kNextLinkIndex, next);
4966 }
4967
4968
flags()4969 int DependentCode::flags() { return Smi::cast(get(kFlagsIndex))->value(); }
4970
4971
set_flags(int flags)4972 void DependentCode::set_flags(int flags) {
4973 set(kFlagsIndex, Smi::FromInt(flags));
4974 }
4975
4976
count()4977 int DependentCode::count() { return CountField::decode(flags()); }
4978
set_count(int value)4979 void DependentCode::set_count(int value) {
4980 set_flags(CountField::update(flags(), value));
4981 }
4982
4983
group()4984 DependentCode::DependencyGroup DependentCode::group() {
4985 return static_cast<DependencyGroup>(GroupField::decode(flags()));
4986 }
4987
4988
set_group(DependentCode::DependencyGroup group)4989 void DependentCode::set_group(DependentCode::DependencyGroup group) {
4990 set_flags(GroupField::update(flags(), static_cast<int>(group)));
4991 }
4992
4993
set_object_at(int i,Object * object)4994 void DependentCode::set_object_at(int i, Object* object) {
4995 set(kCodesStartIndex + i, object);
4996 }
4997
4998
object_at(int i)4999 Object* DependentCode::object_at(int i) {
5000 return get(kCodesStartIndex + i);
5001 }
5002
5003
clear_at(int i)5004 void DependentCode::clear_at(int i) {
5005 set_undefined(kCodesStartIndex + i);
5006 }
5007
5008
copy(int from,int to)5009 void DependentCode::copy(int from, int to) {
5010 set(kCodesStartIndex + to, get(kCodesStartIndex + from));
5011 }
5012
5013
set_flags(Code::Flags flags)5014 void Code::set_flags(Code::Flags flags) {
5015 STATIC_ASSERT(Code::NUMBER_OF_KINDS <= KindField::kMax + 1);
5016 WRITE_INT_FIELD(this, kFlagsOffset, flags);
5017 }
5018
5019
kind()5020 Code::Kind Code::kind() {
5021 return ExtractKindFromFlags(flags());
5022 }
5023
IsCodeStubOrIC()5024 bool Code::IsCodeStubOrIC() {
5025 switch (kind()) {
5026 case STUB:
5027 case HANDLER:
5028 #define CASE_KIND(kind) case kind:
5029 IC_KIND_LIST(CASE_KIND)
5030 #undef CASE_KIND
5031 return true;
5032 default:
5033 return false;
5034 }
5035 }
5036
extra_ic_state()5037 ExtraICState Code::extra_ic_state() {
5038 DCHECK(is_inline_cache_stub() || is_debug_stub());
5039 return ExtractExtraICStateFromFlags(flags());
5040 }
5041
5042
5043 // For initialization.
set_raw_kind_specific_flags1(int value)5044 void Code::set_raw_kind_specific_flags1(int value) {
5045 WRITE_INT_FIELD(this, kKindSpecificFlags1Offset, value);
5046 }
5047
5048
set_raw_kind_specific_flags2(int value)5049 void Code::set_raw_kind_specific_flags2(int value) {
5050 WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
5051 }
5052
5053
is_crankshafted()5054 inline bool Code::is_crankshafted() {
5055 return IsCrankshaftedField::decode(
5056 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5057 }
5058
5059
is_hydrogen_stub()5060 inline bool Code::is_hydrogen_stub() {
5061 return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
5062 }
5063
is_interpreter_trampoline_builtin()5064 inline bool Code::is_interpreter_trampoline_builtin() {
5065 Builtins* builtins = GetIsolate()->builtins();
5066 return this == *builtins->InterpreterEntryTrampoline() ||
5067 this == *builtins->InterpreterEnterBytecodeAdvance() ||
5068 this == *builtins->InterpreterEnterBytecodeDispatch();
5069 }
5070
has_unwinding_info()5071 inline bool Code::has_unwinding_info() const {
5072 return HasUnwindingInfoField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
5073 }
5074
set_has_unwinding_info(bool state)5075 inline void Code::set_has_unwinding_info(bool state) {
5076 uint32_t previous = READ_UINT32_FIELD(this, kFlagsOffset);
5077 uint32_t updated_value = HasUnwindingInfoField::update(previous, state);
5078 WRITE_UINT32_FIELD(this, kFlagsOffset, updated_value);
5079 }
5080
set_is_crankshafted(bool value)5081 inline void Code::set_is_crankshafted(bool value) {
5082 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5083 int updated = IsCrankshaftedField::update(previous, value);
5084 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5085 }
5086
5087
is_turbofanned()5088 inline bool Code::is_turbofanned() {
5089 return IsTurbofannedField::decode(
5090 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5091 }
5092
5093
set_is_turbofanned(bool value)5094 inline void Code::set_is_turbofanned(bool value) {
5095 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5096 int updated = IsTurbofannedField::update(previous, value);
5097 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5098 }
5099
5100
can_have_weak_objects()5101 inline bool Code::can_have_weak_objects() {
5102 DCHECK(kind() == OPTIMIZED_FUNCTION);
5103 return CanHaveWeakObjectsField::decode(
5104 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5105 }
5106
5107
set_can_have_weak_objects(bool value)5108 inline void Code::set_can_have_weak_objects(bool value) {
5109 DCHECK(kind() == OPTIMIZED_FUNCTION);
5110 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5111 int updated = CanHaveWeakObjectsField::update(previous, value);
5112 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5113 }
5114
is_construct_stub()5115 inline bool Code::is_construct_stub() {
5116 DCHECK(kind() == BUILTIN);
5117 return IsConstructStubField::decode(
5118 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5119 }
5120
set_is_construct_stub(bool value)5121 inline void Code::set_is_construct_stub(bool value) {
5122 DCHECK(kind() == BUILTIN);
5123 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5124 int updated = IsConstructStubField::update(previous, value);
5125 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5126 }
5127
has_deoptimization_support()5128 bool Code::has_deoptimization_support() {
5129 DCHECK_EQ(FUNCTION, kind());
5130 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5131 return FullCodeFlagsHasDeoptimizationSupportField::decode(flags);
5132 }
5133
5134
set_has_deoptimization_support(bool value)5135 void Code::set_has_deoptimization_support(bool value) {
5136 DCHECK_EQ(FUNCTION, kind());
5137 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5138 flags = FullCodeFlagsHasDeoptimizationSupportField::update(flags, value);
5139 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5140 }
5141
5142
has_debug_break_slots()5143 bool Code::has_debug_break_slots() {
5144 DCHECK_EQ(FUNCTION, kind());
5145 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5146 return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
5147 }
5148
5149
set_has_debug_break_slots(bool value)5150 void Code::set_has_debug_break_slots(bool value) {
5151 DCHECK_EQ(FUNCTION, kind());
5152 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5153 flags = FullCodeFlagsHasDebugBreakSlotsField::update(flags, value);
5154 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5155 }
5156
5157
has_reloc_info_for_serialization()5158 bool Code::has_reloc_info_for_serialization() {
5159 DCHECK_EQ(FUNCTION, kind());
5160 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5161 return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
5162 }
5163
5164
set_has_reloc_info_for_serialization(bool value)5165 void Code::set_has_reloc_info_for_serialization(bool value) {
5166 DCHECK_EQ(FUNCTION, kind());
5167 unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
5168 flags = FullCodeFlagsHasRelocInfoForSerialization::update(flags, value);
5169 WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
5170 }
5171
5172
allow_osr_at_loop_nesting_level()5173 int Code::allow_osr_at_loop_nesting_level() {
5174 DCHECK_EQ(FUNCTION, kind());
5175 int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5176 return AllowOSRAtLoopNestingLevelField::decode(fields);
5177 }
5178
5179
set_allow_osr_at_loop_nesting_level(int level)5180 void Code::set_allow_osr_at_loop_nesting_level(int level) {
5181 DCHECK_EQ(FUNCTION, kind());
5182 DCHECK(level >= 0 && level <= AbstractCode::kMaxLoopNestingMarker);
5183 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5184 int updated = AllowOSRAtLoopNestingLevelField::update(previous, level);
5185 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5186 }
5187
5188
profiler_ticks()5189 int Code::profiler_ticks() {
5190 DCHECK_EQ(FUNCTION, kind());
5191 return ProfilerTicksField::decode(
5192 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5193 }
5194
5195
set_profiler_ticks(int ticks)5196 void Code::set_profiler_ticks(int ticks) {
5197 if (kind() == FUNCTION) {
5198 unsigned previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5199 unsigned updated = ProfilerTicksField::update(previous, ticks);
5200 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5201 }
5202 }
5203
builtin_index()5204 int Code::builtin_index() { return READ_INT_FIELD(this, kBuiltinIndexOffset); }
5205
set_builtin_index(int index)5206 void Code::set_builtin_index(int index) {
5207 WRITE_INT_FIELD(this, kBuiltinIndexOffset, index);
5208 }
5209
5210
stack_slots()5211 unsigned Code::stack_slots() {
5212 DCHECK(is_crankshafted());
5213 return StackSlotsField::decode(
5214 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5215 }
5216
5217
set_stack_slots(unsigned slots)5218 void Code::set_stack_slots(unsigned slots) {
5219 CHECK(slots <= (1 << kStackSlotsBitCount));
5220 DCHECK(is_crankshafted());
5221 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5222 int updated = StackSlotsField::update(previous, slots);
5223 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5224 }
5225
5226
safepoint_table_offset()5227 unsigned Code::safepoint_table_offset() {
5228 DCHECK(is_crankshafted());
5229 return SafepointTableOffsetField::decode(
5230 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
5231 }
5232
5233
set_safepoint_table_offset(unsigned offset)5234 void Code::set_safepoint_table_offset(unsigned offset) {
5235 CHECK(offset <= (1 << kSafepointTableOffsetBitCount));
5236 DCHECK(is_crankshafted());
5237 DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
5238 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5239 int updated = SafepointTableOffsetField::update(previous, offset);
5240 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5241 }
5242
5243
back_edge_table_offset()5244 unsigned Code::back_edge_table_offset() {
5245 DCHECK_EQ(FUNCTION, kind());
5246 return BackEdgeTableOffsetField::decode(
5247 READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
5248 }
5249
5250
set_back_edge_table_offset(unsigned offset)5251 void Code::set_back_edge_table_offset(unsigned offset) {
5252 DCHECK_EQ(FUNCTION, kind());
5253 DCHECK(IsAligned(offset, static_cast<unsigned>(kPointerSize)));
5254 offset = offset >> kPointerSizeLog2;
5255 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
5256 int updated = BackEdgeTableOffsetField::update(previous, offset);
5257 WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
5258 }
5259
5260
back_edges_patched_for_osr()5261 bool Code::back_edges_patched_for_osr() {
5262 DCHECK_EQ(FUNCTION, kind());
5263 return allow_osr_at_loop_nesting_level() > 0;
5264 }
5265
5266
to_boolean_state()5267 uint16_t Code::to_boolean_state() { return extra_ic_state(); }
5268
5269
marked_for_deoptimization()5270 bool Code::marked_for_deoptimization() {
5271 DCHECK(kind() == OPTIMIZED_FUNCTION);
5272 return MarkedForDeoptimizationField::decode(
5273 READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
5274 }
5275
5276
set_marked_for_deoptimization(bool flag)5277 void Code::set_marked_for_deoptimization(bool flag) {
5278 DCHECK(kind() == OPTIMIZED_FUNCTION);
5279 DCHECK(!flag || AllowDeoptimization::IsAllowed(GetIsolate()));
5280 int previous = READ_UINT32_FIELD(this, kKindSpecificFlags1Offset);
5281 int updated = MarkedForDeoptimizationField::update(previous, flag);
5282 WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
5283 }
5284
5285
is_inline_cache_stub()5286 bool Code::is_inline_cache_stub() {
5287 Kind kind = this->kind();
5288 switch (kind) {
5289 #define CASE(name) case name: return true;
5290 IC_KIND_LIST(CASE)
5291 #undef CASE
5292 default: return false;
5293 }
5294 }
5295
is_debug_stub()5296 bool Code::is_debug_stub() {
5297 if (kind() != BUILTIN) return false;
5298 switch (builtin_index()) {
5299 #define CASE_DEBUG_BUILTIN(name) case Builtins::k##name:
5300 BUILTIN_LIST_DBG(CASE_DEBUG_BUILTIN)
5301 #undef CASE_DEBUG_BUILTIN
5302 return true;
5303 default:
5304 return false;
5305 }
5306 return false;
5307 }
is_handler()5308 bool Code::is_handler() { return kind() == HANDLER; }
is_call_stub()5309 bool Code::is_call_stub() { return kind() == CALL_IC; }
is_binary_op_stub()5310 bool Code::is_binary_op_stub() { return kind() == BINARY_OP_IC; }
is_compare_ic_stub()5311 bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
is_to_boolean_ic_stub()5312 bool Code::is_to_boolean_ic_stub() { return kind() == TO_BOOLEAN_IC; }
is_optimized_code()5313 bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
is_wasm_code()5314 bool Code::is_wasm_code() { return kind() == WASM_FUNCTION; }
5315
constant_pool()5316 Address Code::constant_pool() {
5317 Address constant_pool = NULL;
5318 if (FLAG_enable_embedded_constant_pool) {
5319 int offset = constant_pool_offset();
5320 if (offset < instruction_size()) {
5321 constant_pool = FIELD_ADDR(this, kHeaderSize + offset);
5322 }
5323 }
5324 return constant_pool;
5325 }
5326
ComputeFlags(Kind kind,ExtraICState extra_ic_state,CacheHolderFlag holder)5327 Code::Flags Code::ComputeFlags(Kind kind, ExtraICState extra_ic_state,
5328 CacheHolderFlag holder) {
5329 // Compute the bit mask.
5330 unsigned int bits = KindField::encode(kind) |
5331 ExtraICStateField::encode(extra_ic_state) |
5332 CacheHolderField::encode(holder);
5333 return static_cast<Flags>(bits);
5334 }
5335
ComputeHandlerFlags(Kind handler_kind,CacheHolderFlag holder)5336 Code::Flags Code::ComputeHandlerFlags(Kind handler_kind,
5337 CacheHolderFlag holder) {
5338 return ComputeFlags(Code::HANDLER, handler_kind, holder);
5339 }
5340
5341
ExtractKindFromFlags(Flags flags)5342 Code::Kind Code::ExtractKindFromFlags(Flags flags) {
5343 return KindField::decode(flags);
5344 }
5345
5346
ExtractExtraICStateFromFlags(Flags flags)5347 ExtraICState Code::ExtractExtraICStateFromFlags(Flags flags) {
5348 return ExtraICStateField::decode(flags);
5349 }
5350
5351
ExtractCacheHolderFromFlags(Flags flags)5352 CacheHolderFlag Code::ExtractCacheHolderFromFlags(Flags flags) {
5353 return CacheHolderField::decode(flags);
5354 }
5355
RemoveHolderFromFlags(Flags flags)5356 Code::Flags Code::RemoveHolderFromFlags(Flags flags) {
5357 int bits = flags & ~CacheHolderField::kMask;
5358 return static_cast<Flags>(bits);
5359 }
5360
5361
GetCodeFromTargetAddress(Address address)5362 Code* Code::GetCodeFromTargetAddress(Address address) {
5363 HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
5364 // GetCodeFromTargetAddress might be called when marking objects during mark
5365 // sweep. reinterpret_cast is therefore used instead of the more appropriate
5366 // Code::cast. Code::cast does not work when the object's map is
5367 // marked.
5368 Code* result = reinterpret_cast<Code*>(code);
5369 return result;
5370 }
5371
5372
GetObjectFromEntryAddress(Address location_of_address)5373 Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
5374 return HeapObject::
5375 FromAddress(Memory::Address_at(location_of_address) - Code::kHeaderSize);
5376 }
5377
5378
CanContainWeakObjects()5379 bool Code::CanContainWeakObjects() {
5380 return is_optimized_code() && can_have_weak_objects();
5381 }
5382
5383
IsWeakObject(Object * object)5384 bool Code::IsWeakObject(Object* object) {
5385 return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
5386 }
5387
5388
IsWeakObjectInOptimizedCode(Object * object)5389 bool Code::IsWeakObjectInOptimizedCode(Object* object) {
5390 if (object->IsMap()) {
5391 return Map::cast(object)->CanTransition() &&
5392 FLAG_weak_embedded_maps_in_optimized_code;
5393 }
5394 if (object->IsCell()) {
5395 object = Cell::cast(object)->value();
5396 } else if (object->IsPropertyCell()) {
5397 object = PropertyCell::cast(object)->value();
5398 }
5399 if (object->IsJSReceiver()) {
5400 return FLAG_weak_embedded_objects_in_optimized_code;
5401 }
5402 if (object->IsContext()) {
5403 // Contexts of inlined functions are embedded in optimized code.
5404 return FLAG_weak_embedded_objects_in_optimized_code;
5405 }
5406 return false;
5407 }
5408
5409
5410 class Code::FindAndReplacePattern {
5411 public:
FindAndReplacePattern()5412 FindAndReplacePattern() : count_(0) { }
Add(Handle<Map> map_to_find,Handle<Object> obj_to_replace)5413 void Add(Handle<Map> map_to_find, Handle<Object> obj_to_replace) {
5414 DCHECK(count_ < kMaxCount);
5415 find_[count_] = map_to_find;
5416 replace_[count_] = obj_to_replace;
5417 ++count_;
5418 }
5419 private:
5420 static const int kMaxCount = 4;
5421 int count_;
5422 Handle<Map> find_[kMaxCount];
5423 Handle<Object> replace_[kMaxCount];
5424 friend class Code;
5425 };
5426
instruction_size()5427 int AbstractCode::instruction_size() {
5428 if (IsCode()) {
5429 return GetCode()->instruction_size();
5430 } else {
5431 return GetBytecodeArray()->length();
5432 }
5433 }
5434
source_position_table()5435 ByteArray* AbstractCode::source_position_table() {
5436 if (IsCode()) {
5437 return GetCode()->source_position_table();
5438 } else {
5439 return GetBytecodeArray()->source_position_table();
5440 }
5441 }
5442
set_source_position_table(ByteArray * source_position_table)5443 void AbstractCode::set_source_position_table(ByteArray* source_position_table) {
5444 if (IsCode()) {
5445 GetCode()->set_source_position_table(source_position_table);
5446 } else {
5447 GetBytecodeArray()->set_source_position_table(source_position_table);
5448 }
5449 }
5450
LookupRangeInHandlerTable(int code_offset,int * data,HandlerTable::CatchPrediction * prediction)5451 int AbstractCode::LookupRangeInHandlerTable(
5452 int code_offset, int* data, HandlerTable::CatchPrediction* prediction) {
5453 if (IsCode()) {
5454 return GetCode()->LookupRangeInHandlerTable(code_offset, data, prediction);
5455 } else {
5456 return GetBytecodeArray()->LookupRangeInHandlerTable(code_offset, data,
5457 prediction);
5458 }
5459 }
5460
SizeIncludingMetadata()5461 int AbstractCode::SizeIncludingMetadata() {
5462 if (IsCode()) {
5463 return GetCode()->SizeIncludingMetadata();
5464 } else {
5465 return GetBytecodeArray()->SizeIncludingMetadata();
5466 }
5467 }
ExecutableSize()5468 int AbstractCode::ExecutableSize() {
5469 if (IsCode()) {
5470 return GetCode()->ExecutableSize();
5471 } else {
5472 return GetBytecodeArray()->BytecodeArraySize();
5473 }
5474 }
5475
instruction_start()5476 Address AbstractCode::instruction_start() {
5477 if (IsCode()) {
5478 return GetCode()->instruction_start();
5479 } else {
5480 return GetBytecodeArray()->GetFirstBytecodeAddress();
5481 }
5482 }
5483
instruction_end()5484 Address AbstractCode::instruction_end() {
5485 if (IsCode()) {
5486 return GetCode()->instruction_end();
5487 } else {
5488 return GetBytecodeArray()->GetFirstBytecodeAddress() +
5489 GetBytecodeArray()->length();
5490 }
5491 }
5492
contains(byte * inner_pointer)5493 bool AbstractCode::contains(byte* inner_pointer) {
5494 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
5495 }
5496
kind()5497 AbstractCode::Kind AbstractCode::kind() {
5498 if (IsCode()) {
5499 STATIC_ASSERT(AbstractCode::FUNCTION ==
5500 static_cast<AbstractCode::Kind>(Code::FUNCTION));
5501 return static_cast<AbstractCode::Kind>(GetCode()->kind());
5502 } else {
5503 return INTERPRETED_FUNCTION;
5504 }
5505 }
5506
GetCode()5507 Code* AbstractCode::GetCode() { return Code::cast(this); }
5508
GetBytecodeArray()5509 BytecodeArray* AbstractCode::GetBytecodeArray() {
5510 return BytecodeArray::cast(this);
5511 }
5512
prototype()5513 Object* Map::prototype() const {
5514 return READ_FIELD(this, kPrototypeOffset);
5515 }
5516
5517
set_prototype(Object * value,WriteBarrierMode mode)5518 void Map::set_prototype(Object* value, WriteBarrierMode mode) {
5519 DCHECK(value->IsNull(GetIsolate()) || value->IsJSReceiver());
5520 WRITE_FIELD(this, kPrototypeOffset, value);
5521 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kPrototypeOffset, value, mode);
5522 }
5523
5524
layout_descriptor_gc_safe()5525 LayoutDescriptor* Map::layout_descriptor_gc_safe() {
5526 Object* layout_desc = READ_FIELD(this, kLayoutDescriptorOffset);
5527 return LayoutDescriptor::cast_gc_safe(layout_desc);
5528 }
5529
5530
HasFastPointerLayout()5531 bool Map::HasFastPointerLayout() const {
5532 Object* layout_desc = READ_FIELD(this, kLayoutDescriptorOffset);
5533 return LayoutDescriptor::IsFastPointerLayout(layout_desc);
5534 }
5535
5536
UpdateDescriptors(DescriptorArray * descriptors,LayoutDescriptor * layout_desc)5537 void Map::UpdateDescriptors(DescriptorArray* descriptors,
5538 LayoutDescriptor* layout_desc) {
5539 set_instance_descriptors(descriptors);
5540 if (FLAG_unbox_double_fields) {
5541 if (layout_descriptor()->IsSlowLayout()) {
5542 set_layout_descriptor(layout_desc);
5543 }
5544 #ifdef VERIFY_HEAP
5545 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5546 if (FLAG_verify_heap) {
5547 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5548 CHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5549 }
5550 #else
5551 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5552 DCHECK(visitor_id() == Heap::GetStaticVisitorIdForMap(this));
5553 #endif
5554 }
5555 }
5556
5557
InitializeDescriptors(DescriptorArray * descriptors,LayoutDescriptor * layout_desc)5558 void Map::InitializeDescriptors(DescriptorArray* descriptors,
5559 LayoutDescriptor* layout_desc) {
5560 int len = descriptors->number_of_descriptors();
5561 set_instance_descriptors(descriptors);
5562 SetNumberOfOwnDescriptors(len);
5563
5564 if (FLAG_unbox_double_fields) {
5565 set_layout_descriptor(layout_desc);
5566 #ifdef VERIFY_HEAP
5567 // TODO(ishell): remove these checks from VERIFY_HEAP mode.
5568 if (FLAG_verify_heap) {
5569 CHECK(layout_descriptor()->IsConsistentWithMap(this));
5570 }
5571 #else
5572 SLOW_DCHECK(layout_descriptor()->IsConsistentWithMap(this));
5573 #endif
5574 set_visitor_id(Heap::GetStaticVisitorIdForMap(this));
5575 }
5576 }
5577
5578
ACCESSORS(Map,instance_descriptors,DescriptorArray,kDescriptorsOffset)5579 ACCESSORS(Map, instance_descriptors, DescriptorArray, kDescriptorsOffset)
5580 ACCESSORS(Map, layout_descriptor, LayoutDescriptor, kLayoutDescriptorOffset)
5581
5582 void Map::set_bit_field3(uint32_t bits) {
5583 if (kInt32Size != kPointerSize) {
5584 WRITE_UINT32_FIELD(this, kBitField3Offset + kInt32Size, 0);
5585 }
5586 WRITE_UINT32_FIELD(this, kBitField3Offset, bits);
5587 }
5588
5589
bit_field3()5590 uint32_t Map::bit_field3() const {
5591 return READ_UINT32_FIELD(this, kBitField3Offset);
5592 }
5593
5594
GetLayoutDescriptor()5595 LayoutDescriptor* Map::GetLayoutDescriptor() {
5596 return FLAG_unbox_double_fields ? layout_descriptor()
5597 : LayoutDescriptor::FastPointerLayout();
5598 }
5599
5600
AppendDescriptor(Descriptor * desc)5601 void Map::AppendDescriptor(Descriptor* desc) {
5602 DescriptorArray* descriptors = instance_descriptors();
5603 int number_of_own_descriptors = NumberOfOwnDescriptors();
5604 DCHECK(descriptors->number_of_descriptors() == number_of_own_descriptors);
5605 descriptors->Append(desc);
5606 SetNumberOfOwnDescriptors(number_of_own_descriptors + 1);
5607
5608 // This function does not support appending double field descriptors and
5609 // it should never try to (otherwise, layout descriptor must be updated too).
5610 #ifdef DEBUG
5611 PropertyDetails details = desc->GetDetails();
5612 CHECK(details.type() != DATA || !details.representation().IsDouble());
5613 #endif
5614 }
5615
5616
GetBackPointer()5617 Object* Map::GetBackPointer() {
5618 Object* object = constructor_or_backpointer();
5619 if (object->IsMap()) {
5620 return object;
5621 }
5622 return GetIsolate()->heap()->undefined_value();
5623 }
5624
5625
ElementsTransitionMap()5626 Map* Map::ElementsTransitionMap() {
5627 return TransitionArray::SearchSpecial(
5628 this, GetHeap()->elements_transition_symbol());
5629 }
5630
5631
ACCESSORS(Map,raw_transitions,Object,kTransitionsOrPrototypeInfoOffset)5632 ACCESSORS(Map, raw_transitions, Object, kTransitionsOrPrototypeInfoOffset)
5633
5634
5635 Object* Map::prototype_info() const {
5636 DCHECK(is_prototype_map());
5637 return READ_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset);
5638 }
5639
5640
set_prototype_info(Object * value,WriteBarrierMode mode)5641 void Map::set_prototype_info(Object* value, WriteBarrierMode mode) {
5642 DCHECK(is_prototype_map());
5643 WRITE_FIELD(this, Map::kTransitionsOrPrototypeInfoOffset, value);
5644 CONDITIONAL_WRITE_BARRIER(
5645 GetHeap(), this, Map::kTransitionsOrPrototypeInfoOffset, value, mode);
5646 }
5647
5648
SetBackPointer(Object * value,WriteBarrierMode mode)5649 void Map::SetBackPointer(Object* value, WriteBarrierMode mode) {
5650 DCHECK(instance_type() >= FIRST_JS_RECEIVER_TYPE);
5651 DCHECK(value->IsMap());
5652 DCHECK(GetBackPointer()->IsUndefined(GetIsolate()));
5653 DCHECK(!value->IsMap() ||
5654 Map::cast(value)->GetConstructor() == constructor_or_backpointer());
5655 set_constructor_or_backpointer(value, mode);
5656 }
5657
ACCESSORS(Map,code_cache,FixedArray,kCodeCacheOffset)5658 ACCESSORS(Map, code_cache, FixedArray, kCodeCacheOffset)
5659 ACCESSORS(Map, dependent_code, DependentCode, kDependentCodeOffset)
5660 ACCESSORS(Map, weak_cell_cache, Object, kWeakCellCacheOffset)
5661 ACCESSORS(Map, constructor_or_backpointer, Object,
5662 kConstructorOrBackPointerOffset)
5663
5664
5665 Object* Map::GetConstructor() const {
5666 Object* maybe_constructor = constructor_or_backpointer();
5667 // Follow any back pointers.
5668 while (maybe_constructor->IsMap()) {
5669 maybe_constructor =
5670 Map::cast(maybe_constructor)->constructor_or_backpointer();
5671 }
5672 return maybe_constructor;
5673 }
5674
5675
SetConstructor(Object * constructor,WriteBarrierMode mode)5676 void Map::SetConstructor(Object* constructor, WriteBarrierMode mode) {
5677 // Never overwrite a back pointer with a constructor.
5678 DCHECK(!constructor_or_backpointer()->IsMap());
5679 set_constructor_or_backpointer(constructor, mode);
5680 }
5681
5682
CopyInitialMap(Handle<Map> map)5683 Handle<Map> Map::CopyInitialMap(Handle<Map> map) {
5684 return CopyInitialMap(map, map->instance_size(), map->GetInObjectProperties(),
5685 map->unused_property_fields());
5686 }
5687
5688
5689 ACCESSORS(JSBoundFunction, bound_target_function, JSReceiver,
5690 kBoundTargetFunctionOffset)
5691 ACCESSORS(JSBoundFunction, bound_this, Object, kBoundThisOffset)
5692 ACCESSORS(JSBoundFunction, bound_arguments, FixedArray, kBoundArgumentsOffset)
5693
5694 ACCESSORS(JSFunction, shared, SharedFunctionInfo, kSharedFunctionInfoOffset)
5695 ACCESSORS(JSFunction, literals, LiteralsArray, kLiteralsOffset)
5696 ACCESSORS(JSFunction, next_function_link, Object, kNextFunctionLinkOffset)
5697
5698 ACCESSORS(JSGlobalObject, native_context, Context, kNativeContextOffset)
5699 ACCESSORS(JSGlobalObject, global_proxy, JSObject, kGlobalProxyOffset)
5700
5701 ACCESSORS(JSGlobalProxy, native_context, Object, kNativeContextOffset)
5702 ACCESSORS(JSGlobalProxy, hash, Object, kHashOffset)
5703
5704 ACCESSORS(AccessorInfo, name, Object, kNameOffset)
5705 SMI_ACCESSORS(AccessorInfo, flag, kFlagOffset)
5706 ACCESSORS(AccessorInfo, expected_receiver_type, Object,
5707 kExpectedReceiverTypeOffset)
5708
5709 ACCESSORS(AccessorInfo, getter, Object, kGetterOffset)
5710 ACCESSORS(AccessorInfo, setter, Object, kSetterOffset)
5711 ACCESSORS(AccessorInfo, js_getter, Object, kJsGetterOffset)
5712 ACCESSORS(AccessorInfo, data, Object, kDataOffset)
5713
5714 ACCESSORS(Box, value, Object, kValueOffset)
5715
5716 ACCESSORS(PromiseResolveThenableJobInfo, thenable, JSReceiver, kThenableOffset)
5717 ACCESSORS(PromiseResolveThenableJobInfo, then, JSReceiver, kThenOffset)
5718 ACCESSORS(PromiseResolveThenableJobInfo, resolve, JSFunction, kResolveOffset)
5719 ACCESSORS(PromiseResolveThenableJobInfo, reject, JSFunction, kRejectOffset)
5720 ACCESSORS(PromiseResolveThenableJobInfo, debug_id, Object, kDebugIdOffset)
5721 ACCESSORS(PromiseResolveThenableJobInfo, debug_name, Object, kDebugNameOffset)
5722 ACCESSORS(PromiseResolveThenableJobInfo, context, Context, kContextOffset);
5723
5724 ACCESSORS(PromiseReactionJobInfo, value, Object, kValueOffset);
5725 ACCESSORS(PromiseReactionJobInfo, tasks, Object, kTasksOffset);
5726 ACCESSORS(PromiseReactionJobInfo, deferred, Object, kDeferredOffset);
5727 ACCESSORS(PromiseReactionJobInfo, debug_id, Object, kDebugIdOffset);
5728 ACCESSORS(PromiseReactionJobInfo, debug_name, Object, kDebugNameOffset);
5729 ACCESSORS(PromiseReactionJobInfo, context, Context, kContextOffset);
5730
ObjectCreateMap()5731 Map* PrototypeInfo::ObjectCreateMap() {
5732 return Map::cast(WeakCell::cast(object_create_map())->value());
5733 }
5734
5735 // static
SetObjectCreateMap(Handle<PrototypeInfo> info,Handle<Map> map)5736 void PrototypeInfo::SetObjectCreateMap(Handle<PrototypeInfo> info,
5737 Handle<Map> map) {
5738 Handle<WeakCell> cell = Map::WeakCellForMap(map);
5739 info->set_object_create_map(*cell);
5740 }
5741
HasObjectCreateMap()5742 bool PrototypeInfo::HasObjectCreateMap() {
5743 Object* cache = object_create_map();
5744 return cache->IsWeakCell() && !WeakCell::cast(cache)->cleared();
5745 }
5746
instantiated()5747 bool FunctionTemplateInfo::instantiated() {
5748 return shared_function_info()->IsSharedFunctionInfo();
5749 }
5750
GetParent(Isolate * isolate)5751 FunctionTemplateInfo* FunctionTemplateInfo::GetParent(Isolate* isolate) {
5752 Object* parent = parent_template();
5753 return parent->IsUndefined(isolate) ? nullptr
5754 : FunctionTemplateInfo::cast(parent);
5755 }
5756
GetParent(Isolate * isolate)5757 ObjectTemplateInfo* ObjectTemplateInfo::GetParent(Isolate* isolate) {
5758 Object* maybe_ctor = constructor();
5759 if (maybe_ctor->IsUndefined(isolate)) return nullptr;
5760 FunctionTemplateInfo* constructor = FunctionTemplateInfo::cast(maybe_ctor);
5761 while (true) {
5762 constructor = constructor->GetParent(isolate);
5763 if (constructor == nullptr) return nullptr;
5764 Object* maybe_obj = constructor->instance_template();
5765 if (!maybe_obj->IsUndefined(isolate)) {
5766 return ObjectTemplateInfo::cast(maybe_obj);
5767 }
5768 }
5769 return nullptr;
5770 }
5771
ACCESSORS(PrototypeInfo,weak_cell,Object,kWeakCellOffset)5772 ACCESSORS(PrototypeInfo, weak_cell, Object, kWeakCellOffset)
5773 ACCESSORS(PrototypeInfo, prototype_users, Object, kPrototypeUsersOffset)
5774 ACCESSORS(PrototypeInfo, object_create_map, Object, kObjectCreateMap)
5775 SMI_ACCESSORS(PrototypeInfo, registry_slot, kRegistrySlotOffset)
5776 ACCESSORS(PrototypeInfo, validity_cell, Object, kValidityCellOffset)
5777 SMI_ACCESSORS(PrototypeInfo, bit_field, kBitFieldOffset)
5778 BOOL_ACCESSORS(PrototypeInfo, bit_field, should_be_fast_map, kShouldBeFastBit)
5779
5780 ACCESSORS(Tuple3, value1, Object, kValue1Offset)
5781 ACCESSORS(Tuple3, value2, Object, kValue2Offset)
5782 ACCESSORS(Tuple3, value3, Object, kValue3Offset)
5783
5784 ACCESSORS(ContextExtension, scope_info, ScopeInfo, kScopeInfoOffset)
5785 ACCESSORS(ContextExtension, extension, Object, kExtensionOffset)
5786
5787 ACCESSORS(JSModuleNamespace, module, Module, kModuleOffset)
5788
5789 ACCESSORS(JSFixedArrayIterator, array, FixedArray, kArrayOffset)
5790 SMI_ACCESSORS(JSFixedArrayIterator, index, kIndexOffset)
5791 ACCESSORS(JSFixedArrayIterator, initial_next, JSFunction, kNextOffset)
5792
5793 ACCESSORS(Module, code, Object, kCodeOffset)
5794 ACCESSORS(Module, exports, ObjectHashTable, kExportsOffset)
5795 ACCESSORS(Module, regular_exports, FixedArray, kRegularExportsOffset)
5796 ACCESSORS(Module, regular_imports, FixedArray, kRegularImportsOffset)
5797 ACCESSORS(Module, module_namespace, HeapObject, kModuleNamespaceOffset)
5798 ACCESSORS(Module, requested_modules, FixedArray, kRequestedModulesOffset)
5799 SMI_ACCESSORS(Module, hash, kHashOffset)
5800
5801 bool Module::evaluated() const { return code()->IsModuleInfo(); }
5802
set_evaluated()5803 void Module::set_evaluated() {
5804 DCHECK(instantiated());
5805 DCHECK(!evaluated());
5806 return set_code(
5807 JSFunction::cast(code())->shared()->scope_info()->ModuleDescriptorInfo());
5808 }
5809
instantiated()5810 bool Module::instantiated() const { return !code()->IsSharedFunctionInfo(); }
5811
info()5812 ModuleInfo* Module::info() const {
5813 if (evaluated()) return ModuleInfo::cast(code());
5814 ScopeInfo* scope_info = instantiated()
5815 ? JSFunction::cast(code())->shared()->scope_info()
5816 : SharedFunctionInfo::cast(code())->scope_info();
5817 return scope_info->ModuleDescriptorInfo();
5818 }
5819
ACCESSORS(AccessorPair,getter,Object,kGetterOffset)5820 ACCESSORS(AccessorPair, getter, Object, kGetterOffset)
5821 ACCESSORS(AccessorPair, setter, Object, kSetterOffset)
5822
5823 ACCESSORS(AccessCheckInfo, callback, Object, kCallbackOffset)
5824 ACCESSORS(AccessCheckInfo, named_interceptor, Object, kNamedInterceptorOffset)
5825 ACCESSORS(AccessCheckInfo, indexed_interceptor, Object,
5826 kIndexedInterceptorOffset)
5827 ACCESSORS(AccessCheckInfo, data, Object, kDataOffset)
5828
5829 ACCESSORS(InterceptorInfo, getter, Object, kGetterOffset)
5830 ACCESSORS(InterceptorInfo, setter, Object, kSetterOffset)
5831 ACCESSORS(InterceptorInfo, query, Object, kQueryOffset)
5832 ACCESSORS(InterceptorInfo, descriptor, Object, kDescriptorOffset)
5833 ACCESSORS(InterceptorInfo, deleter, Object, kDeleterOffset)
5834 ACCESSORS(InterceptorInfo, enumerator, Object, kEnumeratorOffset)
5835 ACCESSORS(InterceptorInfo, definer, Object, kDefinerOffset)
5836 ACCESSORS(InterceptorInfo, data, Object, kDataOffset)
5837 SMI_ACCESSORS(InterceptorInfo, flags, kFlagsOffset)
5838 BOOL_ACCESSORS(InterceptorInfo, flags, can_intercept_symbols,
5839 kCanInterceptSymbolsBit)
5840 BOOL_ACCESSORS(InterceptorInfo, flags, all_can_read, kAllCanReadBit)
5841 BOOL_ACCESSORS(InterceptorInfo, flags, non_masking, kNonMasking)
5842
5843 ACCESSORS(CallHandlerInfo, callback, Object, kCallbackOffset)
5844 ACCESSORS(CallHandlerInfo, data, Object, kDataOffset)
5845 ACCESSORS(CallHandlerInfo, fast_handler, Object, kFastHandlerOffset)
5846
5847 ACCESSORS(TemplateInfo, tag, Object, kTagOffset)
5848 ACCESSORS(TemplateInfo, serial_number, Object, kSerialNumberOffset)
5849 SMI_ACCESSORS(TemplateInfo, number_of_properties, kNumberOfProperties)
5850 ACCESSORS(TemplateInfo, property_list, Object, kPropertyListOffset)
5851 ACCESSORS(TemplateInfo, property_accessors, Object, kPropertyAccessorsOffset)
5852
5853 ACCESSORS(FunctionTemplateInfo, call_code, Object, kCallCodeOffset)
5854 ACCESSORS(FunctionTemplateInfo, prototype_template, Object,
5855 kPrototypeTemplateOffset)
5856 ACCESSORS(FunctionTemplateInfo, parent_template, Object, kParentTemplateOffset)
5857 ACCESSORS(FunctionTemplateInfo, named_property_handler, Object,
5858 kNamedPropertyHandlerOffset)
5859 ACCESSORS(FunctionTemplateInfo, indexed_property_handler, Object,
5860 kIndexedPropertyHandlerOffset)
5861 ACCESSORS(FunctionTemplateInfo, instance_template, Object,
5862 kInstanceTemplateOffset)
5863 ACCESSORS(FunctionTemplateInfo, class_name, Object, kClassNameOffset)
5864 ACCESSORS(FunctionTemplateInfo, signature, Object, kSignatureOffset)
5865 ACCESSORS(FunctionTemplateInfo, instance_call_handler, Object,
5866 kInstanceCallHandlerOffset)
5867 ACCESSORS(FunctionTemplateInfo, access_check_info, Object,
5868 kAccessCheckInfoOffset)
5869 ACCESSORS(FunctionTemplateInfo, shared_function_info, Object,
5870 kSharedFunctionInfoOffset)
5871 ACCESSORS(FunctionTemplateInfo, cached_property_name, Object,
5872 kCachedPropertyNameOffset)
5873
5874 SMI_ACCESSORS(FunctionTemplateInfo, flag, kFlagOffset)
5875
5876 ACCESSORS(ObjectTemplateInfo, constructor, Object, kConstructorOffset)
5877 ACCESSORS(ObjectTemplateInfo, data, Object, kDataOffset)
5878
5879 int ObjectTemplateInfo::internal_field_count() const {
5880 Object* value = data();
5881 DCHECK(value->IsSmi());
5882 return InternalFieldCount::decode(Smi::cast(value)->value());
5883 }
5884
set_internal_field_count(int count)5885 void ObjectTemplateInfo::set_internal_field_count(int count) {
5886 return set_data(Smi::FromInt(
5887 InternalFieldCount::update(Smi::cast(data())->value(), count)));
5888 }
5889
immutable_proto()5890 bool ObjectTemplateInfo::immutable_proto() const {
5891 Object* value = data();
5892 DCHECK(value->IsSmi());
5893 return IsImmutablePrototype::decode(Smi::cast(value)->value());
5894 }
5895
set_immutable_proto(bool immutable)5896 void ObjectTemplateInfo::set_immutable_proto(bool immutable) {
5897 return set_data(Smi::FromInt(
5898 IsImmutablePrototype::update(Smi::cast(data())->value(), immutable)));
5899 }
5900
length()5901 int TemplateList::length() const {
5902 return Smi::cast(FixedArray::cast(this)->get(kLengthIndex))->value();
5903 }
5904
get(int index)5905 Object* TemplateList::get(int index) const {
5906 return FixedArray::cast(this)->get(kFirstElementIndex + index);
5907 }
5908
set(int index,Object * value)5909 void TemplateList::set(int index, Object* value) {
5910 FixedArray::cast(this)->set(kFirstElementIndex + index, value);
5911 }
5912
ACCESSORS(AllocationSite,transition_info,Object,kTransitionInfoOffset)5913 ACCESSORS(AllocationSite, transition_info, Object, kTransitionInfoOffset)
5914 ACCESSORS(AllocationSite, nested_site, Object, kNestedSiteOffset)
5915 SMI_ACCESSORS(AllocationSite, pretenure_data, kPretenureDataOffset)
5916 SMI_ACCESSORS(AllocationSite, pretenure_create_count,
5917 kPretenureCreateCountOffset)
5918 ACCESSORS(AllocationSite, dependent_code, DependentCode,
5919 kDependentCodeOffset)
5920 ACCESSORS(AllocationSite, weak_next, Object, kWeakNextOffset)
5921 ACCESSORS(AllocationMemento, allocation_site, Object, kAllocationSiteOffset)
5922
5923 ACCESSORS(Script, source, Object, kSourceOffset)
5924 ACCESSORS(Script, name, Object, kNameOffset)
5925 SMI_ACCESSORS(Script, id, kIdOffset)
5926 SMI_ACCESSORS(Script, line_offset, kLineOffsetOffset)
5927 SMI_ACCESSORS(Script, column_offset, kColumnOffsetOffset)
5928 ACCESSORS(Script, context_data, Object, kContextOffset)
5929 ACCESSORS(Script, wrapper, HeapObject, kWrapperOffset)
5930 SMI_ACCESSORS(Script, type, kTypeOffset)
5931 ACCESSORS(Script, line_ends, Object, kLineEndsOffset)
5932 ACCESSORS_CHECKED(Script, eval_from_shared, Object, kEvalFromSharedOffset,
5933 this->type() != TYPE_WASM)
5934 SMI_ACCESSORS_CHECKED(Script, eval_from_position, kEvalFromPositionOffset,
5935 this->type() != TYPE_WASM)
5936 ACCESSORS(Script, shared_function_infos, Object, kSharedFunctionInfosOffset)
5937 SMI_ACCESSORS(Script, flags, kFlagsOffset)
5938 ACCESSORS(Script, source_url, Object, kSourceUrlOffset)
5939 ACCESSORS(Script, source_mapping_url, Object, kSourceMappingUrlOffset)
5940 ACCESSORS_CHECKED(Script, wasm_compiled_module, Object, kEvalFromSharedOffset,
5941 this->type() == TYPE_WASM)
5942
5943 Script::CompilationType Script::compilation_type() {
5944 return BooleanBit::get(flags(), kCompilationTypeBit) ?
5945 COMPILATION_TYPE_EVAL : COMPILATION_TYPE_HOST;
5946 }
set_compilation_type(CompilationType type)5947 void Script::set_compilation_type(CompilationType type) {
5948 set_flags(BooleanBit::set(flags(), kCompilationTypeBit,
5949 type == COMPILATION_TYPE_EVAL));
5950 }
hide_source()5951 bool Script::hide_source() { return BooleanBit::get(flags(), kHideSourceBit); }
set_hide_source(bool value)5952 void Script::set_hide_source(bool value) {
5953 set_flags(BooleanBit::set(flags(), kHideSourceBit, value));
5954 }
compilation_state()5955 Script::CompilationState Script::compilation_state() {
5956 return BooleanBit::get(flags(), kCompilationStateBit) ?
5957 COMPILATION_STATE_COMPILED : COMPILATION_STATE_INITIAL;
5958 }
set_compilation_state(CompilationState state)5959 void Script::set_compilation_state(CompilationState state) {
5960 set_flags(BooleanBit::set(flags(), kCompilationStateBit,
5961 state == COMPILATION_STATE_COMPILED));
5962 }
origin_options()5963 ScriptOriginOptions Script::origin_options() {
5964 return ScriptOriginOptions((flags() & kOriginOptionsMask) >>
5965 kOriginOptionsShift);
5966 }
set_origin_options(ScriptOriginOptions origin_options)5967 void Script::set_origin_options(ScriptOriginOptions origin_options) {
5968 DCHECK(!(origin_options.Flags() & ~((1 << kOriginOptionsSize) - 1)));
5969 set_flags((flags() & ~kOriginOptionsMask) |
5970 (origin_options.Flags() << kOriginOptionsShift));
5971 }
5972
5973
ACCESSORS(DebugInfo,shared,SharedFunctionInfo,kSharedFunctionInfoIndex)5974 ACCESSORS(DebugInfo, shared, SharedFunctionInfo, kSharedFunctionInfoIndex)
5975 ACCESSORS(DebugInfo, debug_bytecode_array, Object, kDebugBytecodeArrayIndex)
5976 ACCESSORS(DebugInfo, break_points, FixedArray, kBreakPointsStateIndex)
5977
5978 bool DebugInfo::HasDebugBytecodeArray() {
5979 return debug_bytecode_array()->IsBytecodeArray();
5980 }
5981
HasDebugCode()5982 bool DebugInfo::HasDebugCode() {
5983 Code* code = shared()->code();
5984 bool has = code->kind() == Code::FUNCTION;
5985 DCHECK(!has || code->has_debug_break_slots());
5986 return has;
5987 }
5988
OriginalBytecodeArray()5989 BytecodeArray* DebugInfo::OriginalBytecodeArray() {
5990 DCHECK(HasDebugBytecodeArray());
5991 return shared()->bytecode_array();
5992 }
5993
DebugBytecodeArray()5994 BytecodeArray* DebugInfo::DebugBytecodeArray() {
5995 DCHECK(HasDebugBytecodeArray());
5996 return BytecodeArray::cast(debug_bytecode_array());
5997 }
5998
DebugCode()5999 Code* DebugInfo::DebugCode() {
6000 DCHECK(HasDebugCode());
6001 return shared()->code();
6002 }
6003
SMI_ACCESSORS(BreakPointInfo,source_position,kSourcePositionIndex)6004 SMI_ACCESSORS(BreakPointInfo, source_position, kSourcePositionIndex)
6005 ACCESSORS(BreakPointInfo, break_point_objects, Object, kBreakPointObjectsIndex)
6006
6007 ACCESSORS(SharedFunctionInfo, name, Object, kNameOffset)
6008 ACCESSORS(SharedFunctionInfo, optimized_code_map, FixedArray,
6009 kOptimizedCodeMapOffset)
6010 ACCESSORS(SharedFunctionInfo, construct_stub, Code, kConstructStubOffset)
6011 ACCESSORS(SharedFunctionInfo, feedback_metadata, TypeFeedbackMetadata,
6012 kFeedbackMetadataOffset)
6013 #if TRACE_MAPS
6014 SMI_ACCESSORS(SharedFunctionInfo, unique_id, kUniqueIdOffset)
6015 #endif
6016 ACCESSORS(SharedFunctionInfo, instance_class_name, Object,
6017 kInstanceClassNameOffset)
6018 ACCESSORS(SharedFunctionInfo, function_data, Object, kFunctionDataOffset)
6019 ACCESSORS(SharedFunctionInfo, script, Object, kScriptOffset)
6020 ACCESSORS(SharedFunctionInfo, debug_info, Object, kDebugInfoOffset)
6021 ACCESSORS(SharedFunctionInfo, function_identifier, Object,
6022 kFunctionIdentifierOffset)
6023
6024 SMI_ACCESSORS(FunctionTemplateInfo, length, kLengthOffset)
6025 BOOL_ACCESSORS(FunctionTemplateInfo, flag, hidden_prototype,
6026 kHiddenPrototypeBit)
6027 BOOL_ACCESSORS(FunctionTemplateInfo, flag, undetectable, kUndetectableBit)
6028 BOOL_ACCESSORS(FunctionTemplateInfo, flag, needs_access_check,
6029 kNeedsAccessCheckBit)
6030 BOOL_ACCESSORS(FunctionTemplateInfo, flag, read_only_prototype,
6031 kReadOnlyPrototypeBit)
6032 BOOL_ACCESSORS(FunctionTemplateInfo, flag, remove_prototype,
6033 kRemovePrototypeBit)
6034 BOOL_ACCESSORS(FunctionTemplateInfo, flag, do_not_cache,
6035 kDoNotCacheBit)
6036 BOOL_ACCESSORS(FunctionTemplateInfo, flag, accept_any_receiver,
6037 kAcceptAnyReceiver)
6038 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_named_expression,
6039 kIsNamedExpressionBit)
6040 BOOL_ACCESSORS(SharedFunctionInfo, start_position_and_type, is_toplevel,
6041 kIsTopLevelBit)
6042
6043 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, allows_lazy_compilation,
6044 kAllowLazyCompilation)
6045 BOOL_ACCESSORS(SharedFunctionInfo,
6046 compiler_hints,
6047 uses_arguments,
6048 kUsesArguments)
6049 BOOL_ACCESSORS(SharedFunctionInfo,
6050 compiler_hints,
6051 has_duplicate_parameters,
6052 kHasDuplicateParameters)
6053 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, asm_function, kIsAsmFunction)
6054 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, deserialized, kDeserialized)
6055 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, never_compiled,
6056 kNeverCompiled)
6057 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_declaration,
6058 kIsDeclaration)
6059 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, marked_for_tier_up,
6060 kMarkedForTierUp)
6061
6062 #if V8_HOST_ARCH_32_BIT
6063 SMI_ACCESSORS(SharedFunctionInfo, length, kLengthOffset)
6064 SMI_ACCESSORS(SharedFunctionInfo, internal_formal_parameter_count,
6065 kFormalParameterCountOffset)
6066 SMI_ACCESSORS(SharedFunctionInfo, expected_nof_properties,
6067 kExpectedNofPropertiesOffset)
6068 SMI_ACCESSORS(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
6069 SMI_ACCESSORS(SharedFunctionInfo, start_position_and_type,
6070 kStartPositionAndTypeOffset)
6071 SMI_ACCESSORS(SharedFunctionInfo, end_position, kEndPositionOffset)
6072 SMI_ACCESSORS(SharedFunctionInfo, function_token_position,
6073 kFunctionTokenPositionOffset)
6074 SMI_ACCESSORS(SharedFunctionInfo, compiler_hints,
6075 kCompilerHintsOffset)
6076 SMI_ACCESSORS(SharedFunctionInfo, opt_count_and_bailout_reason,
6077 kOptCountAndBailoutReasonOffset)
6078 SMI_ACCESSORS(SharedFunctionInfo, counters, kCountersOffset)
6079 SMI_ACCESSORS(SharedFunctionInfo, ast_node_count, kAstNodeCountOffset)
6080 SMI_ACCESSORS(SharedFunctionInfo, profiler_ticks, kProfilerTicksOffset)
6081
6082 #else
6083
6084 #if V8_TARGET_LITTLE_ENDIAN
6085 #define PSEUDO_SMI_LO_ALIGN 0
6086 #define PSEUDO_SMI_HI_ALIGN kIntSize
6087 #else
6088 #define PSEUDO_SMI_LO_ALIGN kIntSize
6089 #define PSEUDO_SMI_HI_ALIGN 0
6090 #endif
6091
6092 #define PSEUDO_SMI_ACCESSORS_LO(holder, name, offset) \
6093 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_LO_ALIGN); \
6094 int holder::name() const { \
6095 int value = READ_INT_FIELD(this, offset); \
6096 DCHECK(kHeapObjectTag == 1); \
6097 DCHECK((value & kHeapObjectTag) == 0); \
6098 return value >> 1; \
6099 } \
6100 void holder::set_##name(int value) { \
6101 DCHECK(kHeapObjectTag == 1); \
6102 DCHECK((value & 0xC0000000) == 0xC0000000 || (value & 0xC0000000) == 0x0); \
6103 WRITE_INT_FIELD(this, offset, (value << 1) & ~kHeapObjectTag); \
6104 }
6105
6106 #define PSEUDO_SMI_ACCESSORS_HI(holder, name, offset) \
6107 STATIC_ASSERT(holder::offset % kPointerSize == PSEUDO_SMI_HI_ALIGN); \
6108 INT_ACCESSORS(holder, name, offset)
6109
6110
6111 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, length, kLengthOffset)
6112 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, internal_formal_parameter_count,
6113 kFormalParameterCountOffset)
6114
6115 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
6116 expected_nof_properties,
6117 kExpectedNofPropertiesOffset)
6118 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, num_literals, kNumLiteralsOffset)
6119
6120 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo, end_position, kEndPositionOffset)
6121 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
6122 start_position_and_type,
6123 kStartPositionAndTypeOffset)
6124
6125 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
6126 function_token_position,
6127 kFunctionTokenPositionOffset)
6128 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
6129 compiler_hints,
6130 kCompilerHintsOffset)
6131
6132 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
6133 opt_count_and_bailout_reason,
6134 kOptCountAndBailoutReasonOffset)
6135 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo, counters, kCountersOffset)
6136
6137 PSEUDO_SMI_ACCESSORS_LO(SharedFunctionInfo,
6138 ast_node_count,
6139 kAstNodeCountOffset)
6140 PSEUDO_SMI_ACCESSORS_HI(SharedFunctionInfo,
6141 profiler_ticks,
6142 kProfilerTicksOffset)
6143
6144 #endif
6145
6146
6147 BOOL_GETTER(SharedFunctionInfo,
6148 compiler_hints,
6149 optimization_disabled,
6150 kOptimizationDisabled)
6151
6152 AbstractCode* SharedFunctionInfo::abstract_code() {
6153 if (HasBytecodeArray()) {
6154 return AbstractCode::cast(bytecode_array());
6155 } else {
6156 return AbstractCode::cast(code());
6157 }
6158 }
6159
set_optimization_disabled(bool disable)6160 void SharedFunctionInfo::set_optimization_disabled(bool disable) {
6161 set_compiler_hints(BooleanBit::set(compiler_hints(),
6162 kOptimizationDisabled,
6163 disable));
6164 }
6165
6166
language_mode()6167 LanguageMode SharedFunctionInfo::language_mode() {
6168 STATIC_ASSERT(LANGUAGE_END == 2);
6169 return construct_language_mode(
6170 BooleanBit::get(compiler_hints(), kStrictModeFunction));
6171 }
6172
6173
set_language_mode(LanguageMode language_mode)6174 void SharedFunctionInfo::set_language_mode(LanguageMode language_mode) {
6175 STATIC_ASSERT(LANGUAGE_END == 2);
6176 // We only allow language mode transitions that set the same language mode
6177 // again or go up in the chain:
6178 DCHECK(is_sloppy(this->language_mode()) || is_strict(language_mode));
6179 int hints = compiler_hints();
6180 hints = BooleanBit::set(hints, kStrictModeFunction, is_strict(language_mode));
6181 set_compiler_hints(hints);
6182 }
6183
kind()6184 FunctionKind SharedFunctionInfo::kind() const {
6185 return FunctionKindBits::decode(compiler_hints());
6186 }
6187
6188
set_kind(FunctionKind kind)6189 void SharedFunctionInfo::set_kind(FunctionKind kind) {
6190 DCHECK(IsValidFunctionKind(kind));
6191 int hints = compiler_hints();
6192 hints = FunctionKindBits::update(hints, kind);
6193 set_compiler_hints(hints);
6194 }
6195
BOOL_ACCESSORS(SharedFunctionInfo,compiler_hints,needs_home_object,kNeedsHomeObject)6196 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, needs_home_object,
6197 kNeedsHomeObject)
6198 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, native, kNative)
6199 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, force_inline, kForceInline)
6200 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints,
6201 name_should_print_as_anonymous,
6202 kNameShouldPrintAsAnonymous)
6203 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_anonymous_expression,
6204 kIsAnonymousExpression)
6205 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_function, kIsFunction)
6206 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_crankshaft,
6207 kDontCrankshaft)
6208 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, dont_flush, kDontFlush)
6209 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_asm_wasm_broken,
6210 kIsAsmWasmBroken)
6211 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, requires_class_field_init,
6212 kRequiresClassFieldInit)
6213 BOOL_ACCESSORS(SharedFunctionInfo, compiler_hints, is_class_field_initializer,
6214 kIsClassFieldInitializer)
6215
6216 bool Script::HasValidSource() {
6217 Object* src = this->source();
6218 if (!src->IsString()) return true;
6219 String* src_str = String::cast(src);
6220 if (!StringShape(src_str).IsExternal()) return true;
6221 if (src_str->IsOneByteRepresentation()) {
6222 return ExternalOneByteString::cast(src)->resource() != NULL;
6223 } else if (src_str->IsTwoByteRepresentation()) {
6224 return ExternalTwoByteString::cast(src)->resource() != NULL;
6225 }
6226 return true;
6227 }
6228
6229
DontAdaptArguments()6230 void SharedFunctionInfo::DontAdaptArguments() {
6231 DCHECK(code()->kind() == Code::BUILTIN || code()->kind() == Code::STUB);
6232 set_internal_formal_parameter_count(kDontAdaptArgumentsSentinel);
6233 }
6234
6235
start_position()6236 int SharedFunctionInfo::start_position() const {
6237 return start_position_and_type() >> kStartPositionShift;
6238 }
6239
6240
set_start_position(int start_position)6241 void SharedFunctionInfo::set_start_position(int start_position) {
6242 set_start_position_and_type((start_position << kStartPositionShift)
6243 | (start_position_and_type() & ~kStartPositionMask));
6244 }
6245
6246
code()6247 Code* SharedFunctionInfo::code() const {
6248 return Code::cast(READ_FIELD(this, kCodeOffset));
6249 }
6250
6251
set_code(Code * value,WriteBarrierMode mode)6252 void SharedFunctionInfo::set_code(Code* value, WriteBarrierMode mode) {
6253 DCHECK(value->kind() != Code::OPTIMIZED_FUNCTION);
6254 WRITE_FIELD(this, kCodeOffset, value);
6255 CONDITIONAL_WRITE_BARRIER(value->GetHeap(), this, kCodeOffset, value, mode);
6256 }
6257
6258
ReplaceCode(Code * value)6259 void SharedFunctionInfo::ReplaceCode(Code* value) {
6260 // If the GC metadata field is already used then the function was
6261 // enqueued as a code flushing candidate and we remove it now.
6262 if (code()->gc_metadata() != NULL) {
6263 CodeFlusher* flusher = GetHeap()->mark_compact_collector()->code_flusher();
6264 flusher->EvictCandidate(this);
6265 }
6266
6267 DCHECK(code()->gc_metadata() == NULL && value->gc_metadata() == NULL);
6268 #ifdef DEBUG
6269 Code::VerifyRecompiledCode(code(), value);
6270 #endif // DEBUG
6271
6272 set_code(value);
6273
6274 if (is_compiled()) set_never_compiled(false);
6275 }
6276
IsInterpreted()6277 bool SharedFunctionInfo::IsInterpreted() const {
6278 return code()->is_interpreter_trampoline_builtin();
6279 }
6280
HasBaselineCode()6281 bool SharedFunctionInfo::HasBaselineCode() const {
6282 return code()->kind() == Code::FUNCTION;
6283 }
6284
scope_info()6285 ScopeInfo* SharedFunctionInfo::scope_info() const {
6286 return reinterpret_cast<ScopeInfo*>(READ_FIELD(this, kScopeInfoOffset));
6287 }
6288
6289
set_scope_info(ScopeInfo * value,WriteBarrierMode mode)6290 void SharedFunctionInfo::set_scope_info(ScopeInfo* value,
6291 WriteBarrierMode mode) {
6292 WRITE_FIELD(this, kScopeInfoOffset, reinterpret_cast<Object*>(value));
6293 CONDITIONAL_WRITE_BARRIER(GetHeap(),
6294 this,
6295 kScopeInfoOffset,
6296 reinterpret_cast<Object*>(value),
6297 mode);
6298 }
6299
ACCESSORS(SharedFunctionInfo,outer_scope_info,HeapObject,kOuterScopeInfoOffset)6300 ACCESSORS(SharedFunctionInfo, outer_scope_info, HeapObject,
6301 kOuterScopeInfoOffset)
6302
6303 bool SharedFunctionInfo::is_compiled() const {
6304 Builtins* builtins = GetIsolate()->builtins();
6305 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent));
6306 DCHECK(code() != builtins->builtin(Builtins::kCompileOptimized));
6307 DCHECK(code() != builtins->builtin(Builtins::kCompileBaseline));
6308 return code() != builtins->builtin(Builtins::kCompileLazy);
6309 }
6310
6311
has_simple_parameters()6312 bool SharedFunctionInfo::has_simple_parameters() {
6313 return scope_info()->HasSimpleParameters();
6314 }
6315
6316
HasDebugInfo()6317 bool SharedFunctionInfo::HasDebugInfo() {
6318 bool has_debug_info = debug_info()->IsStruct();
6319 DCHECK(!has_debug_info || HasDebugCode());
6320 return has_debug_info;
6321 }
6322
6323
GetDebugInfo()6324 DebugInfo* SharedFunctionInfo::GetDebugInfo() {
6325 DCHECK(HasDebugInfo());
6326 return DebugInfo::cast(debug_info());
6327 }
6328
6329
HasDebugCode()6330 bool SharedFunctionInfo::HasDebugCode() {
6331 if (HasBaselineCode()) return code()->has_debug_break_slots();
6332 return HasBytecodeArray();
6333 }
6334
6335
IsApiFunction()6336 bool SharedFunctionInfo::IsApiFunction() {
6337 return function_data()->IsFunctionTemplateInfo();
6338 }
6339
6340
get_api_func_data()6341 FunctionTemplateInfo* SharedFunctionInfo::get_api_func_data() {
6342 DCHECK(IsApiFunction());
6343 return FunctionTemplateInfo::cast(function_data());
6344 }
6345
set_api_func_data(FunctionTemplateInfo * data)6346 void SharedFunctionInfo::set_api_func_data(FunctionTemplateInfo* data) {
6347 DCHECK(function_data()->IsUndefined(GetIsolate()));
6348 set_function_data(data);
6349 }
6350
HasBytecodeArray()6351 bool SharedFunctionInfo::HasBytecodeArray() {
6352 return function_data()->IsBytecodeArray();
6353 }
6354
bytecode_array()6355 BytecodeArray* SharedFunctionInfo::bytecode_array() {
6356 DCHECK(HasBytecodeArray());
6357 return BytecodeArray::cast(function_data());
6358 }
6359
set_bytecode_array(BytecodeArray * bytecode)6360 void SharedFunctionInfo::set_bytecode_array(BytecodeArray* bytecode) {
6361 DCHECK(function_data()->IsUndefined(GetIsolate()));
6362 set_function_data(bytecode);
6363 }
6364
ClearBytecodeArray()6365 void SharedFunctionInfo::ClearBytecodeArray() {
6366 DCHECK(function_data()->IsUndefined(GetIsolate()) || HasBytecodeArray());
6367 set_function_data(GetHeap()->undefined_value());
6368 }
6369
HasAsmWasmData()6370 bool SharedFunctionInfo::HasAsmWasmData() {
6371 return function_data()->IsFixedArray();
6372 }
6373
asm_wasm_data()6374 FixedArray* SharedFunctionInfo::asm_wasm_data() {
6375 DCHECK(HasAsmWasmData());
6376 return FixedArray::cast(function_data());
6377 }
6378
set_asm_wasm_data(FixedArray * data)6379 void SharedFunctionInfo::set_asm_wasm_data(FixedArray* data) {
6380 DCHECK(function_data()->IsUndefined(GetIsolate()) || HasAsmWasmData());
6381 set_function_data(data);
6382 }
6383
ClearAsmWasmData()6384 void SharedFunctionInfo::ClearAsmWasmData() {
6385 DCHECK(function_data()->IsUndefined(GetIsolate()) || HasAsmWasmData());
6386 set_function_data(GetHeap()->undefined_value());
6387 }
6388
HasBuiltinFunctionId()6389 bool SharedFunctionInfo::HasBuiltinFunctionId() {
6390 return function_identifier()->IsSmi();
6391 }
6392
builtin_function_id()6393 BuiltinFunctionId SharedFunctionInfo::builtin_function_id() {
6394 DCHECK(HasBuiltinFunctionId());
6395 return static_cast<BuiltinFunctionId>(
6396 Smi::cast(function_identifier())->value());
6397 }
6398
set_builtin_function_id(BuiltinFunctionId id)6399 void SharedFunctionInfo::set_builtin_function_id(BuiltinFunctionId id) {
6400 set_function_identifier(Smi::FromInt(id));
6401 }
6402
HasInferredName()6403 bool SharedFunctionInfo::HasInferredName() {
6404 return function_identifier()->IsString();
6405 }
6406
inferred_name()6407 String* SharedFunctionInfo::inferred_name() {
6408 if (HasInferredName()) {
6409 return String::cast(function_identifier());
6410 }
6411 Isolate* isolate = GetIsolate();
6412 DCHECK(function_identifier()->IsUndefined(isolate) || HasBuiltinFunctionId());
6413 return isolate->heap()->empty_string();
6414 }
6415
set_inferred_name(String * inferred_name)6416 void SharedFunctionInfo::set_inferred_name(String* inferred_name) {
6417 DCHECK(function_identifier()->IsUndefined(GetIsolate()) || HasInferredName());
6418 set_function_identifier(inferred_name);
6419 }
6420
ic_age()6421 int SharedFunctionInfo::ic_age() {
6422 return ICAgeBits::decode(counters());
6423 }
6424
6425
set_ic_age(int ic_age)6426 void SharedFunctionInfo::set_ic_age(int ic_age) {
6427 set_counters(ICAgeBits::update(counters(), ic_age));
6428 }
6429
6430
deopt_count()6431 int SharedFunctionInfo::deopt_count() {
6432 return DeoptCountBits::decode(counters());
6433 }
6434
6435
set_deopt_count(int deopt_count)6436 void SharedFunctionInfo::set_deopt_count(int deopt_count) {
6437 set_counters(DeoptCountBits::update(counters(), deopt_count));
6438 }
6439
6440
increment_deopt_count()6441 void SharedFunctionInfo::increment_deopt_count() {
6442 int value = counters();
6443 int deopt_count = DeoptCountBits::decode(value);
6444 deopt_count = (deopt_count + 1) & DeoptCountBits::kMax;
6445 set_counters(DeoptCountBits::update(value, deopt_count));
6446 }
6447
6448
opt_reenable_tries()6449 int SharedFunctionInfo::opt_reenable_tries() {
6450 return OptReenableTriesBits::decode(counters());
6451 }
6452
6453
set_opt_reenable_tries(int tries)6454 void SharedFunctionInfo::set_opt_reenable_tries(int tries) {
6455 set_counters(OptReenableTriesBits::update(counters(), tries));
6456 }
6457
6458
opt_count()6459 int SharedFunctionInfo::opt_count() {
6460 return OptCountBits::decode(opt_count_and_bailout_reason());
6461 }
6462
6463
set_opt_count(int opt_count)6464 void SharedFunctionInfo::set_opt_count(int opt_count) {
6465 set_opt_count_and_bailout_reason(
6466 OptCountBits::update(opt_count_and_bailout_reason(), opt_count));
6467 }
6468
6469
disable_optimization_reason()6470 BailoutReason SharedFunctionInfo::disable_optimization_reason() {
6471 return static_cast<BailoutReason>(
6472 DisabledOptimizationReasonBits::decode(opt_count_and_bailout_reason()));
6473 }
6474
6475
has_deoptimization_support()6476 bool SharedFunctionInfo::has_deoptimization_support() {
6477 Code* code = this->code();
6478 return code->kind() == Code::FUNCTION && code->has_deoptimization_support();
6479 }
6480
6481
TryReenableOptimization()6482 void SharedFunctionInfo::TryReenableOptimization() {
6483 int tries = opt_reenable_tries();
6484 set_opt_reenable_tries((tries + 1) & OptReenableTriesBits::kMax);
6485 // We reenable optimization whenever the number of tries is a large
6486 // enough power of 2.
6487 if (tries >= 16 && (((tries - 1) & tries) == 0)) {
6488 set_optimization_disabled(false);
6489 set_opt_count(0);
6490 set_deopt_count(0);
6491 }
6492 }
6493
6494
set_disable_optimization_reason(BailoutReason reason)6495 void SharedFunctionInfo::set_disable_optimization_reason(BailoutReason reason) {
6496 set_opt_count_and_bailout_reason(DisabledOptimizationReasonBits::update(
6497 opt_count_and_bailout_reason(), reason));
6498 }
6499
6500
IsBuiltin()6501 bool SharedFunctionInfo::IsBuiltin() {
6502 Object* script_obj = script();
6503 if (script_obj->IsUndefined(GetIsolate())) return true;
6504 Script* script = Script::cast(script_obj);
6505 Script::Type type = static_cast<Script::Type>(script->type());
6506 return type != Script::TYPE_NORMAL;
6507 }
6508
IsSubjectToDebugging()6509 bool SharedFunctionInfo::IsSubjectToDebugging() {
6510 return !IsBuiltin() && !HasAsmWasmData();
6511 }
6512
OptimizedCodeMapIsCleared()6513 bool SharedFunctionInfo::OptimizedCodeMapIsCleared() const {
6514 return optimized_code_map() == GetHeap()->empty_fixed_array();
6515 }
6516
6517
IsOptimized()6518 bool JSFunction::IsOptimized() {
6519 return code()->kind() == Code::OPTIMIZED_FUNCTION;
6520 }
6521
IsInterpreted()6522 bool JSFunction::IsInterpreted() {
6523 return code()->is_interpreter_trampoline_builtin();
6524 }
6525
IsMarkedForBaseline()6526 bool JSFunction::IsMarkedForBaseline() {
6527 return code() ==
6528 GetIsolate()->builtins()->builtin(Builtins::kCompileBaseline);
6529 }
6530
IsMarkedForOptimization()6531 bool JSFunction::IsMarkedForOptimization() {
6532 return code() == GetIsolate()->builtins()->builtin(
6533 Builtins::kCompileOptimized);
6534 }
6535
6536
IsMarkedForConcurrentOptimization()6537 bool JSFunction::IsMarkedForConcurrentOptimization() {
6538 return code() == GetIsolate()->builtins()->builtin(
6539 Builtins::kCompileOptimizedConcurrent);
6540 }
6541
6542
IsInOptimizationQueue()6543 bool JSFunction::IsInOptimizationQueue() {
6544 return code() == GetIsolate()->builtins()->builtin(
6545 Builtins::kInOptimizationQueue);
6546 }
6547
6548
CompleteInobjectSlackTrackingIfActive()6549 void JSFunction::CompleteInobjectSlackTrackingIfActive() {
6550 if (has_initial_map() && initial_map()->IsInobjectSlackTrackingInProgress()) {
6551 initial_map()->CompleteInobjectSlackTracking();
6552 }
6553 }
6554
6555
IsInobjectSlackTrackingInProgress()6556 bool Map::IsInobjectSlackTrackingInProgress() {
6557 return construction_counter() != Map::kNoSlackTracking;
6558 }
6559
6560
InobjectSlackTrackingStep()6561 void Map::InobjectSlackTrackingStep() {
6562 if (!IsInobjectSlackTrackingInProgress()) return;
6563 int counter = construction_counter();
6564 set_construction_counter(counter - 1);
6565 if (counter == kSlackTrackingCounterEnd) {
6566 CompleteInobjectSlackTracking();
6567 }
6568 }
6569
abstract_code()6570 AbstractCode* JSFunction::abstract_code() {
6571 if (IsInterpreted()) {
6572 return AbstractCode::cast(shared()->bytecode_array());
6573 } else {
6574 return AbstractCode::cast(code());
6575 }
6576 }
6577
code()6578 Code* JSFunction::code() {
6579 return Code::cast(
6580 Code::GetObjectFromEntryAddress(FIELD_ADDR(this, kCodeEntryOffset)));
6581 }
6582
6583
set_code(Code * value)6584 void JSFunction::set_code(Code* value) {
6585 DCHECK(!GetHeap()->InNewSpace(value));
6586 Address entry = value->entry();
6587 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6588 GetHeap()->incremental_marking()->RecordWriteOfCodeEntry(
6589 this,
6590 HeapObject::RawField(this, kCodeEntryOffset),
6591 value);
6592 }
6593
6594
set_code_no_write_barrier(Code * value)6595 void JSFunction::set_code_no_write_barrier(Code* value) {
6596 DCHECK(!GetHeap()->InNewSpace(value));
6597 Address entry = value->entry();
6598 WRITE_INTPTR_FIELD(this, kCodeEntryOffset, reinterpret_cast<intptr_t>(entry));
6599 }
6600
6601
ReplaceCode(Code * code)6602 void JSFunction::ReplaceCode(Code* code) {
6603 bool was_optimized = IsOptimized();
6604 bool is_optimized = code->kind() == Code::OPTIMIZED_FUNCTION;
6605
6606 if (was_optimized && is_optimized) {
6607 shared()->EvictFromOptimizedCodeMap(this->code(),
6608 "Replacing with another optimized code");
6609 }
6610
6611 set_code(code);
6612
6613 // Add/remove the function from the list of optimized functions for this
6614 // context based on the state change.
6615 if (!was_optimized && is_optimized) {
6616 context()->native_context()->AddOptimizedFunction(this);
6617 }
6618 if (was_optimized && !is_optimized) {
6619 // TODO(titzer): linear in the number of optimized functions; fix!
6620 context()->native_context()->RemoveOptimizedFunction(this);
6621 }
6622 }
6623
6624
context()6625 Context* JSFunction::context() {
6626 return Context::cast(READ_FIELD(this, kContextOffset));
6627 }
6628
6629
global_proxy()6630 JSObject* JSFunction::global_proxy() {
6631 return context()->global_proxy();
6632 }
6633
6634
native_context()6635 Context* JSFunction::native_context() { return context()->native_context(); }
6636
6637
set_context(Object * value)6638 void JSFunction::set_context(Object* value) {
6639 DCHECK(value->IsUndefined(GetIsolate()) || value->IsContext());
6640 WRITE_FIELD(this, kContextOffset, value);
6641 WRITE_BARRIER(GetHeap(), this, kContextOffset, value);
6642 }
6643
ACCESSORS(JSFunction,prototype_or_initial_map,Object,kPrototypeOrInitialMapOffset)6644 ACCESSORS(JSFunction, prototype_or_initial_map, Object,
6645 kPrototypeOrInitialMapOffset)
6646
6647
6648 Map* JSFunction::initial_map() {
6649 return Map::cast(prototype_or_initial_map());
6650 }
6651
6652
has_initial_map()6653 bool JSFunction::has_initial_map() {
6654 return prototype_or_initial_map()->IsMap();
6655 }
6656
6657
has_instance_prototype()6658 bool JSFunction::has_instance_prototype() {
6659 return has_initial_map() ||
6660 !prototype_or_initial_map()->IsTheHole(GetIsolate());
6661 }
6662
6663
has_prototype()6664 bool JSFunction::has_prototype() {
6665 return map()->has_non_instance_prototype() || has_instance_prototype();
6666 }
6667
6668
instance_prototype()6669 Object* JSFunction::instance_prototype() {
6670 DCHECK(has_instance_prototype());
6671 if (has_initial_map()) return initial_map()->prototype();
6672 // When there is no initial map and the prototype is a JSObject, the
6673 // initial map field is used for the prototype field.
6674 return prototype_or_initial_map();
6675 }
6676
6677
prototype()6678 Object* JSFunction::prototype() {
6679 DCHECK(has_prototype());
6680 // If the function's prototype property has been set to a non-JSObject
6681 // value, that value is stored in the constructor field of the map.
6682 if (map()->has_non_instance_prototype()) {
6683 Object* prototype = map()->GetConstructor();
6684 // The map must have a prototype in that field, not a back pointer.
6685 DCHECK(!prototype->IsMap());
6686 return prototype;
6687 }
6688 return instance_prototype();
6689 }
6690
6691
is_compiled()6692 bool JSFunction::is_compiled() {
6693 Builtins* builtins = GetIsolate()->builtins();
6694 return code() != builtins->builtin(Builtins::kCompileLazy) &&
6695 code() != builtins->builtin(Builtins::kCompileBaseline) &&
6696 code() != builtins->builtin(Builtins::kCompileOptimized) &&
6697 code() != builtins->builtin(Builtins::kCompileOptimizedConcurrent);
6698 }
6699
feedback_vector()6700 TypeFeedbackVector* JSFunction::feedback_vector() {
6701 LiteralsArray* array = literals();
6702 return array->feedback_vector();
6703 }
6704
ACCESSORS(JSProxy,target,JSReceiver,kTargetOffset)6705 ACCESSORS(JSProxy, target, JSReceiver, kTargetOffset)
6706 ACCESSORS(JSProxy, handler, Object, kHandlerOffset)
6707 ACCESSORS(JSProxy, hash, Object, kHashOffset)
6708
6709 bool JSProxy::IsRevoked() const { return !handler()->IsJSReceiver(); }
6710
ACCESSORS(JSCollection,table,Object,kTableOffset)6711 ACCESSORS(JSCollection, table, Object, kTableOffset)
6712
6713
6714 #define ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(name, type, offset) \
6715 template<class Derived, class TableType> \
6716 type* OrderedHashTableIterator<Derived, TableType>::name() const { \
6717 return type::cast(READ_FIELD(this, offset)); \
6718 } \
6719 template<class Derived, class TableType> \
6720 void OrderedHashTableIterator<Derived, TableType>::set_##name( \
6721 type* value, WriteBarrierMode mode) { \
6722 WRITE_FIELD(this, offset, value); \
6723 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, offset, value, mode); \
6724 }
6725
6726 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(table, Object, kTableOffset)
6727 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(index, Object, kIndexOffset)
6728 ORDERED_HASH_TABLE_ITERATOR_ACCESSORS(kind, Object, kKindOffset)
6729
6730 #undef ORDERED_HASH_TABLE_ITERATOR_ACCESSORS
6731
6732
6733 ACCESSORS(JSWeakCollection, table, Object, kTableOffset)
6734 ACCESSORS(JSWeakCollection, next, Object, kNextOffset)
6735
6736
6737 Address Foreign::foreign_address() {
6738 return AddressFrom<Address>(READ_INTPTR_FIELD(this, kForeignAddressOffset));
6739 }
6740
6741
set_foreign_address(Address value)6742 void Foreign::set_foreign_address(Address value) {
6743 WRITE_INTPTR_FIELD(this, kForeignAddressOffset, OffsetFrom(value));
6744 }
6745
6746
ACCESSORS(JSGeneratorObject,function,JSFunction,kFunctionOffset)6747 ACCESSORS(JSGeneratorObject, function, JSFunction, kFunctionOffset)
6748 ACCESSORS(JSGeneratorObject, context, Context, kContextOffset)
6749 ACCESSORS(JSGeneratorObject, receiver, Object, kReceiverOffset)
6750 ACCESSORS(JSGeneratorObject, input_or_debug_pos, Object, kInputOrDebugPosOffset)
6751 SMI_ACCESSORS(JSGeneratorObject, resume_mode, kResumeModeOffset)
6752 SMI_ACCESSORS(JSGeneratorObject, continuation, kContinuationOffset)
6753 ACCESSORS(JSGeneratorObject, operand_stack, FixedArray, kOperandStackOffset)
6754
6755 bool JSGeneratorObject::is_suspended() const {
6756 DCHECK_LT(kGeneratorExecuting, 0);
6757 DCHECK_LT(kGeneratorClosed, 0);
6758 return continuation() >= 0;
6759 }
6760
is_closed()6761 bool JSGeneratorObject::is_closed() const {
6762 return continuation() == kGeneratorClosed;
6763 }
6764
is_executing()6765 bool JSGeneratorObject::is_executing() const {
6766 return continuation() == kGeneratorExecuting;
6767 }
6768
TYPE_CHECKER(JSModuleNamespace,JS_MODULE_NAMESPACE_TYPE)6769 TYPE_CHECKER(JSModuleNamespace, JS_MODULE_NAMESPACE_TYPE)
6770
6771 ACCESSORS(JSValue, value, Object, kValueOffset)
6772
6773
6774 HeapNumber* HeapNumber::cast(Object* object) {
6775 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6776 return reinterpret_cast<HeapNumber*>(object);
6777 }
6778
6779
cast(const Object * object)6780 const HeapNumber* HeapNumber::cast(const Object* object) {
6781 SLOW_DCHECK(object->IsHeapNumber() || object->IsMutableHeapNumber());
6782 return reinterpret_cast<const HeapNumber*>(object);
6783 }
6784
6785
ACCESSORS(JSDate,value,Object,kValueOffset)6786 ACCESSORS(JSDate, value, Object, kValueOffset)
6787 ACCESSORS(JSDate, cache_stamp, Object, kCacheStampOffset)
6788 ACCESSORS(JSDate, year, Object, kYearOffset)
6789 ACCESSORS(JSDate, month, Object, kMonthOffset)
6790 ACCESSORS(JSDate, day, Object, kDayOffset)
6791 ACCESSORS(JSDate, weekday, Object, kWeekdayOffset)
6792 ACCESSORS(JSDate, hour, Object, kHourOffset)
6793 ACCESSORS(JSDate, min, Object, kMinOffset)
6794 ACCESSORS(JSDate, sec, Object, kSecOffset)
6795
6796
6797 SMI_ACCESSORS(JSMessageObject, type, kTypeOffset)
6798 ACCESSORS(JSMessageObject, argument, Object, kArgumentsOffset)
6799 ACCESSORS(JSMessageObject, script, Object, kScriptOffset)
6800 ACCESSORS(JSMessageObject, stack_frames, Object, kStackFramesOffset)
6801 SMI_ACCESSORS(JSMessageObject, start_position, kStartPositionOffset)
6802 SMI_ACCESSORS(JSMessageObject, end_position, kEndPositionOffset)
6803
6804
6805 INT_ACCESSORS(Code, instruction_size, kInstructionSizeOffset)
6806 INT_ACCESSORS(Code, prologue_offset, kPrologueOffset)
6807 INT_ACCESSORS(Code, constant_pool_offset, kConstantPoolOffset)
6808 ACCESSORS(Code, relocation_info, ByteArray, kRelocationInfoOffset)
6809 ACCESSORS(Code, handler_table, FixedArray, kHandlerTableOffset)
6810 ACCESSORS(Code, deoptimization_data, FixedArray, kDeoptimizationDataOffset)
6811 ACCESSORS(Code, source_position_table, ByteArray, kSourcePositionTableOffset)
6812 ACCESSORS(Code, raw_type_feedback_info, Object, kTypeFeedbackInfoOffset)
6813 ACCESSORS(Code, next_code_link, Object, kNextCodeLinkOffset)
6814
6815 void Code::WipeOutHeader() {
6816 WRITE_FIELD(this, kRelocationInfoOffset, NULL);
6817 WRITE_FIELD(this, kHandlerTableOffset, NULL);
6818 WRITE_FIELD(this, kDeoptimizationDataOffset, NULL);
6819 WRITE_FIELD(this, kSourcePositionTableOffset, NULL);
6820 // Do not wipe out major/minor keys on a code stub or IC
6821 if (!READ_FIELD(this, kTypeFeedbackInfoOffset)->IsSmi()) {
6822 WRITE_FIELD(this, kTypeFeedbackInfoOffset, NULL);
6823 }
6824 WRITE_FIELD(this, kNextCodeLinkOffset, NULL);
6825 WRITE_FIELD(this, kGCMetadataOffset, NULL);
6826 }
6827
6828
type_feedback_info()6829 Object* Code::type_feedback_info() {
6830 DCHECK(kind() == FUNCTION);
6831 return raw_type_feedback_info();
6832 }
6833
6834
set_type_feedback_info(Object * value,WriteBarrierMode mode)6835 void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
6836 DCHECK(kind() == FUNCTION);
6837 set_raw_type_feedback_info(value, mode);
6838 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kTypeFeedbackInfoOffset,
6839 value, mode);
6840 }
6841
6842
stub_key()6843 uint32_t Code::stub_key() {
6844 DCHECK(IsCodeStubOrIC());
6845 Smi* smi_key = Smi::cast(raw_type_feedback_info());
6846 return static_cast<uint32_t>(smi_key->value());
6847 }
6848
6849
set_stub_key(uint32_t key)6850 void Code::set_stub_key(uint32_t key) {
6851 DCHECK(IsCodeStubOrIC());
6852 set_raw_type_feedback_info(Smi::FromInt(key));
6853 }
6854
6855
ACCESSORS(Code,gc_metadata,Object,kGCMetadataOffset)6856 ACCESSORS(Code, gc_metadata, Object, kGCMetadataOffset)
6857 INT_ACCESSORS(Code, ic_age, kICAgeOffset)
6858
6859
6860 byte* Code::instruction_start() {
6861 return FIELD_ADDR(this, kHeaderSize);
6862 }
6863
6864
instruction_end()6865 byte* Code::instruction_end() {
6866 return instruction_start() + instruction_size();
6867 }
6868
GetUnwindingInfoSizeOffset()6869 int Code::GetUnwindingInfoSizeOffset() const {
6870 DCHECK(has_unwinding_info());
6871 return RoundUp(kHeaderSize + instruction_size(), kInt64Size);
6872 }
6873
unwinding_info_size()6874 int Code::unwinding_info_size() const {
6875 DCHECK(has_unwinding_info());
6876 return static_cast<int>(
6877 READ_UINT64_FIELD(this, GetUnwindingInfoSizeOffset()));
6878 }
6879
set_unwinding_info_size(int value)6880 void Code::set_unwinding_info_size(int value) {
6881 DCHECK(has_unwinding_info());
6882 WRITE_UINT64_FIELD(this, GetUnwindingInfoSizeOffset(), value);
6883 }
6884
unwinding_info_start()6885 byte* Code::unwinding_info_start() {
6886 DCHECK(has_unwinding_info());
6887 return FIELD_ADDR(this, GetUnwindingInfoSizeOffset()) + kInt64Size;
6888 }
6889
unwinding_info_end()6890 byte* Code::unwinding_info_end() {
6891 DCHECK(has_unwinding_info());
6892 return unwinding_info_start() + unwinding_info_size();
6893 }
6894
body_size()6895 int Code::body_size() {
6896 int unpadded_body_size =
6897 has_unwinding_info()
6898 ? static_cast<int>(unwinding_info_end() - instruction_start())
6899 : instruction_size();
6900 return RoundUp(unpadded_body_size, kObjectAlignment);
6901 }
6902
SizeIncludingMetadata()6903 int Code::SizeIncludingMetadata() {
6904 int size = CodeSize();
6905 size += relocation_info()->Size();
6906 size += deoptimization_data()->Size();
6907 size += handler_table()->Size();
6908 if (kind() == FUNCTION) size += source_position_table()->Size();
6909 return size;
6910 }
6911
unchecked_relocation_info()6912 ByteArray* Code::unchecked_relocation_info() {
6913 return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
6914 }
6915
6916
relocation_start()6917 byte* Code::relocation_start() {
6918 return unchecked_relocation_info()->GetDataStartAddress();
6919 }
6920
6921
relocation_size()6922 int Code::relocation_size() {
6923 return unchecked_relocation_info()->length();
6924 }
6925
6926
entry()6927 byte* Code::entry() {
6928 return instruction_start();
6929 }
6930
6931
contains(byte * inner_pointer)6932 bool Code::contains(byte* inner_pointer) {
6933 return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
6934 }
6935
6936
ExecutableSize()6937 int Code::ExecutableSize() {
6938 // Check that the assumptions about the layout of the code object holds.
6939 DCHECK_EQ(static_cast<int>(instruction_start() - address()),
6940 Code::kHeaderSize);
6941 return instruction_size() + Code::kHeaderSize;
6942 }
6943
6944
CodeSize()6945 int Code::CodeSize() { return SizeFor(body_size()); }
6946
6947
ACCESSORS(JSArray,length,Object,kLengthOffset)6948 ACCESSORS(JSArray, length, Object, kLengthOffset)
6949
6950
6951 void* JSArrayBuffer::backing_store() const {
6952 intptr_t ptr = READ_INTPTR_FIELD(this, kBackingStoreOffset);
6953 return reinterpret_cast<void*>(ptr);
6954 }
6955
6956
set_backing_store(void * value,WriteBarrierMode mode)6957 void JSArrayBuffer::set_backing_store(void* value, WriteBarrierMode mode) {
6958 intptr_t ptr = reinterpret_cast<intptr_t>(value);
6959 WRITE_INTPTR_FIELD(this, kBackingStoreOffset, ptr);
6960 }
6961
6962
ACCESSORS(JSArrayBuffer,byte_length,Object,kByteLengthOffset)6963 ACCESSORS(JSArrayBuffer, byte_length, Object, kByteLengthOffset)
6964
6965
6966 void JSArrayBuffer::set_bit_field(uint32_t bits) {
6967 if (kInt32Size != kPointerSize) {
6968 #if V8_TARGET_LITTLE_ENDIAN
6969 WRITE_UINT32_FIELD(this, kBitFieldSlot + kInt32Size, 0);
6970 #else
6971 WRITE_UINT32_FIELD(this, kBitFieldSlot, 0);
6972 #endif
6973 }
6974 WRITE_UINT32_FIELD(this, kBitFieldOffset, bits);
6975 }
6976
6977
bit_field()6978 uint32_t JSArrayBuffer::bit_field() const {
6979 return READ_UINT32_FIELD(this, kBitFieldOffset);
6980 }
6981
6982
is_external()6983 bool JSArrayBuffer::is_external() { return IsExternal::decode(bit_field()); }
6984
6985
set_is_external(bool value)6986 void JSArrayBuffer::set_is_external(bool value) {
6987 set_bit_field(IsExternal::update(bit_field(), value));
6988 }
6989
6990
is_neuterable()6991 bool JSArrayBuffer::is_neuterable() {
6992 return IsNeuterable::decode(bit_field());
6993 }
6994
6995
set_is_neuterable(bool value)6996 void JSArrayBuffer::set_is_neuterable(bool value) {
6997 set_bit_field(IsNeuterable::update(bit_field(), value));
6998 }
6999
7000
was_neutered()7001 bool JSArrayBuffer::was_neutered() { return WasNeutered::decode(bit_field()); }
7002
7003
set_was_neutered(bool value)7004 void JSArrayBuffer::set_was_neutered(bool value) {
7005 set_bit_field(WasNeutered::update(bit_field(), value));
7006 }
7007
7008
is_shared()7009 bool JSArrayBuffer::is_shared() { return IsShared::decode(bit_field()); }
7010
7011
set_is_shared(bool value)7012 void JSArrayBuffer::set_is_shared(bool value) {
7013 set_bit_field(IsShared::update(bit_field(), value));
7014 }
7015
7016
byte_offset()7017 Object* JSArrayBufferView::byte_offset() const {
7018 if (WasNeutered()) return Smi::kZero;
7019 return Object::cast(READ_FIELD(this, kByteOffsetOffset));
7020 }
7021
7022
set_byte_offset(Object * value,WriteBarrierMode mode)7023 void JSArrayBufferView::set_byte_offset(Object* value, WriteBarrierMode mode) {
7024 WRITE_FIELD(this, kByteOffsetOffset, value);
7025 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteOffsetOffset, value, mode);
7026 }
7027
7028
byte_length()7029 Object* JSArrayBufferView::byte_length() const {
7030 if (WasNeutered()) return Smi::kZero;
7031 return Object::cast(READ_FIELD(this, kByteLengthOffset));
7032 }
7033
7034
set_byte_length(Object * value,WriteBarrierMode mode)7035 void JSArrayBufferView::set_byte_length(Object* value, WriteBarrierMode mode) {
7036 WRITE_FIELD(this, kByteLengthOffset, value);
7037 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kByteLengthOffset, value, mode);
7038 }
7039
7040
ACCESSORS(JSArrayBufferView,buffer,Object,kBufferOffset)7041 ACCESSORS(JSArrayBufferView, buffer, Object, kBufferOffset)
7042 #ifdef VERIFY_HEAP
7043 ACCESSORS(JSArrayBufferView, raw_byte_offset, Object, kByteOffsetOffset)
7044 ACCESSORS(JSArrayBufferView, raw_byte_length, Object, kByteLengthOffset)
7045 #endif
7046
7047
7048 bool JSArrayBufferView::WasNeutered() const {
7049 return JSArrayBuffer::cast(buffer())->was_neutered();
7050 }
7051
7052
length()7053 Object* JSTypedArray::length() const {
7054 if (WasNeutered()) return Smi::kZero;
7055 return Object::cast(READ_FIELD(this, kLengthOffset));
7056 }
7057
7058
length_value()7059 uint32_t JSTypedArray::length_value() const {
7060 if (WasNeutered()) return 0;
7061 uint32_t index = 0;
7062 CHECK(Object::cast(READ_FIELD(this, kLengthOffset))->ToArrayLength(&index));
7063 return index;
7064 }
7065
7066
set_length(Object * value,WriteBarrierMode mode)7067 void JSTypedArray::set_length(Object* value, WriteBarrierMode mode) {
7068 WRITE_FIELD(this, kLengthOffset, value);
7069 CONDITIONAL_WRITE_BARRIER(GetHeap(), this, kLengthOffset, value, mode);
7070 }
7071
7072
7073 #ifdef VERIFY_HEAP
ACCESSORS(JSTypedArray,raw_length,Object,kLengthOffset)7074 ACCESSORS(JSTypedArray, raw_length, Object, kLengthOffset)
7075 #endif
7076
7077
7078 ACCESSORS(JSRegExp, data, Object, kDataOffset)
7079 ACCESSORS(JSRegExp, flags, Object, kFlagsOffset)
7080 ACCESSORS(JSRegExp, source, Object, kSourceOffset)
7081
7082
7083 JSRegExp::Type JSRegExp::TypeTag() {
7084 Object* data = this->data();
7085 if (data->IsUndefined(GetIsolate())) return JSRegExp::NOT_COMPILED;
7086 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kTagIndex));
7087 return static_cast<JSRegExp::Type>(smi->value());
7088 }
7089
7090
CaptureCount()7091 int JSRegExp::CaptureCount() {
7092 switch (TypeTag()) {
7093 case ATOM:
7094 return 0;
7095 case IRREGEXP:
7096 return Smi::cast(DataAt(kIrregexpCaptureCountIndex))->value();
7097 default:
7098 UNREACHABLE();
7099 return -1;
7100 }
7101 }
7102
7103
GetFlags()7104 JSRegExp::Flags JSRegExp::GetFlags() {
7105 DCHECK(this->data()->IsFixedArray());
7106 Object* data = this->data();
7107 Smi* smi = Smi::cast(FixedArray::cast(data)->get(kFlagsIndex));
7108 return Flags(smi->value());
7109 }
7110
7111
Pattern()7112 String* JSRegExp::Pattern() {
7113 DCHECK(this->data()->IsFixedArray());
7114 Object* data = this->data();
7115 String* pattern = String::cast(FixedArray::cast(data)->get(kSourceIndex));
7116 return pattern;
7117 }
7118
7119
DataAt(int index)7120 Object* JSRegExp::DataAt(int index) {
7121 DCHECK(TypeTag() != NOT_COMPILED);
7122 return FixedArray::cast(data())->get(index);
7123 }
7124
7125
SetDataAt(int index,Object * value)7126 void JSRegExp::SetDataAt(int index, Object* value) {
7127 DCHECK(TypeTag() != NOT_COMPILED);
7128 DCHECK(index >= kDataIndex); // Only implementation data can be set this way.
7129 FixedArray::cast(data())->set(index, value);
7130 }
7131
SetLastIndex(int index)7132 void JSRegExp::SetLastIndex(int index) {
7133 static const int offset =
7134 kSize + JSRegExp::kLastIndexFieldIndex * kPointerSize;
7135 Smi* value = Smi::FromInt(index);
7136 WRITE_FIELD(this, offset, value);
7137 }
7138
LastIndex()7139 Object* JSRegExp::LastIndex() {
7140 static const int offset =
7141 kSize + JSRegExp::kLastIndexFieldIndex * kPointerSize;
7142 return READ_FIELD(this, offset);
7143 }
7144
GetElementsKind()7145 ElementsKind JSObject::GetElementsKind() {
7146 ElementsKind kind = map()->elements_kind();
7147 #if VERIFY_HEAP && DEBUG
7148 FixedArrayBase* fixed_array =
7149 reinterpret_cast<FixedArrayBase*>(READ_FIELD(this, kElementsOffset));
7150
7151 // If a GC was caused while constructing this object, the elements
7152 // pointer may point to a one pointer filler map.
7153 if (ElementsAreSafeToExamine()) {
7154 Map* map = fixed_array->map();
7155 if (IsFastSmiOrObjectElementsKind(kind)) {
7156 DCHECK(map == GetHeap()->fixed_array_map() ||
7157 map == GetHeap()->fixed_cow_array_map());
7158 } else if (IsFastDoubleElementsKind(kind)) {
7159 DCHECK(fixed_array->IsFixedDoubleArray() ||
7160 fixed_array == GetHeap()->empty_fixed_array());
7161 } else if (kind == DICTIONARY_ELEMENTS) {
7162 DCHECK(fixed_array->IsFixedArray());
7163 DCHECK(fixed_array->IsDictionary());
7164 } else {
7165 DCHECK(kind > DICTIONARY_ELEMENTS);
7166 }
7167 DCHECK(!IsSloppyArgumentsElements(kind) ||
7168 (elements()->IsFixedArray() && elements()->length() >= 2));
7169 }
7170 #endif
7171 return kind;
7172 }
7173
7174
HasFastObjectElements()7175 bool JSObject::HasFastObjectElements() {
7176 return IsFastObjectElementsKind(GetElementsKind());
7177 }
7178
7179
HasFastSmiElements()7180 bool JSObject::HasFastSmiElements() {
7181 return IsFastSmiElementsKind(GetElementsKind());
7182 }
7183
7184
HasFastSmiOrObjectElements()7185 bool JSObject::HasFastSmiOrObjectElements() {
7186 return IsFastSmiOrObjectElementsKind(GetElementsKind());
7187 }
7188
7189
HasFastDoubleElements()7190 bool JSObject::HasFastDoubleElements() {
7191 return IsFastDoubleElementsKind(GetElementsKind());
7192 }
7193
7194
HasFastHoleyElements()7195 bool JSObject::HasFastHoleyElements() {
7196 return IsFastHoleyElementsKind(GetElementsKind());
7197 }
7198
7199
HasFastElements()7200 bool JSObject::HasFastElements() {
7201 return IsFastElementsKind(GetElementsKind());
7202 }
7203
7204
HasDictionaryElements()7205 bool JSObject::HasDictionaryElements() {
7206 return GetElementsKind() == DICTIONARY_ELEMENTS;
7207 }
7208
7209
HasFastArgumentsElements()7210 bool JSObject::HasFastArgumentsElements() {
7211 return GetElementsKind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
7212 }
7213
7214
HasSlowArgumentsElements()7215 bool JSObject::HasSlowArgumentsElements() {
7216 return GetElementsKind() == SLOW_SLOPPY_ARGUMENTS_ELEMENTS;
7217 }
7218
7219
HasSloppyArgumentsElements()7220 bool JSObject::HasSloppyArgumentsElements() {
7221 return IsSloppyArgumentsElements(GetElementsKind());
7222 }
7223
HasStringWrapperElements()7224 bool JSObject::HasStringWrapperElements() {
7225 return IsStringWrapperElementsKind(GetElementsKind());
7226 }
7227
HasFastStringWrapperElements()7228 bool JSObject::HasFastStringWrapperElements() {
7229 return GetElementsKind() == FAST_STRING_WRAPPER_ELEMENTS;
7230 }
7231
HasSlowStringWrapperElements()7232 bool JSObject::HasSlowStringWrapperElements() {
7233 return GetElementsKind() == SLOW_STRING_WRAPPER_ELEMENTS;
7234 }
7235
HasFixedTypedArrayElements()7236 bool JSObject::HasFixedTypedArrayElements() {
7237 DCHECK_NOT_NULL(elements());
7238 return map()->has_fixed_typed_array_elements();
7239 }
7240
7241 #define FIXED_TYPED_ELEMENTS_CHECK(Type, type, TYPE, ctype, size) \
7242 bool JSObject::HasFixed##Type##Elements() { \
7243 HeapObject* array = elements(); \
7244 DCHECK(array != NULL); \
7245 if (!array->IsHeapObject()) return false; \
7246 return array->map()->instance_type() == FIXED_##TYPE##_ARRAY_TYPE; \
7247 }
7248
TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)7249 TYPED_ARRAYS(FIXED_TYPED_ELEMENTS_CHECK)
7250
7251 #undef FIXED_TYPED_ELEMENTS_CHECK
7252
7253
7254 bool JSObject::HasNamedInterceptor() {
7255 return map()->has_named_interceptor();
7256 }
7257
7258
HasIndexedInterceptor()7259 bool JSObject::HasIndexedInterceptor() {
7260 return map()->has_indexed_interceptor();
7261 }
7262
7263
global_dictionary()7264 GlobalDictionary* JSObject::global_dictionary() {
7265 DCHECK(!HasFastProperties());
7266 DCHECK(IsJSGlobalObject());
7267 return GlobalDictionary::cast(properties());
7268 }
7269
7270
element_dictionary()7271 SeededNumberDictionary* JSObject::element_dictionary() {
7272 DCHECK(HasDictionaryElements() || HasSlowStringWrapperElements());
7273 return SeededNumberDictionary::cast(elements());
7274 }
7275
7276
IsHashFieldComputed(uint32_t field)7277 bool Name::IsHashFieldComputed(uint32_t field) {
7278 return (field & kHashNotComputedMask) == 0;
7279 }
7280
7281
HasHashCode()7282 bool Name::HasHashCode() {
7283 return IsHashFieldComputed(hash_field());
7284 }
7285
7286
Hash()7287 uint32_t Name::Hash() {
7288 // Fast case: has hash code already been computed?
7289 uint32_t field = hash_field();
7290 if (IsHashFieldComputed(field)) return field >> kHashShift;
7291 // Slow case: compute hash code and set it. Has to be a string.
7292 return String::cast(this)->ComputeAndSetHash();
7293 }
7294
7295
IsPrivate()7296 bool Name::IsPrivate() {
7297 return this->IsSymbol() && Symbol::cast(this)->is_private();
7298 }
7299
7300
StringHasher(int length,uint32_t seed)7301 StringHasher::StringHasher(int length, uint32_t seed)
7302 : length_(length),
7303 raw_running_hash_(seed),
7304 array_index_(0),
7305 is_array_index_(0 < length_ && length_ <= String::kMaxArrayIndexSize),
7306 is_first_char_(true) {
7307 DCHECK(FLAG_randomize_hashes || raw_running_hash_ == 0);
7308 }
7309
7310
has_trivial_hash()7311 bool StringHasher::has_trivial_hash() {
7312 return length_ > String::kMaxHashCalcLength;
7313 }
7314
7315
AddCharacterCore(uint32_t running_hash,uint16_t c)7316 uint32_t StringHasher::AddCharacterCore(uint32_t running_hash, uint16_t c) {
7317 running_hash += c;
7318 running_hash += (running_hash << 10);
7319 running_hash ^= (running_hash >> 6);
7320 return running_hash;
7321 }
7322
7323
GetHashCore(uint32_t running_hash)7324 uint32_t StringHasher::GetHashCore(uint32_t running_hash) {
7325 running_hash += (running_hash << 3);
7326 running_hash ^= (running_hash >> 11);
7327 running_hash += (running_hash << 15);
7328 if ((running_hash & String::kHashBitMask) == 0) {
7329 return kZeroHash;
7330 }
7331 return running_hash;
7332 }
7333
7334
ComputeRunningHash(uint32_t running_hash,const uc16 * chars,int length)7335 uint32_t StringHasher::ComputeRunningHash(uint32_t running_hash,
7336 const uc16* chars, int length) {
7337 DCHECK_NOT_NULL(chars);
7338 DCHECK(length >= 0);
7339 for (int i = 0; i < length; ++i) {
7340 running_hash = AddCharacterCore(running_hash, *chars++);
7341 }
7342 return running_hash;
7343 }
7344
7345
ComputeRunningHashOneByte(uint32_t running_hash,const char * chars,int length)7346 uint32_t StringHasher::ComputeRunningHashOneByte(uint32_t running_hash,
7347 const char* chars,
7348 int length) {
7349 DCHECK_NOT_NULL(chars);
7350 DCHECK(length >= 0);
7351 for (int i = 0; i < length; ++i) {
7352 uint16_t c = static_cast<uint16_t>(*chars++);
7353 running_hash = AddCharacterCore(running_hash, c);
7354 }
7355 return running_hash;
7356 }
7357
7358
AddCharacter(uint16_t c)7359 void StringHasher::AddCharacter(uint16_t c) {
7360 // Use the Jenkins one-at-a-time hash function to update the hash
7361 // for the given character.
7362 raw_running_hash_ = AddCharacterCore(raw_running_hash_, c);
7363 }
7364
7365
UpdateIndex(uint16_t c)7366 bool StringHasher::UpdateIndex(uint16_t c) {
7367 DCHECK(is_array_index_);
7368 if (c < '0' || c > '9') {
7369 is_array_index_ = false;
7370 return false;
7371 }
7372 int d = c - '0';
7373 if (is_first_char_) {
7374 is_first_char_ = false;
7375 if (c == '0' && length_ > 1) {
7376 is_array_index_ = false;
7377 return false;
7378 }
7379 }
7380 if (array_index_ > 429496729U - ((d + 3) >> 3)) {
7381 is_array_index_ = false;
7382 return false;
7383 }
7384 array_index_ = array_index_ * 10 + d;
7385 return true;
7386 }
7387
7388
7389 template<typename Char>
AddCharacters(const Char * chars,int length)7390 inline void StringHasher::AddCharacters(const Char* chars, int length) {
7391 DCHECK(sizeof(Char) == 1 || sizeof(Char) == 2);
7392 int i = 0;
7393 if (is_array_index_) {
7394 for (; i < length; i++) {
7395 AddCharacter(chars[i]);
7396 if (!UpdateIndex(chars[i])) {
7397 i++;
7398 break;
7399 }
7400 }
7401 }
7402 for (; i < length; i++) {
7403 DCHECK(!is_array_index_);
7404 AddCharacter(chars[i]);
7405 }
7406 }
7407
7408
7409 template <typename schar>
HashSequentialString(const schar * chars,int length,uint32_t seed)7410 uint32_t StringHasher::HashSequentialString(const schar* chars,
7411 int length,
7412 uint32_t seed) {
7413 StringHasher hasher(length, seed);
7414 if (!hasher.has_trivial_hash()) hasher.AddCharacters(chars, length);
7415 return hasher.GetHashField();
7416 }
7417
7418
IteratingStringHasher(int len,uint32_t seed)7419 IteratingStringHasher::IteratingStringHasher(int len, uint32_t seed)
7420 : StringHasher(len, seed) {}
7421
7422
Hash(String * string,uint32_t seed)7423 uint32_t IteratingStringHasher::Hash(String* string, uint32_t seed) {
7424 IteratingStringHasher hasher(string->length(), seed);
7425 // Nothing to do.
7426 if (hasher.has_trivial_hash()) return hasher.GetHashField();
7427 ConsString* cons_string = String::VisitFlat(&hasher, string);
7428 if (cons_string == nullptr) return hasher.GetHashField();
7429 hasher.VisitConsString(cons_string);
7430 return hasher.GetHashField();
7431 }
7432
7433
VisitOneByteString(const uint8_t * chars,int length)7434 void IteratingStringHasher::VisitOneByteString(const uint8_t* chars,
7435 int length) {
7436 AddCharacters(chars, length);
7437 }
7438
7439
VisitTwoByteString(const uint16_t * chars,int length)7440 void IteratingStringHasher::VisitTwoByteString(const uint16_t* chars,
7441 int length) {
7442 AddCharacters(chars, length);
7443 }
7444
7445
AsArrayIndex(uint32_t * index)7446 bool Name::AsArrayIndex(uint32_t* index) {
7447 return IsString() && String::cast(this)->AsArrayIndex(index);
7448 }
7449
7450
AsArrayIndex(uint32_t * index)7451 bool String::AsArrayIndex(uint32_t* index) {
7452 uint32_t field = hash_field();
7453 if (IsHashFieldComputed(field) && (field & kIsNotArrayIndexMask)) {
7454 return false;
7455 }
7456 return SlowAsArrayIndex(index);
7457 }
7458
7459
SetForwardedInternalizedString(String * canonical)7460 void String::SetForwardedInternalizedString(String* canonical) {
7461 DCHECK(IsInternalizedString());
7462 DCHECK(HasHashCode());
7463 if (canonical == this) return; // No need to forward.
7464 DCHECK(SlowEquals(canonical));
7465 DCHECK(canonical->IsInternalizedString());
7466 DCHECK(canonical->HasHashCode());
7467 WRITE_FIELD(this, kHashFieldSlot, canonical);
7468 // Setting the hash field to a tagged value sets the LSB, causing the hash
7469 // code to be interpreted as uninitialized. We use this fact to recognize
7470 // that we have a forwarded string.
7471 DCHECK(!HasHashCode());
7472 }
7473
7474
GetForwardedInternalizedString()7475 String* String::GetForwardedInternalizedString() {
7476 DCHECK(IsInternalizedString());
7477 if (HasHashCode()) return this;
7478 String* canonical = String::cast(READ_FIELD(this, kHashFieldSlot));
7479 DCHECK(canonical->IsInternalizedString());
7480 DCHECK(SlowEquals(canonical));
7481 DCHECK(canonical->HasHashCode());
7482 return canonical;
7483 }
7484
7485
7486 // static
GreaterThan(Handle<Object> x,Handle<Object> y)7487 Maybe<bool> Object::GreaterThan(Handle<Object> x, Handle<Object> y) {
7488 Maybe<ComparisonResult> result = Compare(x, y);
7489 if (result.IsJust()) {
7490 switch (result.FromJust()) {
7491 case ComparisonResult::kGreaterThan:
7492 return Just(true);
7493 case ComparisonResult::kLessThan:
7494 case ComparisonResult::kEqual:
7495 case ComparisonResult::kUndefined:
7496 return Just(false);
7497 }
7498 }
7499 return Nothing<bool>();
7500 }
7501
7502
7503 // static
GreaterThanOrEqual(Handle<Object> x,Handle<Object> y)7504 Maybe<bool> Object::GreaterThanOrEqual(Handle<Object> x, Handle<Object> y) {
7505 Maybe<ComparisonResult> result = Compare(x, y);
7506 if (result.IsJust()) {
7507 switch (result.FromJust()) {
7508 case ComparisonResult::kEqual:
7509 case ComparisonResult::kGreaterThan:
7510 return Just(true);
7511 case ComparisonResult::kLessThan:
7512 case ComparisonResult::kUndefined:
7513 return Just(false);
7514 }
7515 }
7516 return Nothing<bool>();
7517 }
7518
7519
7520 // static
LessThan(Handle<Object> x,Handle<Object> y)7521 Maybe<bool> Object::LessThan(Handle<Object> x, Handle<Object> y) {
7522 Maybe<ComparisonResult> result = Compare(x, y);
7523 if (result.IsJust()) {
7524 switch (result.FromJust()) {
7525 case ComparisonResult::kLessThan:
7526 return Just(true);
7527 case ComparisonResult::kEqual:
7528 case ComparisonResult::kGreaterThan:
7529 case ComparisonResult::kUndefined:
7530 return Just(false);
7531 }
7532 }
7533 return Nothing<bool>();
7534 }
7535
7536
7537 // static
LessThanOrEqual(Handle<Object> x,Handle<Object> y)7538 Maybe<bool> Object::LessThanOrEqual(Handle<Object> x, Handle<Object> y) {
7539 Maybe<ComparisonResult> result = Compare(x, y);
7540 if (result.IsJust()) {
7541 switch (result.FromJust()) {
7542 case ComparisonResult::kEqual:
7543 case ComparisonResult::kLessThan:
7544 return Just(true);
7545 case ComparisonResult::kGreaterThan:
7546 case ComparisonResult::kUndefined:
7547 return Just(false);
7548 }
7549 }
7550 return Nothing<bool>();
7551 }
7552
GetPropertyOrElement(Handle<Object> object,Handle<Name> name)7553 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> object,
7554 Handle<Name> name) {
7555 LookupIterator it =
7556 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7557 return GetProperty(&it);
7558 }
7559
SetPropertyOrElement(Handle<Object> object,Handle<Name> name,Handle<Object> value,LanguageMode language_mode,StoreFromKeyed store_mode)7560 MaybeHandle<Object> Object::SetPropertyOrElement(Handle<Object> object,
7561 Handle<Name> name,
7562 Handle<Object> value,
7563 LanguageMode language_mode,
7564 StoreFromKeyed store_mode) {
7565 LookupIterator it =
7566 LookupIterator::PropertyOrElement(name->GetIsolate(), object, name);
7567 MAYBE_RETURN_NULL(SetProperty(&it, value, language_mode, store_mode));
7568 return value;
7569 }
7570
GetPropertyOrElement(Handle<Object> receiver,Handle<Name> name,Handle<JSReceiver> holder)7571 MaybeHandle<Object> Object::GetPropertyOrElement(Handle<Object> receiver,
7572 Handle<Name> name,
7573 Handle<JSReceiver> holder) {
7574 LookupIterator it = LookupIterator::PropertyOrElement(
7575 name->GetIsolate(), receiver, name, holder);
7576 return GetProperty(&it);
7577 }
7578
7579
initialize_properties()7580 void JSReceiver::initialize_properties() {
7581 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_fixed_array()));
7582 DCHECK(!GetHeap()->InNewSpace(GetHeap()->empty_properties_dictionary()));
7583 if (map()->is_dictionary_map()) {
7584 WRITE_FIELD(this, kPropertiesOffset,
7585 GetHeap()->empty_properties_dictionary());
7586 } else {
7587 WRITE_FIELD(this, kPropertiesOffset, GetHeap()->empty_fixed_array());
7588 }
7589 }
7590
7591
HasFastProperties()7592 bool JSReceiver::HasFastProperties() {
7593 DCHECK_EQ(properties()->IsDictionary(), map()->is_dictionary_map());
7594 return !properties()->IsDictionary();
7595 }
7596
7597
property_dictionary()7598 NameDictionary* JSReceiver::property_dictionary() {
7599 DCHECK(!HasFastProperties());
7600 DCHECK(!IsJSGlobalObject());
7601 return NameDictionary::cast(properties());
7602 }
7603
HasProperty(Handle<JSReceiver> object,Handle<Name> name)7604 Maybe<bool> JSReceiver::HasProperty(Handle<JSReceiver> object,
7605 Handle<Name> name) {
7606 LookupIterator it = LookupIterator::PropertyOrElement(object->GetIsolate(),
7607 object, name, object);
7608 return HasProperty(&it);
7609 }
7610
7611
HasOwnProperty(Handle<JSReceiver> object,Handle<Name> name)7612 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7613 Handle<Name> name) {
7614 if (object->IsJSObject()) { // Shortcut
7615 LookupIterator it = LookupIterator::PropertyOrElement(
7616 object->GetIsolate(), object, name, object, LookupIterator::OWN);
7617 return HasProperty(&it);
7618 }
7619
7620 Maybe<PropertyAttributes> attributes =
7621 JSReceiver::GetOwnPropertyAttributes(object, name);
7622 MAYBE_RETURN(attributes, Nothing<bool>());
7623 return Just(attributes.FromJust() != ABSENT);
7624 }
7625
HasOwnProperty(Handle<JSReceiver> object,uint32_t index)7626 Maybe<bool> JSReceiver::HasOwnProperty(Handle<JSReceiver> object,
7627 uint32_t index) {
7628 if (object->IsJSObject()) { // Shortcut
7629 LookupIterator it(object->GetIsolate(), object, index, object,
7630 LookupIterator::OWN);
7631 return HasProperty(&it);
7632 }
7633
7634 Maybe<PropertyAttributes> attributes =
7635 JSReceiver::GetOwnPropertyAttributes(object, index);
7636 MAYBE_RETURN(attributes, Nothing<bool>());
7637 return Just(attributes.FromJust() != ABSENT);
7638 }
7639
GetPropertyAttributes(Handle<JSReceiver> object,Handle<Name> name)7640 Maybe<PropertyAttributes> JSReceiver::GetPropertyAttributes(
7641 Handle<JSReceiver> object, Handle<Name> name) {
7642 LookupIterator it = LookupIterator::PropertyOrElement(name->GetIsolate(),
7643 object, name, object);
7644 return GetPropertyAttributes(&it);
7645 }
7646
7647
GetOwnPropertyAttributes(Handle<JSReceiver> object,Handle<Name> name)7648 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7649 Handle<JSReceiver> object, Handle<Name> name) {
7650 LookupIterator it = LookupIterator::PropertyOrElement(
7651 name->GetIsolate(), object, name, object, LookupIterator::OWN);
7652 return GetPropertyAttributes(&it);
7653 }
7654
GetOwnPropertyAttributes(Handle<JSReceiver> object,uint32_t index)7655 Maybe<PropertyAttributes> JSReceiver::GetOwnPropertyAttributes(
7656 Handle<JSReceiver> object, uint32_t index) {
7657 LookupIterator it(object->GetIsolate(), object, index, object,
7658 LookupIterator::OWN);
7659 return GetPropertyAttributes(&it);
7660 }
7661
HasElement(Handle<JSReceiver> object,uint32_t index)7662 Maybe<bool> JSReceiver::HasElement(Handle<JSReceiver> object, uint32_t index) {
7663 LookupIterator it(object->GetIsolate(), object, index, object);
7664 return HasProperty(&it);
7665 }
7666
7667
GetElementAttributes(Handle<JSReceiver> object,uint32_t index)7668 Maybe<PropertyAttributes> JSReceiver::GetElementAttributes(
7669 Handle<JSReceiver> object, uint32_t index) {
7670 Isolate* isolate = object->GetIsolate();
7671 LookupIterator it(isolate, object, index, object);
7672 return GetPropertyAttributes(&it);
7673 }
7674
7675
GetOwnElementAttributes(Handle<JSReceiver> object,uint32_t index)7676 Maybe<PropertyAttributes> JSReceiver::GetOwnElementAttributes(
7677 Handle<JSReceiver> object, uint32_t index) {
7678 Isolate* isolate = object->GetIsolate();
7679 LookupIterator it(isolate, object, index, object, LookupIterator::OWN);
7680 return GetPropertyAttributes(&it);
7681 }
7682
7683
IsDetached()7684 bool JSGlobalObject::IsDetached() {
7685 return JSGlobalProxy::cast(global_proxy())->IsDetachedFrom(this);
7686 }
7687
7688
IsDetachedFrom(JSGlobalObject * global)7689 bool JSGlobalProxy::IsDetachedFrom(JSGlobalObject* global) const {
7690 const PrototypeIterator iter(this->GetIsolate(),
7691 const_cast<JSGlobalProxy*>(this));
7692 return iter.GetCurrent() != global;
7693 }
7694
SizeWithInternalFields(int internal_field_count)7695 inline int JSGlobalProxy::SizeWithInternalFields(int internal_field_count) {
7696 DCHECK_GE(internal_field_count, 0);
7697 return kSize + internal_field_count * kPointerSize;
7698 }
7699
GetOrCreateIdentityHash(Isolate * isolate,Handle<JSReceiver> object)7700 Smi* JSReceiver::GetOrCreateIdentityHash(Isolate* isolate,
7701 Handle<JSReceiver> object) {
7702 return object->IsJSProxy() ? JSProxy::GetOrCreateIdentityHash(
7703 isolate, Handle<JSProxy>::cast(object))
7704 : JSObject::GetOrCreateIdentityHash(
7705 isolate, Handle<JSObject>::cast(object));
7706 }
7707
GetIdentityHash(Isolate * isolate,Handle<JSReceiver> receiver)7708 Object* JSReceiver::GetIdentityHash(Isolate* isolate,
7709 Handle<JSReceiver> receiver) {
7710 return receiver->IsJSProxy()
7711 ? JSProxy::GetIdentityHash(Handle<JSProxy>::cast(receiver))
7712 : JSObject::GetIdentityHash(isolate,
7713 Handle<JSObject>::cast(receiver));
7714 }
7715
7716
all_can_read()7717 bool AccessorInfo::all_can_read() {
7718 return BooleanBit::get(flag(), kAllCanReadBit);
7719 }
7720
7721
set_all_can_read(bool value)7722 void AccessorInfo::set_all_can_read(bool value) {
7723 set_flag(BooleanBit::set(flag(), kAllCanReadBit, value));
7724 }
7725
7726
all_can_write()7727 bool AccessorInfo::all_can_write() {
7728 return BooleanBit::get(flag(), kAllCanWriteBit);
7729 }
7730
7731
set_all_can_write(bool value)7732 void AccessorInfo::set_all_can_write(bool value) {
7733 set_flag(BooleanBit::set(flag(), kAllCanWriteBit, value));
7734 }
7735
7736
is_special_data_property()7737 bool AccessorInfo::is_special_data_property() {
7738 return BooleanBit::get(flag(), kSpecialDataProperty);
7739 }
7740
7741
set_is_special_data_property(bool value)7742 void AccessorInfo::set_is_special_data_property(bool value) {
7743 set_flag(BooleanBit::set(flag(), kSpecialDataProperty, value));
7744 }
7745
replace_on_access()7746 bool AccessorInfo::replace_on_access() {
7747 return BooleanBit::get(flag(), kReplaceOnAccess);
7748 }
7749
set_replace_on_access(bool value)7750 void AccessorInfo::set_replace_on_access(bool value) {
7751 set_flag(BooleanBit::set(flag(), kReplaceOnAccess, value));
7752 }
7753
is_sloppy()7754 bool AccessorInfo::is_sloppy() { return BooleanBit::get(flag(), kIsSloppy); }
7755
set_is_sloppy(bool value)7756 void AccessorInfo::set_is_sloppy(bool value) {
7757 set_flag(BooleanBit::set(flag(), kIsSloppy, value));
7758 }
7759
property_attributes()7760 PropertyAttributes AccessorInfo::property_attributes() {
7761 return AttributesField::decode(static_cast<uint32_t>(flag()));
7762 }
7763
7764
set_property_attributes(PropertyAttributes attributes)7765 void AccessorInfo::set_property_attributes(PropertyAttributes attributes) {
7766 set_flag(AttributesField::update(flag(), attributes));
7767 }
7768
IsTemplateFor(JSObject * object)7769 bool FunctionTemplateInfo::IsTemplateFor(JSObject* object) {
7770 return IsTemplateFor(object->map());
7771 }
7772
IsCompatibleReceiver(Object * receiver)7773 bool AccessorInfo::IsCompatibleReceiver(Object* receiver) {
7774 if (!HasExpectedReceiverType()) return true;
7775 if (!receiver->IsJSObject()) return false;
7776 return FunctionTemplateInfo::cast(expected_receiver_type())
7777 ->IsTemplateFor(JSObject::cast(receiver)->map());
7778 }
7779
7780
HasExpectedReceiverType()7781 bool AccessorInfo::HasExpectedReceiverType() {
7782 return expected_receiver_type()->IsFunctionTemplateInfo();
7783 }
7784
7785
get(AccessorComponent component)7786 Object* AccessorPair::get(AccessorComponent component) {
7787 return component == ACCESSOR_GETTER ? getter() : setter();
7788 }
7789
7790
set(AccessorComponent component,Object * value)7791 void AccessorPair::set(AccessorComponent component, Object* value) {
7792 if (component == ACCESSOR_GETTER) {
7793 set_getter(value);
7794 } else {
7795 set_setter(value);
7796 }
7797 }
7798
7799
SetComponents(Object * getter,Object * setter)7800 void AccessorPair::SetComponents(Object* getter, Object* setter) {
7801 Isolate* isolate = GetIsolate();
7802 if (!getter->IsNull(isolate)) set_getter(getter);
7803 if (!setter->IsNull(isolate)) set_setter(setter);
7804 }
7805
7806
Equals(AccessorPair * pair)7807 bool AccessorPair::Equals(AccessorPair* pair) {
7808 return (this == pair) || pair->Equals(getter(), setter());
7809 }
7810
7811
Equals(Object * getter_value,Object * setter_value)7812 bool AccessorPair::Equals(Object* getter_value, Object* setter_value) {
7813 return (getter() == getter_value) && (setter() == setter_value);
7814 }
7815
7816
ContainsAccessor()7817 bool AccessorPair::ContainsAccessor() {
7818 return IsJSAccessor(getter()) || IsJSAccessor(setter());
7819 }
7820
7821
IsJSAccessor(Object * obj)7822 bool AccessorPair::IsJSAccessor(Object* obj) {
7823 return obj->IsCallable() || obj->IsUndefined(GetIsolate());
7824 }
7825
7826
7827 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value)7828 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7829 Handle<Object> key,
7830 Handle<Object> value) {
7831 this->SetEntry(entry, key, value, PropertyDetails(Smi::kZero));
7832 }
7833
7834
7835 template<typename Derived, typename Shape, typename Key>
SetEntry(int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7836 void Dictionary<Derived, Shape, Key>::SetEntry(int entry,
7837 Handle<Object> key,
7838 Handle<Object> value,
7839 PropertyDetails details) {
7840 Shape::SetEntry(static_cast<Derived*>(this), entry, key, value, details);
7841 }
7842
7843
7844 template <typename Key>
7845 template <typename Dictionary>
SetEntry(Dictionary * dict,int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7846 void BaseDictionaryShape<Key>::SetEntry(Dictionary* dict, int entry,
7847 Handle<Object> key,
7848 Handle<Object> value,
7849 PropertyDetails details) {
7850 STATIC_ASSERT(Dictionary::kEntrySize == 2 || Dictionary::kEntrySize == 3);
7851 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7852 int index = dict->EntryToIndex(entry);
7853 DisallowHeapAllocation no_gc;
7854 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7855 dict->set(index + Dictionary::kEntryKeyIndex, *key, mode);
7856 dict->set(index + Dictionary::kEntryValueIndex, *value, mode);
7857 if (Dictionary::kEntrySize == 3) {
7858 dict->set(index + Dictionary::kEntryDetailsIndex, details.AsSmi());
7859 }
7860 }
7861
7862
7863 template <typename Dictionary>
SetEntry(Dictionary * dict,int entry,Handle<Object> key,Handle<Object> value,PropertyDetails details)7864 void GlobalDictionaryShape::SetEntry(Dictionary* dict, int entry,
7865 Handle<Object> key, Handle<Object> value,
7866 PropertyDetails details) {
7867 STATIC_ASSERT(Dictionary::kEntrySize == 2);
7868 DCHECK(!key->IsName() || details.dictionary_index() > 0);
7869 DCHECK(value->IsPropertyCell());
7870 int index = dict->EntryToIndex(entry);
7871 DisallowHeapAllocation no_gc;
7872 WriteBarrierMode mode = dict->GetWriteBarrierMode(no_gc);
7873 dict->set(index + Dictionary::kEntryKeyIndex, *key, mode);
7874 dict->set(index + Dictionary::kEntryValueIndex, *value, mode);
7875 PropertyCell::cast(*value)->set_property_details(details);
7876 }
7877
7878
IsMatch(uint32_t key,Object * other)7879 bool NumberDictionaryShape::IsMatch(uint32_t key, Object* other) {
7880 DCHECK(other->IsNumber());
7881 return key == static_cast<uint32_t>(other->Number());
7882 }
7883
7884
Hash(uint32_t key)7885 uint32_t UnseededNumberDictionaryShape::Hash(uint32_t key) {
7886 return ComputeIntegerHash(key, 0);
7887 }
7888
7889
HashForObject(uint32_t key,Object * other)7890 uint32_t UnseededNumberDictionaryShape::HashForObject(uint32_t key,
7891 Object* other) {
7892 DCHECK(other->IsNumber());
7893 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), 0);
7894 }
7895
GetMap(Isolate * isolate)7896 Map* UnseededNumberDictionaryShape::GetMap(Isolate* isolate) {
7897 return isolate->heap()->unseeded_number_dictionary_map();
7898 }
7899
SeededHash(uint32_t key,uint32_t seed)7900 uint32_t SeededNumberDictionaryShape::SeededHash(uint32_t key, uint32_t seed) {
7901 return ComputeIntegerHash(key, seed);
7902 }
7903
7904
SeededHashForObject(uint32_t key,uint32_t seed,Object * other)7905 uint32_t SeededNumberDictionaryShape::SeededHashForObject(uint32_t key,
7906 uint32_t seed,
7907 Object* other) {
7908 DCHECK(other->IsNumber());
7909 return ComputeIntegerHash(static_cast<uint32_t>(other->Number()), seed);
7910 }
7911
7912
AsHandle(Isolate * isolate,uint32_t key)7913 Handle<Object> NumberDictionaryShape::AsHandle(Isolate* isolate, uint32_t key) {
7914 return isolate->factory()->NewNumberFromUint(key);
7915 }
7916
7917
IsMatch(Handle<Name> key,Object * other)7918 bool NameDictionaryShape::IsMatch(Handle<Name> key, Object* other) {
7919 // We know that all entries in a hash table had their hash keys created.
7920 // Use that knowledge to have fast failure.
7921 if (key->Hash() != Name::cast(other)->Hash()) return false;
7922 return key->Equals(Name::cast(other));
7923 }
7924
7925
Hash(Handle<Name> key)7926 uint32_t NameDictionaryShape::Hash(Handle<Name> key) {
7927 return key->Hash();
7928 }
7929
7930
HashForObject(Handle<Name> key,Object * other)7931 uint32_t NameDictionaryShape::HashForObject(Handle<Name> key, Object* other) {
7932 return Name::cast(other)->Hash();
7933 }
7934
7935
AsHandle(Isolate * isolate,Handle<Name> key)7936 Handle<Object> NameDictionaryShape::AsHandle(Isolate* isolate,
7937 Handle<Name> key) {
7938 DCHECK(key->IsUniqueName());
7939 return key;
7940 }
7941
7942
DoGenerateNewEnumerationIndices(Handle<NameDictionary> dictionary)7943 Handle<FixedArray> NameDictionary::DoGenerateNewEnumerationIndices(
7944 Handle<NameDictionary> dictionary) {
7945 return DerivedDictionary::GenerateNewEnumerationIndices(dictionary);
7946 }
7947
7948
7949 template <typename Dictionary>
DetailsAt(Dictionary * dict,int entry)7950 PropertyDetails GlobalDictionaryShape::DetailsAt(Dictionary* dict, int entry) {
7951 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7952 Object* raw_value = dict->ValueAt(entry);
7953 DCHECK(raw_value->IsPropertyCell());
7954 PropertyCell* cell = PropertyCell::cast(raw_value);
7955 return cell->property_details();
7956 }
7957
7958
7959 template <typename Dictionary>
DetailsAtPut(Dictionary * dict,int entry,PropertyDetails value)7960 void GlobalDictionaryShape::DetailsAtPut(Dictionary* dict, int entry,
7961 PropertyDetails value) {
7962 DCHECK(entry >= 0); // Not found is -1, which is not caught by get().
7963 Object* raw_value = dict->ValueAt(entry);
7964 DCHECK(raw_value->IsPropertyCell());
7965 PropertyCell* cell = PropertyCell::cast(raw_value);
7966 cell->set_property_details(value);
7967 }
7968
7969
7970 template <typename Dictionary>
IsDeleted(Dictionary * dict,int entry)7971 bool GlobalDictionaryShape::IsDeleted(Dictionary* dict, int entry) {
7972 DCHECK(dict->ValueAt(entry)->IsPropertyCell());
7973 Isolate* isolate = dict->GetIsolate();
7974 return PropertyCell::cast(dict->ValueAt(entry))->value()->IsTheHole(isolate);
7975 }
7976
7977
IsMatch(Handle<Object> key,Object * other)7978 bool ObjectHashTableShape::IsMatch(Handle<Object> key, Object* other) {
7979 return key->SameValue(other);
7980 }
7981
7982
Hash(Handle<Object> key)7983 uint32_t ObjectHashTableShape::Hash(Handle<Object> key) {
7984 return Smi::cast(key->GetHash())->value();
7985 }
7986
7987
HashForObject(Handle<Object> key,Object * other)7988 uint32_t ObjectHashTableShape::HashForObject(Handle<Object> key,
7989 Object* other) {
7990 return Smi::cast(other->GetHash())->value();
7991 }
7992
7993
AsHandle(Isolate * isolate,Handle<Object> key)7994 Handle<Object> ObjectHashTableShape::AsHandle(Isolate* isolate,
7995 Handle<Object> key) {
7996 return key;
7997 }
7998
7999
Shrink(Handle<ObjectHashTable> table,Handle<Object> key)8000 Handle<ObjectHashTable> ObjectHashTable::Shrink(
8001 Handle<ObjectHashTable> table, Handle<Object> key) {
8002 return DerivedHashTable::Shrink(table, key);
8003 }
8004
8005
ValueAt(int entry)8006 Object* OrderedHashMap::ValueAt(int entry) {
8007 return get(EntryToIndex(entry) + kValueOffset);
8008 }
8009
8010
8011 template <int entrysize>
IsMatch(Handle<Object> key,Object * other)8012 bool WeakHashTableShape<entrysize>::IsMatch(Handle<Object> key, Object* other) {
8013 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
8014 return key->IsWeakCell() ? WeakCell::cast(*key)->value() == other
8015 : *key == other;
8016 }
8017
8018
8019 template <int entrysize>
Hash(Handle<Object> key)8020 uint32_t WeakHashTableShape<entrysize>::Hash(Handle<Object> key) {
8021 intptr_t hash =
8022 key->IsWeakCell()
8023 ? reinterpret_cast<intptr_t>(WeakCell::cast(*key)->value())
8024 : reinterpret_cast<intptr_t>(*key);
8025 return (uint32_t)(hash & 0xFFFFFFFF);
8026 }
8027
8028
8029 template <int entrysize>
HashForObject(Handle<Object> key,Object * other)8030 uint32_t WeakHashTableShape<entrysize>::HashForObject(Handle<Object> key,
8031 Object* other) {
8032 if (other->IsWeakCell()) other = WeakCell::cast(other)->value();
8033 intptr_t hash = reinterpret_cast<intptr_t>(other);
8034 return (uint32_t)(hash & 0xFFFFFFFF);
8035 }
8036
8037
8038 template <int entrysize>
AsHandle(Isolate * isolate,Handle<Object> key)8039 Handle<Object> WeakHashTableShape<entrysize>::AsHandle(Isolate* isolate,
8040 Handle<Object> key) {
8041 return key;
8042 }
8043
8044
IsAsmModule()8045 bool ScopeInfo::IsAsmModule() { return AsmModuleField::decode(Flags()); }
8046
8047
IsAsmFunction()8048 bool ScopeInfo::IsAsmFunction() { return AsmFunctionField::decode(Flags()); }
8049
8050
HasSimpleParameters()8051 bool ScopeInfo::HasSimpleParameters() {
8052 return HasSimpleParametersField::decode(Flags());
8053 }
8054
8055
8056 #define SCOPE_INFO_FIELD_ACCESSORS(name) \
8057 void ScopeInfo::Set##name(int value) { set(k##name, Smi::FromInt(value)); } \
8058 int ScopeInfo::name() { \
8059 if (length() > 0) { \
8060 return Smi::cast(get(k##name))->value(); \
8061 } else { \
8062 return 0; \
8063 } \
8064 }
8065 FOR_EACH_SCOPE_INFO_NUMERIC_FIELD(SCOPE_INFO_FIELD_ACCESSORS)
8066 #undef SCOPE_INFO_FIELD_ACCESSORS
8067
ACCESSORS(ModuleInfoEntry,export_name,Object,kExportNameOffset)8068 ACCESSORS(ModuleInfoEntry, export_name, Object, kExportNameOffset)
8069 ACCESSORS(ModuleInfoEntry, local_name, Object, kLocalNameOffset)
8070 ACCESSORS(ModuleInfoEntry, import_name, Object, kImportNameOffset)
8071 SMI_ACCESSORS(ModuleInfoEntry, module_request, kModuleRequestOffset)
8072 SMI_ACCESSORS(ModuleInfoEntry, cell_index, kCellIndexOffset)
8073 SMI_ACCESSORS(ModuleInfoEntry, beg_pos, kBegPosOffset)
8074 SMI_ACCESSORS(ModuleInfoEntry, end_pos, kEndPosOffset)
8075
8076 FixedArray* ModuleInfo::module_requests() const {
8077 return FixedArray::cast(get(kModuleRequestsIndex));
8078 }
8079
special_exports()8080 FixedArray* ModuleInfo::special_exports() const {
8081 return FixedArray::cast(get(kSpecialExportsIndex));
8082 }
8083
regular_exports()8084 FixedArray* ModuleInfo::regular_exports() const {
8085 return FixedArray::cast(get(kRegularExportsIndex));
8086 }
8087
regular_imports()8088 FixedArray* ModuleInfo::regular_imports() const {
8089 return FixedArray::cast(get(kRegularImportsIndex));
8090 }
8091
namespace_imports()8092 FixedArray* ModuleInfo::namespace_imports() const {
8093 return FixedArray::cast(get(kNamespaceImportsIndex));
8094 }
8095
8096 #ifdef DEBUG
Equals(ModuleInfo * other)8097 bool ModuleInfo::Equals(ModuleInfo* other) const {
8098 return regular_exports() == other->regular_exports() &&
8099 regular_imports() == other->regular_imports() &&
8100 special_exports() == other->special_exports() &&
8101 namespace_imports() == other->namespace_imports();
8102 }
8103 #endif
8104
ClearCodeCache(Heap * heap)8105 void Map::ClearCodeCache(Heap* heap) {
8106 // No write barrier is needed since empty_fixed_array is not in new space.
8107 // Please note this function is used during marking:
8108 // - MarkCompactCollector::MarkUnmarkedObject
8109 // - IncrementalMarking::Step
8110 WRITE_FIELD(this, kCodeCacheOffset, heap->empty_fixed_array());
8111 }
8112
8113
SlackForArraySize(int old_size,int size_limit)8114 int Map::SlackForArraySize(int old_size, int size_limit) {
8115 const int max_slack = size_limit - old_size;
8116 CHECK_LE(0, max_slack);
8117 if (old_size < 4) {
8118 DCHECK_LE(1, max_slack);
8119 return 1;
8120 }
8121 return Min(max_slack, old_size / 4);
8122 }
8123
8124
set_length(Smi * length)8125 void JSArray::set_length(Smi* length) {
8126 // Don't need a write barrier for a Smi.
8127 set_length(static_cast<Object*>(length), SKIP_WRITE_BARRIER);
8128 }
8129
8130
SetLengthWouldNormalize(Heap * heap,uint32_t new_length)8131 bool JSArray::SetLengthWouldNormalize(Heap* heap, uint32_t new_length) {
8132 // If the new array won't fit in a some non-trivial fraction of the max old
8133 // space size, then force it to go dictionary mode.
8134 uint32_t max_fast_array_size =
8135 static_cast<uint32_t>((heap->MaxOldGenerationSize() / kDoubleSize) / 4);
8136 return new_length >= max_fast_array_size;
8137 }
8138
8139
AllowsSetLength()8140 bool JSArray::AllowsSetLength() {
8141 bool result = elements()->IsFixedArray() || elements()->IsFixedDoubleArray();
8142 DCHECK(result == !HasFixedTypedArrayElements());
8143 return result;
8144 }
8145
8146
SetContent(Handle<JSArray> array,Handle<FixedArrayBase> storage)8147 void JSArray::SetContent(Handle<JSArray> array,
8148 Handle<FixedArrayBase> storage) {
8149 EnsureCanContainElements(array, storage, storage->length(),
8150 ALLOW_COPIED_DOUBLE_ELEMENTS);
8151
8152 DCHECK((storage->map() == array->GetHeap()->fixed_double_array_map() &&
8153 IsFastDoubleElementsKind(array->GetElementsKind())) ||
8154 ((storage->map() != array->GetHeap()->fixed_double_array_map()) &&
8155 (IsFastObjectElementsKind(array->GetElementsKind()) ||
8156 (IsFastSmiElementsKind(array->GetElementsKind()) &&
8157 Handle<FixedArray>::cast(storage)->ContainsOnlySmisOrHoles()))));
8158 array->set_elements(*storage);
8159 array->set_length(Smi::FromInt(storage->length()));
8160 }
8161
8162
HasArrayPrototype(Isolate * isolate)8163 bool JSArray::HasArrayPrototype(Isolate* isolate) {
8164 return map()->prototype() == *isolate->initial_array_prototype();
8165 }
8166
8167
ic_total_count()8168 int TypeFeedbackInfo::ic_total_count() {
8169 int current = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8170 return ICTotalCountField::decode(current);
8171 }
8172
8173
set_ic_total_count(int count)8174 void TypeFeedbackInfo::set_ic_total_count(int count) {
8175 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8176 value = ICTotalCountField::update(value,
8177 ICTotalCountField::decode(count));
8178 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
8179 }
8180
8181
ic_with_type_info_count()8182 int TypeFeedbackInfo::ic_with_type_info_count() {
8183 int current = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8184 return ICsWithTypeInfoCountField::decode(current);
8185 }
8186
8187
change_ic_with_type_info_count(int delta)8188 void TypeFeedbackInfo::change_ic_with_type_info_count(int delta) {
8189 if (delta == 0) return;
8190 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8191 int new_count = ICsWithTypeInfoCountField::decode(value) + delta;
8192 // We can get negative count here when the type-feedback info is
8193 // shared between two code objects. The can only happen when
8194 // the debugger made a shallow copy of code object (see Heap::CopyCode).
8195 // Since we do not optimize when the debugger is active, we can skip
8196 // this counter update.
8197 if (new_count >= 0) {
8198 new_count &= ICsWithTypeInfoCountField::kMask;
8199 value = ICsWithTypeInfoCountField::update(value, new_count);
8200 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
8201 }
8202 }
8203
8204
ic_generic_count()8205 int TypeFeedbackInfo::ic_generic_count() {
8206 return Smi::cast(READ_FIELD(this, kStorage3Offset))->value();
8207 }
8208
8209
change_ic_generic_count(int delta)8210 void TypeFeedbackInfo::change_ic_generic_count(int delta) {
8211 if (delta == 0) return;
8212 int new_count = ic_generic_count() + delta;
8213 if (new_count >= 0) {
8214 new_count &= ~Smi::kMinValue;
8215 WRITE_FIELD(this, kStorage3Offset, Smi::FromInt(new_count));
8216 }
8217 }
8218
8219
initialize_storage()8220 void TypeFeedbackInfo::initialize_storage() {
8221 WRITE_FIELD(this, kStorage1Offset, Smi::kZero);
8222 WRITE_FIELD(this, kStorage2Offset, Smi::kZero);
8223 WRITE_FIELD(this, kStorage3Offset, Smi::kZero);
8224 }
8225
8226
change_own_type_change_checksum()8227 void TypeFeedbackInfo::change_own_type_change_checksum() {
8228 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8229 int checksum = OwnTypeChangeChecksum::decode(value);
8230 checksum = (checksum + 1) % (1 << kTypeChangeChecksumBits);
8231 value = OwnTypeChangeChecksum::update(value, checksum);
8232 // Ensure packed bit field is in Smi range.
8233 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
8234 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
8235 WRITE_FIELD(this, kStorage1Offset, Smi::FromInt(value));
8236 }
8237
8238
set_inlined_type_change_checksum(int checksum)8239 void TypeFeedbackInfo::set_inlined_type_change_checksum(int checksum) {
8240 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8241 int mask = (1 << kTypeChangeChecksumBits) - 1;
8242 value = InlinedTypeChangeChecksum::update(value, checksum & mask);
8243 // Ensure packed bit field is in Smi range.
8244 if (value > Smi::kMaxValue) value |= Smi::kMinValue;
8245 if (value < Smi::kMinValue) value &= ~Smi::kMinValue;
8246 WRITE_FIELD(this, kStorage2Offset, Smi::FromInt(value));
8247 }
8248
8249
own_type_change_checksum()8250 int TypeFeedbackInfo::own_type_change_checksum() {
8251 int value = Smi::cast(READ_FIELD(this, kStorage1Offset))->value();
8252 return OwnTypeChangeChecksum::decode(value);
8253 }
8254
8255
matches_inlined_type_change_checksum(int checksum)8256 bool TypeFeedbackInfo::matches_inlined_type_change_checksum(int checksum) {
8257 int value = Smi::cast(READ_FIELD(this, kStorage2Offset))->value();
8258 int mask = (1 << kTypeChangeChecksumBits) - 1;
8259 return InlinedTypeChangeChecksum::decode(value) == (checksum & mask);
8260 }
8261
8262
SMI_ACCESSORS(AliasedArgumentsEntry,aliased_context_slot,kAliasedContextSlot)8263 SMI_ACCESSORS(AliasedArgumentsEntry, aliased_context_slot, kAliasedContextSlot)
8264
8265
8266 Relocatable::Relocatable(Isolate* isolate) {
8267 isolate_ = isolate;
8268 prev_ = isolate->relocatable_top();
8269 isolate->set_relocatable_top(this);
8270 }
8271
8272
~Relocatable()8273 Relocatable::~Relocatable() {
8274 DCHECK_EQ(isolate_->relocatable_top(), this);
8275 isolate_->set_relocatable_top(prev_);
8276 }
8277
8278
8279 template<class Derived, class TableType>
CurrentKey()8280 Object* OrderedHashTableIterator<Derived, TableType>::CurrentKey() {
8281 TableType* table(TableType::cast(this->table()));
8282 int index = Smi::cast(this->index())->value();
8283 Object* key = table->KeyAt(index);
8284 DCHECK(!key->IsTheHole(table->GetIsolate()));
8285 return key;
8286 }
8287
8288
PopulateValueArray(FixedArray * array)8289 void JSSetIterator::PopulateValueArray(FixedArray* array) {
8290 array->set(0, CurrentKey());
8291 }
8292
8293
PopulateValueArray(FixedArray * array)8294 void JSMapIterator::PopulateValueArray(FixedArray* array) {
8295 array->set(0, CurrentKey());
8296 array->set(1, CurrentValue());
8297 }
8298
8299
CurrentValue()8300 Object* JSMapIterator::CurrentValue() {
8301 OrderedHashMap* table(OrderedHashMap::cast(this->table()));
8302 int index = Smi::cast(this->index())->value();
8303 Object* value = table->ValueAt(index);
8304 DCHECK(!value->IsTheHole(table->GetIsolate()));
8305 return value;
8306 }
8307
8308
SubStringRange(String * string,int first,int length)8309 String::SubStringRange::SubStringRange(String* string, int first, int length)
8310 : string_(string),
8311 first_(first),
8312 length_(length == -1 ? string->length() : length) {}
8313
8314
8315 class String::SubStringRange::iterator final {
8316 public:
8317 typedef std::forward_iterator_tag iterator_category;
8318 typedef int difference_type;
8319 typedef uc16 value_type;
8320 typedef uc16* pointer;
8321 typedef uc16& reference;
8322
iterator(const iterator & other)8323 iterator(const iterator& other)
8324 : content_(other.content_), offset_(other.offset_) {}
8325
8326 uc16 operator*() { return content_.Get(offset_); }
8327 bool operator==(const iterator& other) const {
8328 return content_.UsesSameString(other.content_) && offset_ == other.offset_;
8329 }
8330 bool operator!=(const iterator& other) const {
8331 return !content_.UsesSameString(other.content_) || offset_ != other.offset_;
8332 }
8333 iterator& operator++() {
8334 ++offset_;
8335 return *this;
8336 }
8337 iterator operator++(int);
8338
8339 private:
8340 friend class String;
iterator(String * from,int offset)8341 iterator(String* from, int offset)
8342 : content_(from->GetFlatContent()), offset_(offset) {}
8343 String::FlatContent content_;
8344 int offset_;
8345 };
8346
8347
begin()8348 String::SubStringRange::iterator String::SubStringRange::begin() {
8349 return String::SubStringRange::iterator(string_, first_);
8350 }
8351
8352
end()8353 String::SubStringRange::iterator String::SubStringRange::end() {
8354 return String::SubStringRange::iterator(string_, first_ + length_);
8355 }
8356
8357
8358 // Predictably converts HeapObject* or Address to uint32 by calculating
8359 // offset of the address in respective MemoryChunk.
ObjectAddressForHashing(void * object)8360 static inline uint32_t ObjectAddressForHashing(void* object) {
8361 uint32_t value = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(object));
8362 return value & MemoryChunk::kAlignmentMask;
8363 }
8364
MakeEntryPair(Isolate * isolate,uint32_t index,Handle<Object> value)8365 static inline Handle<Object> MakeEntryPair(Isolate* isolate, uint32_t index,
8366 Handle<Object> value) {
8367 Handle<Object> key = isolate->factory()->Uint32ToString(index);
8368 Handle<FixedArray> entry_storage =
8369 isolate->factory()->NewUninitializedFixedArray(2);
8370 {
8371 entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
8372 entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
8373 }
8374 return isolate->factory()->NewJSArrayWithElements(entry_storage,
8375 FAST_ELEMENTS, 2);
8376 }
8377
MakeEntryPair(Isolate * isolate,Handle<Name> key,Handle<Object> value)8378 static inline Handle<Object> MakeEntryPair(Isolate* isolate, Handle<Name> key,
8379 Handle<Object> value) {
8380 Handle<FixedArray> entry_storage =
8381 isolate->factory()->NewUninitializedFixedArray(2);
8382 {
8383 entry_storage->set(0, *key, SKIP_WRITE_BARRIER);
8384 entry_storage->set(1, *value, SKIP_WRITE_BARRIER);
8385 }
8386 return isolate->factory()->NewJSArrayWithElements(entry_storage,
8387 FAST_ELEMENTS, 2);
8388 }
8389
8390 ACCESSORS(JSIteratorResult, value, Object, kValueOffset)
8391 ACCESSORS(JSIteratorResult, done, Object, kDoneOffset)
8392
8393 ACCESSORS(JSArrayIterator, object, Object, kIteratedObjectOffset)
8394 ACCESSORS(JSArrayIterator, index, Object, kNextIndexOffset)
8395 ACCESSORS(JSArrayIterator, object_map, Object, kIteratedObjectMapOffset)
8396
8397 ACCESSORS(JSStringIterator, string, String, kStringOffset)
8398 SMI_ACCESSORS(JSStringIterator, index, kNextIndexOffset)
8399
8400 #undef TYPE_CHECKER
8401 #undef CAST_ACCESSOR
8402 #undef INT_ACCESSORS
8403 #undef ACCESSORS
8404 #undef SMI_ACCESSORS
8405 #undef SYNCHRONIZED_SMI_ACCESSORS
8406 #undef NOBARRIER_SMI_ACCESSORS
8407 #undef BOOL_GETTER
8408 #undef BOOL_ACCESSORS
8409 #undef FIELD_ADDR
8410 #undef FIELD_ADDR_CONST
8411 #undef READ_FIELD
8412 #undef NOBARRIER_READ_FIELD
8413 #undef WRITE_FIELD
8414 #undef NOBARRIER_WRITE_FIELD
8415 #undef WRITE_BARRIER
8416 #undef CONDITIONAL_WRITE_BARRIER
8417 #undef READ_DOUBLE_FIELD
8418 #undef WRITE_DOUBLE_FIELD
8419 #undef READ_INT_FIELD
8420 #undef WRITE_INT_FIELD
8421 #undef READ_INTPTR_FIELD
8422 #undef WRITE_INTPTR_FIELD
8423 #undef READ_UINT8_FIELD
8424 #undef WRITE_UINT8_FIELD
8425 #undef READ_INT8_FIELD
8426 #undef WRITE_INT8_FIELD
8427 #undef READ_UINT16_FIELD
8428 #undef WRITE_UINT16_FIELD
8429 #undef READ_INT16_FIELD
8430 #undef WRITE_INT16_FIELD
8431 #undef READ_UINT32_FIELD
8432 #undef WRITE_UINT32_FIELD
8433 #undef READ_INT32_FIELD
8434 #undef WRITE_INT32_FIELD
8435 #undef READ_FLOAT_FIELD
8436 #undef WRITE_FLOAT_FIELD
8437 #undef READ_UINT64_FIELD
8438 #undef WRITE_UINT64_FIELD
8439 #undef READ_INT64_FIELD
8440 #undef WRITE_INT64_FIELD
8441 #undef READ_BYTE_FIELD
8442 #undef WRITE_BYTE_FIELD
8443 #undef NOBARRIER_READ_BYTE_FIELD
8444 #undef NOBARRIER_WRITE_BYTE_FIELD
8445
8446 } // namespace internal
8447 } // namespace v8
8448
8449 #endif // V8_OBJECTS_INL_H_
8450