1 // Copyright 2016 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 #include "src/code-stub-assembler.h"
5 #include "src/code-factory.h"
6 #include "src/frames-inl.h"
7 #include "src/frames.h"
8 #include "src/ic/handler-configuration.h"
9 #include "src/ic/stub-cache.h"
10
11 namespace v8 {
12 namespace internal {
13
14 using compiler::Node;
15
CodeStubAssembler(Isolate * isolate,Zone * zone,const CallInterfaceDescriptor & descriptor,Code::Flags flags,const char * name,size_t result_size)16 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
17 const CallInterfaceDescriptor& descriptor,
18 Code::Flags flags, const char* name,
19 size_t result_size)
20 : compiler::CodeAssembler(isolate, zone, descriptor, flags, name,
21 result_size) {}
22
CodeStubAssembler(Isolate * isolate,Zone * zone,int parameter_count,Code::Flags flags,const char * name)23 CodeStubAssembler::CodeStubAssembler(Isolate* isolate, Zone* zone,
24 int parameter_count, Code::Flags flags,
25 const char* name)
26 : compiler::CodeAssembler(isolate, zone, parameter_count, flags, name) {}
27
Assert(ConditionBody codition_body,const char * message,const char * file,int line)28 void CodeStubAssembler::Assert(ConditionBody codition_body, const char* message,
29 const char* file, int line) {
30 #if defined(DEBUG)
31 Label ok(this);
32 Label not_ok(this, Label::kDeferred);
33 if (message != nullptr && FLAG_code_comments) {
34 Comment("[ Assert: %s", message);
35 } else {
36 Comment("[ Assert");
37 }
38 Node* condition = codition_body();
39 DCHECK_NOT_NULL(condition);
40 Branch(condition, &ok, ¬_ok);
41 Bind(¬_ok);
42 if (message != nullptr) {
43 char chars[1024];
44 Vector<char> buffer(chars);
45 if (file != nullptr) {
46 SNPrintF(buffer, "CSA_ASSERT failed: %s [%s:%d]\n", message, file, line);
47 } else {
48 SNPrintF(buffer, "CSA_ASSERT failed: %s\n", message);
49 }
50 CallRuntime(
51 Runtime::kGlobalPrint, SmiConstant(Smi::kZero),
52 HeapConstant(factory()->NewStringFromAsciiChecked(&(buffer[0]))));
53 }
54 DebugBreak();
55 Goto(&ok);
56 Bind(&ok);
57 Comment("] Assert");
58 #endif
59 }
60
NoContextConstant()61 Node* CodeStubAssembler::NoContextConstant() { return NumberConstant(0); }
62
63 #define HEAP_CONSTANT_ACCESSOR(rootName, name) \
64 Node* CodeStubAssembler::name##Constant() { \
65 return LoadRoot(Heap::k##rootName##RootIndex); \
66 }
67 HEAP_CONSTANT_LIST(HEAP_CONSTANT_ACCESSOR);
68 #undef HEAP_CONSTANT_ACCESSOR
69
70 #define HEAP_CONSTANT_TEST(rootName, name) \
71 Node* CodeStubAssembler::Is##name(Node* value) { \
72 return WordEqual(value, name##Constant()); \
73 }
74 HEAP_CONSTANT_LIST(HEAP_CONSTANT_TEST);
75 #undef HEAP_CONSTANT_TEST
76
HashSeed()77 Node* CodeStubAssembler::HashSeed() {
78 return LoadAndUntagToWord32Root(Heap::kHashSeedRootIndex);
79 }
80
StaleRegisterConstant()81 Node* CodeStubAssembler::StaleRegisterConstant() {
82 return LoadRoot(Heap::kStaleRegisterRootIndex);
83 }
84
IntPtrOrSmiConstant(int value,ParameterMode mode)85 Node* CodeStubAssembler::IntPtrOrSmiConstant(int value, ParameterMode mode) {
86 if (mode == SMI_PARAMETERS) {
87 return SmiConstant(Smi::FromInt(value));
88 } else {
89 DCHECK(mode == INTEGER_PARAMETERS || mode == INTPTR_PARAMETERS);
90 return IntPtrConstant(value);
91 }
92 }
93
IntPtrAddFoldConstants(Node * left,Node * right)94 Node* CodeStubAssembler::IntPtrAddFoldConstants(Node* left, Node* right) {
95 int32_t left_constant;
96 bool is_left_constant = ToInt32Constant(left, left_constant);
97 int32_t right_constant;
98 bool is_right_constant = ToInt32Constant(right, right_constant);
99 if (is_left_constant) {
100 if (is_right_constant) {
101 return IntPtrConstant(left_constant + right_constant);
102 }
103 if (left_constant == 0) {
104 return right;
105 }
106 } else if (is_right_constant) {
107 if (right_constant == 0) {
108 return left;
109 }
110 }
111 return IntPtrAdd(left, right);
112 }
113
IntPtrSubFoldConstants(Node * left,Node * right)114 Node* CodeStubAssembler::IntPtrSubFoldConstants(Node* left, Node* right) {
115 int32_t left_constant;
116 bool is_left_constant = ToInt32Constant(left, left_constant);
117 int32_t right_constant;
118 bool is_right_constant = ToInt32Constant(right, right_constant);
119 if (is_left_constant) {
120 if (is_right_constant) {
121 return IntPtrConstant(left_constant - right_constant);
122 }
123 } else if (is_right_constant) {
124 if (right_constant == 0) {
125 return left;
126 }
127 }
128 return IntPtrSub(left, right);
129 }
130
IntPtrRoundUpToPowerOfTwo32(Node * value)131 Node* CodeStubAssembler::IntPtrRoundUpToPowerOfTwo32(Node* value) {
132 Comment("IntPtrRoundUpToPowerOfTwo32");
133 CSA_ASSERT(this, UintPtrLessThanOrEqual(value, IntPtrConstant(0x80000000u)));
134 value = IntPtrSub(value, IntPtrConstant(1));
135 for (int i = 1; i <= 16; i *= 2) {
136 value = WordOr(value, WordShr(value, IntPtrConstant(i)));
137 }
138 return IntPtrAdd(value, IntPtrConstant(1));
139 }
140
WordIsPowerOfTwo(Node * value)141 Node* CodeStubAssembler::WordIsPowerOfTwo(Node* value) {
142 // value && !(value & (value - 1))
143 return WordEqual(
144 Select(WordEqual(value, IntPtrConstant(0)), IntPtrConstant(1),
145 WordAnd(value, IntPtrSub(value, IntPtrConstant(1))),
146 MachineType::PointerRepresentation()),
147 IntPtrConstant(0));
148 }
149
Float64Round(Node * x)150 Node* CodeStubAssembler::Float64Round(Node* x) {
151 Node* one = Float64Constant(1.0);
152 Node* one_half = Float64Constant(0.5);
153
154 Variable var_x(this, MachineRepresentation::kFloat64);
155 Label return_x(this);
156
157 // Round up {x} towards Infinity.
158 var_x.Bind(Float64Ceil(x));
159
160 GotoIf(Float64LessThanOrEqual(Float64Sub(var_x.value(), one_half), x),
161 &return_x);
162 var_x.Bind(Float64Sub(var_x.value(), one));
163 Goto(&return_x);
164
165 Bind(&return_x);
166 return var_x.value();
167 }
168
Float64Ceil(Node * x)169 Node* CodeStubAssembler::Float64Ceil(Node* x) {
170 if (IsFloat64RoundUpSupported()) {
171 return Float64RoundUp(x);
172 }
173
174 Node* one = Float64Constant(1.0);
175 Node* zero = Float64Constant(0.0);
176 Node* two_52 = Float64Constant(4503599627370496.0E0);
177 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
178
179 Variable var_x(this, MachineRepresentation::kFloat64);
180 Label return_x(this), return_minus_x(this);
181 var_x.Bind(x);
182
183 // Check if {x} is greater than zero.
184 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
185 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
186 &if_xnotgreaterthanzero);
187
188 Bind(&if_xgreaterthanzero);
189 {
190 // Just return {x} unless it's in the range ]0,2^52[.
191 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
192
193 // Round positive {x} towards Infinity.
194 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
195 GotoUnless(Float64LessThan(var_x.value(), x), &return_x);
196 var_x.Bind(Float64Add(var_x.value(), one));
197 Goto(&return_x);
198 }
199
200 Bind(&if_xnotgreaterthanzero);
201 {
202 // Just return {x} unless it's in the range ]-2^52,0[
203 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
204 GotoUnless(Float64LessThan(x, zero), &return_x);
205
206 // Round negated {x} towards Infinity and return the result negated.
207 Node* minus_x = Float64Neg(x);
208 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
209 GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
210 var_x.Bind(Float64Sub(var_x.value(), one));
211 Goto(&return_minus_x);
212 }
213
214 Bind(&return_minus_x);
215 var_x.Bind(Float64Neg(var_x.value()));
216 Goto(&return_x);
217
218 Bind(&return_x);
219 return var_x.value();
220 }
221
Float64Floor(Node * x)222 Node* CodeStubAssembler::Float64Floor(Node* x) {
223 if (IsFloat64RoundDownSupported()) {
224 return Float64RoundDown(x);
225 }
226
227 Node* one = Float64Constant(1.0);
228 Node* zero = Float64Constant(0.0);
229 Node* two_52 = Float64Constant(4503599627370496.0E0);
230 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
231
232 Variable var_x(this, MachineRepresentation::kFloat64);
233 Label return_x(this), return_minus_x(this);
234 var_x.Bind(x);
235
236 // Check if {x} is greater than zero.
237 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
238 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
239 &if_xnotgreaterthanzero);
240
241 Bind(&if_xgreaterthanzero);
242 {
243 // Just return {x} unless it's in the range ]0,2^52[.
244 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
245
246 // Round positive {x} towards -Infinity.
247 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
248 GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x);
249 var_x.Bind(Float64Sub(var_x.value(), one));
250 Goto(&return_x);
251 }
252
253 Bind(&if_xnotgreaterthanzero);
254 {
255 // Just return {x} unless it's in the range ]-2^52,0[
256 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
257 GotoUnless(Float64LessThan(x, zero), &return_x);
258
259 // Round negated {x} towards -Infinity and return the result negated.
260 Node* minus_x = Float64Neg(x);
261 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
262 GotoUnless(Float64LessThan(var_x.value(), minus_x), &return_minus_x);
263 var_x.Bind(Float64Add(var_x.value(), one));
264 Goto(&return_minus_x);
265 }
266
267 Bind(&return_minus_x);
268 var_x.Bind(Float64Neg(var_x.value()));
269 Goto(&return_x);
270
271 Bind(&return_x);
272 return var_x.value();
273 }
274
Float64RoundToEven(Node * x)275 Node* CodeStubAssembler::Float64RoundToEven(Node* x) {
276 if (IsFloat64RoundTiesEvenSupported()) {
277 return Float64RoundTiesEven(x);
278 }
279 // See ES#sec-touint8clamp for details.
280 Node* f = Float64Floor(x);
281 Node* f_and_half = Float64Add(f, Float64Constant(0.5));
282
283 Variable var_result(this, MachineRepresentation::kFloat64);
284 Label return_f(this), return_f_plus_one(this), done(this);
285
286 GotoIf(Float64LessThan(f_and_half, x), &return_f_plus_one);
287 GotoIf(Float64LessThan(x, f_and_half), &return_f);
288 {
289 Node* f_mod_2 = Float64Mod(f, Float64Constant(2.0));
290 Branch(Float64Equal(f_mod_2, Float64Constant(0.0)), &return_f,
291 &return_f_plus_one);
292 }
293
294 Bind(&return_f);
295 var_result.Bind(f);
296 Goto(&done);
297
298 Bind(&return_f_plus_one);
299 var_result.Bind(Float64Add(f, Float64Constant(1.0)));
300 Goto(&done);
301
302 Bind(&done);
303 return var_result.value();
304 }
305
Float64Trunc(Node * x)306 Node* CodeStubAssembler::Float64Trunc(Node* x) {
307 if (IsFloat64RoundTruncateSupported()) {
308 return Float64RoundTruncate(x);
309 }
310
311 Node* one = Float64Constant(1.0);
312 Node* zero = Float64Constant(0.0);
313 Node* two_52 = Float64Constant(4503599627370496.0E0);
314 Node* minus_two_52 = Float64Constant(-4503599627370496.0E0);
315
316 Variable var_x(this, MachineRepresentation::kFloat64);
317 Label return_x(this), return_minus_x(this);
318 var_x.Bind(x);
319
320 // Check if {x} is greater than 0.
321 Label if_xgreaterthanzero(this), if_xnotgreaterthanzero(this);
322 Branch(Float64GreaterThan(x, zero), &if_xgreaterthanzero,
323 &if_xnotgreaterthanzero);
324
325 Bind(&if_xgreaterthanzero);
326 {
327 if (IsFloat64RoundDownSupported()) {
328 var_x.Bind(Float64RoundDown(x));
329 } else {
330 // Just return {x} unless it's in the range ]0,2^52[.
331 GotoIf(Float64GreaterThanOrEqual(x, two_52), &return_x);
332
333 // Round positive {x} towards -Infinity.
334 var_x.Bind(Float64Sub(Float64Add(two_52, x), two_52));
335 GotoUnless(Float64GreaterThan(var_x.value(), x), &return_x);
336 var_x.Bind(Float64Sub(var_x.value(), one));
337 }
338 Goto(&return_x);
339 }
340
341 Bind(&if_xnotgreaterthanzero);
342 {
343 if (IsFloat64RoundUpSupported()) {
344 var_x.Bind(Float64RoundUp(x));
345 Goto(&return_x);
346 } else {
347 // Just return {x} unless its in the range ]-2^52,0[.
348 GotoIf(Float64LessThanOrEqual(x, minus_two_52), &return_x);
349 GotoUnless(Float64LessThan(x, zero), &return_x);
350
351 // Round negated {x} towards -Infinity and return result negated.
352 Node* minus_x = Float64Neg(x);
353 var_x.Bind(Float64Sub(Float64Add(two_52, minus_x), two_52));
354 GotoUnless(Float64GreaterThan(var_x.value(), minus_x), &return_minus_x);
355 var_x.Bind(Float64Sub(var_x.value(), one));
356 Goto(&return_minus_x);
357 }
358 }
359
360 Bind(&return_minus_x);
361 var_x.Bind(Float64Neg(var_x.value()));
362 Goto(&return_x);
363
364 Bind(&return_x);
365 return var_x.value();
366 }
367
SmiShiftBitsConstant()368 Node* CodeStubAssembler::SmiShiftBitsConstant() {
369 return IntPtrConstant(kSmiShiftSize + kSmiTagSize);
370 }
371
SmiFromWord32(Node * value)372 Node* CodeStubAssembler::SmiFromWord32(Node* value) {
373 value = ChangeInt32ToIntPtr(value);
374 return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
375 }
376
SmiTag(Node * value)377 Node* CodeStubAssembler::SmiTag(Node* value) {
378 int32_t constant_value;
379 if (ToInt32Constant(value, constant_value) && Smi::IsValid(constant_value)) {
380 return SmiConstant(Smi::FromInt(constant_value));
381 }
382 return BitcastWordToTaggedSigned(WordShl(value, SmiShiftBitsConstant()));
383 }
384
SmiUntag(Node * value)385 Node* CodeStubAssembler::SmiUntag(Node* value) {
386 return WordSar(BitcastTaggedToWord(value), SmiShiftBitsConstant());
387 }
388
SmiToWord32(Node * value)389 Node* CodeStubAssembler::SmiToWord32(Node* value) {
390 Node* result = SmiUntag(value);
391 if (Is64()) {
392 result = TruncateInt64ToInt32(result);
393 }
394 return result;
395 }
396
SmiToFloat64(Node * value)397 Node* CodeStubAssembler::SmiToFloat64(Node* value) {
398 return ChangeInt32ToFloat64(SmiToWord32(value));
399 }
400
SmiAdd(Node * a,Node * b)401 Node* CodeStubAssembler::SmiAdd(Node* a, Node* b) {
402 return BitcastWordToTaggedSigned(
403 IntPtrAdd(BitcastTaggedToWord(a), BitcastTaggedToWord(b)));
404 }
405
SmiSub(Node * a,Node * b)406 Node* CodeStubAssembler::SmiSub(Node* a, Node* b) {
407 return BitcastWordToTaggedSigned(
408 IntPtrSub(BitcastTaggedToWord(a), BitcastTaggedToWord(b)));
409 }
410
SmiEqual(Node * a,Node * b)411 Node* CodeStubAssembler::SmiEqual(Node* a, Node* b) {
412 return WordEqual(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
413 }
414
SmiAbove(Node * a,Node * b)415 Node* CodeStubAssembler::SmiAbove(Node* a, Node* b) {
416 return UintPtrGreaterThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
417 }
418
SmiAboveOrEqual(Node * a,Node * b)419 Node* CodeStubAssembler::SmiAboveOrEqual(Node* a, Node* b) {
420 return UintPtrGreaterThanOrEqual(BitcastTaggedToWord(a),
421 BitcastTaggedToWord(b));
422 }
423
SmiBelow(Node * a,Node * b)424 Node* CodeStubAssembler::SmiBelow(Node* a, Node* b) {
425 return UintPtrLessThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
426 }
427
SmiLessThan(Node * a,Node * b)428 Node* CodeStubAssembler::SmiLessThan(Node* a, Node* b) {
429 return IntPtrLessThan(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
430 }
431
SmiLessThanOrEqual(Node * a,Node * b)432 Node* CodeStubAssembler::SmiLessThanOrEqual(Node* a, Node* b) {
433 return IntPtrLessThanOrEqual(BitcastTaggedToWord(a), BitcastTaggedToWord(b));
434 }
435
SmiMax(Node * a,Node * b)436 Node* CodeStubAssembler::SmiMax(Node* a, Node* b) {
437 return Select(SmiLessThan(a, b), b, a);
438 }
439
SmiMin(Node * a,Node * b)440 Node* CodeStubAssembler::SmiMin(Node* a, Node* b) {
441 return Select(SmiLessThan(a, b), a, b);
442 }
443
SmiMod(Node * a,Node * b)444 Node* CodeStubAssembler::SmiMod(Node* a, Node* b) {
445 Variable var_result(this, MachineRepresentation::kTagged);
446 Label return_result(this, &var_result),
447 return_minuszero(this, Label::kDeferred),
448 return_nan(this, Label::kDeferred);
449
450 // Untag {a} and {b}.
451 a = SmiToWord32(a);
452 b = SmiToWord32(b);
453
454 // Return NaN if {b} is zero.
455 GotoIf(Word32Equal(b, Int32Constant(0)), &return_nan);
456
457 // Check if {a} is non-negative.
458 Label if_aisnotnegative(this), if_aisnegative(this, Label::kDeferred);
459 Branch(Int32LessThanOrEqual(Int32Constant(0), a), &if_aisnotnegative,
460 &if_aisnegative);
461
462 Bind(&if_aisnotnegative);
463 {
464 // Fast case, don't need to check any other edge cases.
465 Node* r = Int32Mod(a, b);
466 var_result.Bind(SmiFromWord32(r));
467 Goto(&return_result);
468 }
469
470 Bind(&if_aisnegative);
471 {
472 if (SmiValuesAre32Bits()) {
473 // Check if {a} is kMinInt and {b} is -1 (only relevant if the
474 // kMinInt is actually representable as a Smi).
475 Label join(this);
476 GotoUnless(Word32Equal(a, Int32Constant(kMinInt)), &join);
477 GotoIf(Word32Equal(b, Int32Constant(-1)), &return_minuszero);
478 Goto(&join);
479 Bind(&join);
480 }
481
482 // Perform the integer modulus operation.
483 Node* r = Int32Mod(a, b);
484
485 // Check if {r} is zero, and if so return -0, because we have to
486 // take the sign of the left hand side {a}, which is negative.
487 GotoIf(Word32Equal(r, Int32Constant(0)), &return_minuszero);
488
489 // The remainder {r} can be outside the valid Smi range on 32bit
490 // architectures, so we cannot just say SmiFromWord32(r) here.
491 var_result.Bind(ChangeInt32ToTagged(r));
492 Goto(&return_result);
493 }
494
495 Bind(&return_minuszero);
496 var_result.Bind(MinusZeroConstant());
497 Goto(&return_result);
498
499 Bind(&return_nan);
500 var_result.Bind(NanConstant());
501 Goto(&return_result);
502
503 Bind(&return_result);
504 return var_result.value();
505 }
506
SmiMul(Node * a,Node * b)507 Node* CodeStubAssembler::SmiMul(Node* a, Node* b) {
508 Variable var_result(this, MachineRepresentation::kTagged);
509 Variable var_lhs_float64(this, MachineRepresentation::kFloat64),
510 var_rhs_float64(this, MachineRepresentation::kFloat64);
511 Label return_result(this, &var_result);
512
513 // Both {a} and {b} are Smis. Convert them to integers and multiply.
514 Node* lhs32 = SmiToWord32(a);
515 Node* rhs32 = SmiToWord32(b);
516 Node* pair = Int32MulWithOverflow(lhs32, rhs32);
517
518 Node* overflow = Projection(1, pair);
519
520 // Check if the multiplication overflowed.
521 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
522 Branch(overflow, &if_overflow, &if_notoverflow);
523 Bind(&if_notoverflow);
524 {
525 // If the answer is zero, we may need to return -0.0, depending on the
526 // input.
527 Label answer_zero(this), answer_not_zero(this);
528 Node* answer = Projection(0, pair);
529 Node* zero = Int32Constant(0);
530 Branch(WordEqual(answer, zero), &answer_zero, &answer_not_zero);
531 Bind(&answer_not_zero);
532 {
533 var_result.Bind(ChangeInt32ToTagged(answer));
534 Goto(&return_result);
535 }
536 Bind(&answer_zero);
537 {
538 Node* or_result = Word32Or(lhs32, rhs32);
539 Label if_should_be_negative_zero(this), if_should_be_zero(this);
540 Branch(Int32LessThan(or_result, zero), &if_should_be_negative_zero,
541 &if_should_be_zero);
542 Bind(&if_should_be_negative_zero);
543 {
544 var_result.Bind(MinusZeroConstant());
545 Goto(&return_result);
546 }
547 Bind(&if_should_be_zero);
548 {
549 var_result.Bind(zero);
550 Goto(&return_result);
551 }
552 }
553 }
554 Bind(&if_overflow);
555 {
556 var_lhs_float64.Bind(SmiToFloat64(a));
557 var_rhs_float64.Bind(SmiToFloat64(b));
558 Node* value = Float64Mul(var_lhs_float64.value(), var_rhs_float64.value());
559 Node* result = AllocateHeapNumberWithValue(value);
560 var_result.Bind(result);
561 Goto(&return_result);
562 }
563
564 Bind(&return_result);
565 return var_result.value();
566 }
567
TaggedIsSmi(Node * a)568 Node* CodeStubAssembler::TaggedIsSmi(Node* a) {
569 return WordEqual(WordAnd(BitcastTaggedToWord(a), IntPtrConstant(kSmiTagMask)),
570 IntPtrConstant(0));
571 }
572
WordIsPositiveSmi(Node * a)573 Node* CodeStubAssembler::WordIsPositiveSmi(Node* a) {
574 return WordEqual(WordAnd(a, IntPtrConstant(kSmiTagMask | kSmiSignMask)),
575 IntPtrConstant(0));
576 }
577
WordIsWordAligned(Node * word)578 Node* CodeStubAssembler::WordIsWordAligned(Node* word) {
579 return WordEqual(IntPtrConstant(0),
580 WordAnd(word, IntPtrConstant((1 << kPointerSizeLog2) - 1)));
581 }
582
BranchIfSimd128Equal(Node * lhs,Node * lhs_map,Node * rhs,Node * rhs_map,Label * if_equal,Label * if_notequal)583 void CodeStubAssembler::BranchIfSimd128Equal(Node* lhs, Node* lhs_map,
584 Node* rhs, Node* rhs_map,
585 Label* if_equal,
586 Label* if_notequal) {
587 Label if_mapsame(this), if_mapnotsame(this);
588 Branch(WordEqual(lhs_map, rhs_map), &if_mapsame, &if_mapnotsame);
589
590 Bind(&if_mapsame);
591 {
592 // Both {lhs} and {rhs} are Simd128Values with the same map, need special
593 // handling for Float32x4 because of NaN comparisons.
594 Label if_float32x4(this), if_notfloat32x4(this);
595 Node* float32x4_map = HeapConstant(factory()->float32x4_map());
596 Branch(WordEqual(lhs_map, float32x4_map), &if_float32x4, &if_notfloat32x4);
597
598 Bind(&if_float32x4);
599 {
600 // Both {lhs} and {rhs} are Float32x4, compare the lanes individually
601 // using a floating point comparison.
602 for (int offset = Float32x4::kValueOffset - kHeapObjectTag;
603 offset < Float32x4::kSize - kHeapObjectTag;
604 offset += sizeof(float)) {
605 // Load the floating point values for {lhs} and {rhs}.
606 Node* lhs_value =
607 Load(MachineType::Float32(), lhs, IntPtrConstant(offset));
608 Node* rhs_value =
609 Load(MachineType::Float32(), rhs, IntPtrConstant(offset));
610
611 // Perform a floating point comparison.
612 Label if_valueequal(this), if_valuenotequal(this);
613 Branch(Float32Equal(lhs_value, rhs_value), &if_valueequal,
614 &if_valuenotequal);
615 Bind(&if_valuenotequal);
616 Goto(if_notequal);
617 Bind(&if_valueequal);
618 }
619
620 // All 4 lanes match, {lhs} and {rhs} considered equal.
621 Goto(if_equal);
622 }
623
624 Bind(&if_notfloat32x4);
625 {
626 // For other Simd128Values we just perform a bitwise comparison.
627 for (int offset = Simd128Value::kValueOffset - kHeapObjectTag;
628 offset < Simd128Value::kSize - kHeapObjectTag;
629 offset += kPointerSize) {
630 // Load the word values for {lhs} and {rhs}.
631 Node* lhs_value =
632 Load(MachineType::Pointer(), lhs, IntPtrConstant(offset));
633 Node* rhs_value =
634 Load(MachineType::Pointer(), rhs, IntPtrConstant(offset));
635
636 // Perform a bitwise word-comparison.
637 Label if_valueequal(this), if_valuenotequal(this);
638 Branch(WordEqual(lhs_value, rhs_value), &if_valueequal,
639 &if_valuenotequal);
640 Bind(&if_valuenotequal);
641 Goto(if_notequal);
642 Bind(&if_valueequal);
643 }
644
645 // Bitwise comparison succeeded, {lhs} and {rhs} considered equal.
646 Goto(if_equal);
647 }
648 }
649
650 Bind(&if_mapnotsame);
651 Goto(if_notequal);
652 }
653
BranchIfPrototypesHaveNoElements(Node * receiver_map,Label * definitely_no_elements,Label * possibly_elements)654 void CodeStubAssembler::BranchIfPrototypesHaveNoElements(
655 Node* receiver_map, Label* definitely_no_elements,
656 Label* possibly_elements) {
657 Variable var_map(this, MachineRepresentation::kTagged);
658 var_map.Bind(receiver_map);
659 Label loop_body(this, &var_map);
660 Node* empty_elements = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
661 Goto(&loop_body);
662
663 Bind(&loop_body);
664 {
665 Node* map = var_map.value();
666 Node* prototype = LoadMapPrototype(map);
667 GotoIf(WordEqual(prototype, NullConstant()), definitely_no_elements);
668 Node* prototype_map = LoadMap(prototype);
669 // Pessimistically assume elements if a Proxy, Special API Object,
670 // or JSValue wrapper is found on the prototype chain. After this
671 // instance type check, it's not necessary to check for interceptors or
672 // access checks.
673 GotoIf(Int32LessThanOrEqual(LoadMapInstanceType(prototype_map),
674 Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
675 possibly_elements);
676 GotoIf(WordNotEqual(LoadElements(prototype), empty_elements),
677 possibly_elements);
678 var_map.Bind(prototype_map);
679 Goto(&loop_body);
680 }
681 }
682
BranchIfJSReceiver(Node * object,Label * if_true,Label * if_false)683 void CodeStubAssembler::BranchIfJSReceiver(Node* object, Label* if_true,
684 Label* if_false) {
685 GotoIf(TaggedIsSmi(object), if_false);
686 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
687 Branch(Int32GreaterThanOrEqual(LoadInstanceType(object),
688 Int32Constant(FIRST_JS_RECEIVER_TYPE)),
689 if_true, if_false);
690 }
691
BranchIfJSObject(Node * object,Label * if_true,Label * if_false)692 void CodeStubAssembler::BranchIfJSObject(Node* object, Label* if_true,
693 Label* if_false) {
694 GotoIf(TaggedIsSmi(object), if_false);
695 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
696 Branch(Int32GreaterThanOrEqual(LoadInstanceType(object),
697 Int32Constant(FIRST_JS_OBJECT_TYPE)),
698 if_true, if_false);
699 }
700
BranchIfFastJSArray(Node * object,Node * context,Label * if_true,Label * if_false)701 void CodeStubAssembler::BranchIfFastJSArray(Node* object, Node* context,
702 Label* if_true, Label* if_false) {
703 // Bailout if receiver is a Smi.
704 GotoIf(TaggedIsSmi(object), if_false);
705
706 Node* map = LoadMap(object);
707
708 // Bailout if instance type is not JS_ARRAY_TYPE.
709 GotoIf(WordNotEqual(LoadMapInstanceType(map), Int32Constant(JS_ARRAY_TYPE)),
710 if_false);
711
712 Node* elements_kind = LoadMapElementsKind(map);
713
714 // Bailout if receiver has slow elements.
715 GotoUnless(IsFastElementsKind(elements_kind), if_false);
716
717 // Check prototype chain if receiver does not have packed elements.
718 GotoUnless(IsHoleyFastElementsKind(elements_kind), if_true);
719
720 BranchIfPrototypesHaveNoElements(map, if_true, if_false);
721 }
722
AllocateRawUnaligned(Node * size_in_bytes,AllocationFlags flags,Node * top_address,Node * limit_address)723 Node* CodeStubAssembler::AllocateRawUnaligned(Node* size_in_bytes,
724 AllocationFlags flags,
725 Node* top_address,
726 Node* limit_address) {
727 Node* top = Load(MachineType::Pointer(), top_address);
728 Node* limit = Load(MachineType::Pointer(), limit_address);
729
730 // If there's not enough space, call the runtime.
731 Variable result(this, MachineRepresentation::kTagged);
732 Label runtime_call(this, Label::kDeferred), no_runtime_call(this);
733 Label merge_runtime(this, &result);
734
735 Node* new_top = IntPtrAdd(top, size_in_bytes);
736 Branch(UintPtrGreaterThanOrEqual(new_top, limit), &runtime_call,
737 &no_runtime_call);
738
739 Bind(&runtime_call);
740 Node* runtime_result;
741 if (flags & kPretenured) {
742 Node* runtime_flags = SmiConstant(
743 Smi::FromInt(AllocateDoubleAlignFlag::encode(false) |
744 AllocateTargetSpace::encode(AllocationSpace::OLD_SPACE)));
745 runtime_result =
746 CallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
747 SmiTag(size_in_bytes), runtime_flags);
748 } else {
749 runtime_result = CallRuntime(Runtime::kAllocateInNewSpace,
750 NoContextConstant(), SmiTag(size_in_bytes));
751 }
752 result.Bind(runtime_result);
753 Goto(&merge_runtime);
754
755 // When there is enough space, return `top' and bump it up.
756 Bind(&no_runtime_call);
757 Node* no_runtime_result = top;
758 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
759 new_top);
760 no_runtime_result = BitcastWordToTagged(
761 IntPtrAdd(no_runtime_result, IntPtrConstant(kHeapObjectTag)));
762 result.Bind(no_runtime_result);
763 Goto(&merge_runtime);
764
765 Bind(&merge_runtime);
766 return result.value();
767 }
768
AllocateRawAligned(Node * size_in_bytes,AllocationFlags flags,Node * top_address,Node * limit_address)769 Node* CodeStubAssembler::AllocateRawAligned(Node* size_in_bytes,
770 AllocationFlags flags,
771 Node* top_address,
772 Node* limit_address) {
773 Node* top = Load(MachineType::Pointer(), top_address);
774 Node* limit = Load(MachineType::Pointer(), limit_address);
775 Variable adjusted_size(this, MachineType::PointerRepresentation());
776 adjusted_size.Bind(size_in_bytes);
777 if (flags & kDoubleAlignment) {
778 // TODO(epertoso): Simd128 alignment.
779 Label aligned(this), not_aligned(this), merge(this, &adjusted_size);
780 Branch(WordAnd(top, IntPtrConstant(kDoubleAlignmentMask)), ¬_aligned,
781 &aligned);
782
783 Bind(¬_aligned);
784 Node* not_aligned_size =
785 IntPtrAdd(size_in_bytes, IntPtrConstant(kPointerSize));
786 adjusted_size.Bind(not_aligned_size);
787 Goto(&merge);
788
789 Bind(&aligned);
790 Goto(&merge);
791
792 Bind(&merge);
793 }
794
795 Variable address(this, MachineRepresentation::kTagged);
796 address.Bind(AllocateRawUnaligned(adjusted_size.value(), kNone, top, limit));
797
798 Label needs_filler(this), doesnt_need_filler(this),
799 merge_address(this, &address);
800 Branch(IntPtrEqual(adjusted_size.value(), size_in_bytes), &doesnt_need_filler,
801 &needs_filler);
802
803 Bind(&needs_filler);
804 // Store a filler and increase the address by kPointerSize.
805 // TODO(epertoso): this code assumes that we only align to kDoubleSize. Change
806 // it when Simd128 alignment is supported.
807 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top,
808 LoadRoot(Heap::kOnePointerFillerMapRootIndex));
809 address.Bind(BitcastWordToTagged(
810 IntPtrAdd(address.value(), IntPtrConstant(kPointerSize))));
811 Goto(&merge_address);
812
813 Bind(&doesnt_need_filler);
814 Goto(&merge_address);
815
816 Bind(&merge_address);
817 // Update the top.
818 StoreNoWriteBarrier(MachineType::PointerRepresentation(), top_address,
819 IntPtrAdd(top, adjusted_size.value()));
820 return address.value();
821 }
822
Allocate(Node * size_in_bytes,AllocationFlags flags)823 Node* CodeStubAssembler::Allocate(Node* size_in_bytes, AllocationFlags flags) {
824 Comment("Allocate");
825 bool const new_space = !(flags & kPretenured);
826 Node* top_address = ExternalConstant(
827 new_space
828 ? ExternalReference::new_space_allocation_top_address(isolate())
829 : ExternalReference::old_space_allocation_top_address(isolate()));
830 Node* limit_address = ExternalConstant(
831 new_space
832 ? ExternalReference::new_space_allocation_limit_address(isolate())
833 : ExternalReference::old_space_allocation_limit_address(isolate()));
834
835 #ifdef V8_HOST_ARCH_32_BIT
836 if (flags & kDoubleAlignment) {
837 return AllocateRawAligned(size_in_bytes, flags, top_address, limit_address);
838 }
839 #endif
840
841 return AllocateRawUnaligned(size_in_bytes, flags, top_address, limit_address);
842 }
843
Allocate(int size_in_bytes,AllocationFlags flags)844 Node* CodeStubAssembler::Allocate(int size_in_bytes, AllocationFlags flags) {
845 return CodeStubAssembler::Allocate(IntPtrConstant(size_in_bytes), flags);
846 }
847
InnerAllocate(Node * previous,Node * offset)848 Node* CodeStubAssembler::InnerAllocate(Node* previous, Node* offset) {
849 return BitcastWordToTagged(IntPtrAdd(previous, offset));
850 }
851
InnerAllocate(Node * previous,int offset)852 Node* CodeStubAssembler::InnerAllocate(Node* previous, int offset) {
853 return InnerAllocate(previous, IntPtrConstant(offset));
854 }
855
IsRegularHeapObjectSize(Node * size)856 Node* CodeStubAssembler::IsRegularHeapObjectSize(Node* size) {
857 return UintPtrLessThanOrEqual(size,
858 IntPtrConstant(kMaxRegularHeapObjectSize));
859 }
860
BranchIfToBooleanIsTrue(Node * value,Label * if_true,Label * if_false)861 void CodeStubAssembler::BranchIfToBooleanIsTrue(Node* value, Label* if_true,
862 Label* if_false) {
863 Label if_valueissmi(this), if_valueisnotsmi(this), if_valueisstring(this),
864 if_valueisheapnumber(this), if_valueisother(this);
865
866 // Fast check for Boolean {value}s (common case).
867 GotoIf(WordEqual(value, BooleanConstant(true)), if_true);
868 GotoIf(WordEqual(value, BooleanConstant(false)), if_false);
869
870 // Check if {value} is a Smi or a HeapObject.
871 Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
872
873 Bind(&if_valueissmi);
874 {
875 // The {value} is a Smi, only need to check against zero.
876 BranchIfSmiEqual(value, SmiConstant(0), if_false, if_true);
877 }
878
879 Bind(&if_valueisnotsmi);
880 {
881 // The {value} is a HeapObject, load its map.
882 Node* value_map = LoadMap(value);
883
884 // Load the {value}s instance type.
885 Node* value_instance_type = LoadMapInstanceType(value_map);
886
887 // Dispatch based on the instance type; we distinguish all String instance
888 // types, the HeapNumber type and everything else.
889 GotoIf(Word32Equal(value_instance_type, Int32Constant(HEAP_NUMBER_TYPE)),
890 &if_valueisheapnumber);
891 Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
892 &if_valueisother);
893
894 Bind(&if_valueisstring);
895 {
896 // Load the string length field of the {value}.
897 Node* value_length = LoadObjectField(value, String::kLengthOffset);
898
899 // Check if the {value} is the empty string.
900 BranchIfSmiEqual(value_length, SmiConstant(0), if_false, if_true);
901 }
902
903 Bind(&if_valueisheapnumber);
904 {
905 // Load the floating point value of {value}.
906 Node* value_value = LoadObjectField(value, HeapNumber::kValueOffset,
907 MachineType::Float64());
908
909 // Check if the floating point {value} is neither 0.0, -0.0 nor NaN.
910 Branch(Float64LessThan(Float64Constant(0.0), Float64Abs(value_value)),
911 if_true, if_false);
912 }
913
914 Bind(&if_valueisother);
915 {
916 // Load the bit field from the {value}s map. The {value} is now either
917 // Null or Undefined, which have the undetectable bit set (so we always
918 // return false for those), or a Symbol or Simd128Value, whose maps never
919 // have the undetectable bit set (so we always return true for those), or
920 // a JSReceiver, which may or may not have the undetectable bit set.
921 Node* value_map_bitfield = LoadMapBitField(value_map);
922 Node* value_map_undetectable = Word32And(
923 value_map_bitfield, Int32Constant(1 << Map::kIsUndetectable));
924
925 // Check if the {value} is undetectable.
926 Branch(Word32Equal(value_map_undetectable, Int32Constant(0)), if_true,
927 if_false);
928 }
929 }
930 }
931
LoadFromFrame(int offset,MachineType rep)932 compiler::Node* CodeStubAssembler::LoadFromFrame(int offset, MachineType rep) {
933 Node* frame_pointer = LoadFramePointer();
934 return Load(rep, frame_pointer, IntPtrConstant(offset));
935 }
936
LoadFromParentFrame(int offset,MachineType rep)937 compiler::Node* CodeStubAssembler::LoadFromParentFrame(int offset,
938 MachineType rep) {
939 Node* frame_pointer = LoadParentFramePointer();
940 return Load(rep, frame_pointer, IntPtrConstant(offset));
941 }
942
LoadBufferObject(Node * buffer,int offset,MachineType rep)943 Node* CodeStubAssembler::LoadBufferObject(Node* buffer, int offset,
944 MachineType rep) {
945 return Load(rep, buffer, IntPtrConstant(offset));
946 }
947
LoadObjectField(Node * object,int offset,MachineType rep)948 Node* CodeStubAssembler::LoadObjectField(Node* object, int offset,
949 MachineType rep) {
950 return Load(rep, object, IntPtrConstant(offset - kHeapObjectTag));
951 }
952
LoadObjectField(Node * object,Node * offset,MachineType rep)953 Node* CodeStubAssembler::LoadObjectField(Node* object, Node* offset,
954 MachineType rep) {
955 return Load(rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)));
956 }
957
LoadAndUntagObjectField(Node * object,int offset)958 Node* CodeStubAssembler::LoadAndUntagObjectField(Node* object, int offset) {
959 if (Is64()) {
960 #if V8_TARGET_LITTLE_ENDIAN
961 offset += kPointerSize / 2;
962 #endif
963 return ChangeInt32ToInt64(
964 LoadObjectField(object, offset, MachineType::Int32()));
965 } else {
966 return SmiToWord(LoadObjectField(object, offset, MachineType::AnyTagged()));
967 }
968 }
969
LoadAndUntagToWord32ObjectField(Node * object,int offset)970 Node* CodeStubAssembler::LoadAndUntagToWord32ObjectField(Node* object,
971 int offset) {
972 if (Is64()) {
973 #if V8_TARGET_LITTLE_ENDIAN
974 offset += kPointerSize / 2;
975 #endif
976 return LoadObjectField(object, offset, MachineType::Int32());
977 } else {
978 return SmiToWord32(
979 LoadObjectField(object, offset, MachineType::AnyTagged()));
980 }
981 }
982
LoadAndUntagSmi(Node * base,int index)983 Node* CodeStubAssembler::LoadAndUntagSmi(Node* base, int index) {
984 if (Is64()) {
985 #if V8_TARGET_LITTLE_ENDIAN
986 index += kPointerSize / 2;
987 #endif
988 return ChangeInt32ToInt64(
989 Load(MachineType::Int32(), base, IntPtrConstant(index)));
990 } else {
991 return SmiToWord(
992 Load(MachineType::AnyTagged(), base, IntPtrConstant(index)));
993 }
994 }
995
LoadAndUntagToWord32Root(Heap::RootListIndex root_index)996 Node* CodeStubAssembler::LoadAndUntagToWord32Root(
997 Heap::RootListIndex root_index) {
998 Node* roots_array_start =
999 ExternalConstant(ExternalReference::roots_array_start(isolate()));
1000 int index = root_index * kPointerSize;
1001 if (Is64()) {
1002 #if V8_TARGET_LITTLE_ENDIAN
1003 index += kPointerSize / 2;
1004 #endif
1005 return Load(MachineType::Int32(), roots_array_start, IntPtrConstant(index));
1006 } else {
1007 return SmiToWord32(Load(MachineType::AnyTagged(), roots_array_start,
1008 IntPtrConstant(index)));
1009 }
1010 }
1011
LoadHeapNumberValue(Node * object)1012 Node* CodeStubAssembler::LoadHeapNumberValue(Node* object) {
1013 return LoadObjectField(object, HeapNumber::kValueOffset,
1014 MachineType::Float64());
1015 }
1016
LoadMap(Node * object)1017 Node* CodeStubAssembler::LoadMap(Node* object) {
1018 return LoadObjectField(object, HeapObject::kMapOffset);
1019 }
1020
LoadInstanceType(Node * object)1021 Node* CodeStubAssembler::LoadInstanceType(Node* object) {
1022 return LoadMapInstanceType(LoadMap(object));
1023 }
1024
HasInstanceType(Node * object,InstanceType instance_type)1025 Node* CodeStubAssembler::HasInstanceType(Node* object,
1026 InstanceType instance_type) {
1027 return Word32Equal(LoadInstanceType(object), Int32Constant(instance_type));
1028 }
1029
LoadProperties(Node * object)1030 Node* CodeStubAssembler::LoadProperties(Node* object) {
1031 return LoadObjectField(object, JSObject::kPropertiesOffset);
1032 }
1033
LoadElements(Node * object)1034 Node* CodeStubAssembler::LoadElements(Node* object) {
1035 return LoadObjectField(object, JSObject::kElementsOffset);
1036 }
1037
LoadJSArrayLength(Node * array)1038 Node* CodeStubAssembler::LoadJSArrayLength(Node* array) {
1039 CSA_ASSERT(this, IsJSArray(array));
1040 return LoadObjectField(array, JSArray::kLengthOffset);
1041 }
1042
LoadFixedArrayBaseLength(Node * array)1043 Node* CodeStubAssembler::LoadFixedArrayBaseLength(Node* array) {
1044 return LoadObjectField(array, FixedArrayBase::kLengthOffset);
1045 }
1046
LoadAndUntagFixedArrayBaseLength(Node * array)1047 Node* CodeStubAssembler::LoadAndUntagFixedArrayBaseLength(Node* array) {
1048 return LoadAndUntagObjectField(array, FixedArrayBase::kLengthOffset);
1049 }
1050
LoadMapBitField(Node * map)1051 Node* CodeStubAssembler::LoadMapBitField(Node* map) {
1052 CSA_SLOW_ASSERT(this, IsMap(map));
1053 return LoadObjectField(map, Map::kBitFieldOffset, MachineType::Uint8());
1054 }
1055
LoadMapBitField2(Node * map)1056 Node* CodeStubAssembler::LoadMapBitField2(Node* map) {
1057 CSA_SLOW_ASSERT(this, IsMap(map));
1058 return LoadObjectField(map, Map::kBitField2Offset, MachineType::Uint8());
1059 }
1060
LoadMapBitField3(Node * map)1061 Node* CodeStubAssembler::LoadMapBitField3(Node* map) {
1062 CSA_SLOW_ASSERT(this, IsMap(map));
1063 return LoadObjectField(map, Map::kBitField3Offset, MachineType::Uint32());
1064 }
1065
LoadMapInstanceType(Node * map)1066 Node* CodeStubAssembler::LoadMapInstanceType(Node* map) {
1067 return LoadObjectField(map, Map::kInstanceTypeOffset, MachineType::Uint8());
1068 }
1069
LoadMapElementsKind(Node * map)1070 Node* CodeStubAssembler::LoadMapElementsKind(Node* map) {
1071 CSA_SLOW_ASSERT(this, IsMap(map));
1072 Node* bit_field2 = LoadMapBitField2(map);
1073 return DecodeWord32<Map::ElementsKindBits>(bit_field2);
1074 }
1075
LoadMapDescriptors(Node * map)1076 Node* CodeStubAssembler::LoadMapDescriptors(Node* map) {
1077 CSA_SLOW_ASSERT(this, IsMap(map));
1078 return LoadObjectField(map, Map::kDescriptorsOffset);
1079 }
1080
LoadMapPrototype(Node * map)1081 Node* CodeStubAssembler::LoadMapPrototype(Node* map) {
1082 CSA_SLOW_ASSERT(this, IsMap(map));
1083 return LoadObjectField(map, Map::kPrototypeOffset);
1084 }
1085
LoadMapPrototypeInfo(Node * map,Label * if_no_proto_info)1086 Node* CodeStubAssembler::LoadMapPrototypeInfo(Node* map,
1087 Label* if_no_proto_info) {
1088 CSA_ASSERT(this, IsMap(map));
1089 Node* prototype_info =
1090 LoadObjectField(map, Map::kTransitionsOrPrototypeInfoOffset);
1091 GotoIf(TaggedIsSmi(prototype_info), if_no_proto_info);
1092 GotoUnless(WordEqual(LoadMap(prototype_info),
1093 LoadRoot(Heap::kPrototypeInfoMapRootIndex)),
1094 if_no_proto_info);
1095 return prototype_info;
1096 }
1097
LoadMapInstanceSize(Node * map)1098 Node* CodeStubAssembler::LoadMapInstanceSize(Node* map) {
1099 CSA_SLOW_ASSERT(this, IsMap(map));
1100 return ChangeUint32ToWord(
1101 LoadObjectField(map, Map::kInstanceSizeOffset, MachineType::Uint8()));
1102 }
1103
LoadMapInobjectProperties(Node * map)1104 Node* CodeStubAssembler::LoadMapInobjectProperties(Node* map) {
1105 CSA_SLOW_ASSERT(this, IsMap(map));
1106 // See Map::GetInObjectProperties() for details.
1107 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
1108 CSA_ASSERT(this,
1109 Int32GreaterThanOrEqual(LoadMapInstanceType(map),
1110 Int32Constant(FIRST_JS_OBJECT_TYPE)));
1111 return ChangeUint32ToWord(LoadObjectField(
1112 map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
1113 MachineType::Uint8()));
1114 }
1115
LoadMapConstructorFunctionIndex(Node * map)1116 Node* CodeStubAssembler::LoadMapConstructorFunctionIndex(Node* map) {
1117 CSA_SLOW_ASSERT(this, IsMap(map));
1118 // See Map::GetConstructorFunctionIndex() for details.
1119 STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
1120 CSA_ASSERT(this, Int32LessThanOrEqual(LoadMapInstanceType(map),
1121 Int32Constant(LAST_PRIMITIVE_TYPE)));
1122 return ChangeUint32ToWord(LoadObjectField(
1123 map, Map::kInObjectPropertiesOrConstructorFunctionIndexOffset,
1124 MachineType::Uint8()));
1125 }
1126
LoadMapConstructor(Node * map)1127 Node* CodeStubAssembler::LoadMapConstructor(Node* map) {
1128 CSA_SLOW_ASSERT(this, IsMap(map));
1129 Variable result(this, MachineRepresentation::kTagged);
1130 result.Bind(LoadObjectField(map, Map::kConstructorOrBackPointerOffset));
1131
1132 Label done(this), loop(this, &result);
1133 Goto(&loop);
1134 Bind(&loop);
1135 {
1136 GotoIf(TaggedIsSmi(result.value()), &done);
1137 Node* is_map_type =
1138 Word32Equal(LoadInstanceType(result.value()), Int32Constant(MAP_TYPE));
1139 GotoUnless(is_map_type, &done);
1140 result.Bind(
1141 LoadObjectField(result.value(), Map::kConstructorOrBackPointerOffset));
1142 Goto(&loop);
1143 }
1144 Bind(&done);
1145 return result.value();
1146 }
1147
LoadNameHashField(Node * name)1148 Node* CodeStubAssembler::LoadNameHashField(Node* name) {
1149 CSA_ASSERT(this, IsName(name));
1150 return LoadObjectField(name, Name::kHashFieldOffset, MachineType::Uint32());
1151 }
1152
LoadNameHash(Node * name,Label * if_hash_not_computed)1153 Node* CodeStubAssembler::LoadNameHash(Node* name, Label* if_hash_not_computed) {
1154 Node* hash_field = LoadNameHashField(name);
1155 if (if_hash_not_computed != nullptr) {
1156 GotoIf(Word32Equal(
1157 Word32And(hash_field, Int32Constant(Name::kHashNotComputedMask)),
1158 Int32Constant(0)),
1159 if_hash_not_computed);
1160 }
1161 return Word32Shr(hash_field, Int32Constant(Name::kHashShift));
1162 }
1163
LoadStringLength(Node * object)1164 Node* CodeStubAssembler::LoadStringLength(Node* object) {
1165 CSA_ASSERT(this, IsString(object));
1166 return LoadObjectField(object, String::kLengthOffset);
1167 }
1168
LoadJSValueValue(Node * object)1169 Node* CodeStubAssembler::LoadJSValueValue(Node* object) {
1170 CSA_ASSERT(this, IsJSValue(object));
1171 return LoadObjectField(object, JSValue::kValueOffset);
1172 }
1173
LoadWeakCellValueUnchecked(Node * weak_cell)1174 Node* CodeStubAssembler::LoadWeakCellValueUnchecked(Node* weak_cell) {
1175 // TODO(ishell): fix callers.
1176 return LoadObjectField(weak_cell, WeakCell::kValueOffset);
1177 }
1178
LoadWeakCellValue(Node * weak_cell,Label * if_cleared)1179 Node* CodeStubAssembler::LoadWeakCellValue(Node* weak_cell, Label* if_cleared) {
1180 CSA_ASSERT(this, IsWeakCell(weak_cell));
1181 Node* value = LoadWeakCellValueUnchecked(weak_cell);
1182 if (if_cleared != nullptr) {
1183 GotoIf(WordEqual(value, IntPtrConstant(0)), if_cleared);
1184 }
1185 return value;
1186 }
1187
LoadFixedArrayElement(Node * object,Node * index_node,int additional_offset,ParameterMode parameter_mode)1188 Node* CodeStubAssembler::LoadFixedArrayElement(Node* object, Node* index_node,
1189 int additional_offset,
1190 ParameterMode parameter_mode) {
1191 int32_t header_size =
1192 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1193 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1194 parameter_mode, header_size);
1195 return Load(MachineType::AnyTagged(), object, offset);
1196 }
1197
LoadFixedTypedArrayElement(Node * data_pointer,Node * index_node,ElementsKind elements_kind,ParameterMode parameter_mode)1198 Node* CodeStubAssembler::LoadFixedTypedArrayElement(
1199 Node* data_pointer, Node* index_node, ElementsKind elements_kind,
1200 ParameterMode parameter_mode) {
1201 Node* offset =
1202 ElementOffsetFromIndex(index_node, elements_kind, parameter_mode, 0);
1203 MachineType type;
1204 switch (elements_kind) {
1205 case UINT8_ELEMENTS: /* fall through */
1206 case UINT8_CLAMPED_ELEMENTS:
1207 type = MachineType::Uint8();
1208 break;
1209 case INT8_ELEMENTS:
1210 type = MachineType::Int8();
1211 break;
1212 case UINT16_ELEMENTS:
1213 type = MachineType::Uint16();
1214 break;
1215 case INT16_ELEMENTS:
1216 type = MachineType::Int16();
1217 break;
1218 case UINT32_ELEMENTS:
1219 type = MachineType::Uint32();
1220 break;
1221 case INT32_ELEMENTS:
1222 type = MachineType::Int32();
1223 break;
1224 case FLOAT32_ELEMENTS:
1225 type = MachineType::Float32();
1226 break;
1227 case FLOAT64_ELEMENTS:
1228 type = MachineType::Float64();
1229 break;
1230 default:
1231 UNREACHABLE();
1232 }
1233 return Load(type, data_pointer, offset);
1234 }
1235
LoadAndUntagToWord32FixedArrayElement(Node * object,Node * index_node,int additional_offset,ParameterMode parameter_mode)1236 Node* CodeStubAssembler::LoadAndUntagToWord32FixedArrayElement(
1237 Node* object, Node* index_node, int additional_offset,
1238 ParameterMode parameter_mode) {
1239 int32_t header_size =
1240 FixedArray::kHeaderSize + additional_offset - kHeapObjectTag;
1241 #if V8_TARGET_LITTLE_ENDIAN
1242 if (Is64()) {
1243 header_size += kPointerSize / 2;
1244 }
1245 #endif
1246 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS,
1247 parameter_mode, header_size);
1248 if (Is64()) {
1249 return Load(MachineType::Int32(), object, offset);
1250 } else {
1251 return SmiToWord32(Load(MachineType::AnyTagged(), object, offset));
1252 }
1253 }
1254
LoadFixedDoubleArrayElement(Node * object,Node * index_node,MachineType machine_type,int additional_offset,ParameterMode parameter_mode,Label * if_hole)1255 Node* CodeStubAssembler::LoadFixedDoubleArrayElement(
1256 Node* object, Node* index_node, MachineType machine_type,
1257 int additional_offset, ParameterMode parameter_mode, Label* if_hole) {
1258 CSA_ASSERT(this, IsFixedDoubleArray(object));
1259 int32_t header_size =
1260 FixedDoubleArray::kHeaderSize + additional_offset - kHeapObjectTag;
1261 Node* offset = ElementOffsetFromIndex(index_node, FAST_HOLEY_DOUBLE_ELEMENTS,
1262 parameter_mode, header_size);
1263 return LoadDoubleWithHoleCheck(object, offset, if_hole, machine_type);
1264 }
1265
LoadDoubleWithHoleCheck(Node * base,Node * offset,Label * if_hole,MachineType machine_type)1266 Node* CodeStubAssembler::LoadDoubleWithHoleCheck(Node* base, Node* offset,
1267 Label* if_hole,
1268 MachineType machine_type) {
1269 if (if_hole) {
1270 // TODO(ishell): Compare only the upper part for the hole once the
1271 // compiler is able to fold addition of already complex |offset| with
1272 // |kIeeeDoubleExponentWordOffset| into one addressing mode.
1273 if (Is64()) {
1274 Node* element = Load(MachineType::Uint64(), base, offset);
1275 GotoIf(Word64Equal(element, Int64Constant(kHoleNanInt64)), if_hole);
1276 } else {
1277 Node* element_upper = Load(
1278 MachineType::Uint32(), base,
1279 IntPtrAdd(offset, IntPtrConstant(kIeeeDoubleExponentWordOffset)));
1280 GotoIf(Word32Equal(element_upper, Int32Constant(kHoleNanUpper32)),
1281 if_hole);
1282 }
1283 }
1284 if (machine_type.IsNone()) {
1285 // This means the actual value is not needed.
1286 return nullptr;
1287 }
1288 return Load(machine_type, base, offset);
1289 }
1290
LoadContextElement(Node * context,int slot_index)1291 Node* CodeStubAssembler::LoadContextElement(Node* context, int slot_index) {
1292 int offset = Context::SlotOffset(slot_index);
1293 return Load(MachineType::AnyTagged(), context, IntPtrConstant(offset));
1294 }
1295
LoadContextElement(Node * context,Node * slot_index)1296 Node* CodeStubAssembler::LoadContextElement(Node* context, Node* slot_index) {
1297 Node* offset =
1298 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
1299 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
1300 return Load(MachineType::AnyTagged(), context, offset);
1301 }
1302
StoreContextElement(Node * context,int slot_index,Node * value)1303 Node* CodeStubAssembler::StoreContextElement(Node* context, int slot_index,
1304 Node* value) {
1305 int offset = Context::SlotOffset(slot_index);
1306 return Store(MachineRepresentation::kTagged, context, IntPtrConstant(offset),
1307 value);
1308 }
1309
StoreContextElement(Node * context,Node * slot_index,Node * value)1310 Node* CodeStubAssembler::StoreContextElement(Node* context, Node* slot_index,
1311 Node* value) {
1312 Node* offset =
1313 IntPtrAdd(WordShl(slot_index, kPointerSizeLog2),
1314 IntPtrConstant(Context::kHeaderSize - kHeapObjectTag));
1315 return Store(MachineRepresentation::kTagged, context, offset, value);
1316 }
1317
LoadNativeContext(Node * context)1318 Node* CodeStubAssembler::LoadNativeContext(Node* context) {
1319 return LoadContextElement(context, Context::NATIVE_CONTEXT_INDEX);
1320 }
1321
LoadJSArrayElementsMap(ElementsKind kind,Node * native_context)1322 Node* CodeStubAssembler::LoadJSArrayElementsMap(ElementsKind kind,
1323 Node* native_context) {
1324 CSA_ASSERT(this, IsNativeContext(native_context));
1325 return LoadFixedArrayElement(native_context,
1326 IntPtrConstant(Context::ArrayMapIndex(kind)));
1327 }
1328
StoreHeapNumberValue(Node * object,Node * value)1329 Node* CodeStubAssembler::StoreHeapNumberValue(Node* object, Node* value) {
1330 return StoreObjectFieldNoWriteBarrier(object, HeapNumber::kValueOffset, value,
1331 MachineRepresentation::kFloat64);
1332 }
1333
StoreObjectField(Node * object,int offset,Node * value)1334 Node* CodeStubAssembler::StoreObjectField(
1335 Node* object, int offset, Node* value) {
1336 return Store(MachineRepresentation::kTagged, object,
1337 IntPtrConstant(offset - kHeapObjectTag), value);
1338 }
1339
StoreObjectField(Node * object,Node * offset,Node * value)1340 Node* CodeStubAssembler::StoreObjectField(Node* object, Node* offset,
1341 Node* value) {
1342 int const_offset;
1343 if (ToInt32Constant(offset, const_offset)) {
1344 return StoreObjectField(object, const_offset, value);
1345 }
1346 return Store(MachineRepresentation::kTagged, object,
1347 IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
1348 }
1349
StoreObjectFieldNoWriteBarrier(Node * object,int offset,Node * value,MachineRepresentation rep)1350 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
1351 Node* object, int offset, Node* value, MachineRepresentation rep) {
1352 return StoreNoWriteBarrier(rep, object,
1353 IntPtrConstant(offset - kHeapObjectTag), value);
1354 }
1355
StoreObjectFieldNoWriteBarrier(Node * object,Node * offset,Node * value,MachineRepresentation rep)1356 Node* CodeStubAssembler::StoreObjectFieldNoWriteBarrier(
1357 Node* object, Node* offset, Node* value, MachineRepresentation rep) {
1358 int const_offset;
1359 if (ToInt32Constant(offset, const_offset)) {
1360 return StoreObjectFieldNoWriteBarrier(object, const_offset, value, rep);
1361 }
1362 return StoreNoWriteBarrier(
1363 rep, object, IntPtrSub(offset, IntPtrConstant(kHeapObjectTag)), value);
1364 }
1365
StoreMapNoWriteBarrier(Node * object,Node * map)1366 Node* CodeStubAssembler::StoreMapNoWriteBarrier(Node* object, Node* map) {
1367 return StoreNoWriteBarrier(
1368 MachineRepresentation::kTagged, object,
1369 IntPtrConstant(HeapNumber::kMapOffset - kHeapObjectTag), map);
1370 }
1371
StoreObjectFieldRoot(Node * object,int offset,Heap::RootListIndex root_index)1372 Node* CodeStubAssembler::StoreObjectFieldRoot(Node* object, int offset,
1373 Heap::RootListIndex root_index) {
1374 if (Heap::RootIsImmortalImmovable(root_index)) {
1375 return StoreObjectFieldNoWriteBarrier(object, offset, LoadRoot(root_index));
1376 } else {
1377 return StoreObjectField(object, offset, LoadRoot(root_index));
1378 }
1379 }
1380
StoreFixedArrayElement(Node * object,Node * index_node,Node * value,WriteBarrierMode barrier_mode,ParameterMode parameter_mode)1381 Node* CodeStubAssembler::StoreFixedArrayElement(Node* object, Node* index_node,
1382 Node* value,
1383 WriteBarrierMode barrier_mode,
1384 ParameterMode parameter_mode) {
1385 DCHECK(barrier_mode == SKIP_WRITE_BARRIER ||
1386 barrier_mode == UPDATE_WRITE_BARRIER);
1387 Node* offset =
1388 ElementOffsetFromIndex(index_node, FAST_HOLEY_ELEMENTS, parameter_mode,
1389 FixedArray::kHeaderSize - kHeapObjectTag);
1390 MachineRepresentation rep = MachineRepresentation::kTagged;
1391 if (barrier_mode == SKIP_WRITE_BARRIER) {
1392 return StoreNoWriteBarrier(rep, object, offset, value);
1393 } else {
1394 return Store(rep, object, offset, value);
1395 }
1396 }
1397
StoreFixedDoubleArrayElement(Node * object,Node * index_node,Node * value,ParameterMode parameter_mode)1398 Node* CodeStubAssembler::StoreFixedDoubleArrayElement(
1399 Node* object, Node* index_node, Node* value, ParameterMode parameter_mode) {
1400 CSA_ASSERT(this, IsFixedDoubleArray(object));
1401 Node* offset =
1402 ElementOffsetFromIndex(index_node, FAST_DOUBLE_ELEMENTS, parameter_mode,
1403 FixedArray::kHeaderSize - kHeapObjectTag);
1404 MachineRepresentation rep = MachineRepresentation::kFloat64;
1405 return StoreNoWriteBarrier(rep, object, offset, value);
1406 }
1407
AllocateHeapNumber(MutableMode mode)1408 Node* CodeStubAssembler::AllocateHeapNumber(MutableMode mode) {
1409 Node* result = Allocate(HeapNumber::kSize, kNone);
1410 Heap::RootListIndex heap_map_index =
1411 mode == IMMUTABLE ? Heap::kHeapNumberMapRootIndex
1412 : Heap::kMutableHeapNumberMapRootIndex;
1413 Node* map = LoadRoot(heap_map_index);
1414 StoreMapNoWriteBarrier(result, map);
1415 return result;
1416 }
1417
AllocateHeapNumberWithValue(Node * value,MutableMode mode)1418 Node* CodeStubAssembler::AllocateHeapNumberWithValue(Node* value,
1419 MutableMode mode) {
1420 Node* result = AllocateHeapNumber(mode);
1421 StoreHeapNumberValue(result, value);
1422 return result;
1423 }
1424
AllocateSeqOneByteString(int length,AllocationFlags flags)1425 Node* CodeStubAssembler::AllocateSeqOneByteString(int length,
1426 AllocationFlags flags) {
1427 Comment("AllocateSeqOneByteString");
1428 Node* result = Allocate(SeqOneByteString::SizeFor(length), flags);
1429 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
1430 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex));
1431 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kLengthOffset,
1432 SmiConstant(Smi::FromInt(length)));
1433 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
1434 IntPtrConstant(String::kEmptyHashField),
1435 MachineRepresentation::kWord32);
1436 return result;
1437 }
1438
AllocateSeqOneByteString(Node * context,Node * length,ParameterMode mode,AllocationFlags flags)1439 Node* CodeStubAssembler::AllocateSeqOneByteString(Node* context, Node* length,
1440 ParameterMode mode,
1441 AllocationFlags flags) {
1442 Comment("AllocateSeqOneByteString");
1443 Variable var_result(this, MachineRepresentation::kTagged);
1444
1445 // Compute the SeqOneByteString size and check if it fits into new space.
1446 Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred),
1447 if_join(this);
1448 Node* raw_size = GetArrayAllocationSize(
1449 length, UINT8_ELEMENTS, mode,
1450 SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
1451 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
1452 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
1453 &if_sizeissmall, &if_notsizeissmall);
1454
1455 Bind(&if_sizeissmall);
1456 {
1457 // Just allocate the SeqOneByteString in new space.
1458 Node* result = Allocate(size, flags);
1459 DCHECK(Heap::RootIsImmortalImmovable(Heap::kOneByteStringMapRootIndex));
1460 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kOneByteStringMapRootIndex));
1461 StoreObjectFieldNoWriteBarrier(
1462 result, SeqOneByteString::kLengthOffset,
1463 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1464 StoreObjectFieldNoWriteBarrier(result, SeqOneByteString::kHashFieldOffset,
1465 IntPtrConstant(String::kEmptyHashField),
1466 MachineRepresentation::kWord32);
1467 var_result.Bind(result);
1468 Goto(&if_join);
1469 }
1470
1471 Bind(&if_notsizeissmall);
1472 {
1473 // We might need to allocate in large object space, go to the runtime.
1474 Node* result =
1475 CallRuntime(Runtime::kAllocateSeqOneByteString, context,
1476 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1477 var_result.Bind(result);
1478 Goto(&if_join);
1479 }
1480
1481 Bind(&if_join);
1482 return var_result.value();
1483 }
1484
AllocateSeqTwoByteString(int length,AllocationFlags flags)1485 Node* CodeStubAssembler::AllocateSeqTwoByteString(int length,
1486 AllocationFlags flags) {
1487 Comment("AllocateSeqTwoByteString");
1488 Node* result = Allocate(SeqTwoByteString::SizeFor(length), flags);
1489 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
1490 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
1491 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kLengthOffset,
1492 SmiConstant(Smi::FromInt(length)));
1493 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
1494 IntPtrConstant(String::kEmptyHashField),
1495 MachineRepresentation::kWord32);
1496 return result;
1497 }
1498
AllocateSeqTwoByteString(Node * context,Node * length,ParameterMode mode,AllocationFlags flags)1499 Node* CodeStubAssembler::AllocateSeqTwoByteString(Node* context, Node* length,
1500 ParameterMode mode,
1501 AllocationFlags flags) {
1502 Comment("AllocateSeqTwoByteString");
1503 Variable var_result(this, MachineRepresentation::kTagged);
1504
1505 // Compute the SeqTwoByteString size and check if it fits into new space.
1506 Label if_sizeissmall(this), if_notsizeissmall(this, Label::kDeferred),
1507 if_join(this);
1508 Node* raw_size = GetArrayAllocationSize(
1509 length, UINT16_ELEMENTS, mode,
1510 SeqOneByteString::kHeaderSize + kObjectAlignmentMask);
1511 Node* size = WordAnd(raw_size, IntPtrConstant(~kObjectAlignmentMask));
1512 Branch(IntPtrLessThanOrEqual(size, IntPtrConstant(kMaxRegularHeapObjectSize)),
1513 &if_sizeissmall, &if_notsizeissmall);
1514
1515 Bind(&if_sizeissmall);
1516 {
1517 // Just allocate the SeqTwoByteString in new space.
1518 Node* result = Allocate(size, flags);
1519 DCHECK(Heap::RootIsImmortalImmovable(Heap::kStringMapRootIndex));
1520 StoreMapNoWriteBarrier(result, LoadRoot(Heap::kStringMapRootIndex));
1521 StoreObjectFieldNoWriteBarrier(
1522 result, SeqTwoByteString::kLengthOffset,
1523 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1524 StoreObjectFieldNoWriteBarrier(result, SeqTwoByteString::kHashFieldOffset,
1525 IntPtrConstant(String::kEmptyHashField),
1526 MachineRepresentation::kWord32);
1527 var_result.Bind(result);
1528 Goto(&if_join);
1529 }
1530
1531 Bind(&if_notsizeissmall);
1532 {
1533 // We might need to allocate in large object space, go to the runtime.
1534 Node* result =
1535 CallRuntime(Runtime::kAllocateSeqTwoByteString, context,
1536 mode == SMI_PARAMETERS ? length : SmiFromWord(length));
1537 var_result.Bind(result);
1538 Goto(&if_join);
1539 }
1540
1541 Bind(&if_join);
1542 return var_result.value();
1543 }
1544
AllocateSlicedString(Heap::RootListIndex map_root_index,Node * length,Node * parent,Node * offset)1545 Node* CodeStubAssembler::AllocateSlicedString(
1546 Heap::RootListIndex map_root_index, Node* length, Node* parent,
1547 Node* offset) {
1548 CSA_ASSERT(this, TaggedIsSmi(length));
1549 Node* result = Allocate(SlicedString::kSize);
1550 Node* map = LoadRoot(map_root_index);
1551 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1552 StoreMapNoWriteBarrier(result, map);
1553 StoreObjectFieldNoWriteBarrier(result, SlicedString::kLengthOffset, length,
1554 MachineRepresentation::kTagged);
1555 StoreObjectFieldNoWriteBarrier(result, SlicedString::kHashFieldOffset,
1556 Int32Constant(String::kEmptyHashField),
1557 MachineRepresentation::kWord32);
1558 StoreObjectFieldNoWriteBarrier(result, SlicedString::kParentOffset, parent,
1559 MachineRepresentation::kTagged);
1560 StoreObjectFieldNoWriteBarrier(result, SlicedString::kOffsetOffset, offset,
1561 MachineRepresentation::kTagged);
1562 return result;
1563 }
1564
AllocateSlicedOneByteString(Node * length,Node * parent,Node * offset)1565 Node* CodeStubAssembler::AllocateSlicedOneByteString(Node* length, Node* parent,
1566 Node* offset) {
1567 return AllocateSlicedString(Heap::kSlicedOneByteStringMapRootIndex, length,
1568 parent, offset);
1569 }
1570
AllocateSlicedTwoByteString(Node * length,Node * parent,Node * offset)1571 Node* CodeStubAssembler::AllocateSlicedTwoByteString(Node* length, Node* parent,
1572 Node* offset) {
1573 return AllocateSlicedString(Heap::kSlicedStringMapRootIndex, length, parent,
1574 offset);
1575 }
1576
AllocateConsString(Heap::RootListIndex map_root_index,Node * length,Node * first,Node * second,AllocationFlags flags)1577 Node* CodeStubAssembler::AllocateConsString(Heap::RootListIndex map_root_index,
1578 Node* length, Node* first,
1579 Node* second,
1580 AllocationFlags flags) {
1581 CSA_ASSERT(this, TaggedIsSmi(length));
1582 Node* result = Allocate(ConsString::kSize, flags);
1583 Node* map = LoadRoot(map_root_index);
1584 DCHECK(Heap::RootIsImmortalImmovable(map_root_index));
1585 StoreMapNoWriteBarrier(result, map);
1586 StoreObjectFieldNoWriteBarrier(result, ConsString::kLengthOffset, length,
1587 MachineRepresentation::kTagged);
1588 StoreObjectFieldNoWriteBarrier(result, ConsString::kHashFieldOffset,
1589 Int32Constant(String::kEmptyHashField),
1590 MachineRepresentation::kWord32);
1591 bool const new_space = !(flags & kPretenured);
1592 if (new_space) {
1593 StoreObjectFieldNoWriteBarrier(result, ConsString::kFirstOffset, first,
1594 MachineRepresentation::kTagged);
1595 StoreObjectFieldNoWriteBarrier(result, ConsString::kSecondOffset, second,
1596 MachineRepresentation::kTagged);
1597 } else {
1598 StoreObjectField(result, ConsString::kFirstOffset, first);
1599 StoreObjectField(result, ConsString::kSecondOffset, second);
1600 }
1601 return result;
1602 }
1603
AllocateOneByteConsString(Node * length,Node * first,Node * second,AllocationFlags flags)1604 Node* CodeStubAssembler::AllocateOneByteConsString(Node* length, Node* first,
1605 Node* second,
1606 AllocationFlags flags) {
1607 return AllocateConsString(Heap::kConsOneByteStringMapRootIndex, length, first,
1608 second, flags);
1609 }
1610
AllocateTwoByteConsString(Node * length,Node * first,Node * second,AllocationFlags flags)1611 Node* CodeStubAssembler::AllocateTwoByteConsString(Node* length, Node* first,
1612 Node* second,
1613 AllocationFlags flags) {
1614 return AllocateConsString(Heap::kConsStringMapRootIndex, length, first,
1615 second, flags);
1616 }
1617
NewConsString(Node * context,Node * length,Node * left,Node * right,AllocationFlags flags)1618 Node* CodeStubAssembler::NewConsString(Node* context, Node* length, Node* left,
1619 Node* right, AllocationFlags flags) {
1620 CSA_ASSERT(this, TaggedIsSmi(length));
1621 // Added string can be a cons string.
1622 Comment("Allocating ConsString");
1623 Node* left_instance_type = LoadInstanceType(left);
1624 Node* right_instance_type = LoadInstanceType(right);
1625
1626 // Compute intersection and difference of instance types.
1627 Node* anded_instance_types = WordAnd(left_instance_type, right_instance_type);
1628 Node* xored_instance_types = WordXor(left_instance_type, right_instance_type);
1629
1630 // We create a one-byte cons string if
1631 // 1. both strings are one-byte, or
1632 // 2. at least one of the strings is two-byte, but happens to contain only
1633 // one-byte characters.
1634 // To do this, we check
1635 // 1. if both strings are one-byte, or if the one-byte data hint is set in
1636 // both strings, or
1637 // 2. if one of the strings has the one-byte data hint set and the other
1638 // string is one-byte.
1639 STATIC_ASSERT(kOneByteStringTag != 0);
1640 STATIC_ASSERT(kOneByteDataHintTag != 0);
1641 Label one_byte_map(this);
1642 Label two_byte_map(this);
1643 Variable result(this, MachineRepresentation::kTagged);
1644 Label done(this, &result);
1645 GotoIf(WordNotEqual(
1646 WordAnd(anded_instance_types,
1647 IntPtrConstant(kStringEncodingMask | kOneByteDataHintTag)),
1648 IntPtrConstant(0)),
1649 &one_byte_map);
1650 Branch(WordNotEqual(WordAnd(xored_instance_types,
1651 IntPtrConstant(kStringEncodingMask |
1652 kOneByteDataHintMask)),
1653 IntPtrConstant(kOneByteStringTag | kOneByteDataHintTag)),
1654 &two_byte_map, &one_byte_map);
1655
1656 Bind(&one_byte_map);
1657 Comment("One-byte ConsString");
1658 result.Bind(AllocateOneByteConsString(length, left, right, flags));
1659 Goto(&done);
1660
1661 Bind(&two_byte_map);
1662 Comment("Two-byte ConsString");
1663 result.Bind(AllocateTwoByteConsString(length, left, right, flags));
1664 Goto(&done);
1665
1666 Bind(&done);
1667
1668 return result.value();
1669 }
1670
AllocateRegExpResult(Node * context,Node * length,Node * index,Node * input)1671 Node* CodeStubAssembler::AllocateRegExpResult(Node* context, Node* length,
1672 Node* index, Node* input) {
1673 Node* const max_length =
1674 SmiConstant(Smi::FromInt(JSArray::kInitialMaxFastElementArray));
1675 CSA_ASSERT(this, SmiLessThanOrEqual(length, max_length));
1676 USE(max_length);
1677
1678 // Allocate the JSRegExpResult.
1679 // TODO(jgruber): Fold JSArray and FixedArray allocations, then remove
1680 // unneeded store of elements.
1681 Node* const result = Allocate(JSRegExpResult::kSize);
1682
1683 // TODO(jgruber): Store map as Heap constant?
1684 Node* const native_context = LoadNativeContext(context);
1685 Node* const map =
1686 LoadContextElement(native_context, Context::REGEXP_RESULT_MAP_INDEX);
1687 StoreMapNoWriteBarrier(result, map);
1688
1689 // Initialize the header before allocating the elements.
1690 Node* const empty_array = EmptyFixedArrayConstant();
1691 DCHECK(Heap::RootIsImmortalImmovable(Heap::kEmptyFixedArrayRootIndex));
1692 StoreObjectFieldNoWriteBarrier(result, JSArray::kPropertiesOffset,
1693 empty_array);
1694 StoreObjectFieldNoWriteBarrier(result, JSArray::kElementsOffset, empty_array);
1695 StoreObjectFieldNoWriteBarrier(result, JSArray::kLengthOffset, length);
1696
1697 StoreObjectFieldNoWriteBarrier(result, JSRegExpResult::kIndexOffset, index);
1698 StoreObjectField(result, JSRegExpResult::kInputOffset, input);
1699
1700 Node* const zero = IntPtrConstant(0);
1701 Node* const length_intptr = SmiUntag(length);
1702 const ElementsKind elements_kind = FAST_ELEMENTS;
1703 const ParameterMode parameter_mode = INTPTR_PARAMETERS;
1704
1705 Node* const elements =
1706 AllocateFixedArray(elements_kind, length_intptr, parameter_mode);
1707 StoreObjectField(result, JSArray::kElementsOffset, elements);
1708
1709 // Fill in the elements with undefined.
1710 FillFixedArrayWithValue(elements_kind, elements, zero, length_intptr,
1711 Heap::kUndefinedValueRootIndex, parameter_mode);
1712
1713 return result;
1714 }
1715
AllocateNameDictionary(int at_least_space_for)1716 Node* CodeStubAssembler::AllocateNameDictionary(int at_least_space_for) {
1717 return AllocateNameDictionary(IntPtrConstant(at_least_space_for));
1718 }
1719
AllocateNameDictionary(Node * at_least_space_for)1720 Node* CodeStubAssembler::AllocateNameDictionary(Node* at_least_space_for) {
1721 CSA_ASSERT(this, UintPtrLessThanOrEqual(
1722 at_least_space_for,
1723 IntPtrConstant(NameDictionary::kMaxCapacity)));
1724
1725 Node* capacity = HashTableComputeCapacity(at_least_space_for);
1726 CSA_ASSERT(this, WordIsPowerOfTwo(capacity));
1727
1728 Node* length = EntryToIndex<NameDictionary>(capacity);
1729 Node* store_size =
1730 IntPtrAddFoldConstants(WordShl(length, IntPtrConstant(kPointerSizeLog2)),
1731 IntPtrConstant(NameDictionary::kHeaderSize));
1732
1733 Node* result = Allocate(store_size);
1734 Comment("Initialize NameDictionary");
1735 // Initialize FixedArray fields.
1736 StoreObjectFieldRoot(result, FixedArray::kMapOffset,
1737 Heap::kHashTableMapRootIndex);
1738 StoreObjectFieldNoWriteBarrier(result, FixedArray::kLengthOffset,
1739 SmiFromWord(length));
1740 // Initialized HashTable fields.
1741 Node* zero = SmiConstant(0);
1742 StoreFixedArrayElement(result, NameDictionary::kNumberOfElementsIndex, zero,
1743 SKIP_WRITE_BARRIER);
1744 StoreFixedArrayElement(result, NameDictionary::kNumberOfDeletedElementsIndex,
1745 zero, SKIP_WRITE_BARRIER);
1746 StoreFixedArrayElement(result, NameDictionary::kCapacityIndex,
1747 SmiTag(capacity), SKIP_WRITE_BARRIER);
1748 // Initialize Dictionary fields.
1749 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex);
1750 StoreFixedArrayElement(result, NameDictionary::kMaxNumberKeyIndex, filler,
1751 SKIP_WRITE_BARRIER);
1752 StoreFixedArrayElement(result, NameDictionary::kNextEnumerationIndexIndex,
1753 SmiConstant(PropertyDetails::kInitialIndex),
1754 SKIP_WRITE_BARRIER);
1755
1756 // Initialize NameDictionary elements.
1757 result = BitcastTaggedToWord(result);
1758 Node* start_address = IntPtrAdd(
1759 result, IntPtrConstant(NameDictionary::OffsetOfElementAt(
1760 NameDictionary::kElementsStartIndex) -
1761 kHeapObjectTag));
1762 Node* end_address = IntPtrAdd(
1763 result,
1764 IntPtrSubFoldConstants(store_size, IntPtrConstant(kHeapObjectTag)));
1765 StoreFieldsNoWriteBarrier(start_address, end_address, filler);
1766 return result;
1767 }
1768
AllocateJSObjectFromMap(Node * map,Node * properties,Node * elements)1769 Node* CodeStubAssembler::AllocateJSObjectFromMap(Node* map, Node* properties,
1770 Node* elements) {
1771 CSA_ASSERT(this, IsMap(map));
1772 Node* size =
1773 IntPtrMul(LoadMapInstanceSize(map), IntPtrConstant(kPointerSize));
1774 CSA_ASSERT(this, IsRegularHeapObjectSize(size));
1775 Node* object = Allocate(size);
1776 StoreMapNoWriteBarrier(object, map);
1777 InitializeJSObjectFromMap(object, map, size, properties, elements);
1778 return object;
1779 }
1780
InitializeJSObjectFromMap(Node * object,Node * map,Node * size,Node * properties,Node * elements)1781 void CodeStubAssembler::InitializeJSObjectFromMap(Node* object, Node* map,
1782 Node* size, Node* properties,
1783 Node* elements) {
1784 // This helper assumes that the object is in new-space, as guarded by the
1785 // check in AllocatedJSObjectFromMap.
1786 if (properties == nullptr) {
1787 CSA_ASSERT(this, Word32BinaryNot(IsDictionaryMap((map))));
1788 StoreObjectFieldRoot(object, JSObject::kPropertiesOffset,
1789 Heap::kEmptyFixedArrayRootIndex);
1790 } else {
1791 StoreObjectFieldNoWriteBarrier(object, JSObject::kPropertiesOffset,
1792 properties);
1793 }
1794 if (elements == nullptr) {
1795 StoreObjectFieldRoot(object, JSObject::kElementsOffset,
1796 Heap::kEmptyFixedArrayRootIndex);
1797 } else {
1798 StoreObjectFieldNoWriteBarrier(object, JSObject::kElementsOffset, elements);
1799 }
1800 InitializeJSObjectBody(object, map, size, JSObject::kHeaderSize);
1801 }
1802
InitializeJSObjectBody(Node * object,Node * map,Node * size,int start_offset)1803 void CodeStubAssembler::InitializeJSObjectBody(Node* object, Node* map,
1804 Node* size, int start_offset) {
1805 // TODO(cbruni): activate in-object slack tracking machinery.
1806 Comment("InitializeJSObjectBody");
1807 Node* filler = LoadRoot(Heap::kUndefinedValueRootIndex);
1808 // Calculate the untagged field addresses.
1809 Node* start_address =
1810 IntPtrAdd(object, IntPtrConstant(start_offset - kHeapObjectTag));
1811 Node* end_address =
1812 IntPtrSub(IntPtrAdd(object, size), IntPtrConstant(kHeapObjectTag));
1813 StoreFieldsNoWriteBarrier(start_address, end_address, filler);
1814 }
1815
StoreFieldsNoWriteBarrier(Node * start_address,Node * end_address,Node * value)1816 void CodeStubAssembler::StoreFieldsNoWriteBarrier(Node* start_address,
1817 Node* end_address,
1818 Node* value) {
1819 Comment("StoreFieldsNoWriteBarrier");
1820 CSA_ASSERT(this, WordIsWordAligned(start_address));
1821 CSA_ASSERT(this, WordIsWordAligned(end_address));
1822 BuildFastLoop(
1823 MachineType::PointerRepresentation(), start_address, end_address,
1824 [value](CodeStubAssembler* a, Node* current) {
1825 a->StoreNoWriteBarrier(MachineRepresentation::kTagged, current, value);
1826 },
1827 kPointerSize, IndexAdvanceMode::kPost);
1828 }
1829
AllocateUninitializedJSArrayWithoutElements(ElementsKind kind,Node * array_map,Node * length,Node * allocation_site)1830 Node* CodeStubAssembler::AllocateUninitializedJSArrayWithoutElements(
1831 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site) {
1832 Comment("begin allocation of JSArray without elements");
1833 int base_size = JSArray::kSize;
1834 if (allocation_site != nullptr) {
1835 base_size += AllocationMemento::kSize;
1836 }
1837
1838 Node* size = IntPtrConstant(base_size);
1839 Node* array = AllocateUninitializedJSArray(kind, array_map, length,
1840 allocation_site, size);
1841 return array;
1842 }
1843
1844 std::pair<Node*, Node*>
AllocateUninitializedJSArrayWithElements(ElementsKind kind,Node * array_map,Node * length,Node * allocation_site,Node * capacity,ParameterMode capacity_mode)1845 CodeStubAssembler::AllocateUninitializedJSArrayWithElements(
1846 ElementsKind kind, Node* array_map, Node* length, Node* allocation_site,
1847 Node* capacity, ParameterMode capacity_mode) {
1848 Comment("begin allocation of JSArray with elements");
1849 int base_size = JSArray::kSize;
1850
1851 if (allocation_site != nullptr) {
1852 base_size += AllocationMemento::kSize;
1853 }
1854
1855 int elements_offset = base_size;
1856
1857 // Compute space for elements
1858 base_size += FixedArray::kHeaderSize;
1859 Node* size = ElementOffsetFromIndex(capacity, kind, capacity_mode, base_size);
1860
1861 Node* array = AllocateUninitializedJSArray(kind, array_map, length,
1862 allocation_site, size);
1863
1864 // The bitcast here is safe because InnerAllocate doesn't actually allocate.
1865 Node* elements = InnerAllocate(BitcastTaggedToWord(array), elements_offset);
1866 StoreObjectField(array, JSObject::kElementsOffset, elements);
1867
1868 return {array, elements};
1869 }
1870
AllocateUninitializedJSArray(ElementsKind kind,Node * array_map,Node * length,Node * allocation_site,Node * size_in_bytes)1871 Node* CodeStubAssembler::AllocateUninitializedJSArray(ElementsKind kind,
1872 Node* array_map,
1873 Node* length,
1874 Node* allocation_site,
1875 Node* size_in_bytes) {
1876 Node* array = Allocate(size_in_bytes);
1877
1878 Comment("write JSArray headers");
1879 StoreMapNoWriteBarrier(array, array_map);
1880
1881 StoreObjectFieldNoWriteBarrier(array, JSArray::kLengthOffset, length);
1882
1883 StoreObjectFieldRoot(array, JSArray::kPropertiesOffset,
1884 Heap::kEmptyFixedArrayRootIndex);
1885
1886 if (allocation_site != nullptr) {
1887 InitializeAllocationMemento(array, JSArray::kSize, allocation_site);
1888 }
1889 return array;
1890 }
1891
AllocateJSArray(ElementsKind kind,Node * array_map,Node * capacity,Node * length,Node * allocation_site,ParameterMode capacity_mode)1892 Node* CodeStubAssembler::AllocateJSArray(ElementsKind kind, Node* array_map,
1893 Node* capacity, Node* length,
1894 Node* allocation_site,
1895 ParameterMode capacity_mode) {
1896 bool is_double = IsFastDoubleElementsKind(kind);
1897
1898 // Allocate both array and elements object, and initialize the JSArray.
1899 Node *array, *elements;
1900 std::tie(array, elements) = AllocateUninitializedJSArrayWithElements(
1901 kind, array_map, length, allocation_site, capacity, capacity_mode);
1902 // Setup elements object.
1903 Heap* heap = isolate()->heap();
1904 Handle<Map> elements_map(is_double ? heap->fixed_double_array_map()
1905 : heap->fixed_array_map());
1906 StoreMapNoWriteBarrier(elements, HeapConstant(elements_map));
1907 StoreObjectFieldNoWriteBarrier(elements, FixedArray::kLengthOffset,
1908 TagParameter(capacity, capacity_mode));
1909
1910 // Fill in the elements with holes.
1911 FillFixedArrayWithValue(
1912 kind, elements, capacity_mode == SMI_PARAMETERS ? SmiConstant(Smi::kZero)
1913 : IntPtrConstant(0),
1914 capacity, Heap::kTheHoleValueRootIndex, capacity_mode);
1915
1916 return array;
1917 }
1918
AllocateFixedArray(ElementsKind kind,Node * capacity_node,ParameterMode mode,AllocationFlags flags)1919 Node* CodeStubAssembler::AllocateFixedArray(ElementsKind kind,
1920 Node* capacity_node,
1921 ParameterMode mode,
1922 AllocationFlags flags) {
1923 CSA_ASSERT(this,
1924 IntPtrGreaterThan(capacity_node, IntPtrOrSmiConstant(0, mode)));
1925 Node* total_size = GetFixedArrayAllocationSize(capacity_node, kind, mode);
1926
1927 // Allocate both array and elements object, and initialize the JSArray.
1928 Node* array = Allocate(total_size, flags);
1929 Heap* heap = isolate()->heap();
1930 Handle<Map> map(IsFastDoubleElementsKind(kind)
1931 ? heap->fixed_double_array_map()
1932 : heap->fixed_array_map());
1933 if (flags & kPretenured) {
1934 StoreObjectField(array, JSObject::kMapOffset, HeapConstant(map));
1935 } else {
1936 StoreMapNoWriteBarrier(array, HeapConstant(map));
1937 }
1938 StoreObjectFieldNoWriteBarrier(array, FixedArray::kLengthOffset,
1939 TagParameter(capacity_node, mode));
1940 return array;
1941 }
1942
FillFixedArrayWithValue(ElementsKind kind,Node * array,Node * from_node,Node * to_node,Heap::RootListIndex value_root_index,ParameterMode mode)1943 void CodeStubAssembler::FillFixedArrayWithValue(
1944 ElementsKind kind, Node* array, Node* from_node, Node* to_node,
1945 Heap::RootListIndex value_root_index, ParameterMode mode) {
1946 bool is_double = IsFastDoubleElementsKind(kind);
1947 DCHECK(value_root_index == Heap::kTheHoleValueRootIndex ||
1948 value_root_index == Heap::kUndefinedValueRootIndex);
1949 DCHECK_IMPLIES(is_double, value_root_index == Heap::kTheHoleValueRootIndex);
1950 STATIC_ASSERT(kHoleNanLower32 == kHoleNanUpper32);
1951 Node* double_hole =
1952 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
1953 Node* value = LoadRoot(value_root_index);
1954
1955 BuildFastFixedArrayForEach(
1956 array, kind, from_node, to_node,
1957 [value, is_double, double_hole](CodeStubAssembler* assembler, Node* array,
1958 Node* offset) {
1959 if (is_double) {
1960 // Don't use doubles to store the hole double, since manipulating the
1961 // signaling NaN used for the hole in C++, e.g. with bit_cast, will
1962 // change its value on ia32 (the x87 stack is used to return values
1963 // and stores to the stack silently clear the signalling bit).
1964 //
1965 // TODO(danno): When we have a Float32/Float64 wrapper class that
1966 // preserves double bits during manipulation, remove this code/change
1967 // this to an indexed Float64 store.
1968 if (assembler->Is64()) {
1969 assembler->StoreNoWriteBarrier(MachineRepresentation::kWord64,
1970 array, offset, double_hole);
1971 } else {
1972 assembler->StoreNoWriteBarrier(MachineRepresentation::kWord32,
1973 array, offset, double_hole);
1974 assembler->StoreNoWriteBarrier(
1975 MachineRepresentation::kWord32, array,
1976 assembler->IntPtrAdd(offset,
1977 assembler->IntPtrConstant(kPointerSize)),
1978 double_hole);
1979 }
1980 } else {
1981 assembler->StoreNoWriteBarrier(MachineRepresentation::kTagged, array,
1982 offset, value);
1983 }
1984 },
1985 mode);
1986 }
1987
CopyFixedArrayElements(ElementsKind from_kind,Node * from_array,ElementsKind to_kind,Node * to_array,Node * element_count,Node * capacity,WriteBarrierMode barrier_mode,ParameterMode mode)1988 void CodeStubAssembler::CopyFixedArrayElements(
1989 ElementsKind from_kind, Node* from_array, ElementsKind to_kind,
1990 Node* to_array, Node* element_count, Node* capacity,
1991 WriteBarrierMode barrier_mode, ParameterMode mode) {
1992 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
1993 const int first_element_offset = FixedArray::kHeaderSize - kHeapObjectTag;
1994 Comment("[ CopyFixedArrayElements");
1995
1996 // Typed array elements are not supported.
1997 DCHECK(!IsFixedTypedArrayElementsKind(from_kind));
1998 DCHECK(!IsFixedTypedArrayElementsKind(to_kind));
1999
2000 Label done(this);
2001 bool from_double_elements = IsFastDoubleElementsKind(from_kind);
2002 bool to_double_elements = IsFastDoubleElementsKind(to_kind);
2003 bool element_size_matches =
2004 Is64() ||
2005 IsFastDoubleElementsKind(from_kind) == IsFastDoubleElementsKind(to_kind);
2006 bool doubles_to_objects_conversion =
2007 IsFastDoubleElementsKind(from_kind) && IsFastObjectElementsKind(to_kind);
2008 bool needs_write_barrier =
2009 doubles_to_objects_conversion || (barrier_mode == UPDATE_WRITE_BARRIER &&
2010 IsFastObjectElementsKind(to_kind));
2011 Node* double_hole =
2012 Is64() ? Int64Constant(kHoleNanInt64) : Int32Constant(kHoleNanLower32);
2013
2014 if (doubles_to_objects_conversion) {
2015 // If the copy might trigger a GC, make sure that the FixedArray is
2016 // pre-initialized with holes to make sure that it's always in a
2017 // consistent state.
2018 FillFixedArrayWithValue(to_kind, to_array, IntPtrOrSmiConstant(0, mode),
2019 capacity, Heap::kTheHoleValueRootIndex, mode);
2020 } else if (element_count != capacity) {
2021 FillFixedArrayWithValue(to_kind, to_array, element_count, capacity,
2022 Heap::kTheHoleValueRootIndex, mode);
2023 }
2024
2025 Node* limit_offset = ElementOffsetFromIndex(
2026 IntPtrOrSmiConstant(0, mode), from_kind, mode, first_element_offset);
2027 Variable var_from_offset(this, MachineType::PointerRepresentation());
2028 var_from_offset.Bind(ElementOffsetFromIndex(element_count, from_kind, mode,
2029 first_element_offset));
2030 // This second variable is used only when the element sizes of source and
2031 // destination arrays do not match.
2032 Variable var_to_offset(this, MachineType::PointerRepresentation());
2033 if (element_size_matches) {
2034 var_to_offset.Bind(var_from_offset.value());
2035 } else {
2036 var_to_offset.Bind(ElementOffsetFromIndex(element_count, to_kind, mode,
2037 first_element_offset));
2038 }
2039
2040 Variable* vars[] = {&var_from_offset, &var_to_offset};
2041 Label decrement(this, 2, vars);
2042
2043 Branch(WordEqual(var_from_offset.value(), limit_offset), &done, &decrement);
2044
2045 Bind(&decrement);
2046 {
2047 Node* from_offset = IntPtrSub(
2048 var_from_offset.value(),
2049 IntPtrConstant(from_double_elements ? kDoubleSize : kPointerSize));
2050 var_from_offset.Bind(from_offset);
2051
2052 Node* to_offset;
2053 if (element_size_matches) {
2054 to_offset = from_offset;
2055 } else {
2056 to_offset = IntPtrSub(
2057 var_to_offset.value(),
2058 IntPtrConstant(to_double_elements ? kDoubleSize : kPointerSize));
2059 var_to_offset.Bind(to_offset);
2060 }
2061
2062 Label next_iter(this), store_double_hole(this);
2063 Label* if_hole;
2064 if (doubles_to_objects_conversion) {
2065 // The target elements array is already preinitialized with holes, so we
2066 // can just proceed with the next iteration.
2067 if_hole = &next_iter;
2068 } else if (IsFastDoubleElementsKind(to_kind)) {
2069 if_hole = &store_double_hole;
2070 } else {
2071 // In all the other cases don't check for holes and copy the data as is.
2072 if_hole = nullptr;
2073 }
2074
2075 Node* value = LoadElementAndPrepareForStore(
2076 from_array, var_from_offset.value(), from_kind, to_kind, if_hole);
2077
2078 if (needs_write_barrier) {
2079 Store(MachineRepresentation::kTagged, to_array, to_offset, value);
2080 } else if (to_double_elements) {
2081 StoreNoWriteBarrier(MachineRepresentation::kFloat64, to_array, to_offset,
2082 value);
2083 } else {
2084 StoreNoWriteBarrier(MachineRepresentation::kTagged, to_array, to_offset,
2085 value);
2086 }
2087 Goto(&next_iter);
2088
2089 if (if_hole == &store_double_hole) {
2090 Bind(&store_double_hole);
2091 // Don't use doubles to store the hole double, since manipulating the
2092 // signaling NaN used for the hole in C++, e.g. with bit_cast, will
2093 // change its value on ia32 (the x87 stack is used to return values
2094 // and stores to the stack silently clear the signalling bit).
2095 //
2096 // TODO(danno): When we have a Float32/Float64 wrapper class that
2097 // preserves double bits during manipulation, remove this code/change
2098 // this to an indexed Float64 store.
2099 if (Is64()) {
2100 StoreNoWriteBarrier(MachineRepresentation::kWord64, to_array, to_offset,
2101 double_hole);
2102 } else {
2103 StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array, to_offset,
2104 double_hole);
2105 StoreNoWriteBarrier(MachineRepresentation::kWord32, to_array,
2106 IntPtrAdd(to_offset, IntPtrConstant(kPointerSize)),
2107 double_hole);
2108 }
2109 Goto(&next_iter);
2110 }
2111
2112 Bind(&next_iter);
2113 Node* compare = WordNotEqual(from_offset, limit_offset);
2114 Branch(compare, &decrement, &done);
2115 }
2116
2117 Bind(&done);
2118 IncrementCounter(isolate()->counters()->inlined_copied_elements(), 1);
2119 Comment("] CopyFixedArrayElements");
2120 }
2121
CopyStringCharacters(compiler::Node * from_string,compiler::Node * to_string,compiler::Node * from_index,compiler::Node * to_index,compiler::Node * character_count,String::Encoding from_encoding,String::Encoding to_encoding,ParameterMode mode)2122 void CodeStubAssembler::CopyStringCharacters(
2123 compiler::Node* from_string, compiler::Node* to_string,
2124 compiler::Node* from_index, compiler::Node* to_index,
2125 compiler::Node* character_count, String::Encoding from_encoding,
2126 String::Encoding to_encoding, ParameterMode mode) {
2127 bool from_one_byte = from_encoding == String::ONE_BYTE_ENCODING;
2128 bool to_one_byte = to_encoding == String::ONE_BYTE_ENCODING;
2129 DCHECK_IMPLIES(to_one_byte, from_one_byte);
2130 Comment("CopyStringCharacters %s -> %s",
2131 from_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING",
2132 to_one_byte ? "ONE_BYTE_ENCODING" : "TWO_BYTE_ENCODING");
2133
2134 ElementsKind from_kind = from_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
2135 ElementsKind to_kind = to_one_byte ? UINT8_ELEMENTS : UINT16_ELEMENTS;
2136 STATIC_ASSERT(SeqOneByteString::kHeaderSize == SeqTwoByteString::kHeaderSize);
2137 int header_size = SeqOneByteString::kHeaderSize - kHeapObjectTag;
2138 Node* from_offset =
2139 ElementOffsetFromIndex(from_index, from_kind, mode, header_size);
2140 Node* to_offset =
2141 ElementOffsetFromIndex(to_index, to_kind, mode, header_size);
2142 Node* byte_count = ElementOffsetFromIndex(character_count, from_kind, mode);
2143 Node* limit_offset = IntPtrAddFoldConstants(from_offset, byte_count);
2144
2145 // Prepare the fast loop
2146 MachineType type =
2147 from_one_byte ? MachineType::Uint8() : MachineType::Uint16();
2148 MachineRepresentation rep = to_one_byte ? MachineRepresentation::kWord8
2149 : MachineRepresentation::kWord16;
2150 int from_increment = 1 << ElementsKindToShiftSize(from_kind);
2151 int to_increment = 1 << ElementsKindToShiftSize(to_kind);
2152
2153 Variable current_to_offset(this, MachineType::PointerRepresentation());
2154 VariableList vars({¤t_to_offset}, zone());
2155 current_to_offset.Bind(to_offset);
2156 int to_index_constant = 0, from_index_constant = 0;
2157 Smi* to_index_smi = nullptr;
2158 Smi* from_index_smi = nullptr;
2159 bool index_same = (from_encoding == to_encoding) &&
2160 (from_index == to_index ||
2161 (ToInt32Constant(from_index, from_index_constant) &&
2162 ToInt32Constant(to_index, to_index_constant) &&
2163 from_index_constant == to_index_constant) ||
2164 (ToSmiConstant(from_index, from_index_smi) &&
2165 ToSmiConstant(to_index, to_index_smi) &&
2166 to_index_smi == from_index_smi));
2167 BuildFastLoop(vars, MachineType::PointerRepresentation(), from_offset,
2168 limit_offset,
2169 [from_string, to_string, ¤t_to_offset, to_increment, type,
2170 rep, index_same](CodeStubAssembler* assembler, Node* offset) {
2171 Node* value = assembler->Load(type, from_string, offset);
2172 assembler->StoreNoWriteBarrier(
2173 rep, to_string,
2174 index_same ? offset : current_to_offset.value(), value);
2175 if (!index_same) {
2176 current_to_offset.Bind(assembler->IntPtrAdd(
2177 current_to_offset.value(),
2178 assembler->IntPtrConstant(to_increment)));
2179 }
2180 },
2181 from_increment, IndexAdvanceMode::kPost);
2182 }
2183
LoadElementAndPrepareForStore(Node * array,Node * offset,ElementsKind from_kind,ElementsKind to_kind,Label * if_hole)2184 Node* CodeStubAssembler::LoadElementAndPrepareForStore(Node* array,
2185 Node* offset,
2186 ElementsKind from_kind,
2187 ElementsKind to_kind,
2188 Label* if_hole) {
2189 if (IsFastDoubleElementsKind(from_kind)) {
2190 Node* value =
2191 LoadDoubleWithHoleCheck(array, offset, if_hole, MachineType::Float64());
2192 if (!IsFastDoubleElementsKind(to_kind)) {
2193 value = AllocateHeapNumberWithValue(value);
2194 }
2195 return value;
2196
2197 } else {
2198 Node* value = Load(MachineType::AnyTagged(), array, offset);
2199 if (if_hole) {
2200 GotoIf(WordEqual(value, TheHoleConstant()), if_hole);
2201 }
2202 if (IsFastDoubleElementsKind(to_kind)) {
2203 if (IsFastSmiElementsKind(from_kind)) {
2204 value = SmiToFloat64(value);
2205 } else {
2206 value = LoadHeapNumberValue(value);
2207 }
2208 }
2209 return value;
2210 }
2211 }
2212
CalculateNewElementsCapacity(Node * old_capacity,ParameterMode mode)2213 Node* CodeStubAssembler::CalculateNewElementsCapacity(Node* old_capacity,
2214 ParameterMode mode) {
2215 Node* half_old_capacity = WordShr(old_capacity, IntPtrConstant(1));
2216 Node* new_capacity = IntPtrAdd(half_old_capacity, old_capacity);
2217 Node* unconditioned_result =
2218 IntPtrAdd(new_capacity, IntPtrOrSmiConstant(16, mode));
2219 if (mode == INTEGER_PARAMETERS || mode == INTPTR_PARAMETERS) {
2220 return unconditioned_result;
2221 } else {
2222 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
2223 return WordAnd(unconditioned_result,
2224 IntPtrConstant(static_cast<size_t>(-1) << kSmiShiftBits));
2225 }
2226 }
2227
TryGrowElementsCapacity(Node * object,Node * elements,ElementsKind kind,Node * key,Label * bailout)2228 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
2229 ElementsKind kind, Node* key,
2230 Label* bailout) {
2231 Node* capacity = LoadFixedArrayBaseLength(elements);
2232
2233 ParameterMode mode = OptimalParameterMode();
2234 capacity = UntagParameter(capacity, mode);
2235 key = UntagParameter(key, mode);
2236
2237 return TryGrowElementsCapacity(object, elements, kind, key, capacity, mode,
2238 bailout);
2239 }
2240
TryGrowElementsCapacity(Node * object,Node * elements,ElementsKind kind,Node * key,Node * capacity,ParameterMode mode,Label * bailout)2241 Node* CodeStubAssembler::TryGrowElementsCapacity(Node* object, Node* elements,
2242 ElementsKind kind, Node* key,
2243 Node* capacity,
2244 ParameterMode mode,
2245 Label* bailout) {
2246 Comment("TryGrowElementsCapacity");
2247
2248 // If the gap growth is too big, fall back to the runtime.
2249 Node* max_gap = IntPtrOrSmiConstant(JSObject::kMaxGap, mode);
2250 Node* max_capacity = IntPtrAdd(capacity, max_gap);
2251 GotoIf(UintPtrGreaterThanOrEqual(key, max_capacity), bailout);
2252
2253 // Calculate the capacity of the new backing store.
2254 Node* new_capacity = CalculateNewElementsCapacity(
2255 IntPtrAdd(key, IntPtrOrSmiConstant(1, mode)), mode);
2256 return GrowElementsCapacity(object, elements, kind, kind, capacity,
2257 new_capacity, mode, bailout);
2258 }
2259
GrowElementsCapacity(Node * object,Node * elements,ElementsKind from_kind,ElementsKind to_kind,Node * capacity,Node * new_capacity,ParameterMode mode,Label * bailout)2260 Node* CodeStubAssembler::GrowElementsCapacity(
2261 Node* object, Node* elements, ElementsKind from_kind, ElementsKind to_kind,
2262 Node* capacity, Node* new_capacity, ParameterMode mode, Label* bailout) {
2263 Comment("[ GrowElementsCapacity");
2264 // If size of the allocation for the new capacity doesn't fit in a page
2265 // that we can bump-pointer allocate from, fall back to the runtime.
2266 int max_size = FixedArrayBase::GetMaxLengthForNewSpaceAllocation(to_kind);
2267 GotoIf(UintPtrGreaterThanOrEqual(new_capacity,
2268 IntPtrOrSmiConstant(max_size, mode)),
2269 bailout);
2270
2271 // Allocate the new backing store.
2272 Node* new_elements = AllocateFixedArray(to_kind, new_capacity, mode);
2273
2274 // Copy the elements from the old elements store to the new.
2275 // The size-check above guarantees that the |new_elements| is allocated
2276 // in new space so we can skip the write barrier.
2277 CopyFixedArrayElements(from_kind, elements, to_kind, new_elements, capacity,
2278 new_capacity, SKIP_WRITE_BARRIER, mode);
2279
2280 StoreObjectField(object, JSObject::kElementsOffset, new_elements);
2281 Comment("] GrowElementsCapacity");
2282 return new_elements;
2283 }
2284
InitializeAllocationMemento(compiler::Node * base_allocation,int base_allocation_size,compiler::Node * allocation_site)2285 void CodeStubAssembler::InitializeAllocationMemento(
2286 compiler::Node* base_allocation, int base_allocation_size,
2287 compiler::Node* allocation_site) {
2288 StoreObjectFieldNoWriteBarrier(
2289 base_allocation, AllocationMemento::kMapOffset + base_allocation_size,
2290 HeapConstant(Handle<Map>(isolate()->heap()->allocation_memento_map())));
2291 StoreObjectFieldNoWriteBarrier(
2292 base_allocation,
2293 AllocationMemento::kAllocationSiteOffset + base_allocation_size,
2294 allocation_site);
2295 if (FLAG_allocation_site_pretenuring) {
2296 Node* count = LoadObjectField(allocation_site,
2297 AllocationSite::kPretenureCreateCountOffset);
2298 Node* incremented_count = SmiAdd(count, SmiConstant(Smi::FromInt(1)));
2299 StoreObjectFieldNoWriteBarrier(allocation_site,
2300 AllocationSite::kPretenureCreateCountOffset,
2301 incremented_count);
2302 }
2303 }
2304
TryTaggedToFloat64(Node * value,Label * if_valueisnotnumber)2305 Node* CodeStubAssembler::TryTaggedToFloat64(Node* value,
2306 Label* if_valueisnotnumber) {
2307 Label out(this);
2308 Variable var_result(this, MachineRepresentation::kFloat64);
2309
2310 // Check if the {value} is a Smi or a HeapObject.
2311 Label if_valueissmi(this), if_valueisnotsmi(this);
2312 Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
2313
2314 Bind(&if_valueissmi);
2315 {
2316 // Convert the Smi {value}.
2317 var_result.Bind(SmiToFloat64(value));
2318 Goto(&out);
2319 }
2320
2321 Bind(&if_valueisnotsmi);
2322 {
2323 // Check if {value} is a HeapNumber.
2324 Label if_valueisheapnumber(this);
2325 Branch(IsHeapNumberMap(LoadMap(value)), &if_valueisheapnumber,
2326 if_valueisnotnumber);
2327
2328 Bind(&if_valueisheapnumber);
2329 {
2330 // Load the floating point value.
2331 var_result.Bind(LoadHeapNumberValue(value));
2332 Goto(&out);
2333 }
2334 }
2335 Bind(&out);
2336 return var_result.value();
2337 }
2338
TruncateTaggedToFloat64(Node * context,Node * value)2339 Node* CodeStubAssembler::TruncateTaggedToFloat64(Node* context, Node* value) {
2340 // We might need to loop once due to ToNumber conversion.
2341 Variable var_value(this, MachineRepresentation::kTagged),
2342 var_result(this, MachineRepresentation::kFloat64);
2343 Label loop(this, &var_value), done_loop(this, &var_result);
2344 var_value.Bind(value);
2345 Goto(&loop);
2346 Bind(&loop);
2347 {
2348 Label if_valueisnotnumber(this, Label::kDeferred);
2349
2350 // Load the current {value}.
2351 value = var_value.value();
2352
2353 // Convert {value} to Float64 if it is a number and convert it to a number
2354 // otherwise.
2355 Node* const result = TryTaggedToFloat64(value, &if_valueisnotnumber);
2356 var_result.Bind(result);
2357 Goto(&done_loop);
2358
2359 Bind(&if_valueisnotnumber);
2360 {
2361 // Convert the {value} to a Number first.
2362 Callable callable = CodeFactory::NonNumberToNumber(isolate());
2363 var_value.Bind(CallStub(callable, context, value));
2364 Goto(&loop);
2365 }
2366 }
2367 Bind(&done_loop);
2368 return var_result.value();
2369 }
2370
TruncateTaggedToWord32(Node * context,Node * value)2371 Node* CodeStubAssembler::TruncateTaggedToWord32(Node* context, Node* value) {
2372 // We might need to loop once due to ToNumber conversion.
2373 Variable var_value(this, MachineRepresentation::kTagged),
2374 var_result(this, MachineRepresentation::kWord32);
2375 Label loop(this, &var_value), done_loop(this, &var_result);
2376 var_value.Bind(value);
2377 Goto(&loop);
2378 Bind(&loop);
2379 {
2380 // Load the current {value}.
2381 value = var_value.value();
2382
2383 // Check if the {value} is a Smi or a HeapObject.
2384 Label if_valueissmi(this), if_valueisnotsmi(this);
2385 Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
2386
2387 Bind(&if_valueissmi);
2388 {
2389 // Convert the Smi {value}.
2390 var_result.Bind(SmiToWord32(value));
2391 Goto(&done_loop);
2392 }
2393
2394 Bind(&if_valueisnotsmi);
2395 {
2396 // Check if {value} is a HeapNumber.
2397 Label if_valueisheapnumber(this),
2398 if_valueisnotheapnumber(this, Label::kDeferred);
2399 Branch(WordEqual(LoadMap(value), HeapNumberMapConstant()),
2400 &if_valueisheapnumber, &if_valueisnotheapnumber);
2401
2402 Bind(&if_valueisheapnumber);
2403 {
2404 // Truncate the floating point value.
2405 var_result.Bind(TruncateHeapNumberValueToWord32(value));
2406 Goto(&done_loop);
2407 }
2408
2409 Bind(&if_valueisnotheapnumber);
2410 {
2411 // Convert the {value} to a Number first.
2412 Callable callable = CodeFactory::NonNumberToNumber(isolate());
2413 var_value.Bind(CallStub(callable, context, value));
2414 Goto(&loop);
2415 }
2416 }
2417 }
2418 Bind(&done_loop);
2419 return var_result.value();
2420 }
2421
TruncateHeapNumberValueToWord32(Node * object)2422 Node* CodeStubAssembler::TruncateHeapNumberValueToWord32(Node* object) {
2423 Node* value = LoadHeapNumberValue(object);
2424 return TruncateFloat64ToWord32(value);
2425 }
2426
ChangeFloat64ToTagged(Node * value)2427 Node* CodeStubAssembler::ChangeFloat64ToTagged(Node* value) {
2428 Node* value32 = RoundFloat64ToInt32(value);
2429 Node* value64 = ChangeInt32ToFloat64(value32);
2430
2431 Label if_valueisint32(this), if_valueisheapnumber(this), if_join(this);
2432
2433 Label if_valueisequal(this), if_valueisnotequal(this);
2434 Branch(Float64Equal(value, value64), &if_valueisequal, &if_valueisnotequal);
2435 Bind(&if_valueisequal);
2436 {
2437 GotoUnless(Word32Equal(value32, Int32Constant(0)), &if_valueisint32);
2438 Branch(Int32LessThan(Float64ExtractHighWord32(value), Int32Constant(0)),
2439 &if_valueisheapnumber, &if_valueisint32);
2440 }
2441 Bind(&if_valueisnotequal);
2442 Goto(&if_valueisheapnumber);
2443
2444 Variable var_result(this, MachineRepresentation::kTagged);
2445 Bind(&if_valueisint32);
2446 {
2447 if (Is64()) {
2448 Node* result = SmiTag(ChangeInt32ToInt64(value32));
2449 var_result.Bind(result);
2450 Goto(&if_join);
2451 } else {
2452 Node* pair = Int32AddWithOverflow(value32, value32);
2453 Node* overflow = Projection(1, pair);
2454 Label if_overflow(this, Label::kDeferred), if_notoverflow(this);
2455 Branch(overflow, &if_overflow, &if_notoverflow);
2456 Bind(&if_overflow);
2457 Goto(&if_valueisheapnumber);
2458 Bind(&if_notoverflow);
2459 {
2460 Node* result = Projection(0, pair);
2461 var_result.Bind(result);
2462 Goto(&if_join);
2463 }
2464 }
2465 }
2466 Bind(&if_valueisheapnumber);
2467 {
2468 Node* result = AllocateHeapNumberWithValue(value);
2469 var_result.Bind(result);
2470 Goto(&if_join);
2471 }
2472 Bind(&if_join);
2473 return var_result.value();
2474 }
2475
ChangeInt32ToTagged(Node * value)2476 Node* CodeStubAssembler::ChangeInt32ToTagged(Node* value) {
2477 if (Is64()) {
2478 return SmiTag(ChangeInt32ToInt64(value));
2479 }
2480 Variable var_result(this, MachineRepresentation::kTagged);
2481 Node* pair = Int32AddWithOverflow(value, value);
2482 Node* overflow = Projection(1, pair);
2483 Label if_overflow(this, Label::kDeferred), if_notoverflow(this),
2484 if_join(this);
2485 Branch(overflow, &if_overflow, &if_notoverflow);
2486 Bind(&if_overflow);
2487 {
2488 Node* value64 = ChangeInt32ToFloat64(value);
2489 Node* result = AllocateHeapNumberWithValue(value64);
2490 var_result.Bind(result);
2491 }
2492 Goto(&if_join);
2493 Bind(&if_notoverflow);
2494 {
2495 Node* result = Projection(0, pair);
2496 var_result.Bind(result);
2497 }
2498 Goto(&if_join);
2499 Bind(&if_join);
2500 return var_result.value();
2501 }
2502
ChangeUint32ToTagged(Node * value)2503 Node* CodeStubAssembler::ChangeUint32ToTagged(Node* value) {
2504 Label if_overflow(this, Label::kDeferred), if_not_overflow(this),
2505 if_join(this);
2506 Variable var_result(this, MachineRepresentation::kTagged);
2507 // If {value} > 2^31 - 1, we need to store it in a HeapNumber.
2508 Branch(Uint32LessThan(Int32Constant(Smi::kMaxValue), value), &if_overflow,
2509 &if_not_overflow);
2510
2511 Bind(&if_not_overflow);
2512 {
2513 if (Is64()) {
2514 var_result.Bind(SmiTag(ChangeUint32ToUint64(value)));
2515 } else {
2516 // If tagging {value} results in an overflow, we need to use a HeapNumber
2517 // to represent it.
2518 Node* pair = Int32AddWithOverflow(value, value);
2519 Node* overflow = Projection(1, pair);
2520 GotoIf(overflow, &if_overflow);
2521
2522 Node* result = Projection(0, pair);
2523 var_result.Bind(result);
2524 }
2525 }
2526 Goto(&if_join);
2527
2528 Bind(&if_overflow);
2529 {
2530 Node* float64_value = ChangeUint32ToFloat64(value);
2531 var_result.Bind(AllocateHeapNumberWithValue(float64_value));
2532 }
2533 Goto(&if_join);
2534
2535 Bind(&if_join);
2536 return var_result.value();
2537 }
2538
ToThisString(Node * context,Node * value,char const * method_name)2539 Node* CodeStubAssembler::ToThisString(Node* context, Node* value,
2540 char const* method_name) {
2541 Variable var_value(this, MachineRepresentation::kTagged);
2542 var_value.Bind(value);
2543
2544 // Check if the {value} is a Smi or a HeapObject.
2545 Label if_valueissmi(this, Label::kDeferred), if_valueisnotsmi(this),
2546 if_valueisstring(this);
2547 Branch(TaggedIsSmi(value), &if_valueissmi, &if_valueisnotsmi);
2548 Bind(&if_valueisnotsmi);
2549 {
2550 // Load the instance type of the {value}.
2551 Node* value_instance_type = LoadInstanceType(value);
2552
2553 // Check if the {value} is already String.
2554 Label if_valueisnotstring(this, Label::kDeferred);
2555 Branch(IsStringInstanceType(value_instance_type), &if_valueisstring,
2556 &if_valueisnotstring);
2557 Bind(&if_valueisnotstring);
2558 {
2559 // Check if the {value} is null.
2560 Label if_valueisnullorundefined(this, Label::kDeferred),
2561 if_valueisnotnullorundefined(this, Label::kDeferred),
2562 if_valueisnotnull(this, Label::kDeferred);
2563 Branch(WordEqual(value, NullConstant()), &if_valueisnullorundefined,
2564 &if_valueisnotnull);
2565 Bind(&if_valueisnotnull);
2566 {
2567 // Check if the {value} is undefined.
2568 Branch(WordEqual(value, UndefinedConstant()),
2569 &if_valueisnullorundefined, &if_valueisnotnullorundefined);
2570 Bind(&if_valueisnotnullorundefined);
2571 {
2572 // Convert the {value} to a String.
2573 Callable callable = CodeFactory::ToString(isolate());
2574 var_value.Bind(CallStub(callable, context, value));
2575 Goto(&if_valueisstring);
2576 }
2577 }
2578
2579 Bind(&if_valueisnullorundefined);
2580 {
2581 // The {value} is either null or undefined.
2582 CallRuntime(Runtime::kThrowCalledOnNullOrUndefined, context,
2583 HeapConstant(factory()->NewStringFromAsciiChecked(
2584 method_name, TENURED)));
2585 Goto(&if_valueisstring); // Never reached.
2586 }
2587 }
2588 }
2589 Bind(&if_valueissmi);
2590 {
2591 // The {value} is a Smi, convert it to a String.
2592 Callable callable = CodeFactory::NumberToString(isolate());
2593 var_value.Bind(CallStub(callable, context, value));
2594 Goto(&if_valueisstring);
2595 }
2596 Bind(&if_valueisstring);
2597 return var_value.value();
2598 }
2599
ToThisValue(Node * context,Node * value,PrimitiveType primitive_type,char const * method_name)2600 Node* CodeStubAssembler::ToThisValue(Node* context, Node* value,
2601 PrimitiveType primitive_type,
2602 char const* method_name) {
2603 // We might need to loop once due to JSValue unboxing.
2604 Variable var_value(this, MachineRepresentation::kTagged);
2605 Label loop(this, &var_value), done_loop(this),
2606 done_throw(this, Label::kDeferred);
2607 var_value.Bind(value);
2608 Goto(&loop);
2609 Bind(&loop);
2610 {
2611 // Load the current {value}.
2612 value = var_value.value();
2613
2614 // Check if the {value} is a Smi or a HeapObject.
2615 GotoIf(TaggedIsSmi(value), (primitive_type == PrimitiveType::kNumber)
2616 ? &done_loop
2617 : &done_throw);
2618
2619 // Load the mape of the {value}.
2620 Node* value_map = LoadMap(value);
2621
2622 // Load the instance type of the {value}.
2623 Node* value_instance_type = LoadMapInstanceType(value_map);
2624
2625 // Check if {value} is a JSValue.
2626 Label if_valueisvalue(this, Label::kDeferred), if_valueisnotvalue(this);
2627 Branch(Word32Equal(value_instance_type, Int32Constant(JS_VALUE_TYPE)),
2628 &if_valueisvalue, &if_valueisnotvalue);
2629
2630 Bind(&if_valueisvalue);
2631 {
2632 // Load the actual value from the {value}.
2633 var_value.Bind(LoadObjectField(value, JSValue::kValueOffset));
2634 Goto(&loop);
2635 }
2636
2637 Bind(&if_valueisnotvalue);
2638 {
2639 switch (primitive_type) {
2640 case PrimitiveType::kBoolean:
2641 GotoIf(WordEqual(value_map, BooleanMapConstant()), &done_loop);
2642 break;
2643 case PrimitiveType::kNumber:
2644 GotoIf(
2645 Word32Equal(value_instance_type, Int32Constant(HEAP_NUMBER_TYPE)),
2646 &done_loop);
2647 break;
2648 case PrimitiveType::kString:
2649 GotoIf(IsStringInstanceType(value_instance_type), &done_loop);
2650 break;
2651 case PrimitiveType::kSymbol:
2652 GotoIf(Word32Equal(value_instance_type, Int32Constant(SYMBOL_TYPE)),
2653 &done_loop);
2654 break;
2655 }
2656 Goto(&done_throw);
2657 }
2658 }
2659
2660 Bind(&done_throw);
2661 {
2662 // The {value} is not a compatible receiver for this method.
2663 CallRuntime(Runtime::kThrowNotGeneric, context,
2664 HeapConstant(factory()->NewStringFromAsciiChecked(method_name,
2665 TENURED)));
2666 Goto(&done_loop); // Never reached.
2667 }
2668
2669 Bind(&done_loop);
2670 return var_value.value();
2671 }
2672
ThrowIfNotInstanceType(Node * context,Node * value,InstanceType instance_type,char const * method_name)2673 Node* CodeStubAssembler::ThrowIfNotInstanceType(Node* context, Node* value,
2674 InstanceType instance_type,
2675 char const* method_name) {
2676 Label out(this), throw_exception(this, Label::kDeferred);
2677 Variable var_value_map(this, MachineRepresentation::kTagged);
2678
2679 GotoIf(TaggedIsSmi(value), &throw_exception);
2680
2681 // Load the instance type of the {value}.
2682 var_value_map.Bind(LoadMap(value));
2683 Node* const value_instance_type = LoadMapInstanceType(var_value_map.value());
2684
2685 Branch(Word32Equal(value_instance_type, Int32Constant(instance_type)), &out,
2686 &throw_exception);
2687
2688 // The {value} is not a compatible receiver for this method.
2689 Bind(&throw_exception);
2690 CallRuntime(
2691 Runtime::kThrowIncompatibleMethodReceiver, context,
2692 HeapConstant(factory()->NewStringFromAsciiChecked(method_name, TENURED)),
2693 value);
2694 var_value_map.Bind(UndefinedConstant());
2695 Goto(&out); // Never reached.
2696
2697 Bind(&out);
2698 return var_value_map.value();
2699 }
2700
IsSpecialReceiverMap(Node * map)2701 Node* CodeStubAssembler::IsSpecialReceiverMap(Node* map) {
2702 Node* is_special = IsSpecialReceiverInstanceType(LoadMapInstanceType(map));
2703 uint32_t mask =
2704 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
2705 USE(mask);
2706 // Interceptors or access checks imply special receiver.
2707 CSA_ASSERT(this, Select(IsSetWord32(LoadMapBitField(map), mask), is_special,
2708 Int32Constant(1), MachineRepresentation::kWord32));
2709 return is_special;
2710 }
2711
IsDictionaryMap(Node * map)2712 Node* CodeStubAssembler::IsDictionaryMap(Node* map) {
2713 CSA_SLOW_ASSERT(this, IsMap(map));
2714 Node* bit_field3 = LoadMapBitField3(map);
2715 return Word32NotEqual(IsSetWord32<Map::DictionaryMap>(bit_field3),
2716 Int32Constant(0));
2717 }
2718
IsCallableMap(Node * map)2719 Node* CodeStubAssembler::IsCallableMap(Node* map) {
2720 CSA_ASSERT(this, IsMap(map));
2721 return Word32NotEqual(
2722 Word32And(LoadMapBitField(map), Int32Constant(1 << Map::kIsCallable)),
2723 Int32Constant(0));
2724 }
2725
IsSpecialReceiverInstanceType(Node * instance_type)2726 Node* CodeStubAssembler::IsSpecialReceiverInstanceType(Node* instance_type) {
2727 STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
2728 return Int32LessThanOrEqual(instance_type,
2729 Int32Constant(LAST_SPECIAL_RECEIVER_TYPE));
2730 }
2731
IsStringInstanceType(Node * instance_type)2732 Node* CodeStubAssembler::IsStringInstanceType(Node* instance_type) {
2733 STATIC_ASSERT(INTERNALIZED_STRING_TYPE == FIRST_TYPE);
2734 return Int32LessThan(instance_type, Int32Constant(FIRST_NONSTRING_TYPE));
2735 }
2736
IsJSReceiverInstanceType(Node * instance_type)2737 Node* CodeStubAssembler::IsJSReceiverInstanceType(Node* instance_type) {
2738 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
2739 return Int32GreaterThanOrEqual(instance_type,
2740 Int32Constant(FIRST_JS_RECEIVER_TYPE));
2741 }
2742
IsJSReceiver(Node * object)2743 Node* CodeStubAssembler::IsJSReceiver(Node* object) {
2744 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
2745 return IsJSReceiverInstanceType(LoadInstanceType(object));
2746 }
2747
IsJSObject(Node * object)2748 Node* CodeStubAssembler::IsJSObject(Node* object) {
2749 STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
2750 return Int32GreaterThanOrEqual(LoadInstanceType(object),
2751 Int32Constant(FIRST_JS_RECEIVER_TYPE));
2752 }
2753
IsJSGlobalProxy(Node * object)2754 Node* CodeStubAssembler::IsJSGlobalProxy(Node* object) {
2755 return Word32Equal(LoadInstanceType(object),
2756 Int32Constant(JS_GLOBAL_PROXY_TYPE));
2757 }
2758
IsMap(Node * map)2759 Node* CodeStubAssembler::IsMap(Node* map) {
2760 return HasInstanceType(map, MAP_TYPE);
2761 }
2762
IsJSValue(Node * map)2763 Node* CodeStubAssembler::IsJSValue(Node* map) {
2764 return HasInstanceType(map, JS_VALUE_TYPE);
2765 }
2766
IsJSArray(Node * object)2767 Node* CodeStubAssembler::IsJSArray(Node* object) {
2768 return HasInstanceType(object, JS_ARRAY_TYPE);
2769 }
2770
IsWeakCell(Node * object)2771 Node* CodeStubAssembler::IsWeakCell(Node* object) {
2772 return HasInstanceType(object, WEAK_CELL_TYPE);
2773 }
2774
IsName(Node * object)2775 Node* CodeStubAssembler::IsName(Node* object) {
2776 return Int32LessThanOrEqual(LoadInstanceType(object),
2777 Int32Constant(LAST_NAME_TYPE));
2778 }
2779
IsString(Node * object)2780 Node* CodeStubAssembler::IsString(Node* object) {
2781 return Int32LessThanOrEqual(LoadInstanceType(object),
2782 Int32Constant(FIRST_NONSTRING_TYPE));
2783 }
2784
IsNativeContext(Node * object)2785 Node* CodeStubAssembler::IsNativeContext(Node* object) {
2786 return WordEqual(LoadMap(object), LoadRoot(Heap::kNativeContextMapRootIndex));
2787 }
2788
IsFixedDoubleArray(Node * object)2789 Node* CodeStubAssembler::IsFixedDoubleArray(Node* object) {
2790 return WordEqual(LoadMap(object), FixedDoubleArrayMapConstant());
2791 }
2792
IsHashTable(Node * object)2793 Node* CodeStubAssembler::IsHashTable(Node* object) {
2794 return WordEqual(LoadMap(object), LoadRoot(Heap::kHashTableMapRootIndex));
2795 }
2796
IsDictionary(Node * object)2797 Node* CodeStubAssembler::IsDictionary(Node* object) {
2798 return WordOr(IsHashTable(object), IsUnseededNumberDictionary(object));
2799 }
2800
IsUnseededNumberDictionary(Node * object)2801 Node* CodeStubAssembler::IsUnseededNumberDictionary(Node* object) {
2802 return WordEqual(LoadMap(object),
2803 LoadRoot(Heap::kUnseededNumberDictionaryMapRootIndex));
2804 }
2805
StringCharCodeAt(Node * string,Node * index)2806 Node* CodeStubAssembler::StringCharCodeAt(Node* string, Node* index) {
2807 CSA_ASSERT(this, IsString(string));
2808 // Translate the {index} into a Word.
2809 index = SmiToWord(index);
2810
2811 // We may need to loop in case of cons or sliced strings.
2812 Variable var_index(this, MachineType::PointerRepresentation());
2813 Variable var_result(this, MachineRepresentation::kWord32);
2814 Variable var_string(this, MachineRepresentation::kTagged);
2815 Variable* loop_vars[] = {&var_index, &var_string};
2816 Label done_loop(this, &var_result), loop(this, 2, loop_vars);
2817 var_string.Bind(string);
2818 var_index.Bind(index);
2819 Goto(&loop);
2820 Bind(&loop);
2821 {
2822 // Load the current {index}.
2823 index = var_index.value();
2824
2825 // Load the current {string}.
2826 string = var_string.value();
2827
2828 // Load the instance type of the {string}.
2829 Node* string_instance_type = LoadInstanceType(string);
2830
2831 // Check if the {string} is a SeqString.
2832 Label if_stringissequential(this), if_stringisnotsequential(this);
2833 Branch(Word32Equal(Word32And(string_instance_type,
2834 Int32Constant(kStringRepresentationMask)),
2835 Int32Constant(kSeqStringTag)),
2836 &if_stringissequential, &if_stringisnotsequential);
2837
2838 Bind(&if_stringissequential);
2839 {
2840 // Check if the {string} is a TwoByteSeqString or a OneByteSeqString.
2841 Label if_stringistwobyte(this), if_stringisonebyte(this);
2842 Branch(Word32Equal(Word32And(string_instance_type,
2843 Int32Constant(kStringEncodingMask)),
2844 Int32Constant(kTwoByteStringTag)),
2845 &if_stringistwobyte, &if_stringisonebyte);
2846
2847 Bind(&if_stringisonebyte);
2848 {
2849 var_result.Bind(
2850 Load(MachineType::Uint8(), string,
2851 IntPtrAdd(index, IntPtrConstant(SeqOneByteString::kHeaderSize -
2852 kHeapObjectTag))));
2853 Goto(&done_loop);
2854 }
2855
2856 Bind(&if_stringistwobyte);
2857 {
2858 var_result.Bind(
2859 Load(MachineType::Uint16(), string,
2860 IntPtrAdd(WordShl(index, IntPtrConstant(1)),
2861 IntPtrConstant(SeqTwoByteString::kHeaderSize -
2862 kHeapObjectTag))));
2863 Goto(&done_loop);
2864 }
2865 }
2866
2867 Bind(&if_stringisnotsequential);
2868 {
2869 // Check if the {string} is a ConsString.
2870 Label if_stringiscons(this), if_stringisnotcons(this);
2871 Branch(Word32Equal(Word32And(string_instance_type,
2872 Int32Constant(kStringRepresentationMask)),
2873 Int32Constant(kConsStringTag)),
2874 &if_stringiscons, &if_stringisnotcons);
2875
2876 Bind(&if_stringiscons);
2877 {
2878 // Check whether the right hand side is the empty string (i.e. if
2879 // this is really a flat string in a cons string). If that is not
2880 // the case we flatten the string first.
2881 Label if_rhsisempty(this), if_rhsisnotempty(this, Label::kDeferred);
2882 Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
2883 Branch(WordEqual(rhs, EmptyStringConstant()), &if_rhsisempty,
2884 &if_rhsisnotempty);
2885
2886 Bind(&if_rhsisempty);
2887 {
2888 // Just operate on the left hand side of the {string}.
2889 var_string.Bind(LoadObjectField(string, ConsString::kFirstOffset));
2890 Goto(&loop);
2891 }
2892
2893 Bind(&if_rhsisnotempty);
2894 {
2895 // Flatten the {string} and lookup in the resulting string.
2896 var_string.Bind(CallRuntime(Runtime::kFlattenString,
2897 NoContextConstant(), string));
2898 Goto(&loop);
2899 }
2900 }
2901
2902 Bind(&if_stringisnotcons);
2903 {
2904 // Check if the {string} is an ExternalString.
2905 Label if_stringisexternal(this), if_stringisnotexternal(this);
2906 Branch(Word32Equal(Word32And(string_instance_type,
2907 Int32Constant(kStringRepresentationMask)),
2908 Int32Constant(kExternalStringTag)),
2909 &if_stringisexternal, &if_stringisnotexternal);
2910
2911 Bind(&if_stringisexternal);
2912 {
2913 // Check if the {string} is a short external string.
2914 Label if_stringisnotshort(this),
2915 if_stringisshort(this, Label::kDeferred);
2916 Branch(Word32Equal(Word32And(string_instance_type,
2917 Int32Constant(kShortExternalStringMask)),
2918 Int32Constant(0)),
2919 &if_stringisnotshort, &if_stringisshort);
2920
2921 Bind(&if_stringisnotshort);
2922 {
2923 // Load the actual resource data from the {string}.
2924 Node* string_resource_data =
2925 LoadObjectField(string, ExternalString::kResourceDataOffset,
2926 MachineType::Pointer());
2927
2928 // Check if the {string} is a TwoByteExternalString or a
2929 // OneByteExternalString.
2930 Label if_stringistwobyte(this), if_stringisonebyte(this);
2931 Branch(Word32Equal(Word32And(string_instance_type,
2932 Int32Constant(kStringEncodingMask)),
2933 Int32Constant(kTwoByteStringTag)),
2934 &if_stringistwobyte, &if_stringisonebyte);
2935
2936 Bind(&if_stringisonebyte);
2937 {
2938 var_result.Bind(
2939 Load(MachineType::Uint8(), string_resource_data, index));
2940 Goto(&done_loop);
2941 }
2942
2943 Bind(&if_stringistwobyte);
2944 {
2945 var_result.Bind(Load(MachineType::Uint16(), string_resource_data,
2946 WordShl(index, IntPtrConstant(1))));
2947 Goto(&done_loop);
2948 }
2949 }
2950
2951 Bind(&if_stringisshort);
2952 {
2953 // The {string} might be compressed, call the runtime.
2954 var_result.Bind(SmiToWord32(
2955 CallRuntime(Runtime::kExternalStringGetChar,
2956 NoContextConstant(), string, SmiTag(index))));
2957 Goto(&done_loop);
2958 }
2959 }
2960
2961 Bind(&if_stringisnotexternal);
2962 {
2963 // The {string} is a SlicedString, continue with its parent.
2964 Node* string_offset =
2965 LoadAndUntagObjectField(string, SlicedString::kOffsetOffset);
2966 Node* string_parent =
2967 LoadObjectField(string, SlicedString::kParentOffset);
2968 var_index.Bind(IntPtrAdd(index, string_offset));
2969 var_string.Bind(string_parent);
2970 Goto(&loop);
2971 }
2972 }
2973 }
2974 }
2975
2976 Bind(&done_loop);
2977 return var_result.value();
2978 }
2979
StringFromCharCode(Node * code)2980 Node* CodeStubAssembler::StringFromCharCode(Node* code) {
2981 Variable var_result(this, MachineRepresentation::kTagged);
2982
2983 // Check if the {code} is a one-byte char code.
2984 Label if_codeisonebyte(this), if_codeistwobyte(this, Label::kDeferred),
2985 if_done(this);
2986 Branch(Int32LessThanOrEqual(code, Int32Constant(String::kMaxOneByteCharCode)),
2987 &if_codeisonebyte, &if_codeistwobyte);
2988 Bind(&if_codeisonebyte);
2989 {
2990 // Load the isolate wide single character string cache.
2991 Node* cache = LoadRoot(Heap::kSingleCharacterStringCacheRootIndex);
2992
2993 // Check if we have an entry for the {code} in the single character string
2994 // cache already.
2995 Label if_entryisundefined(this, Label::kDeferred),
2996 if_entryisnotundefined(this);
2997 Node* entry = LoadFixedArrayElement(cache, code);
2998 Branch(WordEqual(entry, UndefinedConstant()), &if_entryisundefined,
2999 &if_entryisnotundefined);
3000
3001 Bind(&if_entryisundefined);
3002 {
3003 // Allocate a new SeqOneByteString for {code} and store it in the {cache}.
3004 Node* result = AllocateSeqOneByteString(1);
3005 StoreNoWriteBarrier(
3006 MachineRepresentation::kWord8, result,
3007 IntPtrConstant(SeqOneByteString::kHeaderSize - kHeapObjectTag), code);
3008 StoreFixedArrayElement(cache, code, result);
3009 var_result.Bind(result);
3010 Goto(&if_done);
3011 }
3012
3013 Bind(&if_entryisnotundefined);
3014 {
3015 // Return the entry from the {cache}.
3016 var_result.Bind(entry);
3017 Goto(&if_done);
3018 }
3019 }
3020
3021 Bind(&if_codeistwobyte);
3022 {
3023 // Allocate a new SeqTwoByteString for {code}.
3024 Node* result = AllocateSeqTwoByteString(1);
3025 StoreNoWriteBarrier(
3026 MachineRepresentation::kWord16, result,
3027 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag), code);
3028 var_result.Bind(result);
3029 Goto(&if_done);
3030 }
3031
3032 Bind(&if_done);
3033 return var_result.value();
3034 }
3035
3036 namespace {
3037
3038 // A wrapper around CopyStringCharacters which determines the correct string
3039 // encoding, allocates a corresponding sequential string, and then copies the
3040 // given character range using CopyStringCharacters.
3041 // |from_string| must be a sequential string. |from_index| and
3042 // |character_count| must be Smis s.t.
3043 // 0 <= |from_index| <= |from_index| + |character_count| < from_string.length.
AllocAndCopyStringCharacters(CodeStubAssembler * a,Node * context,Node * from,Node * from_instance_type,Node * from_index,Node * character_count)3044 Node* AllocAndCopyStringCharacters(CodeStubAssembler* a, Node* context,
3045 Node* from, Node* from_instance_type,
3046 Node* from_index, Node* character_count) {
3047 typedef CodeStubAssembler::Label Label;
3048 typedef CodeStubAssembler::Variable Variable;
3049
3050 Label end(a), two_byte_sequential(a);
3051 Variable var_result(a, MachineRepresentation::kTagged);
3052
3053 Node* const smi_zero = a->SmiConstant(Smi::kZero);
3054
3055 STATIC_ASSERT((kOneByteStringTag & kStringEncodingMask) != 0);
3056 a->GotoIf(a->Word32Equal(a->Word32And(from_instance_type,
3057 a->Int32Constant(kStringEncodingMask)),
3058 a->Int32Constant(0)),
3059 &two_byte_sequential);
3060
3061 // The subject string is a sequential one-byte string.
3062 {
3063 Node* result =
3064 a->AllocateSeqOneByteString(context, a->SmiToWord(character_count));
3065 a->CopyStringCharacters(from, result, from_index, smi_zero, character_count,
3066 String::ONE_BYTE_ENCODING,
3067 String::ONE_BYTE_ENCODING,
3068 CodeStubAssembler::SMI_PARAMETERS);
3069 var_result.Bind(result);
3070
3071 a->Goto(&end);
3072 }
3073
3074 // The subject string is a sequential two-byte string.
3075 a->Bind(&two_byte_sequential);
3076 {
3077 Node* result =
3078 a->AllocateSeqTwoByteString(context, a->SmiToWord(character_count));
3079 a->CopyStringCharacters(from, result, from_index, smi_zero, character_count,
3080 String::TWO_BYTE_ENCODING,
3081 String::TWO_BYTE_ENCODING,
3082 CodeStubAssembler::SMI_PARAMETERS);
3083 var_result.Bind(result);
3084
3085 a->Goto(&end);
3086 }
3087
3088 a->Bind(&end);
3089 return var_result.value();
3090 }
3091
3092 } // namespace
3093
SubString(Node * context,Node * string,Node * from,Node * to)3094 Node* CodeStubAssembler::SubString(Node* context, Node* string, Node* from,
3095 Node* to) {
3096 Label end(this);
3097 Label runtime(this);
3098
3099 Variable var_instance_type(this, MachineRepresentation::kWord8); // Int32.
3100 Variable var_result(this, MachineRepresentation::kTagged); // String.
3101 Variable var_from(this, MachineRepresentation::kTagged); // Smi.
3102 Variable var_string(this, MachineRepresentation::kTagged); // String.
3103
3104 var_instance_type.Bind(Int32Constant(0));
3105 var_string.Bind(string);
3106 var_from.Bind(from);
3107
3108 // Make sure first argument is a string.
3109
3110 // Bailout if receiver is a Smi.
3111 GotoIf(TaggedIsSmi(string), &runtime);
3112
3113 // Load the instance type of the {string}.
3114 Node* const instance_type = LoadInstanceType(string);
3115 var_instance_type.Bind(instance_type);
3116
3117 // Check if {string} is a String.
3118 GotoUnless(IsStringInstanceType(instance_type), &runtime);
3119
3120 // Make sure that both from and to are non-negative smis.
3121
3122 GotoUnless(WordIsPositiveSmi(from), &runtime);
3123 GotoUnless(WordIsPositiveSmi(to), &runtime);
3124
3125 Node* const substr_length = SmiSub(to, from);
3126 Node* const string_length = LoadStringLength(string);
3127
3128 // Begin dispatching based on substring length.
3129
3130 Label original_string_or_invalid_length(this);
3131 GotoIf(SmiAboveOrEqual(substr_length, string_length),
3132 &original_string_or_invalid_length);
3133
3134 // A real substring (substr_length < string_length).
3135
3136 Label single_char(this);
3137 GotoIf(SmiEqual(substr_length, SmiConstant(Smi::FromInt(1))), &single_char);
3138
3139 // TODO(jgruber): Add an additional case for substring of length == 0?
3140
3141 // Deal with different string types: update the index if necessary
3142 // and put the underlying string into var_string.
3143
3144 // If the string is not indirect, it can only be sequential or external.
3145 STATIC_ASSERT(kIsIndirectStringMask == (kSlicedStringTag & kConsStringTag));
3146 STATIC_ASSERT(kIsIndirectStringMask != 0);
3147 Label underlying_unpacked(this);
3148 GotoIf(Word32Equal(
3149 Word32And(instance_type, Int32Constant(kIsIndirectStringMask)),
3150 Int32Constant(0)),
3151 &underlying_unpacked);
3152
3153 // The subject string is either a sliced or cons string.
3154
3155 Label sliced_string(this);
3156 GotoIf(Word32NotEqual(
3157 Word32And(instance_type, Int32Constant(kSlicedNotConsMask)),
3158 Int32Constant(0)),
3159 &sliced_string);
3160
3161 // Cons string. Check whether it is flat, then fetch first part.
3162 // Flat cons strings have an empty second part.
3163 {
3164 GotoIf(WordNotEqual(LoadObjectField(string, ConsString::kSecondOffset),
3165 EmptyStringConstant()),
3166 &runtime);
3167
3168 Node* first_string_part = LoadObjectField(string, ConsString::kFirstOffset);
3169 var_string.Bind(first_string_part);
3170 var_instance_type.Bind(LoadInstanceType(first_string_part));
3171
3172 Goto(&underlying_unpacked);
3173 }
3174
3175 Bind(&sliced_string);
3176 {
3177 // Fetch parent and correct start index by offset.
3178 Node* sliced_offset = LoadObjectField(string, SlicedString::kOffsetOffset);
3179 var_from.Bind(SmiAdd(from, sliced_offset));
3180
3181 Node* slice_parent = LoadObjectField(string, SlicedString::kParentOffset);
3182 var_string.Bind(slice_parent);
3183
3184 Node* slice_parent_instance_type = LoadInstanceType(slice_parent);
3185 var_instance_type.Bind(slice_parent_instance_type);
3186
3187 Goto(&underlying_unpacked);
3188 }
3189
3190 // The subject string can only be external or sequential string of either
3191 // encoding at this point.
3192 Label external_string(this);
3193 Bind(&underlying_unpacked);
3194 {
3195 if (FLAG_string_slices) {
3196 Label copy_routine(this);
3197
3198 // Short slice. Copy instead of slicing.
3199 GotoIf(SmiLessThan(substr_length,
3200 SmiConstant(Smi::FromInt(SlicedString::kMinLength))),
3201 ©_routine);
3202
3203 // Allocate new sliced string.
3204
3205 Label two_byte_slice(this);
3206 STATIC_ASSERT((kStringEncodingMask & kOneByteStringTag) != 0);
3207 STATIC_ASSERT((kStringEncodingMask & kTwoByteStringTag) == 0);
3208
3209 Counters* counters = isolate()->counters();
3210 IncrementCounter(counters->sub_string_native(), 1);
3211
3212 GotoIf(Word32Equal(Word32And(var_instance_type.value(),
3213 Int32Constant(kStringEncodingMask)),
3214 Int32Constant(0)),
3215 &two_byte_slice);
3216
3217 var_result.Bind(AllocateSlicedOneByteString(
3218 substr_length, var_string.value(), var_from.value()));
3219 Goto(&end);
3220
3221 Bind(&two_byte_slice);
3222
3223 var_result.Bind(AllocateSlicedTwoByteString(
3224 substr_length, var_string.value(), var_from.value()));
3225 Goto(&end);
3226
3227 Bind(©_routine);
3228 }
3229
3230 // The subject string can only be external or sequential string of either
3231 // encoding at this point.
3232 STATIC_ASSERT(kExternalStringTag != 0);
3233 STATIC_ASSERT(kSeqStringTag == 0);
3234 GotoUnless(Word32Equal(Word32And(var_instance_type.value(),
3235 Int32Constant(kExternalStringTag)),
3236 Int32Constant(0)),
3237 &external_string);
3238
3239 var_result.Bind(AllocAndCopyStringCharacters(
3240 this, context, var_string.value(), var_instance_type.value(),
3241 var_from.value(), substr_length));
3242
3243 Counters* counters = isolate()->counters();
3244 IncrementCounter(counters->sub_string_native(), 1);
3245
3246 Goto(&end);
3247 }
3248
3249 // Handle external string.
3250 Bind(&external_string);
3251 {
3252 // Rule out short external strings.
3253 STATIC_ASSERT(kShortExternalStringTag != 0);
3254 GotoIf(Word32NotEqual(Word32And(var_instance_type.value(),
3255 Int32Constant(kShortExternalStringMask)),
3256 Int32Constant(0)),
3257 &runtime);
3258
3259 // Move the pointer so that offset-wise, it looks like a sequential string.
3260 STATIC_ASSERT(SeqTwoByteString::kHeaderSize ==
3261 SeqOneByteString::kHeaderSize);
3262
3263 Node* resource_data = LoadObjectField(var_string.value(),
3264 ExternalString::kResourceDataOffset);
3265 Node* const fake_sequential_string = IntPtrSub(
3266 resource_data,
3267 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
3268
3269 var_result.Bind(AllocAndCopyStringCharacters(
3270 this, context, fake_sequential_string, var_instance_type.value(),
3271 var_from.value(), substr_length));
3272
3273 Counters* counters = isolate()->counters();
3274 IncrementCounter(counters->sub_string_native(), 1);
3275
3276 Goto(&end);
3277 }
3278
3279 // Substrings of length 1 are generated through CharCodeAt and FromCharCode.
3280 Bind(&single_char);
3281 {
3282 Node* char_code = StringCharCodeAt(var_string.value(), var_from.value());
3283 var_result.Bind(StringFromCharCode(char_code));
3284 Goto(&end);
3285 }
3286
3287 Bind(&original_string_or_invalid_length);
3288 {
3289 // Longer than original string's length or negative: unsafe arguments.
3290 GotoIf(SmiAbove(substr_length, string_length), &runtime);
3291
3292 // Equal length - check if {from, to} == {0, str.length}.
3293 GotoIf(SmiAbove(from, SmiConstant(Smi::kZero)), &runtime);
3294
3295 // Return the original string (substr_length == string_length).
3296
3297 Counters* counters = isolate()->counters();
3298 IncrementCounter(counters->sub_string_native(), 1);
3299
3300 var_result.Bind(string);
3301 Goto(&end);
3302 }
3303
3304 // Fall back to a runtime call.
3305 Bind(&runtime);
3306 {
3307 var_result.Bind(
3308 CallRuntime(Runtime::kSubString, context, string, from, to));
3309 Goto(&end);
3310 }
3311
3312 Bind(&end);
3313 return var_result.value();
3314 }
3315
StringAdd(Node * context,Node * left,Node * right,AllocationFlags flags)3316 Node* CodeStubAssembler::StringAdd(Node* context, Node* left, Node* right,
3317 AllocationFlags flags) {
3318 Label check_right(this);
3319 Label runtime(this, Label::kDeferred);
3320 Label cons(this);
3321 Label non_cons(this);
3322 Variable result(this, MachineRepresentation::kTagged);
3323 Label done(this, &result);
3324 Label done_native(this, &result);
3325 Counters* counters = isolate()->counters();
3326
3327 Node* left_length = LoadStringLength(left);
3328 GotoIf(WordNotEqual(IntPtrConstant(0), left_length), &check_right);
3329 result.Bind(right);
3330 Goto(&done_native);
3331
3332 Bind(&check_right);
3333 Node* right_length = LoadStringLength(right);
3334 GotoIf(WordNotEqual(IntPtrConstant(0), right_length), &cons);
3335 result.Bind(left);
3336 Goto(&done_native);
3337
3338 Bind(&cons);
3339 CSA_ASSERT(this, TaggedIsSmi(left_length));
3340 CSA_ASSERT(this, TaggedIsSmi(right_length));
3341 Node* new_length = SmiAdd(left_length, right_length);
3342 GotoIf(UintPtrGreaterThanOrEqual(
3343 new_length, SmiConstant(Smi::FromInt(String::kMaxLength))),
3344 &runtime);
3345
3346 GotoIf(IntPtrLessThan(new_length,
3347 SmiConstant(Smi::FromInt(ConsString::kMinLength))),
3348 &non_cons);
3349
3350 result.Bind(NewConsString(context, new_length, left, right, flags));
3351 Goto(&done_native);
3352
3353 Bind(&non_cons);
3354
3355 Comment("Full string concatenate");
3356 Node* left_instance_type = LoadInstanceType(left);
3357 Node* right_instance_type = LoadInstanceType(right);
3358 // Compute intersection and difference of instance types.
3359
3360 Node* ored_instance_types = WordOr(left_instance_type, right_instance_type);
3361 Node* xored_instance_types = WordXor(left_instance_type, right_instance_type);
3362
3363 // Check if both strings have the same encoding and both are sequential.
3364 GotoIf(WordNotEqual(
3365 WordAnd(xored_instance_types, IntPtrConstant(kStringEncodingMask)),
3366 IntPtrConstant(0)),
3367 &runtime);
3368 GotoIf(WordNotEqual(WordAnd(ored_instance_types,
3369 IntPtrConstant(kStringRepresentationMask)),
3370 IntPtrConstant(0)),
3371 &runtime);
3372
3373 Label two_byte(this);
3374 GotoIf(WordEqual(
3375 WordAnd(ored_instance_types, IntPtrConstant(kStringEncodingMask)),
3376 IntPtrConstant(kTwoByteStringTag)),
3377 &two_byte);
3378 // One-byte sequential string case
3379 Node* new_string =
3380 AllocateSeqOneByteString(context, new_length, SMI_PARAMETERS);
3381 CopyStringCharacters(left, new_string, SmiConstant(Smi::kZero),
3382 SmiConstant(Smi::kZero), left_length,
3383 String::ONE_BYTE_ENCODING, String::ONE_BYTE_ENCODING,
3384 SMI_PARAMETERS);
3385 CopyStringCharacters(right, new_string, SmiConstant(Smi::kZero), left_length,
3386 right_length, String::ONE_BYTE_ENCODING,
3387 String::ONE_BYTE_ENCODING, SMI_PARAMETERS);
3388 result.Bind(new_string);
3389 Goto(&done_native);
3390
3391 Bind(&two_byte);
3392 {
3393 // Two-byte sequential string case
3394 new_string = AllocateSeqTwoByteString(context, new_length, SMI_PARAMETERS);
3395 CopyStringCharacters(left, new_string, SmiConstant(Smi::kZero),
3396 SmiConstant(Smi::kZero), left_length,
3397 String::TWO_BYTE_ENCODING, String::TWO_BYTE_ENCODING,
3398 SMI_PARAMETERS);
3399 CopyStringCharacters(right, new_string, SmiConstant(Smi::kZero),
3400 left_length, right_length, String::TWO_BYTE_ENCODING,
3401 String::TWO_BYTE_ENCODING, SMI_PARAMETERS);
3402 result.Bind(new_string);
3403 Goto(&done_native);
3404 }
3405
3406 Bind(&runtime);
3407 {
3408 result.Bind(CallRuntime(Runtime::kStringAdd, context, left, right));
3409 Goto(&done);
3410 }
3411
3412 Bind(&done_native);
3413 {
3414 IncrementCounter(counters->string_add_native(), 1);
3415 Goto(&done);
3416 }
3417
3418 Bind(&done);
3419 return result.value();
3420 }
3421
StringIndexOfChar(Node * context,Node * string,Node * needle_char,Node * from)3422 Node* CodeStubAssembler::StringIndexOfChar(Node* context, Node* string,
3423 Node* needle_char, Node* from) {
3424 CSA_ASSERT(this, IsString(string));
3425 Variable var_result(this, MachineRepresentation::kTagged);
3426
3427 Label out(this), runtime(this, Label::kDeferred);
3428
3429 // Let runtime handle non-one-byte {needle_char}.
3430
3431 Node* const one_byte_char_mask = IntPtrConstant(0xFF);
3432 GotoUnless(WordEqual(WordAnd(needle_char, one_byte_char_mask), needle_char),
3433 &runtime);
3434
3435 // TODO(jgruber): Handle external and two-byte strings.
3436
3437 Node* const one_byte_seq_mask = Int32Constant(
3438 kIsIndirectStringMask | kExternalStringTag | kStringEncodingMask);
3439 Node* const expected_masked = Int32Constant(kOneByteStringTag);
3440
3441 Node* const string_instance_type = LoadInstanceType(string);
3442 GotoUnless(Word32Equal(Word32And(string_instance_type, one_byte_seq_mask),
3443 expected_masked),
3444 &runtime);
3445
3446 // If we reach this, {string} is a non-indirect, non-external one-byte string.
3447
3448 Node* const length = LoadStringLength(string);
3449 Node* const search_range_length = SmiUntag(SmiSub(length, from));
3450
3451 const int offset = SeqOneByteString::kHeaderSize - kHeapObjectTag;
3452 Node* const begin = IntPtrConstant(offset);
3453 Node* const cursor = IntPtrAdd(begin, SmiUntag(from));
3454 Node* const end = IntPtrAdd(cursor, search_range_length);
3455
3456 var_result.Bind(SmiConstant(Smi::FromInt(-1)));
3457
3458 BuildFastLoop(MachineType::PointerRepresentation(), cursor, end,
3459 [string, needle_char, begin, &var_result, &out](
3460 CodeStubAssembler* csa, Node* cursor) {
3461 Label next(csa);
3462 Node* value = csa->Load(MachineType::Uint8(), string, cursor);
3463 csa->GotoUnless(csa->WordEqual(value, needle_char), &next);
3464
3465 // Found a match.
3466 Node* index = csa->SmiTag(csa->IntPtrSub(cursor, begin));
3467 var_result.Bind(index);
3468 csa->Goto(&out);
3469
3470 csa->Bind(&next);
3471 },
3472 1, IndexAdvanceMode::kPost);
3473 Goto(&out);
3474
3475 Bind(&runtime);
3476 {
3477 Node* const pattern = StringFromCharCode(needle_char);
3478 Node* const result =
3479 CallRuntime(Runtime::kStringIndexOf, context, string, pattern, from);
3480 var_result.Bind(result);
3481 Goto(&out);
3482 }
3483
3484 Bind(&out);
3485 return var_result.value();
3486 }
3487
StringFromCodePoint(compiler::Node * codepoint,UnicodeEncoding encoding)3488 Node* CodeStubAssembler::StringFromCodePoint(compiler::Node* codepoint,
3489 UnicodeEncoding encoding) {
3490 Variable var_result(this, MachineRepresentation::kTagged);
3491 var_result.Bind(EmptyStringConstant());
3492
3493 Label if_isword16(this), if_isword32(this), return_result(this);
3494
3495 Branch(Uint32LessThan(codepoint, Int32Constant(0x10000)), &if_isword16,
3496 &if_isword32);
3497
3498 Bind(&if_isword16);
3499 {
3500 var_result.Bind(StringFromCharCode(codepoint));
3501 Goto(&return_result);
3502 }
3503
3504 Bind(&if_isword32);
3505 {
3506 switch (encoding) {
3507 case UnicodeEncoding::UTF16:
3508 break;
3509 case UnicodeEncoding::UTF32: {
3510 // Convert UTF32 to UTF16 code units, and store as a 32 bit word.
3511 Node* lead_offset = Int32Constant(0xD800 - (0x10000 >> 10));
3512
3513 // lead = (codepoint >> 10) + LEAD_OFFSET
3514 Node* lead =
3515 Int32Add(WordShr(codepoint, Int32Constant(10)), lead_offset);
3516
3517 // trail = (codepoint & 0x3FF) + 0xDC00;
3518 Node* trail = Int32Add(Word32And(codepoint, Int32Constant(0x3FF)),
3519 Int32Constant(0xDC00));
3520
3521 // codpoint = (trail << 16) | lead;
3522 codepoint = Word32Or(WordShl(trail, Int32Constant(16)), lead);
3523 break;
3524 }
3525 }
3526
3527 Node* value = AllocateSeqTwoByteString(2);
3528 StoreNoWriteBarrier(
3529 MachineRepresentation::kWord32, value,
3530 IntPtrConstant(SeqTwoByteString::kHeaderSize - kHeapObjectTag),
3531 codepoint);
3532 var_result.Bind(value);
3533 Goto(&return_result);
3534 }
3535
3536 Bind(&return_result);
3537 return var_result.value();
3538 }
3539
StringToNumber(Node * context,Node * input)3540 Node* CodeStubAssembler::StringToNumber(Node* context, Node* input) {
3541 Label runtime(this, Label::kDeferred);
3542 Label end(this);
3543
3544 Variable var_result(this, MachineRepresentation::kTagged);
3545
3546 // Check if string has a cached array index.
3547 Node* hash = LoadNameHashField(input);
3548 Node* bit =
3549 Word32And(hash, Int32Constant(String::kContainsCachedArrayIndexMask));
3550 GotoIf(Word32NotEqual(bit, Int32Constant(0)), &runtime);
3551
3552 var_result.Bind(
3553 SmiTag(DecodeWordFromWord32<String::ArrayIndexValueBits>(hash)));
3554 Goto(&end);
3555
3556 Bind(&runtime);
3557 {
3558 var_result.Bind(CallRuntime(Runtime::kStringToNumber, context, input));
3559 Goto(&end);
3560 }
3561
3562 Bind(&end);
3563 return var_result.value();
3564 }
3565
NumberToString(compiler::Node * context,compiler::Node * argument)3566 Node* CodeStubAssembler::NumberToString(compiler::Node* context,
3567 compiler::Node* argument) {
3568 Variable result(this, MachineRepresentation::kTagged);
3569 Label runtime(this, Label::kDeferred);
3570 Label smi(this);
3571 Label done(this, &result);
3572
3573 // Load the number string cache.
3574 Node* number_string_cache = LoadRoot(Heap::kNumberStringCacheRootIndex);
3575
3576 // Make the hash mask from the length of the number string cache. It
3577 // contains two elements (number and string) for each cache entry.
3578 Node* mask = LoadFixedArrayBaseLength(number_string_cache);
3579 Node* one = IntPtrConstant(1);
3580 mask = IntPtrSub(mask, one);
3581
3582 GotoIf(TaggedIsSmi(argument), &smi);
3583
3584 // Argument isn't smi, check to see if it's a heap-number.
3585 Node* map = LoadMap(argument);
3586 GotoUnless(WordEqual(map, HeapNumberMapConstant()), &runtime);
3587
3588 // Make a hash from the two 32-bit values of the double.
3589 Node* low =
3590 LoadObjectField(argument, HeapNumber::kValueOffset, MachineType::Int32());
3591 Node* high = LoadObjectField(argument, HeapNumber::kValueOffset + kIntSize,
3592 MachineType::Int32());
3593 Node* hash = Word32Xor(low, high);
3594 if (Is64()) hash = ChangeInt32ToInt64(hash);
3595 hash = WordShl(hash, one);
3596 Node* index = WordAnd(hash, SmiToWord(mask));
3597
3598 // Cache entry's key must be a heap number
3599 Node* number_key =
3600 LoadFixedArrayElement(number_string_cache, index, 0, INTPTR_PARAMETERS);
3601 GotoIf(TaggedIsSmi(number_key), &runtime);
3602 map = LoadMap(number_key);
3603 GotoUnless(WordEqual(map, HeapNumberMapConstant()), &runtime);
3604
3605 // Cache entry's key must match the heap number value we're looking for.
3606 Node* low_compare = LoadObjectField(number_key, HeapNumber::kValueOffset,
3607 MachineType::Int32());
3608 Node* high_compare = LoadObjectField(
3609 number_key, HeapNumber::kValueOffset + kIntSize, MachineType::Int32());
3610 GotoUnless(WordEqual(low, low_compare), &runtime);
3611 GotoUnless(WordEqual(high, high_compare), &runtime);
3612
3613 // Heap number match, return value fro cache entry.
3614 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
3615 result.Bind(LoadFixedArrayElement(number_string_cache, index, kPointerSize,
3616 INTPTR_PARAMETERS));
3617 Goto(&done);
3618
3619 Bind(&runtime);
3620 {
3621 // No cache entry, go to the runtime.
3622 result.Bind(CallRuntime(Runtime::kNumberToString, context, argument));
3623 }
3624 Goto(&done);
3625
3626 Bind(&smi);
3627 {
3628 // Load the smi key, make sure it matches the smi we're looking for.
3629 Node* smi_index = WordAnd(WordShl(argument, one), mask);
3630 Node* smi_key = LoadFixedArrayElement(number_string_cache, smi_index, 0,
3631 SMI_PARAMETERS);
3632 GotoIf(WordNotEqual(smi_key, argument), &runtime);
3633
3634 // Smi match, return value from cache entry.
3635 IncrementCounter(isolate()->counters()->number_to_string_native(), 1);
3636 result.Bind(LoadFixedArrayElement(number_string_cache, smi_index,
3637 kPointerSize, SMI_PARAMETERS));
3638 Goto(&done);
3639 }
3640
3641 Bind(&done);
3642 return result.value();
3643 }
3644
ToName(Node * context,Node * value)3645 Node* CodeStubAssembler::ToName(Node* context, Node* value) {
3646 typedef CodeStubAssembler::Label Label;
3647 typedef CodeStubAssembler::Variable Variable;
3648
3649 Label end(this);
3650 Variable var_result(this, MachineRepresentation::kTagged);
3651
3652 Label is_number(this);
3653 GotoIf(TaggedIsSmi(value), &is_number);
3654
3655 Label not_name(this);
3656 Node* value_instance_type = LoadInstanceType(value);
3657 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
3658 GotoIf(Int32GreaterThan(value_instance_type, Int32Constant(LAST_NAME_TYPE)),
3659 ¬_name);
3660
3661 var_result.Bind(value);
3662 Goto(&end);
3663
3664 Bind(&is_number);
3665 {
3666 Callable callable = CodeFactory::NumberToString(isolate());
3667 var_result.Bind(CallStub(callable, context, value));
3668 Goto(&end);
3669 }
3670
3671 Bind(¬_name);
3672 {
3673 GotoIf(Word32Equal(value_instance_type, Int32Constant(HEAP_NUMBER_TYPE)),
3674 &is_number);
3675
3676 Label not_oddball(this);
3677 GotoIf(Word32NotEqual(value_instance_type, Int32Constant(ODDBALL_TYPE)),
3678 ¬_oddball);
3679
3680 var_result.Bind(LoadObjectField(value, Oddball::kToStringOffset));
3681 Goto(&end);
3682
3683 Bind(¬_oddball);
3684 {
3685 var_result.Bind(CallRuntime(Runtime::kToName, context, value));
3686 Goto(&end);
3687 }
3688 }
3689
3690 Bind(&end);
3691 return var_result.value();
3692 }
3693
NonNumberToNumber(Node * context,Node * input)3694 Node* CodeStubAssembler::NonNumberToNumber(Node* context, Node* input) {
3695 // Assert input is a HeapObject (not smi or heap number)
3696 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(input)));
3697 CSA_ASSERT(this, Word32NotEqual(LoadMap(input), HeapNumberMapConstant()));
3698
3699 // We might need to loop once here due to ToPrimitive conversions.
3700 Variable var_input(this, MachineRepresentation::kTagged);
3701 Variable var_result(this, MachineRepresentation::kTagged);
3702 Label loop(this, &var_input);
3703 Label end(this);
3704 var_input.Bind(input);
3705 Goto(&loop);
3706 Bind(&loop);
3707 {
3708 // Load the current {input} value (known to be a HeapObject).
3709 Node* input = var_input.value();
3710
3711 // Dispatch on the {input} instance type.
3712 Node* input_instance_type = LoadInstanceType(input);
3713 Label if_inputisstring(this), if_inputisoddball(this),
3714 if_inputisreceiver(this, Label::kDeferred),
3715 if_inputisother(this, Label::kDeferred);
3716 GotoIf(IsStringInstanceType(input_instance_type), &if_inputisstring);
3717 GotoIf(Word32Equal(input_instance_type, Int32Constant(ODDBALL_TYPE)),
3718 &if_inputisoddball);
3719 Branch(IsJSReceiverInstanceType(input_instance_type), &if_inputisreceiver,
3720 &if_inputisother);
3721
3722 Bind(&if_inputisstring);
3723 {
3724 // The {input} is a String, use the fast stub to convert it to a Number.
3725 var_result.Bind(StringToNumber(context, input));
3726 Goto(&end);
3727 }
3728
3729 Bind(&if_inputisoddball);
3730 {
3731 // The {input} is an Oddball, we just need to load the Number value of it.
3732 var_result.Bind(LoadObjectField(input, Oddball::kToNumberOffset));
3733 Goto(&end);
3734 }
3735
3736 Bind(&if_inputisreceiver);
3737 {
3738 // The {input} is a JSReceiver, we need to convert it to a Primitive first
3739 // using the ToPrimitive type conversion, preferably yielding a Number.
3740 Callable callable = CodeFactory::NonPrimitiveToPrimitive(
3741 isolate(), ToPrimitiveHint::kNumber);
3742 Node* result = CallStub(callable, context, input);
3743
3744 // Check if the {result} is already a Number.
3745 Label if_resultisnumber(this), if_resultisnotnumber(this);
3746 GotoIf(TaggedIsSmi(result), &if_resultisnumber);
3747 Node* result_map = LoadMap(result);
3748 Branch(WordEqual(result_map, HeapNumberMapConstant()), &if_resultisnumber,
3749 &if_resultisnotnumber);
3750
3751 Bind(&if_resultisnumber);
3752 {
3753 // The ToPrimitive conversion already gave us a Number, so we're done.
3754 var_result.Bind(result);
3755 Goto(&end);
3756 }
3757
3758 Bind(&if_resultisnotnumber);
3759 {
3760 // We now have a Primitive {result}, but it's not yet a Number.
3761 var_input.Bind(result);
3762 Goto(&loop);
3763 }
3764 }
3765
3766 Bind(&if_inputisother);
3767 {
3768 // The {input} is something else (i.e. Symbol or Simd128Value), let the
3769 // runtime figure out the correct exception.
3770 // Note: We cannot tail call to the runtime here, as js-to-wasm
3771 // trampolines also use this code currently, and they declare all
3772 // outgoing parameters as untagged, while we would push a tagged
3773 // object here.
3774 var_result.Bind(CallRuntime(Runtime::kToNumber, context, input));
3775 Goto(&end);
3776 }
3777 }
3778
3779 Bind(&end);
3780 return var_result.value();
3781 }
3782
ToNumber(Node * context,Node * input)3783 Node* CodeStubAssembler::ToNumber(Node* context, Node* input) {
3784 Variable var_result(this, MachineRepresentation::kTagged);
3785 Label end(this);
3786
3787 Label not_smi(this, Label::kDeferred);
3788 GotoUnless(TaggedIsSmi(input), ¬_smi);
3789 var_result.Bind(input);
3790 Goto(&end);
3791
3792 Bind(¬_smi);
3793 {
3794 Label not_heap_number(this, Label::kDeferred);
3795 Node* input_map = LoadMap(input);
3796 GotoIf(Word32NotEqual(input_map, HeapNumberMapConstant()),
3797 ¬_heap_number);
3798
3799 var_result.Bind(input);
3800 Goto(&end);
3801
3802 Bind(¬_heap_number);
3803 {
3804 var_result.Bind(NonNumberToNumber(context, input));
3805 Goto(&end);
3806 }
3807 }
3808
3809 Bind(&end);
3810 return var_result.value();
3811 }
3812
ToString(Node * context,Node * input)3813 Node* CodeStubAssembler::ToString(Node* context, Node* input) {
3814 Label is_number(this);
3815 Label runtime(this, Label::kDeferred);
3816 Variable result(this, MachineRepresentation::kTagged);
3817 Label done(this, &result);
3818
3819 GotoIf(TaggedIsSmi(input), &is_number);
3820
3821 Node* input_map = LoadMap(input);
3822 Node* input_instance_type = LoadMapInstanceType(input_map);
3823
3824 result.Bind(input);
3825 GotoIf(IsStringInstanceType(input_instance_type), &done);
3826
3827 Label not_heap_number(this);
3828 Branch(WordNotEqual(input_map, HeapNumberMapConstant()), ¬_heap_number,
3829 &is_number);
3830
3831 Bind(&is_number);
3832 result.Bind(NumberToString(context, input));
3833 Goto(&done);
3834
3835 Bind(¬_heap_number);
3836 {
3837 GotoIf(Word32NotEqual(input_instance_type, Int32Constant(ODDBALL_TYPE)),
3838 &runtime);
3839 result.Bind(LoadObjectField(input, Oddball::kToStringOffset));
3840 Goto(&done);
3841 }
3842
3843 Bind(&runtime);
3844 {
3845 result.Bind(CallRuntime(Runtime::kToString, context, input));
3846 Goto(&done);
3847 }
3848
3849 Bind(&done);
3850 return result.value();
3851 }
3852
FlattenString(Node * string)3853 Node* CodeStubAssembler::FlattenString(Node* string) {
3854 CSA_ASSERT(this, IsString(string));
3855 Variable var_result(this, MachineRepresentation::kTagged);
3856 var_result.Bind(string);
3857
3858 Node* instance_type = LoadInstanceType(string);
3859
3860 // Check if the {string} is not a ConsString (i.e. already flat).
3861 Label is_cons(this, Label::kDeferred), is_flat_in_cons(this), end(this);
3862 {
3863 GotoUnless(Word32Equal(Word32And(instance_type,
3864 Int32Constant(kStringRepresentationMask)),
3865 Int32Constant(kConsStringTag)),
3866 &end);
3867
3868 // Check whether the right hand side is the empty string (i.e. if
3869 // this is really a flat string in a cons string).
3870 Node* rhs = LoadObjectField(string, ConsString::kSecondOffset);
3871 Branch(WordEqual(rhs, EmptyStringConstant()), &is_flat_in_cons, &is_cons);
3872 }
3873
3874 // Bail out to the runtime.
3875 Bind(&is_cons);
3876 {
3877 var_result.Bind(
3878 CallRuntime(Runtime::kFlattenString, NoContextConstant(), string));
3879 Goto(&end);
3880 }
3881
3882 Bind(&is_flat_in_cons);
3883 {
3884 var_result.Bind(LoadObjectField(string, ConsString::kFirstOffset));
3885 Goto(&end);
3886 }
3887
3888 Bind(&end);
3889 return var_result.value();
3890 }
3891
JSReceiverToPrimitive(Node * context,Node * input)3892 Node* CodeStubAssembler::JSReceiverToPrimitive(Node* context, Node* input) {
3893 Label if_isreceiver(this, Label::kDeferred), if_isnotreceiver(this);
3894 Variable result(this, MachineRepresentation::kTagged);
3895 Label done(this, &result);
3896
3897 BranchIfJSReceiver(input, &if_isreceiver, &if_isnotreceiver);
3898
3899 Bind(&if_isreceiver);
3900 {
3901 // Convert {input} to a primitive first passing Number hint.
3902 Callable callable = CodeFactory::NonPrimitiveToPrimitive(isolate());
3903 result.Bind(CallStub(callable, context, input));
3904 Goto(&done);
3905 }
3906
3907 Bind(&if_isnotreceiver);
3908 {
3909 result.Bind(input);
3910 Goto(&done);
3911 }
3912
3913 Bind(&done);
3914 return result.value();
3915 }
3916
ToInteger(Node * context,Node * input,ToIntegerTruncationMode mode)3917 Node* CodeStubAssembler::ToInteger(Node* context, Node* input,
3918 ToIntegerTruncationMode mode) {
3919 // We might need to loop once for ToNumber conversion.
3920 Variable var_arg(this, MachineRepresentation::kTagged);
3921 Label loop(this, &var_arg), out(this);
3922 var_arg.Bind(input);
3923 Goto(&loop);
3924 Bind(&loop);
3925 {
3926 // Shared entry points.
3927 Label return_zero(this, Label::kDeferred);
3928
3929 // Load the current {arg} value.
3930 Node* arg = var_arg.value();
3931
3932 // Check if {arg} is a Smi.
3933 GotoIf(TaggedIsSmi(arg), &out);
3934
3935 // Check if {arg} is a HeapNumber.
3936 Label if_argisheapnumber(this),
3937 if_argisnotheapnumber(this, Label::kDeferred);
3938 Branch(WordEqual(LoadMap(arg), HeapNumberMapConstant()),
3939 &if_argisheapnumber, &if_argisnotheapnumber);
3940
3941 Bind(&if_argisheapnumber);
3942 {
3943 // Load the floating-point value of {arg}.
3944 Node* arg_value = LoadHeapNumberValue(arg);
3945
3946 // Check if {arg} is NaN.
3947 GotoUnless(Float64Equal(arg_value, arg_value), &return_zero);
3948
3949 // Truncate {arg} towards zero.
3950 Node* value = Float64Trunc(arg_value);
3951
3952 if (mode == kTruncateMinusZero) {
3953 // Truncate -0.0 to 0.
3954 GotoIf(Float64Equal(value, Float64Constant(0.0)), &return_zero);
3955 }
3956
3957 var_arg.Bind(ChangeFloat64ToTagged(value));
3958 Goto(&out);
3959 }
3960
3961 Bind(&if_argisnotheapnumber);
3962 {
3963 // Need to convert {arg} to a Number first.
3964 Callable callable = CodeFactory::NonNumberToNumber(isolate());
3965 var_arg.Bind(CallStub(callable, context, arg));
3966 Goto(&loop);
3967 }
3968
3969 Bind(&return_zero);
3970 var_arg.Bind(SmiConstant(Smi::kZero));
3971 Goto(&out);
3972 }
3973
3974 Bind(&out);
3975 return var_arg.value();
3976 }
3977
DecodeWord32(Node * word32,uint32_t shift,uint32_t mask)3978 Node* CodeStubAssembler::DecodeWord32(Node* word32, uint32_t shift,
3979 uint32_t mask) {
3980 return Word32Shr(Word32And(word32, Int32Constant(mask)),
3981 static_cast<int>(shift));
3982 }
3983
DecodeWord(Node * word,uint32_t shift,uint32_t mask)3984 Node* CodeStubAssembler::DecodeWord(Node* word, uint32_t shift, uint32_t mask) {
3985 return WordShr(WordAnd(word, IntPtrConstant(mask)), static_cast<int>(shift));
3986 }
3987
SetCounter(StatsCounter * counter,int value)3988 void CodeStubAssembler::SetCounter(StatsCounter* counter, int value) {
3989 if (FLAG_native_code_counters && counter->Enabled()) {
3990 Node* counter_address = ExternalConstant(ExternalReference(counter));
3991 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address,
3992 Int32Constant(value));
3993 }
3994 }
3995
IncrementCounter(StatsCounter * counter,int delta)3996 void CodeStubAssembler::IncrementCounter(StatsCounter* counter, int delta) {
3997 DCHECK(delta > 0);
3998 if (FLAG_native_code_counters && counter->Enabled()) {
3999 Node* counter_address = ExternalConstant(ExternalReference(counter));
4000 Node* value = Load(MachineType::Int32(), counter_address);
4001 value = Int32Add(value, Int32Constant(delta));
4002 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
4003 }
4004 }
4005
DecrementCounter(StatsCounter * counter,int delta)4006 void CodeStubAssembler::DecrementCounter(StatsCounter* counter, int delta) {
4007 DCHECK(delta > 0);
4008 if (FLAG_native_code_counters && counter->Enabled()) {
4009 Node* counter_address = ExternalConstant(ExternalReference(counter));
4010 Node* value = Load(MachineType::Int32(), counter_address);
4011 value = Int32Sub(value, Int32Constant(delta));
4012 StoreNoWriteBarrier(MachineRepresentation::kWord32, counter_address, value);
4013 }
4014 }
4015
Use(Label * label)4016 void CodeStubAssembler::Use(Label* label) {
4017 GotoIf(Word32Equal(Int32Constant(0), Int32Constant(1)), label);
4018 }
4019
TryToName(Node * key,Label * if_keyisindex,Variable * var_index,Label * if_keyisunique,Label * if_bailout)4020 void CodeStubAssembler::TryToName(Node* key, Label* if_keyisindex,
4021 Variable* var_index, Label* if_keyisunique,
4022 Label* if_bailout) {
4023 DCHECK_EQ(MachineType::PointerRepresentation(), var_index->rep());
4024 Comment("TryToName");
4025
4026 Label if_hascachedindex(this), if_keyisnotindex(this);
4027 // Handle Smi and HeapNumber keys.
4028 var_index->Bind(TryToIntptr(key, &if_keyisnotindex));
4029 Goto(if_keyisindex);
4030
4031 Bind(&if_keyisnotindex);
4032 Node* key_instance_type = LoadInstanceType(key);
4033 // Symbols are unique.
4034 GotoIf(Word32Equal(key_instance_type, Int32Constant(SYMBOL_TYPE)),
4035 if_keyisunique);
4036 // Miss if |key| is not a String.
4037 STATIC_ASSERT(FIRST_NAME_TYPE == FIRST_TYPE);
4038 GotoUnless(IsStringInstanceType(key_instance_type), if_bailout);
4039 // |key| is a String. Check if it has a cached array index.
4040 Node* hash = LoadNameHashField(key);
4041 Node* contains_index =
4042 Word32And(hash, Int32Constant(Name::kContainsCachedArrayIndexMask));
4043 GotoIf(Word32Equal(contains_index, Int32Constant(0)), &if_hascachedindex);
4044 // No cached array index. If the string knows that it contains an index,
4045 // then it must be an uncacheable index. Handle this case in the runtime.
4046 Node* not_an_index =
4047 Word32And(hash, Int32Constant(Name::kIsNotArrayIndexMask));
4048 GotoIf(Word32Equal(not_an_index, Int32Constant(0)), if_bailout);
4049 // Finally, check if |key| is internalized.
4050 STATIC_ASSERT(kNotInternalizedTag != 0);
4051 Node* not_internalized =
4052 Word32And(key_instance_type, Int32Constant(kIsNotInternalizedMask));
4053 GotoIf(Word32NotEqual(not_internalized, Int32Constant(0)), if_bailout);
4054 Goto(if_keyisunique);
4055
4056 Bind(&if_hascachedindex);
4057 var_index->Bind(DecodeWordFromWord32<Name::ArrayIndexValueBits>(hash));
4058 Goto(if_keyisindex);
4059 }
4060
4061 template <typename Dictionary>
EntryToIndex(Node * entry,int field_index)4062 Node* CodeStubAssembler::EntryToIndex(Node* entry, int field_index) {
4063 Node* entry_index = IntPtrMul(entry, IntPtrConstant(Dictionary::kEntrySize));
4064 return IntPtrAdd(entry_index, IntPtrConstant(Dictionary::kElementsStartIndex +
4065 field_index));
4066 }
4067
4068 template Node* CodeStubAssembler::EntryToIndex<NameDictionary>(Node*, int);
4069 template Node* CodeStubAssembler::EntryToIndex<GlobalDictionary>(Node*, int);
4070
HashTableComputeCapacity(Node * at_least_space_for)4071 Node* CodeStubAssembler::HashTableComputeCapacity(Node* at_least_space_for) {
4072 Node* capacity = IntPtrRoundUpToPowerOfTwo32(
4073 WordShl(at_least_space_for, IntPtrConstant(1)));
4074 return IntPtrMax(capacity, IntPtrConstant(HashTableBase::kMinCapacity));
4075 }
4076
IntPtrMax(Node * left,Node * right)4077 Node* CodeStubAssembler::IntPtrMax(Node* left, Node* right) {
4078 return Select(IntPtrGreaterThanOrEqual(left, right), left, right,
4079 MachineType::PointerRepresentation());
4080 }
4081
4082 template <typename Dictionary>
NameDictionaryLookup(Node * dictionary,Node * unique_name,Label * if_found,Variable * var_name_index,Label * if_not_found,int inlined_probes)4083 void CodeStubAssembler::NameDictionaryLookup(Node* dictionary,
4084 Node* unique_name, Label* if_found,
4085 Variable* var_name_index,
4086 Label* if_not_found,
4087 int inlined_probes) {
4088 CSA_ASSERT(this, IsDictionary(dictionary));
4089 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
4090 Comment("NameDictionaryLookup");
4091
4092 Node* capacity = SmiUntag(LoadFixedArrayElement(
4093 dictionary, IntPtrConstant(Dictionary::kCapacityIndex), 0,
4094 INTPTR_PARAMETERS));
4095 Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
4096 Node* hash = ChangeUint32ToWord(LoadNameHash(unique_name));
4097
4098 // See Dictionary::FirstProbe().
4099 Node* count = IntPtrConstant(0);
4100 Node* entry = WordAnd(hash, mask);
4101
4102 for (int i = 0; i < inlined_probes; i++) {
4103 Node* index = EntryToIndex<Dictionary>(entry);
4104 var_name_index->Bind(index);
4105
4106 Node* current =
4107 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
4108 GotoIf(WordEqual(current, unique_name), if_found);
4109
4110 // See Dictionary::NextProbe().
4111 count = IntPtrConstant(i + 1);
4112 entry = WordAnd(IntPtrAdd(entry, count), mask);
4113 }
4114
4115 Node* undefined = UndefinedConstant();
4116
4117 Variable var_count(this, MachineType::PointerRepresentation());
4118 Variable var_entry(this, MachineType::PointerRepresentation());
4119 Variable* loop_vars[] = {&var_count, &var_entry, var_name_index};
4120 Label loop(this, 3, loop_vars);
4121 var_count.Bind(count);
4122 var_entry.Bind(entry);
4123 Goto(&loop);
4124 Bind(&loop);
4125 {
4126 Node* count = var_count.value();
4127 Node* entry = var_entry.value();
4128
4129 Node* index = EntryToIndex<Dictionary>(entry);
4130 var_name_index->Bind(index);
4131
4132 Node* current =
4133 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
4134 GotoIf(WordEqual(current, undefined), if_not_found);
4135 GotoIf(WordEqual(current, unique_name), if_found);
4136
4137 // See Dictionary::NextProbe().
4138 count = IntPtrAdd(count, IntPtrConstant(1));
4139 entry = WordAnd(IntPtrAdd(entry, count), mask);
4140
4141 var_count.Bind(count);
4142 var_entry.Bind(entry);
4143 Goto(&loop);
4144 }
4145 }
4146
4147 // Instantiate template methods to workaround GCC compilation issue.
4148 template void CodeStubAssembler::NameDictionaryLookup<NameDictionary>(
4149 Node*, Node*, Label*, Variable*, Label*, int);
4150 template void CodeStubAssembler::NameDictionaryLookup<GlobalDictionary>(
4151 Node*, Node*, Label*, Variable*, Label*, int);
4152
ComputeIntegerHash(Node * key,Node * seed)4153 Node* CodeStubAssembler::ComputeIntegerHash(Node* key, Node* seed) {
4154 // See v8::internal::ComputeIntegerHash()
4155 Node* hash = key;
4156 hash = Word32Xor(hash, seed);
4157 hash = Int32Add(Word32Xor(hash, Int32Constant(0xffffffff)),
4158 Word32Shl(hash, Int32Constant(15)));
4159 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(12)));
4160 hash = Int32Add(hash, Word32Shl(hash, Int32Constant(2)));
4161 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(4)));
4162 hash = Int32Mul(hash, Int32Constant(2057));
4163 hash = Word32Xor(hash, Word32Shr(hash, Int32Constant(16)));
4164 return Word32And(hash, Int32Constant(0x3fffffff));
4165 }
4166
4167 template <typename Dictionary>
NumberDictionaryLookup(Node * dictionary,Node * intptr_index,Label * if_found,Variable * var_entry,Label * if_not_found)4168 void CodeStubAssembler::NumberDictionaryLookup(Node* dictionary,
4169 Node* intptr_index,
4170 Label* if_found,
4171 Variable* var_entry,
4172 Label* if_not_found) {
4173 CSA_ASSERT(this, IsDictionary(dictionary));
4174 DCHECK_EQ(MachineType::PointerRepresentation(), var_entry->rep());
4175 Comment("NumberDictionaryLookup");
4176
4177 Node* capacity = SmiUntag(LoadFixedArrayElement(
4178 dictionary, IntPtrConstant(Dictionary::kCapacityIndex), 0,
4179 INTPTR_PARAMETERS));
4180 Node* mask = IntPtrSub(capacity, IntPtrConstant(1));
4181
4182 Node* int32_seed;
4183 if (Dictionary::ShapeT::UsesSeed) {
4184 int32_seed = HashSeed();
4185 } else {
4186 int32_seed = Int32Constant(kZeroHashSeed);
4187 }
4188 Node* hash = ChangeUint32ToWord(ComputeIntegerHash(intptr_index, int32_seed));
4189 Node* key_as_float64 = RoundIntPtrToFloat64(intptr_index);
4190
4191 // See Dictionary::FirstProbe().
4192 Node* count = IntPtrConstant(0);
4193 Node* entry = WordAnd(hash, mask);
4194
4195 Node* undefined = UndefinedConstant();
4196 Node* the_hole = TheHoleConstant();
4197
4198 Variable var_count(this, MachineType::PointerRepresentation());
4199 Variable* loop_vars[] = {&var_count, var_entry};
4200 Label loop(this, 2, loop_vars);
4201 var_count.Bind(count);
4202 var_entry->Bind(entry);
4203 Goto(&loop);
4204 Bind(&loop);
4205 {
4206 Node* count = var_count.value();
4207 Node* entry = var_entry->value();
4208
4209 Node* index = EntryToIndex<Dictionary>(entry);
4210 Node* current =
4211 LoadFixedArrayElement(dictionary, index, 0, INTPTR_PARAMETERS);
4212 GotoIf(WordEqual(current, undefined), if_not_found);
4213 Label next_probe(this);
4214 {
4215 Label if_currentissmi(this), if_currentisnotsmi(this);
4216 Branch(TaggedIsSmi(current), &if_currentissmi, &if_currentisnotsmi);
4217 Bind(&if_currentissmi);
4218 {
4219 Node* current_value = SmiUntag(current);
4220 Branch(WordEqual(current_value, intptr_index), if_found, &next_probe);
4221 }
4222 Bind(&if_currentisnotsmi);
4223 {
4224 GotoIf(WordEqual(current, the_hole), &next_probe);
4225 // Current must be the Number.
4226 Node* current_value = LoadHeapNumberValue(current);
4227 Branch(Float64Equal(current_value, key_as_float64), if_found,
4228 &next_probe);
4229 }
4230 }
4231
4232 Bind(&next_probe);
4233 // See Dictionary::NextProbe().
4234 count = IntPtrAdd(count, IntPtrConstant(1));
4235 entry = WordAnd(IntPtrAdd(entry, count), mask);
4236
4237 var_count.Bind(count);
4238 var_entry->Bind(entry);
4239 Goto(&loop);
4240 }
4241 }
4242
DescriptorLookupLinear(Node * unique_name,Node * descriptors,Node * nof,Label * if_found,Variable * var_name_index,Label * if_not_found)4243 void CodeStubAssembler::DescriptorLookupLinear(Node* unique_name,
4244 Node* descriptors, Node* nof,
4245 Label* if_found,
4246 Variable* var_name_index,
4247 Label* if_not_found) {
4248 Node* first_inclusive = IntPtrConstant(DescriptorArray::ToKeyIndex(0));
4249 Node* factor = IntPtrConstant(DescriptorArray::kDescriptorSize);
4250 Node* last_exclusive = IntPtrAdd(first_inclusive, IntPtrMul(nof, factor));
4251
4252 BuildFastLoop(
4253 MachineType::PointerRepresentation(), last_exclusive, first_inclusive,
4254 [descriptors, unique_name, if_found, var_name_index](
4255 CodeStubAssembler* assembler, Node* name_index) {
4256 Node* candidate_name = assembler->LoadFixedArrayElement(
4257 descriptors, name_index, 0, INTPTR_PARAMETERS);
4258 var_name_index->Bind(name_index);
4259 assembler->GotoIf(assembler->WordEqual(candidate_name, unique_name),
4260 if_found);
4261 },
4262 -DescriptorArray::kDescriptorSize, IndexAdvanceMode::kPre);
4263 Goto(if_not_found);
4264 }
4265
TryLookupProperty(Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found_fast,Label * if_found_dict,Label * if_found_global,Variable * var_meta_storage,Variable * var_name_index,Label * if_not_found,Label * if_bailout)4266 void CodeStubAssembler::TryLookupProperty(
4267 Node* object, Node* map, Node* instance_type, Node* unique_name,
4268 Label* if_found_fast, Label* if_found_dict, Label* if_found_global,
4269 Variable* var_meta_storage, Variable* var_name_index, Label* if_not_found,
4270 Label* if_bailout) {
4271 DCHECK_EQ(MachineRepresentation::kTagged, var_meta_storage->rep());
4272 DCHECK_EQ(MachineType::PointerRepresentation(), var_name_index->rep());
4273
4274 Label if_objectisspecial(this);
4275 STATIC_ASSERT(JS_GLOBAL_OBJECT_TYPE <= LAST_SPECIAL_RECEIVER_TYPE);
4276 GotoIf(Int32LessThanOrEqual(instance_type,
4277 Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
4278 &if_objectisspecial);
4279
4280 uint32_t mask =
4281 1 << Map::kHasNamedInterceptor | 1 << Map::kIsAccessCheckNeeded;
4282 CSA_ASSERT(this, Word32BinaryNot(IsSetWord32(LoadMapBitField(map), mask)));
4283 USE(mask);
4284
4285 Node* bit_field3 = LoadMapBitField3(map);
4286 Label if_isfastmap(this), if_isslowmap(this);
4287 Branch(IsSetWord32<Map::DictionaryMap>(bit_field3), &if_isslowmap,
4288 &if_isfastmap);
4289 Bind(&if_isfastmap);
4290 {
4291 Comment("DescriptorArrayLookup");
4292 Node* nof =
4293 DecodeWordFromWord32<Map::NumberOfOwnDescriptorsBits>(bit_field3);
4294 // Bail out to the runtime for large numbers of own descriptors. The stub
4295 // only does linear search, which becomes too expensive in that case.
4296 {
4297 static const int32_t kMaxLinear = 210;
4298 GotoIf(UintPtrGreaterThan(nof, IntPtrConstant(kMaxLinear)), if_bailout);
4299 }
4300 Node* descriptors = LoadMapDescriptors(map);
4301 var_meta_storage->Bind(descriptors);
4302
4303 DescriptorLookupLinear(unique_name, descriptors, nof, if_found_fast,
4304 var_name_index, if_not_found);
4305 }
4306 Bind(&if_isslowmap);
4307 {
4308 Node* dictionary = LoadProperties(object);
4309 var_meta_storage->Bind(dictionary);
4310
4311 NameDictionaryLookup<NameDictionary>(dictionary, unique_name, if_found_dict,
4312 var_name_index, if_not_found);
4313 }
4314 Bind(&if_objectisspecial);
4315 {
4316 // Handle global object here and other special objects in runtime.
4317 GotoUnless(Word32Equal(instance_type, Int32Constant(JS_GLOBAL_OBJECT_TYPE)),
4318 if_bailout);
4319
4320 // Handle interceptors and access checks in runtime.
4321 Node* bit_field = LoadMapBitField(map);
4322 Node* mask = Int32Constant(1 << Map::kHasNamedInterceptor |
4323 1 << Map::kIsAccessCheckNeeded);
4324 GotoIf(Word32NotEqual(Word32And(bit_field, mask), Int32Constant(0)),
4325 if_bailout);
4326
4327 Node* dictionary = LoadProperties(object);
4328 var_meta_storage->Bind(dictionary);
4329
4330 NameDictionaryLookup<GlobalDictionary>(
4331 dictionary, unique_name, if_found_global, var_name_index, if_not_found);
4332 }
4333 }
4334
TryHasOwnProperty(compiler::Node * object,compiler::Node * map,compiler::Node * instance_type,compiler::Node * unique_name,Label * if_found,Label * if_not_found,Label * if_bailout)4335 void CodeStubAssembler::TryHasOwnProperty(compiler::Node* object,
4336 compiler::Node* map,
4337 compiler::Node* instance_type,
4338 compiler::Node* unique_name,
4339 Label* if_found, Label* if_not_found,
4340 Label* if_bailout) {
4341 Comment("TryHasOwnProperty");
4342 Variable var_meta_storage(this, MachineRepresentation::kTagged);
4343 Variable var_name_index(this, MachineType::PointerRepresentation());
4344
4345 Label if_found_global(this);
4346 TryLookupProperty(object, map, instance_type, unique_name, if_found, if_found,
4347 &if_found_global, &var_meta_storage, &var_name_index,
4348 if_not_found, if_bailout);
4349 Bind(&if_found_global);
4350 {
4351 Variable var_value(this, MachineRepresentation::kTagged);
4352 Variable var_details(this, MachineRepresentation::kWord32);
4353 // Check if the property cell is not deleted.
4354 LoadPropertyFromGlobalDictionary(var_meta_storage.value(),
4355 var_name_index.value(), &var_value,
4356 &var_details, if_not_found);
4357 Goto(if_found);
4358 }
4359 }
4360
LoadPropertyFromFastObject(Node * object,Node * map,Node * descriptors,Node * name_index,Variable * var_details,Variable * var_value)4361 void CodeStubAssembler::LoadPropertyFromFastObject(Node* object, Node* map,
4362 Node* descriptors,
4363 Node* name_index,
4364 Variable* var_details,
4365 Variable* var_value) {
4366 DCHECK_EQ(MachineRepresentation::kWord32, var_details->rep());
4367 DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
4368 Comment("[ LoadPropertyFromFastObject");
4369
4370 const int name_to_details_offset =
4371 (DescriptorArray::kDescriptorDetails - DescriptorArray::kDescriptorKey) *
4372 kPointerSize;
4373 const int name_to_value_offset =
4374 (DescriptorArray::kDescriptorValue - DescriptorArray::kDescriptorKey) *
4375 kPointerSize;
4376
4377 Node* details = LoadAndUntagToWord32FixedArrayElement(descriptors, name_index,
4378 name_to_details_offset);
4379 var_details->Bind(details);
4380
4381 Node* location = DecodeWord32<PropertyDetails::LocationField>(details);
4382
4383 Label if_in_field(this), if_in_descriptor(this), done(this);
4384 Branch(Word32Equal(location, Int32Constant(kField)), &if_in_field,
4385 &if_in_descriptor);
4386 Bind(&if_in_field);
4387 {
4388 Node* field_index =
4389 DecodeWordFromWord32<PropertyDetails::FieldIndexField>(details);
4390 Node* representation =
4391 DecodeWord32<PropertyDetails::RepresentationField>(details);
4392
4393 Node* inobject_properties = LoadMapInobjectProperties(map);
4394
4395 Label if_inobject(this), if_backing_store(this);
4396 Variable var_double_value(this, MachineRepresentation::kFloat64);
4397 Label rebox_double(this, &var_double_value);
4398 Branch(UintPtrLessThan(field_index, inobject_properties), &if_inobject,
4399 &if_backing_store);
4400 Bind(&if_inobject);
4401 {
4402 Comment("if_inobject");
4403 Node* field_offset =
4404 IntPtrMul(IntPtrSub(LoadMapInstanceSize(map),
4405 IntPtrSub(inobject_properties, field_index)),
4406 IntPtrConstant(kPointerSize));
4407
4408 Label if_double(this), if_tagged(this);
4409 Branch(Word32NotEqual(representation,
4410 Int32Constant(Representation::kDouble)),
4411 &if_tagged, &if_double);
4412 Bind(&if_tagged);
4413 {
4414 var_value->Bind(LoadObjectField(object, field_offset));
4415 Goto(&done);
4416 }
4417 Bind(&if_double);
4418 {
4419 if (FLAG_unbox_double_fields) {
4420 var_double_value.Bind(
4421 LoadObjectField(object, field_offset, MachineType::Float64()));
4422 } else {
4423 Node* mutable_heap_number = LoadObjectField(object, field_offset);
4424 var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
4425 }
4426 Goto(&rebox_double);
4427 }
4428 }
4429 Bind(&if_backing_store);
4430 {
4431 Comment("if_backing_store");
4432 Node* properties = LoadProperties(object);
4433 field_index = IntPtrSub(field_index, inobject_properties);
4434 Node* value = LoadFixedArrayElement(properties, field_index);
4435
4436 Label if_double(this), if_tagged(this);
4437 Branch(Word32NotEqual(representation,
4438 Int32Constant(Representation::kDouble)),
4439 &if_tagged, &if_double);
4440 Bind(&if_tagged);
4441 {
4442 var_value->Bind(value);
4443 Goto(&done);
4444 }
4445 Bind(&if_double);
4446 {
4447 var_double_value.Bind(LoadHeapNumberValue(value));
4448 Goto(&rebox_double);
4449 }
4450 }
4451 Bind(&rebox_double);
4452 {
4453 Comment("rebox_double");
4454 Node* heap_number = AllocateHeapNumberWithValue(var_double_value.value());
4455 var_value->Bind(heap_number);
4456 Goto(&done);
4457 }
4458 }
4459 Bind(&if_in_descriptor);
4460 {
4461 Node* value =
4462 LoadFixedArrayElement(descriptors, name_index, name_to_value_offset);
4463 var_value->Bind(value);
4464 Goto(&done);
4465 }
4466 Bind(&done);
4467
4468 Comment("] LoadPropertyFromFastObject");
4469 }
4470
LoadPropertyFromNameDictionary(Node * dictionary,Node * name_index,Variable * var_details,Variable * var_value)4471 void CodeStubAssembler::LoadPropertyFromNameDictionary(Node* dictionary,
4472 Node* name_index,
4473 Variable* var_details,
4474 Variable* var_value) {
4475 Comment("LoadPropertyFromNameDictionary");
4476 CSA_ASSERT(this, IsDictionary(dictionary));
4477 const int name_to_details_offset =
4478 (NameDictionary::kEntryDetailsIndex - NameDictionary::kEntryKeyIndex) *
4479 kPointerSize;
4480 const int name_to_value_offset =
4481 (NameDictionary::kEntryValueIndex - NameDictionary::kEntryKeyIndex) *
4482 kPointerSize;
4483
4484 Node* details = LoadAndUntagToWord32FixedArrayElement(dictionary, name_index,
4485 name_to_details_offset);
4486
4487 var_details->Bind(details);
4488 var_value->Bind(
4489 LoadFixedArrayElement(dictionary, name_index, name_to_value_offset));
4490
4491 Comment("] LoadPropertyFromNameDictionary");
4492 }
4493
LoadPropertyFromGlobalDictionary(Node * dictionary,Node * name_index,Variable * var_details,Variable * var_value,Label * if_deleted)4494 void CodeStubAssembler::LoadPropertyFromGlobalDictionary(Node* dictionary,
4495 Node* name_index,
4496 Variable* var_details,
4497 Variable* var_value,
4498 Label* if_deleted) {
4499 Comment("[ LoadPropertyFromGlobalDictionary");
4500 CSA_ASSERT(this, IsDictionary(dictionary));
4501
4502 const int name_to_value_offset =
4503 (GlobalDictionary::kEntryValueIndex - GlobalDictionary::kEntryKeyIndex) *
4504 kPointerSize;
4505
4506 Node* property_cell =
4507 LoadFixedArrayElement(dictionary, name_index, name_to_value_offset);
4508
4509 Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
4510 GotoIf(WordEqual(value, TheHoleConstant()), if_deleted);
4511
4512 var_value->Bind(value);
4513
4514 Node* details = LoadAndUntagToWord32ObjectField(property_cell,
4515 PropertyCell::kDetailsOffset);
4516 var_details->Bind(details);
4517
4518 Comment("] LoadPropertyFromGlobalDictionary");
4519 }
4520
4521 // |value| is the property backing store's contents, which is either a value
4522 // or an accessor pair, as specified by |details|.
4523 // Returns either the original value, or the result of the getter call.
CallGetterIfAccessor(Node * value,Node * details,Node * context,Node * receiver,Label * if_bailout)4524 Node* CodeStubAssembler::CallGetterIfAccessor(Node* value, Node* details,
4525 Node* context, Node* receiver,
4526 Label* if_bailout) {
4527 Variable var_value(this, MachineRepresentation::kTagged);
4528 var_value.Bind(value);
4529 Label done(this);
4530
4531 Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
4532 GotoIf(Word32Equal(kind, Int32Constant(kData)), &done);
4533
4534 // Accessor case.
4535 {
4536 Node* accessor_pair = value;
4537 GotoIf(Word32Equal(LoadInstanceType(accessor_pair),
4538 Int32Constant(ACCESSOR_INFO_TYPE)),
4539 if_bailout);
4540 CSA_ASSERT(this, HasInstanceType(accessor_pair, ACCESSOR_PAIR_TYPE));
4541 Node* getter = LoadObjectField(accessor_pair, AccessorPair::kGetterOffset);
4542 Node* getter_map = LoadMap(getter);
4543 Node* instance_type = LoadMapInstanceType(getter_map);
4544 // FunctionTemplateInfo getters are not supported yet.
4545 GotoIf(
4546 Word32Equal(instance_type, Int32Constant(FUNCTION_TEMPLATE_INFO_TYPE)),
4547 if_bailout);
4548
4549 // Return undefined if the {getter} is not callable.
4550 var_value.Bind(UndefinedConstant());
4551 GotoUnless(IsCallableMap(getter_map), &done);
4552
4553 // Call the accessor.
4554 Callable callable = CodeFactory::Call(isolate());
4555 Node* result = CallJS(callable, context, getter, receiver);
4556 var_value.Bind(result);
4557 Goto(&done);
4558 }
4559
4560 Bind(&done);
4561 return var_value.value();
4562 }
4563
TryGetOwnProperty(Node * context,Node * receiver,Node * object,Node * map,Node * instance_type,Node * unique_name,Label * if_found_value,Variable * var_value,Label * if_not_found,Label * if_bailout)4564 void CodeStubAssembler::TryGetOwnProperty(
4565 Node* context, Node* receiver, Node* object, Node* map, Node* instance_type,
4566 Node* unique_name, Label* if_found_value, Variable* var_value,
4567 Label* if_not_found, Label* if_bailout) {
4568 DCHECK_EQ(MachineRepresentation::kTagged, var_value->rep());
4569 Comment("TryGetOwnProperty");
4570
4571 Variable var_meta_storage(this, MachineRepresentation::kTagged);
4572 Variable var_entry(this, MachineType::PointerRepresentation());
4573
4574 Label if_found_fast(this), if_found_dict(this), if_found_global(this);
4575
4576 Variable var_details(this, MachineRepresentation::kWord32);
4577 Variable* vars[] = {var_value, &var_details};
4578 Label if_found(this, 2, vars);
4579
4580 TryLookupProperty(object, map, instance_type, unique_name, &if_found_fast,
4581 &if_found_dict, &if_found_global, &var_meta_storage,
4582 &var_entry, if_not_found, if_bailout);
4583 Bind(&if_found_fast);
4584 {
4585 Node* descriptors = var_meta_storage.value();
4586 Node* name_index = var_entry.value();
4587
4588 LoadPropertyFromFastObject(object, map, descriptors, name_index,
4589 &var_details, var_value);
4590 Goto(&if_found);
4591 }
4592 Bind(&if_found_dict);
4593 {
4594 Node* dictionary = var_meta_storage.value();
4595 Node* entry = var_entry.value();
4596 LoadPropertyFromNameDictionary(dictionary, entry, &var_details, var_value);
4597 Goto(&if_found);
4598 }
4599 Bind(&if_found_global);
4600 {
4601 Node* dictionary = var_meta_storage.value();
4602 Node* entry = var_entry.value();
4603
4604 LoadPropertyFromGlobalDictionary(dictionary, entry, &var_details, var_value,
4605 if_not_found);
4606 Goto(&if_found);
4607 }
4608 // Here we have details and value which could be an accessor.
4609 Bind(&if_found);
4610 {
4611 Node* value = CallGetterIfAccessor(var_value->value(), var_details.value(),
4612 context, receiver, if_bailout);
4613 var_value->Bind(value);
4614 Goto(if_found_value);
4615 }
4616 }
4617
TryLookupElement(Node * object,Node * map,Node * instance_type,Node * intptr_index,Label * if_found,Label * if_not_found,Label * if_bailout)4618 void CodeStubAssembler::TryLookupElement(Node* object, Node* map,
4619 Node* instance_type,
4620 Node* intptr_index, Label* if_found,
4621 Label* if_not_found,
4622 Label* if_bailout) {
4623 // Handle special objects in runtime.
4624 GotoIf(Int32LessThanOrEqual(instance_type,
4625 Int32Constant(LAST_SPECIAL_RECEIVER_TYPE)),
4626 if_bailout);
4627
4628 Node* elements_kind = LoadMapElementsKind(map);
4629
4630 // TODO(verwaest): Support other elements kinds as well.
4631 Label if_isobjectorsmi(this), if_isdouble(this), if_isdictionary(this),
4632 if_isfaststringwrapper(this), if_isslowstringwrapper(this), if_oob(this);
4633 // clang-format off
4634 int32_t values[] = {
4635 // Handled by {if_isobjectorsmi}.
4636 FAST_SMI_ELEMENTS, FAST_HOLEY_SMI_ELEMENTS, FAST_ELEMENTS,
4637 FAST_HOLEY_ELEMENTS,
4638 // Handled by {if_isdouble}.
4639 FAST_DOUBLE_ELEMENTS, FAST_HOLEY_DOUBLE_ELEMENTS,
4640 // Handled by {if_isdictionary}.
4641 DICTIONARY_ELEMENTS,
4642 // Handled by {if_isfaststringwrapper}.
4643 FAST_STRING_WRAPPER_ELEMENTS,
4644 // Handled by {if_isslowstringwrapper}.
4645 SLOW_STRING_WRAPPER_ELEMENTS,
4646 // Handled by {if_not_found}.
4647 NO_ELEMENTS,
4648 };
4649 Label* labels[] = {
4650 &if_isobjectorsmi, &if_isobjectorsmi, &if_isobjectorsmi,
4651 &if_isobjectorsmi,
4652 &if_isdouble, &if_isdouble,
4653 &if_isdictionary,
4654 &if_isfaststringwrapper,
4655 &if_isslowstringwrapper,
4656 if_not_found,
4657 };
4658 // clang-format on
4659 STATIC_ASSERT(arraysize(values) == arraysize(labels));
4660 Switch(elements_kind, if_bailout, values, labels, arraysize(values));
4661
4662 Bind(&if_isobjectorsmi);
4663 {
4664 Node* elements = LoadElements(object);
4665 Node* length = LoadAndUntagFixedArrayBaseLength(elements);
4666
4667 GotoUnless(UintPtrLessThan(intptr_index, length), &if_oob);
4668
4669 Node* element =
4670 LoadFixedArrayElement(elements, intptr_index, 0, INTPTR_PARAMETERS);
4671 Node* the_hole = TheHoleConstant();
4672 Branch(WordEqual(element, the_hole), if_not_found, if_found);
4673 }
4674 Bind(&if_isdouble);
4675 {
4676 Node* elements = LoadElements(object);
4677 Node* length = LoadAndUntagFixedArrayBaseLength(elements);
4678
4679 GotoUnless(UintPtrLessThan(intptr_index, length), &if_oob);
4680
4681 // Check if the element is a double hole, but don't load it.
4682 LoadFixedDoubleArrayElement(elements, intptr_index, MachineType::None(), 0,
4683 INTPTR_PARAMETERS, if_not_found);
4684 Goto(if_found);
4685 }
4686 Bind(&if_isdictionary);
4687 {
4688 // Negative keys must be converted to property names.
4689 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
4690
4691 Variable var_entry(this, MachineType::PointerRepresentation());
4692 Node* elements = LoadElements(object);
4693 NumberDictionaryLookup<SeededNumberDictionary>(
4694 elements, intptr_index, if_found, &var_entry, if_not_found);
4695 }
4696 Bind(&if_isfaststringwrapper);
4697 {
4698 CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
4699 Node* string = LoadJSValueValue(object);
4700 CSA_ASSERT(this, IsStringInstanceType(LoadInstanceType(string)));
4701 Node* length = LoadStringLength(string);
4702 GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
4703 Goto(&if_isobjectorsmi);
4704 }
4705 Bind(&if_isslowstringwrapper);
4706 {
4707 CSA_ASSERT(this, HasInstanceType(object, JS_VALUE_TYPE));
4708 Node* string = LoadJSValueValue(object);
4709 CSA_ASSERT(this, IsStringInstanceType(LoadInstanceType(string)));
4710 Node* length = LoadStringLength(string);
4711 GotoIf(UintPtrLessThan(intptr_index, SmiUntag(length)), if_found);
4712 Goto(&if_isdictionary);
4713 }
4714 Bind(&if_oob);
4715 {
4716 // Positive OOB indices mean "not found", negative indices must be
4717 // converted to property names.
4718 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), if_bailout);
4719 Goto(if_not_found);
4720 }
4721 }
4722
4723 // Instantiate template methods to workaround GCC compilation issue.
4724 template void CodeStubAssembler::NumberDictionaryLookup<SeededNumberDictionary>(
4725 Node*, Node*, Label*, Variable*, Label*);
4726 template void CodeStubAssembler::NumberDictionaryLookup<
4727 UnseededNumberDictionary>(Node*, Node*, Label*, Variable*, Label*);
4728
TryPrototypeChainLookup(Node * receiver,Node * key,LookupInHolder & lookup_property_in_holder,LookupInHolder & lookup_element_in_holder,Label * if_end,Label * if_bailout)4729 void CodeStubAssembler::TryPrototypeChainLookup(
4730 Node* receiver, Node* key, LookupInHolder& lookup_property_in_holder,
4731 LookupInHolder& lookup_element_in_holder, Label* if_end,
4732 Label* if_bailout) {
4733 // Ensure receiver is JSReceiver, otherwise bailout.
4734 Label if_objectisnotsmi(this);
4735 Branch(TaggedIsSmi(receiver), if_bailout, &if_objectisnotsmi);
4736 Bind(&if_objectisnotsmi);
4737
4738 Node* map = LoadMap(receiver);
4739 Node* instance_type = LoadMapInstanceType(map);
4740 {
4741 Label if_objectisreceiver(this);
4742 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
4743 STATIC_ASSERT(FIRST_JS_RECEIVER_TYPE == JS_PROXY_TYPE);
4744 Branch(
4745 Int32GreaterThan(instance_type, Int32Constant(FIRST_JS_RECEIVER_TYPE)),
4746 &if_objectisreceiver, if_bailout);
4747 Bind(&if_objectisreceiver);
4748 }
4749
4750 Variable var_index(this, MachineType::PointerRepresentation());
4751
4752 Label if_keyisindex(this), if_iskeyunique(this);
4753 TryToName(key, &if_keyisindex, &var_index, &if_iskeyunique, if_bailout);
4754
4755 Bind(&if_iskeyunique);
4756 {
4757 Variable var_holder(this, MachineRepresentation::kTagged);
4758 Variable var_holder_map(this, MachineRepresentation::kTagged);
4759 Variable var_holder_instance_type(this, MachineRepresentation::kWord8);
4760
4761 Variable* merged_variables[] = {&var_holder, &var_holder_map,
4762 &var_holder_instance_type};
4763 Label loop(this, arraysize(merged_variables), merged_variables);
4764 var_holder.Bind(receiver);
4765 var_holder_map.Bind(map);
4766 var_holder_instance_type.Bind(instance_type);
4767 Goto(&loop);
4768 Bind(&loop);
4769 {
4770 Node* holder_map = var_holder_map.value();
4771 Node* holder_instance_type = var_holder_instance_type.value();
4772
4773 Label next_proto(this);
4774 lookup_property_in_holder(receiver, var_holder.value(), holder_map,
4775 holder_instance_type, key, &next_proto,
4776 if_bailout);
4777 Bind(&next_proto);
4778
4779 // Bailout if it can be an integer indexed exotic case.
4780 GotoIf(
4781 Word32Equal(holder_instance_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
4782 if_bailout);
4783
4784 Node* proto = LoadMapPrototype(holder_map);
4785
4786 Label if_not_null(this);
4787 Branch(WordEqual(proto, NullConstant()), if_end, &if_not_null);
4788 Bind(&if_not_null);
4789
4790 Node* map = LoadMap(proto);
4791 Node* instance_type = LoadMapInstanceType(map);
4792
4793 var_holder.Bind(proto);
4794 var_holder_map.Bind(map);
4795 var_holder_instance_type.Bind(instance_type);
4796 Goto(&loop);
4797 }
4798 }
4799 Bind(&if_keyisindex);
4800 {
4801 Variable var_holder(this, MachineRepresentation::kTagged);
4802 Variable var_holder_map(this, MachineRepresentation::kTagged);
4803 Variable var_holder_instance_type(this, MachineRepresentation::kWord8);
4804
4805 Variable* merged_variables[] = {&var_holder, &var_holder_map,
4806 &var_holder_instance_type};
4807 Label loop(this, arraysize(merged_variables), merged_variables);
4808 var_holder.Bind(receiver);
4809 var_holder_map.Bind(map);
4810 var_holder_instance_type.Bind(instance_type);
4811 Goto(&loop);
4812 Bind(&loop);
4813 {
4814 Label next_proto(this);
4815 lookup_element_in_holder(receiver, var_holder.value(),
4816 var_holder_map.value(),
4817 var_holder_instance_type.value(),
4818 var_index.value(), &next_proto, if_bailout);
4819 Bind(&next_proto);
4820
4821 Node* proto = LoadMapPrototype(var_holder_map.value());
4822
4823 Label if_not_null(this);
4824 Branch(WordEqual(proto, NullConstant()), if_end, &if_not_null);
4825 Bind(&if_not_null);
4826
4827 Node* map = LoadMap(proto);
4828 Node* instance_type = LoadMapInstanceType(map);
4829
4830 var_holder.Bind(proto);
4831 var_holder_map.Bind(map);
4832 var_holder_instance_type.Bind(instance_type);
4833 Goto(&loop);
4834 }
4835 }
4836 }
4837
OrdinaryHasInstance(Node * context,Node * callable,Node * object)4838 Node* CodeStubAssembler::OrdinaryHasInstance(Node* context, Node* callable,
4839 Node* object) {
4840 Variable var_result(this, MachineRepresentation::kTagged);
4841 Label return_false(this), return_true(this),
4842 return_runtime(this, Label::kDeferred), return_result(this);
4843
4844 // Goto runtime if {object} is a Smi.
4845 GotoIf(TaggedIsSmi(object), &return_runtime);
4846
4847 // Load map of {object}.
4848 Node* object_map = LoadMap(object);
4849
4850 // Lookup the {callable} and {object} map in the global instanceof cache.
4851 // Note: This is safe because we clear the global instanceof cache whenever
4852 // we change the prototype of any object.
4853 Node* instanceof_cache_function =
4854 LoadRoot(Heap::kInstanceofCacheFunctionRootIndex);
4855 Node* instanceof_cache_map = LoadRoot(Heap::kInstanceofCacheMapRootIndex);
4856 {
4857 Label instanceof_cache_miss(this);
4858 GotoUnless(WordEqual(instanceof_cache_function, callable),
4859 &instanceof_cache_miss);
4860 GotoUnless(WordEqual(instanceof_cache_map, object_map),
4861 &instanceof_cache_miss);
4862 var_result.Bind(LoadRoot(Heap::kInstanceofCacheAnswerRootIndex));
4863 Goto(&return_result);
4864 Bind(&instanceof_cache_miss);
4865 }
4866
4867 // Goto runtime if {callable} is a Smi.
4868 GotoIf(TaggedIsSmi(callable), &return_runtime);
4869
4870 // Load map of {callable}.
4871 Node* callable_map = LoadMap(callable);
4872
4873 // Goto runtime if {callable} is not a JSFunction.
4874 Node* callable_instance_type = LoadMapInstanceType(callable_map);
4875 GotoUnless(
4876 Word32Equal(callable_instance_type, Int32Constant(JS_FUNCTION_TYPE)),
4877 &return_runtime);
4878
4879 // Goto runtime if {callable} is not a constructor or has
4880 // a non-instance "prototype".
4881 Node* callable_bitfield = LoadMapBitField(callable_map);
4882 GotoUnless(
4883 Word32Equal(Word32And(callable_bitfield,
4884 Int32Constant((1 << Map::kHasNonInstancePrototype) |
4885 (1 << Map::kIsConstructor))),
4886 Int32Constant(1 << Map::kIsConstructor)),
4887 &return_runtime);
4888
4889 // Get the "prototype" (or initial map) of the {callable}.
4890 Node* callable_prototype =
4891 LoadObjectField(callable, JSFunction::kPrototypeOrInitialMapOffset);
4892 {
4893 Variable var_callable_prototype(this, MachineRepresentation::kTagged);
4894 Label callable_prototype_valid(this);
4895 var_callable_prototype.Bind(callable_prototype);
4896
4897 // Resolve the "prototype" if the {callable} has an initial map. Afterwards
4898 // the {callable_prototype} will be either the JSReceiver prototype object
4899 // or the hole value, which means that no instances of the {callable} were
4900 // created so far and hence we should return false.
4901 Node* callable_prototype_instance_type =
4902 LoadInstanceType(callable_prototype);
4903 GotoUnless(
4904 Word32Equal(callable_prototype_instance_type, Int32Constant(MAP_TYPE)),
4905 &callable_prototype_valid);
4906 var_callable_prototype.Bind(
4907 LoadObjectField(callable_prototype, Map::kPrototypeOffset));
4908 Goto(&callable_prototype_valid);
4909 Bind(&callable_prototype_valid);
4910 callable_prototype = var_callable_prototype.value();
4911 }
4912
4913 // Update the global instanceof cache with the current {object} map and
4914 // {callable}. The cached answer will be set when it is known below.
4915 StoreRoot(Heap::kInstanceofCacheFunctionRootIndex, callable);
4916 StoreRoot(Heap::kInstanceofCacheMapRootIndex, object_map);
4917
4918 // Loop through the prototype chain looking for the {callable} prototype.
4919 Variable var_object_map(this, MachineRepresentation::kTagged);
4920 var_object_map.Bind(object_map);
4921 Label loop(this, &var_object_map);
4922 Goto(&loop);
4923 Bind(&loop);
4924 {
4925 Node* object_map = var_object_map.value();
4926
4927 // Check if the current {object} needs to be access checked.
4928 Node* object_bitfield = LoadMapBitField(object_map);
4929 GotoUnless(
4930 Word32Equal(Word32And(object_bitfield,
4931 Int32Constant(1 << Map::kIsAccessCheckNeeded)),
4932 Int32Constant(0)),
4933 &return_runtime);
4934
4935 // Check if the current {object} is a proxy.
4936 Node* object_instance_type = LoadMapInstanceType(object_map);
4937 GotoIf(Word32Equal(object_instance_type, Int32Constant(JS_PROXY_TYPE)),
4938 &return_runtime);
4939
4940 // Check the current {object} prototype.
4941 Node* object_prototype = LoadMapPrototype(object_map);
4942 GotoIf(WordEqual(object_prototype, NullConstant()), &return_false);
4943 GotoIf(WordEqual(object_prototype, callable_prototype), &return_true);
4944
4945 // Continue with the prototype.
4946 var_object_map.Bind(LoadMap(object_prototype));
4947 Goto(&loop);
4948 }
4949
4950 Bind(&return_true);
4951 StoreRoot(Heap::kInstanceofCacheAnswerRootIndex, BooleanConstant(true));
4952 var_result.Bind(BooleanConstant(true));
4953 Goto(&return_result);
4954
4955 Bind(&return_false);
4956 StoreRoot(Heap::kInstanceofCacheAnswerRootIndex, BooleanConstant(false));
4957 var_result.Bind(BooleanConstant(false));
4958 Goto(&return_result);
4959
4960 Bind(&return_runtime);
4961 {
4962 // Invalidate the global instanceof cache.
4963 StoreRoot(Heap::kInstanceofCacheFunctionRootIndex, SmiConstant(0));
4964 // Fallback to the runtime implementation.
4965 var_result.Bind(
4966 CallRuntime(Runtime::kOrdinaryHasInstance, context, callable, object));
4967 }
4968 Goto(&return_result);
4969
4970 Bind(&return_result);
4971 return var_result.value();
4972 }
4973
ElementOffsetFromIndex(Node * index_node,ElementsKind kind,ParameterMode mode,int base_size)4974 compiler::Node* CodeStubAssembler::ElementOffsetFromIndex(Node* index_node,
4975 ElementsKind kind,
4976 ParameterMode mode,
4977 int base_size) {
4978 int element_size_shift = ElementsKindToShiftSize(kind);
4979 int element_size = 1 << element_size_shift;
4980 int const kSmiShiftBits = kSmiShiftSize + kSmiTagSize;
4981 intptr_t index = 0;
4982 bool constant_index = false;
4983 if (mode == SMI_PARAMETERS) {
4984 element_size_shift -= kSmiShiftBits;
4985 Smi* smi_index;
4986 constant_index = ToSmiConstant(index_node, smi_index);
4987 if (constant_index) index = smi_index->value();
4988 index_node = BitcastTaggedToWord(index_node);
4989 } else if (mode == INTEGER_PARAMETERS) {
4990 int32_t temp = 0;
4991 constant_index = ToInt32Constant(index_node, temp);
4992 index = static_cast<intptr_t>(temp);
4993 } else {
4994 DCHECK(mode == INTPTR_PARAMETERS);
4995 constant_index = ToIntPtrConstant(index_node, index);
4996 }
4997 if (constant_index) {
4998 return IntPtrConstant(base_size + element_size * index);
4999 }
5000 if (Is64() && mode == INTEGER_PARAMETERS) {
5001 index_node = ChangeInt32ToInt64(index_node);
5002 }
5003
5004 Node* shifted_index =
5005 (element_size_shift == 0)
5006 ? index_node
5007 : ((element_size_shift > 0)
5008 ? WordShl(index_node, IntPtrConstant(element_size_shift))
5009 : WordShr(index_node, IntPtrConstant(-element_size_shift)));
5010 return IntPtrAddFoldConstants(IntPtrConstant(base_size), shifted_index);
5011 }
5012
LoadTypeFeedbackVectorForStub()5013 compiler::Node* CodeStubAssembler::LoadTypeFeedbackVectorForStub() {
5014 Node* function =
5015 LoadFromParentFrame(JavaScriptFrameConstants::kFunctionOffset);
5016 Node* literals = LoadObjectField(function, JSFunction::kLiteralsOffset);
5017 return LoadObjectField(literals, LiteralsArray::kFeedbackVectorOffset);
5018 }
5019
UpdateFeedback(compiler::Node * feedback,compiler::Node * type_feedback_vector,compiler::Node * slot_id)5020 void CodeStubAssembler::UpdateFeedback(compiler::Node* feedback,
5021 compiler::Node* type_feedback_vector,
5022 compiler::Node* slot_id) {
5023 // This method is used for binary op and compare feedback. These
5024 // vector nodes are initialized with a smi 0, so we can simply OR
5025 // our new feedback in place.
5026 // TODO(interpreter): Consider passing the feedback as Smi already to avoid
5027 // the tagging completely.
5028 Node* previous_feedback =
5029 LoadFixedArrayElement(type_feedback_vector, slot_id);
5030 Node* combined_feedback = SmiOr(previous_feedback, SmiFromWord32(feedback));
5031 StoreFixedArrayElement(type_feedback_vector, slot_id, combined_feedback,
5032 SKIP_WRITE_BARRIER);
5033 }
5034
LoadReceiverMap(compiler::Node * receiver)5035 compiler::Node* CodeStubAssembler::LoadReceiverMap(compiler::Node* receiver) {
5036 Variable var_receiver_map(this, MachineRepresentation::kTagged);
5037 Label load_smi_map(this, Label::kDeferred), load_receiver_map(this),
5038 if_result(this);
5039
5040 Branch(TaggedIsSmi(receiver), &load_smi_map, &load_receiver_map);
5041 Bind(&load_smi_map);
5042 {
5043 var_receiver_map.Bind(LoadRoot(Heap::kHeapNumberMapRootIndex));
5044 Goto(&if_result);
5045 }
5046 Bind(&load_receiver_map);
5047 {
5048 var_receiver_map.Bind(LoadMap(receiver));
5049 Goto(&if_result);
5050 }
5051 Bind(&if_result);
5052 return var_receiver_map.value();
5053 }
5054
TryMonomorphicCase(compiler::Node * slot,compiler::Node * vector,compiler::Node * receiver_map,Label * if_handler,Variable * var_handler,Label * if_miss)5055 compiler::Node* CodeStubAssembler::TryMonomorphicCase(
5056 compiler::Node* slot, compiler::Node* vector, compiler::Node* receiver_map,
5057 Label* if_handler, Variable* var_handler, Label* if_miss) {
5058 Comment("TryMonomorphicCase");
5059 DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep());
5060
5061 // TODO(ishell): add helper class that hides offset computations for a series
5062 // of loads.
5063 int32_t header_size = FixedArray::kHeaderSize - kHeapObjectTag;
5064 // Adding |header_size| with a separate IntPtrAdd rather than passing it
5065 // into ElementOffsetFromIndex() allows it to be folded into a single
5066 // [base, index, offset] indirect memory access on x64.
5067 Node* offset =
5068 ElementOffsetFromIndex(slot, FAST_HOLEY_ELEMENTS, SMI_PARAMETERS);
5069 Node* feedback = Load(MachineType::AnyTagged(), vector,
5070 IntPtrAdd(offset, IntPtrConstant(header_size)));
5071
5072 // Try to quickly handle the monomorphic case without knowing for sure
5073 // if we have a weak cell in feedback. We do know it's safe to look
5074 // at WeakCell::kValueOffset.
5075 GotoIf(WordNotEqual(receiver_map, LoadWeakCellValueUnchecked(feedback)),
5076 if_miss);
5077
5078 Node* handler =
5079 Load(MachineType::AnyTagged(), vector,
5080 IntPtrAdd(offset, IntPtrConstant(header_size + kPointerSize)));
5081
5082 var_handler->Bind(handler);
5083 Goto(if_handler);
5084 return feedback;
5085 }
5086
HandlePolymorphicCase(compiler::Node * receiver_map,compiler::Node * feedback,Label * if_handler,Variable * var_handler,Label * if_miss,int unroll_count)5087 void CodeStubAssembler::HandlePolymorphicCase(
5088 compiler::Node* receiver_map, compiler::Node* feedback, Label* if_handler,
5089 Variable* var_handler, Label* if_miss, int unroll_count) {
5090 Comment("HandlePolymorphicCase");
5091 DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep());
5092
5093 // Iterate {feedback} array.
5094 const int kEntrySize = 2;
5095
5096 for (int i = 0; i < unroll_count; i++) {
5097 Label next_entry(this);
5098 Node* cached_map = LoadWeakCellValue(LoadFixedArrayElement(
5099 feedback, IntPtrConstant(i * kEntrySize), 0, INTPTR_PARAMETERS));
5100 GotoIf(WordNotEqual(receiver_map, cached_map), &next_entry);
5101
5102 // Found, now call handler.
5103 Node* handler = LoadFixedArrayElement(
5104 feedback, IntPtrConstant(i * kEntrySize + 1), 0, INTPTR_PARAMETERS);
5105 var_handler->Bind(handler);
5106 Goto(if_handler);
5107
5108 Bind(&next_entry);
5109 }
5110
5111 // Loop from {unroll_count}*kEntrySize to {length}.
5112 Node* init = IntPtrConstant(unroll_count * kEntrySize);
5113 Node* length = LoadAndUntagFixedArrayBaseLength(feedback);
5114 BuildFastLoop(
5115 MachineType::PointerRepresentation(), init, length,
5116 [receiver_map, feedback, if_handler, var_handler](CodeStubAssembler* csa,
5117 Node* index) {
5118 Node* cached_map = csa->LoadWeakCellValue(
5119 csa->LoadFixedArrayElement(feedback, index, 0, INTPTR_PARAMETERS));
5120
5121 Label next_entry(csa);
5122 csa->GotoIf(csa->WordNotEqual(receiver_map, cached_map), &next_entry);
5123
5124 // Found, now call handler.
5125 Node* handler = csa->LoadFixedArrayElement(
5126 feedback, index, kPointerSize, INTPTR_PARAMETERS);
5127 var_handler->Bind(handler);
5128 csa->Goto(if_handler);
5129
5130 csa->Bind(&next_entry);
5131 },
5132 kEntrySize, IndexAdvanceMode::kPost);
5133 // The loop falls through if no handler was found.
5134 Goto(if_miss);
5135 }
5136
HandleKeyedStorePolymorphicCase(compiler::Node * receiver_map,compiler::Node * feedback,Label * if_handler,Variable * var_handler,Label * if_transition_handler,Variable * var_transition_map_cell,Label * if_miss)5137 void CodeStubAssembler::HandleKeyedStorePolymorphicCase(
5138 compiler::Node* receiver_map, compiler::Node* feedback, Label* if_handler,
5139 Variable* var_handler, Label* if_transition_handler,
5140 Variable* var_transition_map_cell, Label* if_miss) {
5141 DCHECK_EQ(MachineRepresentation::kTagged, var_handler->rep());
5142 DCHECK_EQ(MachineRepresentation::kTagged, var_transition_map_cell->rep());
5143
5144 const int kEntrySize = 3;
5145
5146 Node* init = IntPtrConstant(0);
5147 Node* length = LoadAndUntagFixedArrayBaseLength(feedback);
5148 BuildFastLoop(
5149 MachineType::PointerRepresentation(), init, length,
5150 [receiver_map, feedback, if_handler, var_handler, if_transition_handler,
5151 var_transition_map_cell](CodeStubAssembler* csa, Node* index) {
5152 Node* cached_map = csa->LoadWeakCellValue(
5153 csa->LoadFixedArrayElement(feedback, index, 0, INTPTR_PARAMETERS));
5154 Label next_entry(csa);
5155 csa->GotoIf(csa->WordNotEqual(receiver_map, cached_map), &next_entry);
5156
5157 Node* maybe_transition_map_cell = csa->LoadFixedArrayElement(
5158 feedback, index, kPointerSize, INTPTR_PARAMETERS);
5159
5160 var_handler->Bind(csa->LoadFixedArrayElement(
5161 feedback, index, 2 * kPointerSize, INTPTR_PARAMETERS));
5162 csa->GotoIf(
5163 csa->WordEqual(maybe_transition_map_cell,
5164 csa->LoadRoot(Heap::kUndefinedValueRootIndex)),
5165 if_handler);
5166 var_transition_map_cell->Bind(maybe_transition_map_cell);
5167 csa->Goto(if_transition_handler);
5168
5169 csa->Bind(&next_entry);
5170 },
5171 kEntrySize, IndexAdvanceMode::kPost);
5172 // The loop falls through if no handler was found.
5173 Goto(if_miss);
5174 }
5175
StubCachePrimaryOffset(compiler::Node * name,compiler::Node * map)5176 compiler::Node* CodeStubAssembler::StubCachePrimaryOffset(compiler::Node* name,
5177 compiler::Node* map) {
5178 // See v8::internal::StubCache::PrimaryOffset().
5179 STATIC_ASSERT(StubCache::kCacheIndexShift == Name::kHashShift);
5180 // Compute the hash of the name (use entire hash field).
5181 Node* hash_field = LoadNameHashField(name);
5182 CSA_ASSERT(this,
5183 Word32Equal(Word32And(hash_field,
5184 Int32Constant(Name::kHashNotComputedMask)),
5185 Int32Constant(0)));
5186
5187 // Using only the low bits in 64-bit mode is unlikely to increase the
5188 // risk of collision even if the heap is spread over an area larger than
5189 // 4Gb (and not at all if it isn't).
5190 Node* hash = Int32Add(hash_field, map);
5191 // Base the offset on a simple combination of name and map.
5192 hash = Word32Xor(hash, Int32Constant(StubCache::kPrimaryMagic));
5193 uint32_t mask = (StubCache::kPrimaryTableSize - 1)
5194 << StubCache::kCacheIndexShift;
5195 return ChangeUint32ToWord(Word32And(hash, Int32Constant(mask)));
5196 }
5197
StubCacheSecondaryOffset(compiler::Node * name,compiler::Node * seed)5198 compiler::Node* CodeStubAssembler::StubCacheSecondaryOffset(
5199 compiler::Node* name, compiler::Node* seed) {
5200 // See v8::internal::StubCache::SecondaryOffset().
5201
5202 // Use the seed from the primary cache in the secondary cache.
5203 Node* hash = Int32Sub(seed, name);
5204 hash = Int32Add(hash, Int32Constant(StubCache::kSecondaryMagic));
5205 int32_t mask = (StubCache::kSecondaryTableSize - 1)
5206 << StubCache::kCacheIndexShift;
5207 return ChangeUint32ToWord(Word32And(hash, Int32Constant(mask)));
5208 }
5209
5210 enum CodeStubAssembler::StubCacheTable : int {
5211 kPrimary = static_cast<int>(StubCache::kPrimary),
5212 kSecondary = static_cast<int>(StubCache::kSecondary)
5213 };
5214
TryProbeStubCacheTable(StubCache * stub_cache,StubCacheTable table_id,compiler::Node * entry_offset,compiler::Node * name,compiler::Node * map,Label * if_handler,Variable * var_handler,Label * if_miss)5215 void CodeStubAssembler::TryProbeStubCacheTable(
5216 StubCache* stub_cache, StubCacheTable table_id,
5217 compiler::Node* entry_offset, compiler::Node* name, compiler::Node* map,
5218 Label* if_handler, Variable* var_handler, Label* if_miss) {
5219 StubCache::Table table = static_cast<StubCache::Table>(table_id);
5220 #ifdef DEBUG
5221 if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) {
5222 Goto(if_miss);
5223 return;
5224 } else if (FLAG_test_primary_stub_cache && table == StubCache::kSecondary) {
5225 Goto(if_miss);
5226 return;
5227 }
5228 #endif
5229 // The {table_offset} holds the entry offset times four (due to masking
5230 // and shifting optimizations).
5231 const int kMultiplier = sizeof(StubCache::Entry) >> Name::kHashShift;
5232 entry_offset = IntPtrMul(entry_offset, IntPtrConstant(kMultiplier));
5233
5234 // Check that the key in the entry matches the name.
5235 Node* key_base =
5236 ExternalConstant(ExternalReference(stub_cache->key_reference(table)));
5237 Node* entry_key = Load(MachineType::Pointer(), key_base, entry_offset);
5238 GotoIf(WordNotEqual(name, entry_key), if_miss);
5239
5240 // Get the map entry from the cache.
5241 DCHECK_EQ(kPointerSize * 2, stub_cache->map_reference(table).address() -
5242 stub_cache->key_reference(table).address());
5243 Node* entry_map =
5244 Load(MachineType::Pointer(), key_base,
5245 IntPtrAdd(entry_offset, IntPtrConstant(kPointerSize * 2)));
5246 GotoIf(WordNotEqual(map, entry_map), if_miss);
5247
5248 DCHECK_EQ(kPointerSize, stub_cache->value_reference(table).address() -
5249 stub_cache->key_reference(table).address());
5250 Node* handler = Load(MachineType::TaggedPointer(), key_base,
5251 IntPtrAdd(entry_offset, IntPtrConstant(kPointerSize)));
5252
5253 // We found the handler.
5254 var_handler->Bind(handler);
5255 Goto(if_handler);
5256 }
5257
TryProbeStubCache(StubCache * stub_cache,compiler::Node * receiver,compiler::Node * name,Label * if_handler,Variable * var_handler,Label * if_miss)5258 void CodeStubAssembler::TryProbeStubCache(
5259 StubCache* stub_cache, compiler::Node* receiver, compiler::Node* name,
5260 Label* if_handler, Variable* var_handler, Label* if_miss) {
5261 Label try_secondary(this), miss(this);
5262
5263 Counters* counters = isolate()->counters();
5264 IncrementCounter(counters->megamorphic_stub_cache_probes(), 1);
5265
5266 // Check that the {receiver} isn't a smi.
5267 GotoIf(TaggedIsSmi(receiver), &miss);
5268
5269 Node* receiver_map = LoadMap(receiver);
5270
5271 // Probe the primary table.
5272 Node* primary_offset = StubCachePrimaryOffset(name, receiver_map);
5273 TryProbeStubCacheTable(stub_cache, kPrimary, primary_offset, name,
5274 receiver_map, if_handler, var_handler, &try_secondary);
5275
5276 Bind(&try_secondary);
5277 {
5278 // Probe the secondary table.
5279 Node* secondary_offset = StubCacheSecondaryOffset(name, primary_offset);
5280 TryProbeStubCacheTable(stub_cache, kSecondary, secondary_offset, name,
5281 receiver_map, if_handler, var_handler, &miss);
5282 }
5283
5284 Bind(&miss);
5285 {
5286 IncrementCounter(counters->megamorphic_stub_cache_misses(), 1);
5287 Goto(if_miss);
5288 }
5289 }
5290
TryToIntptr(Node * key,Label * miss)5291 Node* CodeStubAssembler::TryToIntptr(Node* key, Label* miss) {
5292 Variable var_intptr_key(this, MachineType::PointerRepresentation());
5293 Label done(this, &var_intptr_key), key_is_smi(this);
5294 GotoIf(TaggedIsSmi(key), &key_is_smi);
5295 // Try to convert a heap number to a Smi.
5296 GotoUnless(WordEqual(LoadMap(key), HeapNumberMapConstant()), miss);
5297 {
5298 Node* value = LoadHeapNumberValue(key);
5299 Node* int_value = RoundFloat64ToInt32(value);
5300 GotoUnless(Float64Equal(value, ChangeInt32ToFloat64(int_value)), miss);
5301 var_intptr_key.Bind(ChangeInt32ToIntPtr(int_value));
5302 Goto(&done);
5303 }
5304
5305 Bind(&key_is_smi);
5306 {
5307 var_intptr_key.Bind(SmiUntag(key));
5308 Goto(&done);
5309 }
5310
5311 Bind(&done);
5312 return var_intptr_key.value();
5313 }
5314
EmitFastElementsBoundsCheck(Node * object,Node * elements,Node * intptr_index,Node * is_jsarray_condition,Label * miss)5315 void CodeStubAssembler::EmitFastElementsBoundsCheck(Node* object,
5316 Node* elements,
5317 Node* intptr_index,
5318 Node* is_jsarray_condition,
5319 Label* miss) {
5320 Variable var_length(this, MachineType::PointerRepresentation());
5321 Comment("Fast elements bounds check");
5322 Label if_array(this), length_loaded(this, &var_length);
5323 GotoIf(is_jsarray_condition, &if_array);
5324 {
5325 var_length.Bind(SmiUntag(LoadFixedArrayBaseLength(elements)));
5326 Goto(&length_loaded);
5327 }
5328 Bind(&if_array);
5329 {
5330 var_length.Bind(SmiUntag(LoadJSArrayLength(object)));
5331 Goto(&length_loaded);
5332 }
5333 Bind(&length_loaded);
5334 GotoUnless(UintPtrLessThan(intptr_index, var_length.value()), miss);
5335 }
5336
EmitElementLoad(Node * object,Node * elements,Node * elements_kind,Node * intptr_index,Node * is_jsarray_condition,Label * if_hole,Label * rebox_double,Variable * var_double_value,Label * unimplemented_elements_kind,Label * out_of_bounds,Label * miss)5337 void CodeStubAssembler::EmitElementLoad(Node* object, Node* elements,
5338 Node* elements_kind, Node* intptr_index,
5339 Node* is_jsarray_condition,
5340 Label* if_hole, Label* rebox_double,
5341 Variable* var_double_value,
5342 Label* unimplemented_elements_kind,
5343 Label* out_of_bounds, Label* miss) {
5344 Label if_typed_array(this), if_fast_packed(this), if_fast_holey(this),
5345 if_fast_double(this), if_fast_holey_double(this), if_nonfast(this),
5346 if_dictionary(this);
5347 GotoIf(
5348 IntPtrGreaterThan(elements_kind, IntPtrConstant(LAST_FAST_ELEMENTS_KIND)),
5349 &if_nonfast);
5350
5351 EmitFastElementsBoundsCheck(object, elements, intptr_index,
5352 is_jsarray_condition, out_of_bounds);
5353 int32_t kinds[] = {// Handled by if_fast_packed.
5354 FAST_SMI_ELEMENTS, FAST_ELEMENTS,
5355 // Handled by if_fast_holey.
5356 FAST_HOLEY_SMI_ELEMENTS, FAST_HOLEY_ELEMENTS,
5357 // Handled by if_fast_double.
5358 FAST_DOUBLE_ELEMENTS,
5359 // Handled by if_fast_holey_double.
5360 FAST_HOLEY_DOUBLE_ELEMENTS};
5361 Label* labels[] = {// FAST_{SMI,}_ELEMENTS
5362 &if_fast_packed, &if_fast_packed,
5363 // FAST_HOLEY_{SMI,}_ELEMENTS
5364 &if_fast_holey, &if_fast_holey,
5365 // FAST_DOUBLE_ELEMENTS
5366 &if_fast_double,
5367 // FAST_HOLEY_DOUBLE_ELEMENTS
5368 &if_fast_holey_double};
5369 Switch(elements_kind, unimplemented_elements_kind, kinds, labels,
5370 arraysize(kinds));
5371
5372 Bind(&if_fast_packed);
5373 {
5374 Comment("fast packed elements");
5375 Return(LoadFixedArrayElement(elements, intptr_index, 0, INTPTR_PARAMETERS));
5376 }
5377
5378 Bind(&if_fast_holey);
5379 {
5380 Comment("fast holey elements");
5381 Node* element =
5382 LoadFixedArrayElement(elements, intptr_index, 0, INTPTR_PARAMETERS);
5383 GotoIf(WordEqual(element, TheHoleConstant()), if_hole);
5384 Return(element);
5385 }
5386
5387 Bind(&if_fast_double);
5388 {
5389 Comment("packed double elements");
5390 var_double_value->Bind(LoadFixedDoubleArrayElement(
5391 elements, intptr_index, MachineType::Float64(), 0, INTPTR_PARAMETERS));
5392 Goto(rebox_double);
5393 }
5394
5395 Bind(&if_fast_holey_double);
5396 {
5397 Comment("holey double elements");
5398 Node* value = LoadFixedDoubleArrayElement(elements, intptr_index,
5399 MachineType::Float64(), 0,
5400 INTPTR_PARAMETERS, if_hole);
5401 var_double_value->Bind(value);
5402 Goto(rebox_double);
5403 }
5404
5405 Bind(&if_nonfast);
5406 {
5407 STATIC_ASSERT(LAST_ELEMENTS_KIND == LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND);
5408 GotoIf(IntPtrGreaterThanOrEqual(
5409 elements_kind,
5410 IntPtrConstant(FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND)),
5411 &if_typed_array);
5412 GotoIf(IntPtrEqual(elements_kind, IntPtrConstant(DICTIONARY_ELEMENTS)),
5413 &if_dictionary);
5414 Goto(unimplemented_elements_kind);
5415 }
5416
5417 Bind(&if_dictionary);
5418 {
5419 Comment("dictionary elements");
5420 GotoIf(IntPtrLessThan(intptr_index, IntPtrConstant(0)), out_of_bounds);
5421 Variable var_entry(this, MachineType::PointerRepresentation());
5422 Label if_found(this);
5423 NumberDictionaryLookup<SeededNumberDictionary>(
5424 elements, intptr_index, &if_found, &var_entry, if_hole);
5425 Bind(&if_found);
5426 // Check that the value is a data property.
5427 Node* details_index = EntryToIndex<SeededNumberDictionary>(
5428 var_entry.value(), SeededNumberDictionary::kEntryDetailsIndex);
5429 Node* details = SmiToWord32(
5430 LoadFixedArrayElement(elements, details_index, 0, INTPTR_PARAMETERS));
5431 Node* kind = DecodeWord32<PropertyDetails::KindField>(details);
5432 // TODO(jkummerow): Support accessors without missing?
5433 GotoUnless(Word32Equal(kind, Int32Constant(kData)), miss);
5434 // Finally, load the value.
5435 Node* value_index = EntryToIndex<SeededNumberDictionary>(
5436 var_entry.value(), SeededNumberDictionary::kEntryValueIndex);
5437 Return(LoadFixedArrayElement(elements, value_index, 0, INTPTR_PARAMETERS));
5438 }
5439
5440 Bind(&if_typed_array);
5441 {
5442 Comment("typed elements");
5443 // Check if buffer has been neutered.
5444 Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
5445 Node* bitfield = LoadObjectField(buffer, JSArrayBuffer::kBitFieldOffset,
5446 MachineType::Uint32());
5447 Node* neutered_bit =
5448 Word32And(bitfield, Int32Constant(JSArrayBuffer::WasNeutered::kMask));
5449 GotoUnless(Word32Equal(neutered_bit, Int32Constant(0)), miss);
5450
5451 // Bounds check.
5452 Node* length =
5453 SmiUntag(LoadObjectField(object, JSTypedArray::kLengthOffset));
5454 GotoUnless(UintPtrLessThan(intptr_index, length), out_of_bounds);
5455
5456 // Backing store = external_pointer + base_pointer.
5457 Node* external_pointer =
5458 LoadObjectField(elements, FixedTypedArrayBase::kExternalPointerOffset,
5459 MachineType::Pointer());
5460 Node* base_pointer =
5461 LoadObjectField(elements, FixedTypedArrayBase::kBasePointerOffset);
5462 Node* backing_store = IntPtrAdd(external_pointer, base_pointer);
5463
5464 Label uint8_elements(this), int8_elements(this), uint16_elements(this),
5465 int16_elements(this), uint32_elements(this), int32_elements(this),
5466 float32_elements(this), float64_elements(this);
5467 Label* elements_kind_labels[] = {
5468 &uint8_elements, &uint8_elements, &int8_elements,
5469 &uint16_elements, &int16_elements, &uint32_elements,
5470 &int32_elements, &float32_elements, &float64_elements};
5471 int32_t elements_kinds[] = {
5472 UINT8_ELEMENTS, UINT8_CLAMPED_ELEMENTS, INT8_ELEMENTS,
5473 UINT16_ELEMENTS, INT16_ELEMENTS, UINT32_ELEMENTS,
5474 INT32_ELEMENTS, FLOAT32_ELEMENTS, FLOAT64_ELEMENTS};
5475 const size_t kTypedElementsKindCount =
5476 LAST_FIXED_TYPED_ARRAY_ELEMENTS_KIND -
5477 FIRST_FIXED_TYPED_ARRAY_ELEMENTS_KIND + 1;
5478 DCHECK_EQ(kTypedElementsKindCount, arraysize(elements_kinds));
5479 DCHECK_EQ(kTypedElementsKindCount, arraysize(elements_kind_labels));
5480 Switch(elements_kind, miss, elements_kinds, elements_kind_labels,
5481 kTypedElementsKindCount);
5482 Bind(&uint8_elements);
5483 {
5484 Comment("UINT8_ELEMENTS"); // Handles UINT8_CLAMPED_ELEMENTS too.
5485 Return(SmiTag(Load(MachineType::Uint8(), backing_store, intptr_index)));
5486 }
5487 Bind(&int8_elements);
5488 {
5489 Comment("INT8_ELEMENTS");
5490 Return(SmiTag(Load(MachineType::Int8(), backing_store, intptr_index)));
5491 }
5492 Bind(&uint16_elements);
5493 {
5494 Comment("UINT16_ELEMENTS");
5495 Node* index = WordShl(intptr_index, IntPtrConstant(1));
5496 Return(SmiTag(Load(MachineType::Uint16(), backing_store, index)));
5497 }
5498 Bind(&int16_elements);
5499 {
5500 Comment("INT16_ELEMENTS");
5501 Node* index = WordShl(intptr_index, IntPtrConstant(1));
5502 Return(SmiTag(Load(MachineType::Int16(), backing_store, index)));
5503 }
5504 Bind(&uint32_elements);
5505 {
5506 Comment("UINT32_ELEMENTS");
5507 Node* index = WordShl(intptr_index, IntPtrConstant(2));
5508 Node* element = Load(MachineType::Uint32(), backing_store, index);
5509 Return(ChangeUint32ToTagged(element));
5510 }
5511 Bind(&int32_elements);
5512 {
5513 Comment("INT32_ELEMENTS");
5514 Node* index = WordShl(intptr_index, IntPtrConstant(2));
5515 Node* element = Load(MachineType::Int32(), backing_store, index);
5516 Return(ChangeInt32ToTagged(element));
5517 }
5518 Bind(&float32_elements);
5519 {
5520 Comment("FLOAT32_ELEMENTS");
5521 Node* index = WordShl(intptr_index, IntPtrConstant(2));
5522 Node* element = Load(MachineType::Float32(), backing_store, index);
5523 var_double_value->Bind(ChangeFloat32ToFloat64(element));
5524 Goto(rebox_double);
5525 }
5526 Bind(&float64_elements);
5527 {
5528 Comment("FLOAT64_ELEMENTS");
5529 Node* index = WordShl(intptr_index, IntPtrConstant(3));
5530 Node* element = Load(MachineType::Float64(), backing_store, index);
5531 var_double_value->Bind(element);
5532 Goto(rebox_double);
5533 }
5534 }
5535 }
5536
HandleLoadICHandlerCase(const LoadICParameters * p,Node * handler,Label * miss,ElementSupport support_elements)5537 void CodeStubAssembler::HandleLoadICHandlerCase(
5538 const LoadICParameters* p, Node* handler, Label* miss,
5539 ElementSupport support_elements) {
5540 Comment("have_handler");
5541 Variable var_holder(this, MachineRepresentation::kTagged);
5542 var_holder.Bind(p->receiver);
5543 Variable var_smi_handler(this, MachineRepresentation::kTagged);
5544 var_smi_handler.Bind(handler);
5545
5546 Variable* vars[] = {&var_holder, &var_smi_handler};
5547 Label if_smi_handler(this, 2, vars);
5548 Label try_proto_handler(this), call_handler(this);
5549
5550 Branch(TaggedIsSmi(handler), &if_smi_handler, &try_proto_handler);
5551
5552 // |handler| is a Smi, encoding what to do. See SmiHandler methods
5553 // for the encoding format.
5554 Bind(&if_smi_handler);
5555 {
5556 HandleLoadICSmiHandlerCase(p, var_holder.value(), var_smi_handler.value(),
5557 miss, support_elements);
5558 }
5559
5560 Bind(&try_proto_handler);
5561 {
5562 GotoIf(IsCodeMap(LoadMap(handler)), &call_handler);
5563 HandleLoadICProtoHandler(p, handler, &var_holder, &var_smi_handler,
5564 &if_smi_handler, miss);
5565 }
5566
5567 Bind(&call_handler);
5568 {
5569 typedef LoadWithVectorDescriptor Descriptor;
5570 TailCallStub(Descriptor(isolate()), handler, p->context,
5571 Arg(Descriptor::kReceiver, p->receiver),
5572 Arg(Descriptor::kName, p->name),
5573 Arg(Descriptor::kSlot, p->slot),
5574 Arg(Descriptor::kVector, p->vector));
5575 }
5576 }
5577
HandleLoadICSmiHandlerCase(const LoadICParameters * p,Node * holder,Node * smi_handler,Label * miss,ElementSupport support_elements)5578 void CodeStubAssembler::HandleLoadICSmiHandlerCase(
5579 const LoadICParameters* p, Node* holder, Node* smi_handler, Label* miss,
5580 ElementSupport support_elements) {
5581 Variable var_double_value(this, MachineRepresentation::kFloat64);
5582 Label rebox_double(this, &var_double_value);
5583
5584 Node* handler_word = SmiUntag(smi_handler);
5585 Node* handler_kind = DecodeWord<LoadHandler::KindBits>(handler_word);
5586 if (support_elements == kSupportElements) {
5587 Label property(this);
5588 GotoUnless(
5589 WordEqual(handler_kind, IntPtrConstant(LoadHandler::kForElements)),
5590 &property);
5591
5592 Comment("element_load");
5593 Node* intptr_index = TryToIntptr(p->name, miss);
5594 Node* elements = LoadElements(holder);
5595 Node* is_jsarray_condition =
5596 IsSetWord<LoadHandler::IsJsArrayBits>(handler_word);
5597 Node* elements_kind =
5598 DecodeWord<LoadHandler::ElementsKindBits>(handler_word);
5599 Label if_hole(this), unimplemented_elements_kind(this);
5600 Label* out_of_bounds = miss;
5601 EmitElementLoad(holder, elements, elements_kind, intptr_index,
5602 is_jsarray_condition, &if_hole, &rebox_double,
5603 &var_double_value, &unimplemented_elements_kind,
5604 out_of_bounds, miss);
5605
5606 Bind(&unimplemented_elements_kind);
5607 {
5608 // Smi handlers should only be installed for supported elements kinds.
5609 // Crash if we get here.
5610 DebugBreak();
5611 Goto(miss);
5612 }
5613
5614 Bind(&if_hole);
5615 {
5616 Comment("convert hole");
5617 GotoUnless(IsSetWord<LoadHandler::ConvertHoleBits>(handler_word), miss);
5618 Node* protector_cell = LoadRoot(Heap::kArrayProtectorRootIndex);
5619 DCHECK(isolate()->heap()->array_protector()->IsPropertyCell());
5620 GotoUnless(
5621 WordEqual(LoadObjectField(protector_cell, PropertyCell::kValueOffset),
5622 SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
5623 miss);
5624 Return(UndefinedConstant());
5625 }
5626
5627 Bind(&property);
5628 Comment("property_load");
5629 }
5630
5631 Label constant(this), field(this);
5632 Branch(WordEqual(handler_kind, IntPtrConstant(LoadHandler::kForFields)),
5633 &field, &constant);
5634
5635 Bind(&field);
5636 {
5637 Comment("field_load");
5638 Node* offset = DecodeWord<LoadHandler::FieldOffsetBits>(handler_word);
5639
5640 Label inobject(this), out_of_object(this);
5641 Branch(IsSetWord<LoadHandler::IsInobjectBits>(handler_word), &inobject,
5642 &out_of_object);
5643
5644 Bind(&inobject);
5645 {
5646 Label is_double(this);
5647 GotoIf(IsSetWord<LoadHandler::IsDoubleBits>(handler_word), &is_double);
5648 Return(LoadObjectField(holder, offset));
5649
5650 Bind(&is_double);
5651 if (FLAG_unbox_double_fields) {
5652 var_double_value.Bind(
5653 LoadObjectField(holder, offset, MachineType::Float64()));
5654 } else {
5655 Node* mutable_heap_number = LoadObjectField(holder, offset);
5656 var_double_value.Bind(LoadHeapNumberValue(mutable_heap_number));
5657 }
5658 Goto(&rebox_double);
5659 }
5660
5661 Bind(&out_of_object);
5662 {
5663 Label is_double(this);
5664 Node* properties = LoadProperties(holder);
5665 Node* value = LoadObjectField(properties, offset);
5666 GotoIf(IsSetWord<LoadHandler::IsDoubleBits>(handler_word), &is_double);
5667 Return(value);
5668
5669 Bind(&is_double);
5670 var_double_value.Bind(LoadHeapNumberValue(value));
5671 Goto(&rebox_double);
5672 }
5673
5674 Bind(&rebox_double);
5675 Return(AllocateHeapNumberWithValue(var_double_value.value()));
5676 }
5677
5678 Bind(&constant);
5679 {
5680 Comment("constant_load");
5681 Node* descriptors = LoadMapDescriptors(LoadMap(holder));
5682 Node* descriptor =
5683 DecodeWord<LoadHandler::DescriptorValueIndexBits>(handler_word);
5684 CSA_ASSERT(this,
5685 UintPtrLessThan(descriptor,
5686 LoadAndUntagFixedArrayBaseLength(descriptors)));
5687 Node* value =
5688 LoadFixedArrayElement(descriptors, descriptor, 0, INTPTR_PARAMETERS);
5689
5690 Label if_accessor_info(this);
5691 GotoIf(IsSetWord<LoadHandler::IsAccessorInfoBits>(handler_word),
5692 &if_accessor_info);
5693 Return(value);
5694
5695 Bind(&if_accessor_info);
5696 Callable callable = CodeFactory::ApiGetter(isolate());
5697 TailCallStub(callable, p->context, p->receiver, holder, value);
5698 }
5699 }
5700
HandleLoadICProtoHandler(const LoadICParameters * p,Node * handler,Variable * var_holder,Variable * var_smi_handler,Label * if_smi_handler,Label * miss)5701 void CodeStubAssembler::HandleLoadICProtoHandler(
5702 const LoadICParameters* p, Node* handler, Variable* var_holder,
5703 Variable* var_smi_handler, Label* if_smi_handler, Label* miss) {
5704 DCHECK_EQ(MachineRepresentation::kTagged, var_holder->rep());
5705 DCHECK_EQ(MachineRepresentation::kTagged, var_smi_handler->rep());
5706
5707 // IC dispatchers rely on these assumptions to be held.
5708 STATIC_ASSERT(FixedArray::kLengthOffset == LoadHandler::kHolderCellOffset);
5709 DCHECK_EQ(FixedArray::OffsetOfElementAt(LoadHandler::kSmiHandlerIndex),
5710 LoadHandler::kSmiHandlerOffset);
5711 DCHECK_EQ(FixedArray::OffsetOfElementAt(LoadHandler::kValidityCellIndex),
5712 LoadHandler::kValidityCellOffset);
5713
5714 // Both FixedArray and Tuple3 handlers have validity cell at the same offset.
5715 Label validity_cell_check_done(this);
5716 Node* validity_cell =
5717 LoadObjectField(handler, LoadHandler::kValidityCellOffset);
5718 GotoIf(WordEqual(validity_cell, IntPtrConstant(0)),
5719 &validity_cell_check_done);
5720 Node* cell_value = LoadObjectField(validity_cell, Cell::kValueOffset);
5721 GotoIf(WordNotEqual(cell_value,
5722 SmiConstant(Smi::FromInt(Map::kPrototypeChainValid))),
5723 miss);
5724 Goto(&validity_cell_check_done);
5725
5726 Bind(&validity_cell_check_done);
5727 Node* smi_handler = LoadObjectField(handler, LoadHandler::kSmiHandlerOffset);
5728 CSA_ASSERT(this, TaggedIsSmi(smi_handler));
5729 Node* handler_flags = SmiUntag(smi_handler);
5730
5731 Label check_prototypes(this);
5732 GotoUnless(
5733 IsSetWord<LoadHandler::DoNegativeLookupOnReceiverBits>(handler_flags),
5734 &check_prototypes);
5735 {
5736 CSA_ASSERT(this, Word32BinaryNot(
5737 HasInstanceType(p->receiver, JS_GLOBAL_OBJECT_TYPE)));
5738 // We have a dictionary receiver, do a negative lookup check.
5739 NameDictionaryNegativeLookup(p->receiver, p->name, miss);
5740 Goto(&check_prototypes);
5741 }
5742
5743 Bind(&check_prototypes);
5744 Node* maybe_holder_cell =
5745 LoadObjectField(handler, LoadHandler::kHolderCellOffset);
5746 Label array_handler(this), tuple_handler(this);
5747 Branch(TaggedIsSmi(maybe_holder_cell), &array_handler, &tuple_handler);
5748
5749 Bind(&tuple_handler);
5750 {
5751 Label load_existent(this);
5752 GotoIf(WordNotEqual(maybe_holder_cell, NullConstant()), &load_existent);
5753 // This is a handler for a load of a non-existent value.
5754 Return(UndefinedConstant());
5755
5756 Bind(&load_existent);
5757 Node* holder = LoadWeakCellValue(maybe_holder_cell);
5758 // The |holder| is guaranteed to be alive at this point since we passed
5759 // both the receiver map check and the validity cell check.
5760 CSA_ASSERT(this, WordNotEqual(holder, IntPtrConstant(0)));
5761
5762 var_holder->Bind(holder);
5763 var_smi_handler->Bind(smi_handler);
5764 Goto(if_smi_handler);
5765 }
5766
5767 Bind(&array_handler);
5768 {
5769 typedef LoadICProtoArrayDescriptor Descriptor;
5770 LoadICProtoArrayStub stub(isolate());
5771 Node* target = HeapConstant(stub.GetCode());
5772 TailCallStub(Descriptor(isolate()), target, p->context,
5773 Arg(Descriptor::kReceiver, p->receiver),
5774 Arg(Descriptor::kName, p->name),
5775 Arg(Descriptor::kSlot, p->slot),
5776 Arg(Descriptor::kVector, p->vector),
5777 Arg(Descriptor::kHandler, handler));
5778 }
5779 }
5780
LoadICProtoArray(const LoadICParameters * p,Node * handler)5781 void CodeStubAssembler::LoadICProtoArray(const LoadICParameters* p,
5782 Node* handler) {
5783 Label miss(this);
5784 CSA_ASSERT(this, Word32BinaryNot(TaggedIsSmi(handler)));
5785 CSA_ASSERT(this, IsFixedArrayMap(LoadMap(handler)));
5786
5787 Node* smi_handler = LoadObjectField(handler, LoadHandler::kSmiHandlerOffset);
5788 Node* handler_flags = SmiUntag(smi_handler);
5789
5790 Node* handler_length = LoadAndUntagFixedArrayBaseLength(handler);
5791
5792 Node* holder = EmitLoadICProtoArrayCheck(p, handler, handler_length,
5793 handler_flags, &miss);
5794
5795 HandleLoadICSmiHandlerCase(p, holder, smi_handler, &miss, kOnlyProperties);
5796
5797 Bind(&miss);
5798 {
5799 TailCallRuntime(Runtime::kLoadIC_Miss, p->context, p->receiver, p->name,
5800 p->slot, p->vector);
5801 }
5802 }
5803
EmitLoadICProtoArrayCheck(const LoadICParameters * p,Node * handler,Node * handler_length,Node * handler_flags,Label * miss)5804 Node* CodeStubAssembler::EmitLoadICProtoArrayCheck(const LoadICParameters* p,
5805 Node* handler,
5806 Node* handler_length,
5807 Node* handler_flags,
5808 Label* miss) {
5809 Variable start_index(this, MachineType::PointerRepresentation());
5810 start_index.Bind(IntPtrConstant(LoadHandler::kFirstPrototypeIndex));
5811
5812 Label can_access(this);
5813 GotoUnless(IsSetWord<LoadHandler::DoAccessCheckOnReceiverBits>(handler_flags),
5814 &can_access);
5815 {
5816 // Skip this entry of a handler.
5817 start_index.Bind(IntPtrConstant(LoadHandler::kFirstPrototypeIndex + 1));
5818
5819 int offset =
5820 FixedArray::OffsetOfElementAt(LoadHandler::kFirstPrototypeIndex);
5821 Node* expected_native_context =
5822 LoadWeakCellValue(LoadObjectField(handler, offset), miss);
5823 CSA_ASSERT(this, IsNativeContext(expected_native_context));
5824
5825 Node* native_context = LoadNativeContext(p->context);
5826 GotoIf(WordEqual(expected_native_context, native_context), &can_access);
5827 // If the receiver is not a JSGlobalProxy then we miss.
5828 GotoUnless(IsJSGlobalProxy(p->receiver), miss);
5829 // For JSGlobalProxy receiver try to compare security tokens of current
5830 // and expected native contexts.
5831 Node* expected_token = LoadContextElement(expected_native_context,
5832 Context::SECURITY_TOKEN_INDEX);
5833 Node* current_token =
5834 LoadContextElement(native_context, Context::SECURITY_TOKEN_INDEX);
5835 Branch(WordEqual(expected_token, current_token), &can_access, miss);
5836 }
5837 Bind(&can_access);
5838
5839 BuildFastLoop(
5840 MachineType::PointerRepresentation(), start_index.value(), handler_length,
5841 [this, p, handler, miss](CodeStubAssembler*, Node* current) {
5842 Node* prototype_cell =
5843 LoadFixedArrayElement(handler, current, 0, INTPTR_PARAMETERS);
5844 CheckPrototype(prototype_cell, p->name, miss);
5845 },
5846 1, IndexAdvanceMode::kPost);
5847
5848 Node* maybe_holder_cell = LoadFixedArrayElement(
5849 handler, IntPtrConstant(LoadHandler::kHolderCellIndex), 0,
5850 INTPTR_PARAMETERS);
5851 Label load_existent(this);
5852 GotoIf(WordNotEqual(maybe_holder_cell, NullConstant()), &load_existent);
5853 // This is a handler for a load of a non-existent value.
5854 Return(UndefinedConstant());
5855
5856 Bind(&load_existent);
5857 Node* holder = LoadWeakCellValue(maybe_holder_cell);
5858 // The |holder| is guaranteed to be alive at this point since we passed
5859 // the receiver map check, the validity cell check and the prototype chain
5860 // check.
5861 CSA_ASSERT(this, WordNotEqual(holder, IntPtrConstant(0)));
5862 return holder;
5863 }
5864
CheckPrototype(Node * prototype_cell,Node * name,Label * miss)5865 void CodeStubAssembler::CheckPrototype(Node* prototype_cell, Node* name,
5866 Label* miss) {
5867 Node* maybe_prototype = LoadWeakCellValue(prototype_cell, miss);
5868
5869 Label done(this);
5870 Label if_property_cell(this), if_dictionary_object(this);
5871
5872 // |maybe_prototype| is either a PropertyCell or a slow-mode prototype.
5873 Branch(WordEqual(LoadMap(maybe_prototype),
5874 LoadRoot(Heap::kGlobalPropertyCellMapRootIndex)),
5875 &if_property_cell, &if_dictionary_object);
5876
5877 Bind(&if_dictionary_object);
5878 {
5879 CSA_ASSERT(this, IsDictionaryMap(LoadMap(maybe_prototype)));
5880 NameDictionaryNegativeLookup(maybe_prototype, name, miss);
5881 Goto(&done);
5882 }
5883
5884 Bind(&if_property_cell);
5885 {
5886 // Ensure the property cell still contains the hole.
5887 Node* value = LoadObjectField(maybe_prototype, PropertyCell::kValueOffset);
5888 GotoIf(WordNotEqual(value, LoadRoot(Heap::kTheHoleValueRootIndex)), miss);
5889 Goto(&done);
5890 }
5891
5892 Bind(&done);
5893 }
5894
NameDictionaryNegativeLookup(Node * object,Node * name,Label * miss)5895 void CodeStubAssembler::NameDictionaryNegativeLookup(Node* object, Node* name,
5896 Label* miss) {
5897 CSA_ASSERT(this, IsDictionaryMap(LoadMap(object)));
5898 Node* properties = LoadProperties(object);
5899 // Ensure the property does not exist in a dictionary-mode object.
5900 Variable var_name_index(this, MachineType::PointerRepresentation());
5901 Label done(this);
5902 NameDictionaryLookup<NameDictionary>(properties, name, miss, &var_name_index,
5903 &done);
5904 Bind(&done);
5905 }
5906
LoadIC(const LoadICParameters * p)5907 void CodeStubAssembler::LoadIC(const LoadICParameters* p) {
5908 Variable var_handler(this, MachineRepresentation::kTagged);
5909 // TODO(ishell): defer blocks when it works.
5910 Label if_handler(this, &var_handler), try_polymorphic(this),
5911 try_megamorphic(this /*, Label::kDeferred*/),
5912 miss(this /*, Label::kDeferred*/);
5913
5914 Node* receiver_map = LoadReceiverMap(p->receiver);
5915
5916 // Check monomorphic case.
5917 Node* feedback =
5918 TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
5919 &var_handler, &try_polymorphic);
5920 Bind(&if_handler);
5921 {
5922 HandleLoadICHandlerCase(p, var_handler.value(), &miss);
5923 }
5924
5925 Bind(&try_polymorphic);
5926 {
5927 // Check polymorphic case.
5928 Comment("LoadIC_try_polymorphic");
5929 GotoUnless(WordEqual(LoadMap(feedback), FixedArrayMapConstant()),
5930 &try_megamorphic);
5931 HandlePolymorphicCase(receiver_map, feedback, &if_handler, &var_handler,
5932 &miss, 2);
5933 }
5934
5935 Bind(&try_megamorphic);
5936 {
5937 // Check megamorphic case.
5938 GotoUnless(
5939 WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
5940 &miss);
5941
5942 TryProbeStubCache(isolate()->load_stub_cache(), p->receiver, p->name,
5943 &if_handler, &var_handler, &miss);
5944 }
5945 Bind(&miss);
5946 {
5947 TailCallRuntime(Runtime::kLoadIC_Miss, p->context, p->receiver, p->name,
5948 p->slot, p->vector);
5949 }
5950 }
5951
KeyedLoadIC(const LoadICParameters * p)5952 void CodeStubAssembler::KeyedLoadIC(const LoadICParameters* p) {
5953 Variable var_handler(this, MachineRepresentation::kTagged);
5954 // TODO(ishell): defer blocks when it works.
5955 Label if_handler(this, &var_handler), try_polymorphic(this),
5956 try_megamorphic(this /*, Label::kDeferred*/),
5957 try_polymorphic_name(this /*, Label::kDeferred*/),
5958 miss(this /*, Label::kDeferred*/);
5959
5960 Node* receiver_map = LoadReceiverMap(p->receiver);
5961
5962 // Check monomorphic case.
5963 Node* feedback =
5964 TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
5965 &var_handler, &try_polymorphic);
5966 Bind(&if_handler);
5967 {
5968 HandleLoadICHandlerCase(p, var_handler.value(), &miss, kSupportElements);
5969 }
5970
5971 Bind(&try_polymorphic);
5972 {
5973 // Check polymorphic case.
5974 Comment("KeyedLoadIC_try_polymorphic");
5975 GotoUnless(WordEqual(LoadMap(feedback), FixedArrayMapConstant()),
5976 &try_megamorphic);
5977 HandlePolymorphicCase(receiver_map, feedback, &if_handler, &var_handler,
5978 &miss, 2);
5979 }
5980
5981 Bind(&try_megamorphic);
5982 {
5983 // Check megamorphic case.
5984 Comment("KeyedLoadIC_try_megamorphic");
5985 GotoUnless(
5986 WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
5987 &try_polymorphic_name);
5988 // TODO(jkummerow): Inline this? Or some of it?
5989 TailCallStub(CodeFactory::KeyedLoadIC_Megamorphic(isolate()), p->context,
5990 p->receiver, p->name, p->slot, p->vector);
5991 }
5992 Bind(&try_polymorphic_name);
5993 {
5994 // We might have a name in feedback, and a fixed array in the next slot.
5995 Comment("KeyedLoadIC_try_polymorphic_name");
5996 GotoUnless(WordEqual(feedback, p->name), &miss);
5997 // If the name comparison succeeded, we know we have a fixed array with
5998 // at least one map/handler pair.
5999 Node* offset = ElementOffsetFromIndex(
6000 p->slot, FAST_HOLEY_ELEMENTS, SMI_PARAMETERS,
6001 FixedArray::kHeaderSize + kPointerSize - kHeapObjectTag);
6002 Node* array = Load(MachineType::AnyTagged(), p->vector, offset);
6003 HandlePolymorphicCase(receiver_map, array, &if_handler, &var_handler, &miss,
6004 1);
6005 }
6006 Bind(&miss);
6007 {
6008 Comment("KeyedLoadIC_miss");
6009 TailCallRuntime(Runtime::kKeyedLoadIC_Miss, p->context, p->receiver,
6010 p->name, p->slot, p->vector);
6011 }
6012 }
6013
KeyedLoadICGeneric(const LoadICParameters * p)6014 void CodeStubAssembler::KeyedLoadICGeneric(const LoadICParameters* p) {
6015 Variable var_index(this, MachineType::PointerRepresentation());
6016 Variable var_details(this, MachineRepresentation::kWord32);
6017 Variable var_value(this, MachineRepresentation::kTagged);
6018 Label if_index(this), if_unique_name(this), if_element_hole(this),
6019 if_oob(this), slow(this), stub_cache_miss(this),
6020 if_property_dictionary(this), if_found_on_receiver(this);
6021
6022 Node* receiver = p->receiver;
6023 GotoIf(TaggedIsSmi(receiver), &slow);
6024 Node* receiver_map = LoadMap(receiver);
6025 Node* instance_type = LoadMapInstanceType(receiver_map);
6026 // Receivers requiring non-standard element accesses (interceptors, access
6027 // checks, strings and string wrappers, proxies) are handled in the runtime.
6028 GotoIf(Int32LessThanOrEqual(instance_type,
6029 Int32Constant(LAST_CUSTOM_ELEMENTS_RECEIVER)),
6030 &slow);
6031
6032 Node* key = p->name;
6033 TryToName(key, &if_index, &var_index, &if_unique_name, &slow);
6034
6035 Bind(&if_index);
6036 {
6037 Comment("integer index");
6038 Node* index = var_index.value();
6039 Node* elements = LoadElements(receiver);
6040 Node* elements_kind = LoadMapElementsKind(receiver_map);
6041 Node* is_jsarray_condition =
6042 Word32Equal(instance_type, Int32Constant(JS_ARRAY_TYPE));
6043 Variable var_double_value(this, MachineRepresentation::kFloat64);
6044 Label rebox_double(this, &var_double_value);
6045
6046 // Unimplemented elements kinds fall back to a runtime call.
6047 Label* unimplemented_elements_kind = &slow;
6048 IncrementCounter(isolate()->counters()->ic_keyed_load_generic_smi(), 1);
6049 EmitElementLoad(receiver, elements, elements_kind, index,
6050 is_jsarray_condition, &if_element_hole, &rebox_double,
6051 &var_double_value, unimplemented_elements_kind, &if_oob,
6052 &slow);
6053
6054 Bind(&rebox_double);
6055 Return(AllocateHeapNumberWithValue(var_double_value.value()));
6056 }
6057
6058 Bind(&if_oob);
6059 {
6060 Comment("out of bounds");
6061 Node* index = var_index.value();
6062 // Negative keys can't take the fast OOB path.
6063 GotoIf(IntPtrLessThan(index, IntPtrConstant(0)), &slow);
6064 // Positive OOB indices are effectively the same as hole loads.
6065 Goto(&if_element_hole);
6066 }
6067
6068 Bind(&if_element_hole);
6069 {
6070 Comment("found the hole");
6071 Label return_undefined(this);
6072 BranchIfPrototypesHaveNoElements(receiver_map, &return_undefined, &slow);
6073
6074 Bind(&return_undefined);
6075 Return(UndefinedConstant());
6076 }
6077
6078 Node* properties = nullptr;
6079 Bind(&if_unique_name);
6080 {
6081 Comment("key is unique name");
6082 // Check if the receiver has fast or slow properties.
6083 properties = LoadProperties(receiver);
6084 Node* properties_map = LoadMap(properties);
6085 GotoIf(WordEqual(properties_map, LoadRoot(Heap::kHashTableMapRootIndex)),
6086 &if_property_dictionary);
6087
6088 // Try looking up the property on the receiver; if unsuccessful, look
6089 // for a handler in the stub cache.
6090 Comment("DescriptorArray lookup");
6091
6092 // Skip linear search if there are too many descriptors.
6093 // TODO(jkummerow): Consider implementing binary search.
6094 // See also TryLookupProperty() which has the same limitation.
6095 const int32_t kMaxLinear = 210;
6096 Label stub_cache(this);
6097 Node* bitfield3 = LoadMapBitField3(receiver_map);
6098 Node* nof =
6099 DecodeWordFromWord32<Map::NumberOfOwnDescriptorsBits>(bitfield3);
6100 GotoIf(UintPtrGreaterThan(nof, IntPtrConstant(kMaxLinear)), &stub_cache);
6101 Node* descriptors = LoadMapDescriptors(receiver_map);
6102 Variable var_name_index(this, MachineType::PointerRepresentation());
6103 Label if_descriptor_found(this);
6104 DescriptorLookupLinear(key, descriptors, nof, &if_descriptor_found,
6105 &var_name_index, &stub_cache);
6106
6107 Bind(&if_descriptor_found);
6108 {
6109 LoadPropertyFromFastObject(receiver, receiver_map, descriptors,
6110 var_name_index.value(), &var_details,
6111 &var_value);
6112 Goto(&if_found_on_receiver);
6113 }
6114
6115 Bind(&stub_cache);
6116 {
6117 Comment("stub cache probe for fast property load");
6118 Variable var_handler(this, MachineRepresentation::kTagged);
6119 Label found_handler(this, &var_handler), stub_cache_miss(this);
6120 TryProbeStubCache(isolate()->load_stub_cache(), receiver, key,
6121 &found_handler, &var_handler, &stub_cache_miss);
6122 Bind(&found_handler);
6123 { HandleLoadICHandlerCase(p, var_handler.value(), &slow); }
6124
6125 Bind(&stub_cache_miss);
6126 {
6127 Comment("KeyedLoadGeneric_miss");
6128 TailCallRuntime(Runtime::kKeyedLoadIC_Miss, p->context, p->receiver,
6129 p->name, p->slot, p->vector);
6130 }
6131 }
6132 }
6133
6134 Bind(&if_property_dictionary);
6135 {
6136 Comment("dictionary property load");
6137 // We checked for LAST_CUSTOM_ELEMENTS_RECEIVER before, which rules out
6138 // seeing global objects here (which would need special handling).
6139
6140 Variable var_name_index(this, MachineType::PointerRepresentation());
6141 Label dictionary_found(this, &var_name_index);
6142 NameDictionaryLookup<NameDictionary>(properties, key, &dictionary_found,
6143 &var_name_index, &slow);
6144 Bind(&dictionary_found);
6145 {
6146 LoadPropertyFromNameDictionary(properties, var_name_index.value(),
6147 &var_details, &var_value);
6148 Goto(&if_found_on_receiver);
6149 }
6150 }
6151
6152 Bind(&if_found_on_receiver);
6153 {
6154 Node* value = CallGetterIfAccessor(var_value.value(), var_details.value(),
6155 p->context, receiver, &slow);
6156 IncrementCounter(isolate()->counters()->ic_keyed_load_generic_symbol(), 1);
6157 Return(value);
6158 }
6159
6160 Bind(&slow);
6161 {
6162 Comment("KeyedLoadGeneric_slow");
6163 IncrementCounter(isolate()->counters()->ic_keyed_load_generic_slow(), 1);
6164 // TODO(jkummerow): Should we use the GetProperty TF stub instead?
6165 TailCallRuntime(Runtime::kKeyedGetProperty, p->context, p->receiver,
6166 p->name);
6167 }
6168 }
6169
HandleStoreFieldAndReturn(Node * handler_word,Node * holder,Representation representation,Node * value,Node * transition,Label * miss)6170 void CodeStubAssembler::HandleStoreFieldAndReturn(Node* handler_word,
6171 Node* holder,
6172 Representation representation,
6173 Node* value, Node* transition,
6174 Label* miss) {
6175 bool transition_to_field = transition != nullptr;
6176 Node* prepared_value = PrepareValueForWrite(value, representation, miss);
6177
6178 if (transition_to_field) {
6179 Label storage_extended(this);
6180 GotoUnless(IsSetWord<StoreHandler::ExtendStorageBits>(handler_word),
6181 &storage_extended);
6182 Comment("[ Extend storage");
6183 ExtendPropertiesBackingStore(holder);
6184 Comment("] Extend storage");
6185 Goto(&storage_extended);
6186
6187 Bind(&storage_extended);
6188 }
6189
6190 Node* offset = DecodeWord<StoreHandler::FieldOffsetBits>(handler_word);
6191 Label if_inobject(this), if_out_of_object(this);
6192 Branch(IsSetWord<StoreHandler::IsInobjectBits>(handler_word), &if_inobject,
6193 &if_out_of_object);
6194
6195 Bind(&if_inobject);
6196 {
6197 StoreNamedField(holder, offset, true, representation, prepared_value,
6198 transition_to_field);
6199 if (transition_to_field) {
6200 StoreObjectField(holder, JSObject::kMapOffset, transition);
6201 }
6202 Return(value);
6203 }
6204
6205 Bind(&if_out_of_object);
6206 {
6207 StoreNamedField(holder, offset, false, representation, prepared_value,
6208 transition_to_field);
6209 if (transition_to_field) {
6210 StoreObjectField(holder, JSObject::kMapOffset, transition);
6211 }
6212 Return(value);
6213 }
6214 }
6215
HandleStoreICSmiHandlerCase(Node * handler_word,Node * holder,Node * value,Node * transition,Label * miss)6216 void CodeStubAssembler::HandleStoreICSmiHandlerCase(Node* handler_word,
6217 Node* holder, Node* value,
6218 Node* transition,
6219 Label* miss) {
6220 Comment(transition ? "transitioning field store" : "field store");
6221
6222 #ifdef DEBUG
6223 Node* handler_kind = DecodeWord<StoreHandler::KindBits>(handler_word);
6224 if (transition) {
6225 CSA_ASSERT(
6226 this,
6227 WordOr(WordEqual(handler_kind,
6228 IntPtrConstant(StoreHandler::kTransitionToField)),
6229 WordEqual(handler_kind,
6230 IntPtrConstant(StoreHandler::kTransitionToConstant))));
6231 } else {
6232 CSA_ASSERT(this, WordEqual(handler_kind,
6233 IntPtrConstant(StoreHandler::kStoreField)));
6234 }
6235 #endif
6236
6237 Node* field_representation =
6238 DecodeWord<StoreHandler::FieldRepresentationBits>(handler_word);
6239
6240 Label if_smi_field(this), if_double_field(this), if_heap_object_field(this),
6241 if_tagged_field(this);
6242
6243 GotoIf(WordEqual(field_representation, IntPtrConstant(StoreHandler::kTagged)),
6244 &if_tagged_field);
6245 GotoIf(WordEqual(field_representation,
6246 IntPtrConstant(StoreHandler::kHeapObject)),
6247 &if_heap_object_field);
6248 GotoIf(WordEqual(field_representation, IntPtrConstant(StoreHandler::kDouble)),
6249 &if_double_field);
6250 CSA_ASSERT(this, WordEqual(field_representation,
6251 IntPtrConstant(StoreHandler::kSmi)));
6252 Goto(&if_smi_field);
6253
6254 Bind(&if_tagged_field);
6255 {
6256 Comment("store tagged field");
6257 HandleStoreFieldAndReturn(handler_word, holder, Representation::Tagged(),
6258 value, transition, miss);
6259 }
6260
6261 Bind(&if_double_field);
6262 {
6263 Comment("store double field");
6264 HandleStoreFieldAndReturn(handler_word, holder, Representation::Double(),
6265 value, transition, miss);
6266 }
6267
6268 Bind(&if_heap_object_field);
6269 {
6270 Comment("store heap object field");
6271 // Generate full field type check here and then store value as Tagged.
6272 Node* prepared_value =
6273 PrepareValueForWrite(value, Representation::HeapObject(), miss);
6274 Node* value_index_in_descriptor =
6275 DecodeWord<StoreHandler::DescriptorValueIndexBits>(handler_word);
6276 Node* descriptors =
6277 LoadMapDescriptors(transition ? transition : LoadMap(holder));
6278 Node* maybe_field_type = LoadFixedArrayElement(
6279 descriptors, value_index_in_descriptor, 0, INTPTR_PARAMETERS);
6280 Label do_store(this);
6281 GotoIf(TaggedIsSmi(maybe_field_type), &do_store);
6282 // Check that value type matches the field type.
6283 {
6284 Node* field_type = LoadWeakCellValue(maybe_field_type, miss);
6285 Branch(WordEqual(LoadMap(prepared_value), field_type), &do_store, miss);
6286 }
6287 Bind(&do_store);
6288 HandleStoreFieldAndReturn(handler_word, holder, Representation::Tagged(),
6289 prepared_value, transition, miss);
6290 }
6291
6292 Bind(&if_smi_field);
6293 {
6294 Comment("store smi field");
6295 HandleStoreFieldAndReturn(handler_word, holder, Representation::Smi(),
6296 value, transition, miss);
6297 }
6298 }
6299
HandleStoreICHandlerCase(const StoreICParameters * p,Node * handler,Label * miss)6300 void CodeStubAssembler::HandleStoreICHandlerCase(const StoreICParameters* p,
6301 Node* handler, Label* miss) {
6302 Label if_smi_handler(this);
6303 Label try_proto_handler(this), call_handler(this);
6304
6305 Branch(TaggedIsSmi(handler), &if_smi_handler, &try_proto_handler);
6306
6307 // |handler| is a Smi, encoding what to do. See SmiHandler methods
6308 // for the encoding format.
6309 Bind(&if_smi_handler);
6310 {
6311 Node* holder = p->receiver;
6312 Node* handler_word = SmiUntag(handler);
6313
6314 // Handle non-transitioning field stores.
6315 HandleStoreICSmiHandlerCase(handler_word, holder, p->value, nullptr, miss);
6316 }
6317
6318 Bind(&try_proto_handler);
6319 {
6320 GotoIf(IsCodeMap(LoadMap(handler)), &call_handler);
6321 HandleStoreICProtoHandler(p, handler, miss);
6322 }
6323
6324 // |handler| is a heap object. Must be code, call it.
6325 Bind(&call_handler);
6326 {
6327 StoreWithVectorDescriptor descriptor(isolate());
6328 TailCallStub(descriptor, handler, p->context, p->receiver, p->name,
6329 p->value, p->slot, p->vector);
6330 }
6331 }
6332
HandleStoreICProtoHandler(const StoreICParameters * p,Node * handler,Label * miss)6333 void CodeStubAssembler::HandleStoreICProtoHandler(const StoreICParameters* p,
6334 Node* handler, Label* miss) {
6335 // IC dispatchers rely on these assumptions to be held.
6336 STATIC_ASSERT(FixedArray::kLengthOffset ==
6337 StoreHandler::kTransitionCellOffset);
6338 DCHECK_EQ(FixedArray::OffsetOfElementAt(StoreHandler::kSmiHandlerIndex),
6339 StoreHandler::kSmiHandlerOffset);
6340 DCHECK_EQ(FixedArray::OffsetOfElementAt(StoreHandler::kValidityCellIndex),
6341 StoreHandler::kValidityCellOffset);
6342
6343 // Both FixedArray and Tuple3 handlers have validity cell at the same offset.
6344 Label validity_cell_check_done(this);
6345 Node* validity_cell =
6346 LoadObjectField(handler, StoreHandler::kValidityCellOffset);
6347 GotoIf(WordEqual(validity_cell, IntPtrConstant(0)),
6348 &validity_cell_check_done);
6349 Node* cell_value = LoadObjectField(validity_cell, Cell::kValueOffset);
6350 GotoIf(WordNotEqual(cell_value,
6351 SmiConstant(Smi::FromInt(Map::kPrototypeChainValid))),
6352 miss);
6353 Goto(&validity_cell_check_done);
6354
6355 Bind(&validity_cell_check_done);
6356 Node* smi_handler = LoadObjectField(handler, StoreHandler::kSmiHandlerOffset);
6357 CSA_ASSERT(this, TaggedIsSmi(smi_handler));
6358
6359 Node* maybe_transition_cell =
6360 LoadObjectField(handler, StoreHandler::kTransitionCellOffset);
6361 Label array_handler(this), tuple_handler(this);
6362 Branch(TaggedIsSmi(maybe_transition_cell), &array_handler, &tuple_handler);
6363
6364 Variable var_transition(this, MachineRepresentation::kTagged);
6365 Label if_transition(this), if_transition_to_constant(this);
6366 Bind(&tuple_handler);
6367 {
6368 Node* transition = LoadWeakCellValue(maybe_transition_cell, miss);
6369 var_transition.Bind(transition);
6370 Goto(&if_transition);
6371 }
6372
6373 Bind(&array_handler);
6374 {
6375 Node* length = SmiUntag(maybe_transition_cell);
6376 BuildFastLoop(MachineType::PointerRepresentation(),
6377 IntPtrConstant(StoreHandler::kFirstPrototypeIndex), length,
6378 [this, p, handler, miss](CodeStubAssembler*, Node* current) {
6379 Node* prototype_cell = LoadFixedArrayElement(
6380 handler, current, 0, INTPTR_PARAMETERS);
6381 CheckPrototype(prototype_cell, p->name, miss);
6382 },
6383 1, IndexAdvanceMode::kPost);
6384
6385 Node* maybe_transition_cell = LoadFixedArrayElement(
6386 handler, IntPtrConstant(StoreHandler::kTransitionCellIndex), 0,
6387 INTPTR_PARAMETERS);
6388 Node* transition = LoadWeakCellValue(maybe_transition_cell, miss);
6389 var_transition.Bind(transition);
6390 Goto(&if_transition);
6391 }
6392
6393 Bind(&if_transition);
6394 {
6395 Node* holder = p->receiver;
6396 Node* transition = var_transition.value();
6397 Node* handler_word = SmiUntag(smi_handler);
6398
6399 GotoIf(IsSetWord32<Map::Deprecated>(LoadMapBitField3(transition)), miss);
6400
6401 Node* handler_kind = DecodeWord<StoreHandler::KindBits>(handler_word);
6402 GotoIf(WordEqual(handler_kind,
6403 IntPtrConstant(StoreHandler::kTransitionToConstant)),
6404 &if_transition_to_constant);
6405
6406 // Handle transitioning field stores.
6407 HandleStoreICSmiHandlerCase(handler_word, holder, p->value, transition,
6408 miss);
6409
6410 Bind(&if_transition_to_constant);
6411 {
6412 // Check that constant matches value.
6413 Node* value_index_in_descriptor =
6414 DecodeWord<StoreHandler::DescriptorValueIndexBits>(handler_word);
6415 Node* descriptors = LoadMapDescriptors(transition);
6416 Node* constant = LoadFixedArrayElement(
6417 descriptors, value_index_in_descriptor, 0, INTPTR_PARAMETERS);
6418 GotoIf(WordNotEqual(p->value, constant), miss);
6419
6420 StoreObjectField(p->receiver, JSObject::kMapOffset, transition);
6421 Return(p->value);
6422 }
6423 }
6424 }
6425
StoreIC(const StoreICParameters * p)6426 void CodeStubAssembler::StoreIC(const StoreICParameters* p) {
6427 Variable var_handler(this, MachineRepresentation::kTagged);
6428 // TODO(ishell): defer blocks when it works.
6429 Label if_handler(this, &var_handler), try_polymorphic(this),
6430 try_megamorphic(this /*, Label::kDeferred*/),
6431 miss(this /*, Label::kDeferred*/);
6432
6433 Node* receiver_map = LoadReceiverMap(p->receiver);
6434
6435 // Check monomorphic case.
6436 Node* feedback =
6437 TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
6438 &var_handler, &try_polymorphic);
6439 Bind(&if_handler);
6440 {
6441 Comment("StoreIC_if_handler");
6442 HandleStoreICHandlerCase(p, var_handler.value(), &miss);
6443 }
6444
6445 Bind(&try_polymorphic);
6446 {
6447 // Check polymorphic case.
6448 Comment("StoreIC_try_polymorphic");
6449 GotoUnless(
6450 WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)),
6451 &try_megamorphic);
6452 HandlePolymorphicCase(receiver_map, feedback, &if_handler, &var_handler,
6453 &miss, 2);
6454 }
6455
6456 Bind(&try_megamorphic);
6457 {
6458 // Check megamorphic case.
6459 GotoUnless(
6460 WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
6461 &miss);
6462
6463 TryProbeStubCache(isolate()->store_stub_cache(), p->receiver, p->name,
6464 &if_handler, &var_handler, &miss);
6465 }
6466 Bind(&miss);
6467 {
6468 TailCallRuntime(Runtime::kStoreIC_Miss, p->context, p->value, p->slot,
6469 p->vector, p->receiver, p->name);
6470 }
6471 }
6472
KeyedStoreIC(const StoreICParameters * p,LanguageMode language_mode)6473 void CodeStubAssembler::KeyedStoreIC(const StoreICParameters* p,
6474 LanguageMode language_mode) {
6475 Variable var_handler(this, MachineRepresentation::kTagged);
6476 // This is to make |miss| label see the var_handler bound on all paths.
6477 var_handler.Bind(IntPtrConstant(0));
6478
6479 // TODO(ishell): defer blocks when it works.
6480 Label if_handler(this, &var_handler), try_polymorphic(this),
6481 try_megamorphic(this /*, Label::kDeferred*/),
6482 try_polymorphic_name(this /*, Label::kDeferred*/),
6483 miss(this /*, Label::kDeferred*/);
6484
6485 Node* receiver_map = LoadReceiverMap(p->receiver);
6486
6487 // Check monomorphic case.
6488 Node* feedback =
6489 TryMonomorphicCase(p->slot, p->vector, receiver_map, &if_handler,
6490 &var_handler, &try_polymorphic);
6491 Bind(&if_handler);
6492 {
6493 Comment("KeyedStoreIC_if_handler");
6494 HandleStoreICHandlerCase(p, var_handler.value(), &miss);
6495 }
6496
6497 Bind(&try_polymorphic);
6498 {
6499 // CheckPolymorphic case.
6500 Comment("KeyedStoreIC_try_polymorphic");
6501 GotoUnless(
6502 WordEqual(LoadMap(feedback), LoadRoot(Heap::kFixedArrayMapRootIndex)),
6503 &try_megamorphic);
6504 Label if_transition_handler(this);
6505 Variable var_transition_map_cell(this, MachineRepresentation::kTagged);
6506 HandleKeyedStorePolymorphicCase(receiver_map, feedback, &if_handler,
6507 &var_handler, &if_transition_handler,
6508 &var_transition_map_cell, &miss);
6509 Bind(&if_transition_handler);
6510 Comment("KeyedStoreIC_polymorphic_transition");
6511 Node* transition_map =
6512 LoadWeakCellValue(var_transition_map_cell.value(), &miss);
6513 StoreTransitionDescriptor descriptor(isolate());
6514 TailCallStub(descriptor, var_handler.value(), p->context, p->receiver,
6515 p->name, transition_map, p->value, p->slot, p->vector);
6516 }
6517
6518 Bind(&try_megamorphic);
6519 {
6520 // Check megamorphic case.
6521 Comment("KeyedStoreIC_try_megamorphic");
6522 GotoUnless(
6523 WordEqual(feedback, LoadRoot(Heap::kmegamorphic_symbolRootIndex)),
6524 &try_polymorphic_name);
6525 TailCallStub(
6526 CodeFactory::KeyedStoreIC_Megamorphic(isolate(), language_mode),
6527 p->context, p->receiver, p->name, p->value, p->slot, p->vector);
6528 }
6529
6530 Bind(&try_polymorphic_name);
6531 {
6532 // We might have a name in feedback, and a fixed array in the next slot.
6533 Comment("KeyedStoreIC_try_polymorphic_name");
6534 GotoUnless(WordEqual(feedback, p->name), &miss);
6535 // If the name comparison succeeded, we know we have a FixedArray with
6536 // at least one map/handler pair.
6537 Node* offset = ElementOffsetFromIndex(
6538 p->slot, FAST_HOLEY_ELEMENTS, SMI_PARAMETERS,
6539 FixedArray::kHeaderSize + kPointerSize - kHeapObjectTag);
6540 Node* array = Load(MachineType::AnyTagged(), p->vector, offset);
6541 HandlePolymorphicCase(receiver_map, array, &if_handler, &var_handler, &miss,
6542 1);
6543 }
6544
6545 Bind(&miss);
6546 {
6547 Comment("KeyedStoreIC_miss");
6548 TailCallRuntime(Runtime::kKeyedStoreIC_Miss, p->context, p->value, p->slot,
6549 p->vector, p->receiver, p->name);
6550 }
6551 }
6552
LoadGlobalIC(const LoadICParameters * p)6553 void CodeStubAssembler::LoadGlobalIC(const LoadICParameters* p) {
6554 Label try_handler(this), miss(this);
6555 Node* weak_cell =
6556 LoadFixedArrayElement(p->vector, p->slot, 0, SMI_PARAMETERS);
6557 CSA_ASSERT(this, HasInstanceType(weak_cell, WEAK_CELL_TYPE));
6558
6559 // Load value or try handler case if the {weak_cell} is cleared.
6560 Node* property_cell = LoadWeakCellValue(weak_cell, &try_handler);
6561 CSA_ASSERT(this, HasInstanceType(property_cell, PROPERTY_CELL_TYPE));
6562
6563 Node* value = LoadObjectField(property_cell, PropertyCell::kValueOffset);
6564 GotoIf(WordEqual(value, TheHoleConstant()), &miss);
6565 Return(value);
6566
6567 Bind(&try_handler);
6568 {
6569 Node* handler =
6570 LoadFixedArrayElement(p->vector, p->slot, kPointerSize, SMI_PARAMETERS);
6571 GotoIf(WordEqual(handler, LoadRoot(Heap::kuninitialized_symbolRootIndex)),
6572 &miss);
6573
6574 // In this case {handler} must be a Code object.
6575 CSA_ASSERT(this, HasInstanceType(handler, CODE_TYPE));
6576 LoadWithVectorDescriptor descriptor(isolate());
6577 Node* native_context = LoadNativeContext(p->context);
6578 Node* receiver =
6579 LoadContextElement(native_context, Context::EXTENSION_INDEX);
6580 Node* fake_name = IntPtrConstant(0);
6581 TailCallStub(descriptor, handler, p->context, receiver, fake_name, p->slot,
6582 p->vector);
6583 }
6584 Bind(&miss);
6585 {
6586 TailCallRuntime(Runtime::kLoadGlobalIC_Miss, p->context, p->slot,
6587 p->vector);
6588 }
6589 }
6590
ExtendPropertiesBackingStore(compiler::Node * object)6591 void CodeStubAssembler::ExtendPropertiesBackingStore(compiler::Node* object) {
6592 Node* properties = LoadProperties(object);
6593 Node* length = LoadFixedArrayBaseLength(properties);
6594
6595 ParameterMode mode = OptimalParameterMode();
6596 length = UntagParameter(length, mode);
6597
6598 Node* delta = IntPtrOrSmiConstant(JSObject::kFieldsAdded, mode);
6599 Node* new_capacity = IntPtrAdd(length, delta);
6600
6601 // Grow properties array.
6602 ElementsKind kind = FAST_ELEMENTS;
6603 DCHECK(kMaxNumberOfDescriptors + JSObject::kFieldsAdded <
6604 FixedArrayBase::GetMaxLengthForNewSpaceAllocation(kind));
6605 // The size of a new properties backing store is guaranteed to be small
6606 // enough that the new backing store will be allocated in new space.
6607 CSA_ASSERT(this, UintPtrLessThan(new_capacity,
6608 IntPtrConstant(kMaxNumberOfDescriptors +
6609 JSObject::kFieldsAdded)));
6610
6611 Node* new_properties = AllocateFixedArray(kind, new_capacity, mode);
6612
6613 FillFixedArrayWithValue(kind, new_properties, length, new_capacity,
6614 Heap::kUndefinedValueRootIndex, mode);
6615
6616 // |new_properties| is guaranteed to be in new space, so we can skip
6617 // the write barrier.
6618 CopyFixedArrayElements(kind, properties, new_properties, length,
6619 SKIP_WRITE_BARRIER, mode);
6620
6621 StoreObjectField(object, JSObject::kPropertiesOffset, new_properties);
6622 }
6623
PrepareValueForWrite(Node * value,Representation representation,Label * bailout)6624 Node* CodeStubAssembler::PrepareValueForWrite(Node* value,
6625 Representation representation,
6626 Label* bailout) {
6627 if (representation.IsDouble()) {
6628 value = TryTaggedToFloat64(value, bailout);
6629 } else if (representation.IsHeapObject()) {
6630 // Field type is checked by the handler, here we only check if the value
6631 // is a heap object.
6632 GotoIf(TaggedIsSmi(value), bailout);
6633 } else if (representation.IsSmi()) {
6634 GotoUnless(TaggedIsSmi(value), bailout);
6635 } else {
6636 DCHECK(representation.IsTagged());
6637 }
6638 return value;
6639 }
6640
StoreNamedField(Node * object,FieldIndex index,Representation representation,Node * value,bool transition_to_field)6641 void CodeStubAssembler::StoreNamedField(Node* object, FieldIndex index,
6642 Representation representation,
6643 Node* value, bool transition_to_field) {
6644 DCHECK_EQ(index.is_double(), representation.IsDouble());
6645
6646 StoreNamedField(object, IntPtrConstant(index.offset()), index.is_inobject(),
6647 representation, value, transition_to_field);
6648 }
6649
StoreNamedField(Node * object,Node * offset,bool is_inobject,Representation representation,Node * value,bool transition_to_field)6650 void CodeStubAssembler::StoreNamedField(Node* object, Node* offset,
6651 bool is_inobject,
6652 Representation representation,
6653 Node* value, bool transition_to_field) {
6654 bool store_value_as_double = representation.IsDouble();
6655 Node* property_storage = object;
6656 if (!is_inobject) {
6657 property_storage = LoadProperties(object);
6658 }
6659
6660 if (representation.IsDouble()) {
6661 if (!FLAG_unbox_double_fields || !is_inobject) {
6662 if (transition_to_field) {
6663 Node* heap_number = AllocateHeapNumberWithValue(value, MUTABLE);
6664 // Store the new mutable heap number into the object.
6665 value = heap_number;
6666 store_value_as_double = false;
6667 } else {
6668 // Load the heap number.
6669 property_storage = LoadObjectField(property_storage, offset);
6670 // Store the double value into it.
6671 offset = IntPtrConstant(HeapNumber::kValueOffset);
6672 }
6673 }
6674 }
6675
6676 if (store_value_as_double) {
6677 StoreObjectFieldNoWriteBarrier(property_storage, offset, value,
6678 MachineRepresentation::kFloat64);
6679 } else if (representation.IsSmi()) {
6680 StoreObjectFieldNoWriteBarrier(property_storage, offset, value);
6681 } else {
6682 StoreObjectField(property_storage, offset, value);
6683 }
6684 }
6685
EmitKeyedSloppyArguments(Node * receiver,Node * key,Node * value,Label * bailout)6686 Node* CodeStubAssembler::EmitKeyedSloppyArguments(Node* receiver, Node* key,
6687 Node* value, Label* bailout) {
6688 // Mapped arguments are actual arguments. Unmapped arguments are values added
6689 // to the arguments object after it was created for the call. Mapped arguments
6690 // are stored in the context at indexes given by elements[key + 2]. Unmapped
6691 // arguments are stored as regular indexed properties in the arguments array,
6692 // held at elements[1]. See NewSloppyArguments() in runtime.cc for a detailed
6693 // look at argument object construction.
6694 //
6695 // The sloppy arguments elements array has a special format:
6696 //
6697 // 0: context
6698 // 1: unmapped arguments array
6699 // 2: mapped_index0,
6700 // 3: mapped_index1,
6701 // ...
6702 //
6703 // length is 2 + min(number_of_actual_arguments, number_of_formal_arguments).
6704 // If key + 2 >= elements.length then attempt to look in the unmapped
6705 // arguments array (given by elements[1]) and return the value at key, missing
6706 // to the runtime if the unmapped arguments array is not a fixed array or if
6707 // key >= unmapped_arguments_array.length.
6708 //
6709 // Otherwise, t = elements[key + 2]. If t is the hole, then look up the value
6710 // in the unmapped arguments array, as described above. Otherwise, t is a Smi
6711 // index into the context array given at elements[0]. Return the value at
6712 // context[t].
6713
6714 bool is_load = value == nullptr;
6715
6716 GotoUnless(TaggedIsSmi(key), bailout);
6717 key = SmiUntag(key);
6718 GotoIf(IntPtrLessThan(key, IntPtrConstant(0)), bailout);
6719
6720 Node* elements = LoadElements(receiver);
6721 Node* elements_length = LoadAndUntagFixedArrayBaseLength(elements);
6722
6723 Variable var_result(this, MachineRepresentation::kTagged);
6724 if (!is_load) {
6725 var_result.Bind(value);
6726 }
6727 Label if_mapped(this), if_unmapped(this), end(this, &var_result);
6728 Node* intptr_two = IntPtrConstant(2);
6729 Node* adjusted_length = IntPtrSub(elements_length, intptr_two);
6730
6731 GotoIf(UintPtrGreaterThanOrEqual(key, adjusted_length), &if_unmapped);
6732
6733 Node* mapped_index = LoadFixedArrayElement(
6734 elements, IntPtrAdd(key, intptr_two), 0, INTPTR_PARAMETERS);
6735 Branch(WordEqual(mapped_index, TheHoleConstant()), &if_unmapped, &if_mapped);
6736
6737 Bind(&if_mapped);
6738 {
6739 CSA_ASSERT(this, TaggedIsSmi(mapped_index));
6740 mapped_index = SmiUntag(mapped_index);
6741 Node* the_context = LoadFixedArrayElement(elements, IntPtrConstant(0), 0,
6742 INTPTR_PARAMETERS);
6743 // Assert that we can use LoadFixedArrayElement/StoreFixedArrayElement
6744 // methods for accessing Context.
6745 STATIC_ASSERT(Context::kHeaderSize == FixedArray::kHeaderSize);
6746 DCHECK_EQ(Context::SlotOffset(0) + kHeapObjectTag,
6747 FixedArray::OffsetOfElementAt(0));
6748 if (is_load) {
6749 Node* result = LoadFixedArrayElement(the_context, mapped_index, 0,
6750 INTPTR_PARAMETERS);
6751 CSA_ASSERT(this, WordNotEqual(result, TheHoleConstant()));
6752 var_result.Bind(result);
6753 } else {
6754 StoreFixedArrayElement(the_context, mapped_index, value,
6755 UPDATE_WRITE_BARRIER, INTPTR_PARAMETERS);
6756 }
6757 Goto(&end);
6758 }
6759
6760 Bind(&if_unmapped);
6761 {
6762 Node* backing_store = LoadFixedArrayElement(elements, IntPtrConstant(1), 0,
6763 INTPTR_PARAMETERS);
6764 GotoIf(WordNotEqual(LoadMap(backing_store), FixedArrayMapConstant()),
6765 bailout);
6766
6767 Node* backing_store_length =
6768 LoadAndUntagFixedArrayBaseLength(backing_store);
6769 GotoIf(UintPtrGreaterThanOrEqual(key, backing_store_length), bailout);
6770
6771 // The key falls into unmapped range.
6772 if (is_load) {
6773 Node* result =
6774 LoadFixedArrayElement(backing_store, key, 0, INTPTR_PARAMETERS);
6775 GotoIf(WordEqual(result, TheHoleConstant()), bailout);
6776 var_result.Bind(result);
6777 } else {
6778 StoreFixedArrayElement(backing_store, key, value, UPDATE_WRITE_BARRIER,
6779 INTPTR_PARAMETERS);
6780 }
6781 Goto(&end);
6782 }
6783
6784 Bind(&end);
6785 return var_result.value();
6786 }
6787
LoadScriptContext(Node * context,int context_index)6788 Node* CodeStubAssembler::LoadScriptContext(Node* context, int context_index) {
6789 Node* native_context = LoadNativeContext(context);
6790 Node* script_context_table =
6791 LoadContextElement(native_context, Context::SCRIPT_CONTEXT_TABLE_INDEX);
6792
6793 int offset =
6794 ScriptContextTable::GetContextOffset(context_index) - kHeapObjectTag;
6795 return Load(MachineType::AnyTagged(), script_context_table,
6796 IntPtrConstant(offset));
6797 }
6798
6799 namespace {
6800
6801 // Converts typed array elements kind to a machine representations.
ElementsKindToMachineRepresentation(ElementsKind kind)6802 MachineRepresentation ElementsKindToMachineRepresentation(ElementsKind kind) {
6803 switch (kind) {
6804 case UINT8_CLAMPED_ELEMENTS:
6805 case UINT8_ELEMENTS:
6806 case INT8_ELEMENTS:
6807 return MachineRepresentation::kWord8;
6808 case UINT16_ELEMENTS:
6809 case INT16_ELEMENTS:
6810 return MachineRepresentation::kWord16;
6811 case UINT32_ELEMENTS:
6812 case INT32_ELEMENTS:
6813 return MachineRepresentation::kWord32;
6814 case FLOAT32_ELEMENTS:
6815 return MachineRepresentation::kFloat32;
6816 case FLOAT64_ELEMENTS:
6817 return MachineRepresentation::kFloat64;
6818 default:
6819 UNREACHABLE();
6820 return MachineRepresentation::kNone;
6821 }
6822 }
6823
6824 } // namespace
6825
StoreElement(Node * elements,ElementsKind kind,Node * index,Node * value,ParameterMode mode)6826 void CodeStubAssembler::StoreElement(Node* elements, ElementsKind kind,
6827 Node* index, Node* value,
6828 ParameterMode mode) {
6829 if (IsFixedTypedArrayElementsKind(kind)) {
6830 if (kind == UINT8_CLAMPED_ELEMENTS) {
6831 CSA_ASSERT(this,
6832 Word32Equal(value, Word32And(Int32Constant(0xff), value)));
6833 }
6834 Node* offset = ElementOffsetFromIndex(index, kind, mode, 0);
6835 MachineRepresentation rep = ElementsKindToMachineRepresentation(kind);
6836 StoreNoWriteBarrier(rep, elements, offset, value);
6837 return;
6838 }
6839
6840 WriteBarrierMode barrier_mode =
6841 IsFastSmiElementsKind(kind) ? SKIP_WRITE_BARRIER : UPDATE_WRITE_BARRIER;
6842 if (IsFastDoubleElementsKind(kind)) {
6843 // Make sure we do not store signalling NaNs into double arrays.
6844 value = Float64SilenceNaN(value);
6845 StoreFixedDoubleArrayElement(elements, index, value, mode);
6846 } else {
6847 StoreFixedArrayElement(elements, index, value, barrier_mode, mode);
6848 }
6849 }
6850
Int32ToUint8Clamped(Node * int32_value)6851 Node* CodeStubAssembler::Int32ToUint8Clamped(Node* int32_value) {
6852 Label done(this);
6853 Node* int32_zero = Int32Constant(0);
6854 Node* int32_255 = Int32Constant(255);
6855 Variable var_value(this, MachineRepresentation::kWord32);
6856 var_value.Bind(int32_value);
6857 GotoIf(Uint32LessThanOrEqual(int32_value, int32_255), &done);
6858 var_value.Bind(int32_zero);
6859 GotoIf(Int32LessThan(int32_value, int32_zero), &done);
6860 var_value.Bind(int32_255);
6861 Goto(&done);
6862 Bind(&done);
6863 return var_value.value();
6864 }
6865
Float64ToUint8Clamped(Node * float64_value)6866 Node* CodeStubAssembler::Float64ToUint8Clamped(Node* float64_value) {
6867 Label done(this);
6868 Variable var_value(this, MachineRepresentation::kWord32);
6869 var_value.Bind(Int32Constant(0));
6870 GotoIf(Float64LessThanOrEqual(float64_value, Float64Constant(0.0)), &done);
6871 var_value.Bind(Int32Constant(255));
6872 GotoIf(Float64LessThanOrEqual(Float64Constant(255.0), float64_value), &done);
6873 {
6874 Node* rounded_value = Float64RoundToEven(float64_value);
6875 var_value.Bind(TruncateFloat64ToWord32(rounded_value));
6876 Goto(&done);
6877 }
6878 Bind(&done);
6879 return var_value.value();
6880 }
6881
PrepareValueForWriteToTypedArray(Node * input,ElementsKind elements_kind,Label * bailout)6882 Node* CodeStubAssembler::PrepareValueForWriteToTypedArray(
6883 Node* input, ElementsKind elements_kind, Label* bailout) {
6884 DCHECK(IsFixedTypedArrayElementsKind(elements_kind));
6885
6886 MachineRepresentation rep;
6887 switch (elements_kind) {
6888 case UINT8_ELEMENTS:
6889 case INT8_ELEMENTS:
6890 case UINT16_ELEMENTS:
6891 case INT16_ELEMENTS:
6892 case UINT32_ELEMENTS:
6893 case INT32_ELEMENTS:
6894 case UINT8_CLAMPED_ELEMENTS:
6895 rep = MachineRepresentation::kWord32;
6896 break;
6897 case FLOAT32_ELEMENTS:
6898 rep = MachineRepresentation::kFloat32;
6899 break;
6900 case FLOAT64_ELEMENTS:
6901 rep = MachineRepresentation::kFloat64;
6902 break;
6903 default:
6904 UNREACHABLE();
6905 return nullptr;
6906 }
6907
6908 Variable var_result(this, rep);
6909 Label done(this, &var_result), if_smi(this);
6910 GotoIf(TaggedIsSmi(input), &if_smi);
6911 // Try to convert a heap number to a Smi.
6912 GotoUnless(IsHeapNumberMap(LoadMap(input)), bailout);
6913 {
6914 Node* value = LoadHeapNumberValue(input);
6915 if (rep == MachineRepresentation::kWord32) {
6916 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
6917 value = Float64ToUint8Clamped(value);
6918 } else {
6919 value = TruncateFloat64ToWord32(value);
6920 }
6921 } else if (rep == MachineRepresentation::kFloat32) {
6922 value = TruncateFloat64ToFloat32(value);
6923 } else {
6924 DCHECK_EQ(MachineRepresentation::kFloat64, rep);
6925 }
6926 var_result.Bind(value);
6927 Goto(&done);
6928 }
6929
6930 Bind(&if_smi);
6931 {
6932 Node* value = SmiToWord32(input);
6933 if (rep == MachineRepresentation::kFloat32) {
6934 value = RoundInt32ToFloat32(value);
6935 } else if (rep == MachineRepresentation::kFloat64) {
6936 value = ChangeInt32ToFloat64(value);
6937 } else {
6938 DCHECK_EQ(MachineRepresentation::kWord32, rep);
6939 if (elements_kind == UINT8_CLAMPED_ELEMENTS) {
6940 value = Int32ToUint8Clamped(value);
6941 }
6942 }
6943 var_result.Bind(value);
6944 Goto(&done);
6945 }
6946
6947 Bind(&done);
6948 return var_result.value();
6949 }
6950
EmitElementStore(Node * object,Node * key,Node * value,bool is_jsarray,ElementsKind elements_kind,KeyedAccessStoreMode store_mode,Label * bailout)6951 void CodeStubAssembler::EmitElementStore(Node* object, Node* key, Node* value,
6952 bool is_jsarray,
6953 ElementsKind elements_kind,
6954 KeyedAccessStoreMode store_mode,
6955 Label* bailout) {
6956 Node* elements = LoadElements(object);
6957 if (IsFastSmiOrObjectElementsKind(elements_kind) &&
6958 store_mode != STORE_NO_TRANSITION_HANDLE_COW) {
6959 // Bailout in case of COW elements.
6960 GotoIf(WordNotEqual(LoadMap(elements),
6961 LoadRoot(Heap::kFixedArrayMapRootIndex)),
6962 bailout);
6963 }
6964 // TODO(ishell): introduce TryToIntPtrOrSmi() and use OptimalParameterMode().
6965 ParameterMode parameter_mode = INTPTR_PARAMETERS;
6966 key = TryToIntptr(key, bailout);
6967
6968 if (IsFixedTypedArrayElementsKind(elements_kind)) {
6969 Label done(this);
6970 // TODO(ishell): call ToNumber() on value and don't bailout but be careful
6971 // to call it only once if we decide to bailout because of bounds checks.
6972
6973 value = PrepareValueForWriteToTypedArray(value, elements_kind, bailout);
6974
6975 // There must be no allocations between the buffer load and
6976 // and the actual store to backing store, because GC may decide that
6977 // the buffer is not alive or move the elements.
6978 // TODO(ishell): introduce DisallowHeapAllocationCode scope here.
6979
6980 // Check if buffer has been neutered.
6981 Node* buffer = LoadObjectField(object, JSArrayBufferView::kBufferOffset);
6982 Node* bitfield = LoadObjectField(buffer, JSArrayBuffer::kBitFieldOffset,
6983 MachineType::Uint32());
6984 Node* neutered_bit =
6985 Word32And(bitfield, Int32Constant(JSArrayBuffer::WasNeutered::kMask));
6986 GotoUnless(Word32Equal(neutered_bit, Int32Constant(0)), bailout);
6987
6988 // Bounds check.
6989 Node* length = UntagParameter(
6990 LoadObjectField(object, JSTypedArray::kLengthOffset), parameter_mode);
6991
6992 if (store_mode == STORE_NO_TRANSITION_IGNORE_OUT_OF_BOUNDS) {
6993 // Skip the store if we write beyond the length.
6994 GotoUnless(IntPtrLessThan(key, length), &done);
6995 // ... but bailout if the key is negative.
6996 } else {
6997 DCHECK_EQ(STANDARD_STORE, store_mode);
6998 }
6999 GotoUnless(UintPtrLessThan(key, length), bailout);
7000
7001 // Backing store = external_pointer + base_pointer.
7002 Node* external_pointer =
7003 LoadObjectField(elements, FixedTypedArrayBase::kExternalPointerOffset,
7004 MachineType::Pointer());
7005 Node* base_pointer =
7006 LoadObjectField(elements, FixedTypedArrayBase::kBasePointerOffset);
7007 Node* backing_store = IntPtrAdd(external_pointer, base_pointer);
7008 StoreElement(backing_store, elements_kind, key, value, parameter_mode);
7009 Goto(&done);
7010
7011 Bind(&done);
7012 return;
7013 }
7014 DCHECK(IsFastSmiOrObjectElementsKind(elements_kind) ||
7015 IsFastDoubleElementsKind(elements_kind));
7016
7017 Node* length = is_jsarray ? LoadObjectField(object, JSArray::kLengthOffset)
7018 : LoadFixedArrayBaseLength(elements);
7019 length = UntagParameter(length, parameter_mode);
7020
7021 // In case value is stored into a fast smi array, assure that the value is
7022 // a smi before manipulating the backing store. Otherwise the backing store
7023 // may be left in an invalid state.
7024 if (IsFastSmiElementsKind(elements_kind)) {
7025 GotoUnless(TaggedIsSmi(value), bailout);
7026 } else if (IsFastDoubleElementsKind(elements_kind)) {
7027 value = TryTaggedToFloat64(value, bailout);
7028 }
7029
7030 if (IsGrowStoreMode(store_mode)) {
7031 elements = CheckForCapacityGrow(object, elements, elements_kind, length,
7032 key, parameter_mode, is_jsarray, bailout);
7033 } else {
7034 GotoUnless(UintPtrLessThan(key, length), bailout);
7035
7036 if ((store_mode == STORE_NO_TRANSITION_HANDLE_COW) &&
7037 IsFastSmiOrObjectElementsKind(elements_kind)) {
7038 elements = CopyElementsOnWrite(object, elements, elements_kind, length,
7039 parameter_mode, bailout);
7040 }
7041 }
7042 StoreElement(elements, elements_kind, key, value, parameter_mode);
7043 }
7044
CheckForCapacityGrow(Node * object,Node * elements,ElementsKind kind,Node * length,Node * key,ParameterMode mode,bool is_js_array,Label * bailout)7045 Node* CodeStubAssembler::CheckForCapacityGrow(Node* object, Node* elements,
7046 ElementsKind kind, Node* length,
7047 Node* key, ParameterMode mode,
7048 bool is_js_array,
7049 Label* bailout) {
7050 Variable checked_elements(this, MachineRepresentation::kTagged);
7051 Label grow_case(this), no_grow_case(this), done(this);
7052
7053 Node* condition;
7054 if (IsHoleyElementsKind(kind)) {
7055 condition = UintPtrGreaterThanOrEqual(key, length);
7056 } else {
7057 condition = WordEqual(key, length);
7058 }
7059 Branch(condition, &grow_case, &no_grow_case);
7060
7061 Bind(&grow_case);
7062 {
7063 Node* current_capacity =
7064 UntagParameter(LoadFixedArrayBaseLength(elements), mode);
7065
7066 checked_elements.Bind(elements);
7067
7068 Label fits_capacity(this);
7069 GotoIf(UintPtrLessThan(key, current_capacity), &fits_capacity);
7070 {
7071 Node* new_elements = TryGrowElementsCapacity(
7072 object, elements, kind, key, current_capacity, mode, bailout);
7073
7074 checked_elements.Bind(new_elements);
7075 Goto(&fits_capacity);
7076 }
7077 Bind(&fits_capacity);
7078
7079 if (is_js_array) {
7080 Node* new_length = IntPtrAdd(key, IntPtrOrSmiConstant(1, mode));
7081 StoreObjectFieldNoWriteBarrier(object, JSArray::kLengthOffset,
7082 TagParameter(new_length, mode));
7083 }
7084 Goto(&done);
7085 }
7086
7087 Bind(&no_grow_case);
7088 {
7089 GotoUnless(UintPtrLessThan(key, length), bailout);
7090 checked_elements.Bind(elements);
7091 Goto(&done);
7092 }
7093
7094 Bind(&done);
7095 return checked_elements.value();
7096 }
7097
CopyElementsOnWrite(Node * object,Node * elements,ElementsKind kind,Node * length,ParameterMode mode,Label * bailout)7098 Node* CodeStubAssembler::CopyElementsOnWrite(Node* object, Node* elements,
7099 ElementsKind kind, Node* length,
7100 ParameterMode mode,
7101 Label* bailout) {
7102 Variable new_elements_var(this, MachineRepresentation::kTagged);
7103 Label done(this);
7104
7105 new_elements_var.Bind(elements);
7106 GotoUnless(
7107 WordEqual(LoadMap(elements), LoadRoot(Heap::kFixedCOWArrayMapRootIndex)),
7108 &done);
7109 {
7110 Node* capacity = UntagParameter(LoadFixedArrayBaseLength(elements), mode);
7111 Node* new_elements = GrowElementsCapacity(object, elements, kind, kind,
7112 length, capacity, mode, bailout);
7113
7114 new_elements_var.Bind(new_elements);
7115 Goto(&done);
7116 }
7117
7118 Bind(&done);
7119 return new_elements_var.value();
7120 }
7121
TransitionElementsKind(compiler::Node * object,compiler::Node * map,ElementsKind from_kind,ElementsKind to_kind,bool is_jsarray,Label * bailout)7122 void CodeStubAssembler::TransitionElementsKind(
7123 compiler::Node* object, compiler::Node* map, ElementsKind from_kind,
7124 ElementsKind to_kind, bool is_jsarray, Label* bailout) {
7125 DCHECK(!IsFastHoleyElementsKind(from_kind) ||
7126 IsFastHoleyElementsKind(to_kind));
7127 if (AllocationSite::GetMode(from_kind, to_kind) == TRACK_ALLOCATION_SITE) {
7128 TrapAllocationMemento(object, bailout);
7129 }
7130
7131 if (!IsSimpleMapChangeTransition(from_kind, to_kind)) {
7132 Comment("Non-simple map transition");
7133 Node* elements = LoadElements(object);
7134
7135 Node* empty_fixed_array =
7136 HeapConstant(isolate()->factory()->empty_fixed_array());
7137
7138 Label done(this);
7139 GotoIf(WordEqual(elements, empty_fixed_array), &done);
7140
7141 // TODO(ishell): Use OptimalParameterMode().
7142 ParameterMode mode = INTPTR_PARAMETERS;
7143 Node* elements_length = SmiUntag(LoadFixedArrayBaseLength(elements));
7144 Node* array_length =
7145 is_jsarray ? SmiUntag(LoadObjectField(object, JSArray::kLengthOffset))
7146 : elements_length;
7147
7148 GrowElementsCapacity(object, elements, from_kind, to_kind, array_length,
7149 elements_length, mode, bailout);
7150 Goto(&done);
7151 Bind(&done);
7152 }
7153
7154 StoreObjectField(object, JSObject::kMapOffset, map);
7155 }
7156
TrapAllocationMemento(Node * object,Label * memento_found)7157 void CodeStubAssembler::TrapAllocationMemento(Node* object,
7158 Label* memento_found) {
7159 Comment("[ TrapAllocationMemento");
7160 Label no_memento_found(this);
7161 Label top_check(this), map_check(this);
7162
7163 Node* new_space_top_address = ExternalConstant(
7164 ExternalReference::new_space_allocation_top_address(isolate()));
7165 const int kMementoMapOffset = JSArray::kSize;
7166 const int kMementoLastWordOffset =
7167 kMementoMapOffset + AllocationMemento::kSize - kPointerSize;
7168
7169 // Bail out if the object is not in new space.
7170 Node* object_page = PageFromAddress(object);
7171 {
7172 Node* page_flags = Load(MachineType::IntPtr(), object_page,
7173 IntPtrConstant(Page::kFlagsOffset));
7174 GotoIf(WordEqual(WordAnd(page_flags,
7175 IntPtrConstant(MemoryChunk::kIsInNewSpaceMask)),
7176 IntPtrConstant(0)),
7177 &no_memento_found);
7178 }
7179
7180 Node* memento_last_word = IntPtrAdd(
7181 object, IntPtrConstant(kMementoLastWordOffset - kHeapObjectTag));
7182 Node* memento_last_word_page = PageFromAddress(memento_last_word);
7183
7184 Node* new_space_top = Load(MachineType::Pointer(), new_space_top_address);
7185 Node* new_space_top_page = PageFromAddress(new_space_top);
7186
7187 // If the object is in new space, we need to check whether respective
7188 // potential memento object is on the same page as the current top.
7189 GotoIf(WordEqual(memento_last_word_page, new_space_top_page), &top_check);
7190
7191 // The object is on a different page than allocation top. Bail out if the
7192 // object sits on the page boundary as no memento can follow and we cannot
7193 // touch the memory following it.
7194 Branch(WordEqual(object_page, memento_last_word_page), &map_check,
7195 &no_memento_found);
7196
7197 // If top is on the same page as the current object, we need to check whether
7198 // we are below top.
7199 Bind(&top_check);
7200 {
7201 Branch(UintPtrGreaterThanOrEqual(memento_last_word, new_space_top),
7202 &no_memento_found, &map_check);
7203 }
7204
7205 // Memento map check.
7206 Bind(&map_check);
7207 {
7208 Node* memento_map = LoadObjectField(object, kMementoMapOffset);
7209 Branch(
7210 WordEqual(memento_map, LoadRoot(Heap::kAllocationMementoMapRootIndex)),
7211 memento_found, &no_memento_found);
7212 }
7213 Bind(&no_memento_found);
7214 Comment("] TrapAllocationMemento");
7215 }
7216
PageFromAddress(Node * address)7217 Node* CodeStubAssembler::PageFromAddress(Node* address) {
7218 return WordAnd(address, IntPtrConstant(~Page::kPageAlignmentMask));
7219 }
7220
EnumLength(Node * map)7221 Node* CodeStubAssembler::EnumLength(Node* map) {
7222 CSA_ASSERT(this, IsMap(map));
7223 Node* bitfield_3 = LoadMapBitField3(map);
7224 Node* enum_length = DecodeWordFromWord32<Map::EnumLengthBits>(bitfield_3);
7225 return SmiTag(enum_length);
7226 }
7227
CheckEnumCache(Node * receiver,Label * use_cache,Label * use_runtime)7228 void CodeStubAssembler::CheckEnumCache(Node* receiver, Label* use_cache,
7229 Label* use_runtime) {
7230 Variable current_js_object(this, MachineRepresentation::kTagged);
7231 current_js_object.Bind(receiver);
7232
7233 Variable current_map(this, MachineRepresentation::kTagged);
7234 current_map.Bind(LoadMap(current_js_object.value()));
7235
7236 // These variables are updated in the loop below.
7237 Variable* loop_vars[2] = {¤t_js_object, ¤t_map};
7238 Label loop(this, 2, loop_vars), next(this);
7239
7240 // Check if the enum length field is properly initialized, indicating that
7241 // there is an enum cache.
7242 {
7243 Node* invalid_enum_cache_sentinel =
7244 SmiConstant(Smi::FromInt(kInvalidEnumCacheSentinel));
7245 Node* enum_length = EnumLength(current_map.value());
7246 Branch(WordEqual(enum_length, invalid_enum_cache_sentinel), use_runtime,
7247 &loop);
7248 }
7249
7250 // Check that there are no elements. |current_js_object| contains
7251 // the current JS object we've reached through the prototype chain.
7252 Bind(&loop);
7253 {
7254 Label if_elements(this), if_no_elements(this);
7255 Node* elements = LoadElements(current_js_object.value());
7256 Node* empty_fixed_array = LoadRoot(Heap::kEmptyFixedArrayRootIndex);
7257 // Check that there are no elements.
7258 Branch(WordEqual(elements, empty_fixed_array), &if_no_elements,
7259 &if_elements);
7260 Bind(&if_elements);
7261 {
7262 // Second chance, the object may be using the empty slow element
7263 // dictionary.
7264 Node* slow_empty_dictionary =
7265 LoadRoot(Heap::kEmptySlowElementDictionaryRootIndex);
7266 Branch(WordNotEqual(elements, slow_empty_dictionary), use_runtime,
7267 &if_no_elements);
7268 }
7269
7270 Bind(&if_no_elements);
7271 {
7272 // Update map prototype.
7273 current_js_object.Bind(LoadMapPrototype(current_map.value()));
7274 Branch(WordEqual(current_js_object.value(), NullConstant()), use_cache,
7275 &next);
7276 }
7277 }
7278
7279 Bind(&next);
7280 {
7281 // For all objects but the receiver, check that the cache is empty.
7282 current_map.Bind(LoadMap(current_js_object.value()));
7283 Node* enum_length = EnumLength(current_map.value());
7284 Node* zero_constant = SmiConstant(Smi::kZero);
7285 Branch(WordEqual(enum_length, zero_constant), &loop, use_runtime);
7286 }
7287 }
7288
CreateAllocationSiteInFeedbackVector(Node * feedback_vector,Node * slot)7289 Node* CodeStubAssembler::CreateAllocationSiteInFeedbackVector(
7290 Node* feedback_vector, Node* slot) {
7291 Node* size = IntPtrConstant(AllocationSite::kSize);
7292 Node* site = Allocate(size, CodeStubAssembler::kPretenured);
7293
7294 // Store the map
7295 StoreObjectFieldRoot(site, AllocationSite::kMapOffset,
7296 Heap::kAllocationSiteMapRootIndex);
7297 Node* kind = SmiConstant(Smi::FromInt(GetInitialFastElementsKind()));
7298 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kTransitionInfoOffset,
7299 kind);
7300
7301 // Unlike literals, constructed arrays don't have nested sites
7302 Node* zero = IntPtrConstant(0);
7303 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kNestedSiteOffset, zero);
7304
7305 // Pretenuring calculation field.
7306 StoreObjectFieldNoWriteBarrier(site, AllocationSite::kPretenureDataOffset,
7307 zero);
7308
7309 // Pretenuring memento creation count field.
7310 StoreObjectFieldNoWriteBarrier(
7311 site, AllocationSite::kPretenureCreateCountOffset, zero);
7312
7313 // Store an empty fixed array for the code dependency.
7314 StoreObjectFieldRoot(site, AllocationSite::kDependentCodeOffset,
7315 Heap::kEmptyFixedArrayRootIndex);
7316
7317 // Link the object to the allocation site list
7318 Node* site_list = ExternalConstant(
7319 ExternalReference::allocation_sites_list_address(isolate()));
7320 Node* next_site = LoadBufferObject(site_list, 0);
7321
7322 // TODO(mvstanton): This is a store to a weak pointer, which we may want to
7323 // mark as such in order to skip the write barrier, once we have a unified
7324 // system for weakness. For now we decided to keep it like this because having
7325 // an initial write barrier backed store makes this pointer strong until the
7326 // next GC, and allocation sites are designed to survive several GCs anyway.
7327 StoreObjectField(site, AllocationSite::kWeakNextOffset, next_site);
7328 StoreNoWriteBarrier(MachineRepresentation::kTagged, site_list, site);
7329
7330 StoreFixedArrayElement(feedback_vector, slot, site, UPDATE_WRITE_BARRIER,
7331 CodeStubAssembler::SMI_PARAMETERS);
7332 return site;
7333 }
7334
CreateWeakCellInFeedbackVector(Node * feedback_vector,Node * slot,Node * value)7335 Node* CodeStubAssembler::CreateWeakCellInFeedbackVector(Node* feedback_vector,
7336 Node* slot,
7337 Node* value) {
7338 Node* size = IntPtrConstant(WeakCell::kSize);
7339 Node* cell = Allocate(size, CodeStubAssembler::kPretenured);
7340
7341 // Initialize the WeakCell.
7342 StoreObjectFieldRoot(cell, WeakCell::kMapOffset, Heap::kWeakCellMapRootIndex);
7343 StoreObjectField(cell, WeakCell::kValueOffset, value);
7344 StoreObjectFieldRoot(cell, WeakCell::kNextOffset,
7345 Heap::kTheHoleValueRootIndex);
7346
7347 // Store the WeakCell in the feedback vector.
7348 StoreFixedArrayElement(feedback_vector, slot, cell, UPDATE_WRITE_BARRIER,
7349 CodeStubAssembler::SMI_PARAMETERS);
7350 return cell;
7351 }
7352
BuildFastLoop(const CodeStubAssembler::VariableList & vars,MachineRepresentation index_rep,Node * start_index,Node * end_index,std::function<void (CodeStubAssembler * assembler,Node * index)> body,int increment,IndexAdvanceMode mode)7353 void CodeStubAssembler::BuildFastLoop(
7354 const CodeStubAssembler::VariableList& vars,
7355 MachineRepresentation index_rep, Node* start_index, Node* end_index,
7356 std::function<void(CodeStubAssembler* assembler, Node* index)> body,
7357 int increment, IndexAdvanceMode mode) {
7358 Variable var(this, index_rep);
7359 VariableList vars_copy(vars, zone());
7360 vars_copy.Add(&var, zone());
7361 var.Bind(start_index);
7362 Label loop(this, vars_copy);
7363 Label after_loop(this);
7364 // Introduce an explicit second check of the termination condition before the
7365 // loop that helps turbofan generate better code. If there's only a single
7366 // check, then the CodeStubAssembler forces it to be at the beginning of the
7367 // loop requiring a backwards branch at the end of the loop (it's not possible
7368 // to force the loop header check at the end of the loop and branch forward to
7369 // it from the pre-header). The extra branch is slower in the case that the
7370 // loop actually iterates.
7371 Branch(WordEqual(var.value(), end_index), &after_loop, &loop);
7372 Bind(&loop);
7373 {
7374 if (mode == IndexAdvanceMode::kPre) {
7375 var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment)));
7376 }
7377 body(this, var.value());
7378 if (mode == IndexAdvanceMode::kPost) {
7379 var.Bind(IntPtrAdd(var.value(), IntPtrConstant(increment)));
7380 }
7381 Branch(WordNotEqual(var.value(), end_index), &loop, &after_loop);
7382 }
7383 Bind(&after_loop);
7384 }
7385
BuildFastFixedArrayForEach(compiler::Node * fixed_array,ElementsKind kind,compiler::Node * first_element_inclusive,compiler::Node * last_element_exclusive,std::function<void (CodeStubAssembler * assembler,compiler::Node * fixed_array,compiler::Node * offset)> body,ParameterMode mode,ForEachDirection direction)7386 void CodeStubAssembler::BuildFastFixedArrayForEach(
7387 compiler::Node* fixed_array, ElementsKind kind,
7388 compiler::Node* first_element_inclusive,
7389 compiler::Node* last_element_exclusive,
7390 std::function<void(CodeStubAssembler* assembler,
7391 compiler::Node* fixed_array, compiler::Node* offset)>
7392 body,
7393 ParameterMode mode, ForEachDirection direction) {
7394 STATIC_ASSERT(FixedArray::kHeaderSize == FixedDoubleArray::kHeaderSize);
7395 int32_t first_val;
7396 bool constant_first = ToInt32Constant(first_element_inclusive, first_val);
7397 int32_t last_val;
7398 bool constent_last = ToInt32Constant(last_element_exclusive, last_val);
7399 if (constant_first && constent_last) {
7400 int delta = last_val - first_val;
7401 DCHECK(delta >= 0);
7402 if (delta <= kElementLoopUnrollThreshold) {
7403 if (direction == ForEachDirection::kForward) {
7404 for (int i = first_val; i < last_val; ++i) {
7405 Node* index = IntPtrConstant(i);
7406 Node* offset =
7407 ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
7408 FixedArray::kHeaderSize - kHeapObjectTag);
7409 body(this, fixed_array, offset);
7410 }
7411 } else {
7412 for (int i = last_val - 1; i >= first_val; --i) {
7413 Node* index = IntPtrConstant(i);
7414 Node* offset =
7415 ElementOffsetFromIndex(index, kind, INTPTR_PARAMETERS,
7416 FixedArray::kHeaderSize - kHeapObjectTag);
7417 body(this, fixed_array, offset);
7418 }
7419 }
7420 return;
7421 }
7422 }
7423
7424 Node* start =
7425 ElementOffsetFromIndex(first_element_inclusive, kind, mode,
7426 FixedArray::kHeaderSize - kHeapObjectTag);
7427 Node* limit =
7428 ElementOffsetFromIndex(last_element_exclusive, kind, mode,
7429 FixedArray::kHeaderSize - kHeapObjectTag);
7430 if (direction == ForEachDirection::kReverse) std::swap(start, limit);
7431
7432 int increment = IsFastDoubleElementsKind(kind) ? kDoubleSize : kPointerSize;
7433 BuildFastLoop(
7434 MachineType::PointerRepresentation(), start, limit,
7435 [fixed_array, body](CodeStubAssembler* assembler, Node* offset) {
7436 body(assembler, fixed_array, offset);
7437 },
7438 direction == ForEachDirection::kReverse ? -increment : increment,
7439 direction == ForEachDirection::kReverse ? IndexAdvanceMode::kPre
7440 : IndexAdvanceMode::kPost);
7441 }
7442
BranchIfNumericRelationalComparison(RelationalComparisonMode mode,compiler::Node * lhs,compiler::Node * rhs,Label * if_true,Label * if_false)7443 void CodeStubAssembler::BranchIfNumericRelationalComparison(
7444 RelationalComparisonMode mode, compiler::Node* lhs, compiler::Node* rhs,
7445 Label* if_true, Label* if_false) {
7446 typedef compiler::Node Node;
7447
7448 Label end(this);
7449 Variable result(this, MachineRepresentation::kTagged);
7450
7451 // Shared entry for floating point comparison.
7452 Label do_fcmp(this);
7453 Variable var_fcmp_lhs(this, MachineRepresentation::kFloat64),
7454 var_fcmp_rhs(this, MachineRepresentation::kFloat64);
7455
7456 // Check if the {lhs} is a Smi or a HeapObject.
7457 Label if_lhsissmi(this), if_lhsisnotsmi(this);
7458 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
7459
7460 Bind(&if_lhsissmi);
7461 {
7462 // Check if {rhs} is a Smi or a HeapObject.
7463 Label if_rhsissmi(this), if_rhsisnotsmi(this);
7464 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7465
7466 Bind(&if_rhsissmi);
7467 {
7468 // Both {lhs} and {rhs} are Smi, so just perform a fast Smi comparison.
7469 switch (mode) {
7470 case kLessThan:
7471 BranchIfSmiLessThan(lhs, rhs, if_true, if_false);
7472 break;
7473 case kLessThanOrEqual:
7474 BranchIfSmiLessThanOrEqual(lhs, rhs, if_true, if_false);
7475 break;
7476 case kGreaterThan:
7477 BranchIfSmiLessThan(rhs, lhs, if_true, if_false);
7478 break;
7479 case kGreaterThanOrEqual:
7480 BranchIfSmiLessThanOrEqual(rhs, lhs, if_true, if_false);
7481 break;
7482 }
7483 }
7484
7485 Bind(&if_rhsisnotsmi);
7486 {
7487 CSA_ASSERT(this, WordEqual(LoadMap(rhs), HeapNumberMapConstant()));
7488 // Convert the {lhs} and {rhs} to floating point values, and
7489 // perform a floating point comparison.
7490 var_fcmp_lhs.Bind(SmiToFloat64(lhs));
7491 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
7492 Goto(&do_fcmp);
7493 }
7494 }
7495
7496 Bind(&if_lhsisnotsmi);
7497 {
7498 CSA_ASSERT(this, WordEqual(LoadMap(lhs), HeapNumberMapConstant()));
7499
7500 // Check if {rhs} is a Smi or a HeapObject.
7501 Label if_rhsissmi(this), if_rhsisnotsmi(this);
7502 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7503
7504 Bind(&if_rhsissmi);
7505 {
7506 // Convert the {lhs} and {rhs} to floating point values, and
7507 // perform a floating point comparison.
7508 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
7509 var_fcmp_rhs.Bind(SmiToFloat64(rhs));
7510 Goto(&do_fcmp);
7511 }
7512
7513 Bind(&if_rhsisnotsmi);
7514 {
7515 CSA_ASSERT(this, WordEqual(LoadMap(rhs), HeapNumberMapConstant()));
7516
7517 // Convert the {lhs} and {rhs} to floating point values, and
7518 // perform a floating point comparison.
7519 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
7520 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
7521 Goto(&do_fcmp);
7522 }
7523 }
7524
7525 Bind(&do_fcmp);
7526 {
7527 // Load the {lhs} and {rhs} floating point values.
7528 Node* lhs = var_fcmp_lhs.value();
7529 Node* rhs = var_fcmp_rhs.value();
7530
7531 // Perform a fast floating point comparison.
7532 switch (mode) {
7533 case kLessThan:
7534 Branch(Float64LessThan(lhs, rhs), if_true, if_false);
7535 break;
7536 case kLessThanOrEqual:
7537 Branch(Float64LessThanOrEqual(lhs, rhs), if_true, if_false);
7538 break;
7539 case kGreaterThan:
7540 Branch(Float64GreaterThan(lhs, rhs), if_true, if_false);
7541 break;
7542 case kGreaterThanOrEqual:
7543 Branch(Float64GreaterThanOrEqual(lhs, rhs), if_true, if_false);
7544 break;
7545 }
7546 }
7547 }
7548
GotoUnlessNumberLessThan(compiler::Node * lhs,compiler::Node * rhs,Label * if_false)7549 void CodeStubAssembler::GotoUnlessNumberLessThan(compiler::Node* lhs,
7550 compiler::Node* rhs,
7551 Label* if_false) {
7552 Label if_true(this);
7553 BranchIfNumericRelationalComparison(kLessThan, lhs, rhs, &if_true, if_false);
7554 Bind(&if_true);
7555 }
7556
RelationalComparison(RelationalComparisonMode mode,compiler::Node * lhs,compiler::Node * rhs,compiler::Node * context)7557 compiler::Node* CodeStubAssembler::RelationalComparison(
7558 RelationalComparisonMode mode, compiler::Node* lhs, compiler::Node* rhs,
7559 compiler::Node* context) {
7560 typedef compiler::Node Node;
7561
7562 Label return_true(this), return_false(this), end(this);
7563 Variable result(this, MachineRepresentation::kTagged);
7564
7565 // Shared entry for floating point comparison.
7566 Label do_fcmp(this);
7567 Variable var_fcmp_lhs(this, MachineRepresentation::kFloat64),
7568 var_fcmp_rhs(this, MachineRepresentation::kFloat64);
7569
7570 // We might need to loop several times due to ToPrimitive and/or ToNumber
7571 // conversions.
7572 Variable var_lhs(this, MachineRepresentation::kTagged),
7573 var_rhs(this, MachineRepresentation::kTagged);
7574 Variable* loop_vars[2] = {&var_lhs, &var_rhs};
7575 Label loop(this, 2, loop_vars);
7576 var_lhs.Bind(lhs);
7577 var_rhs.Bind(rhs);
7578 Goto(&loop);
7579 Bind(&loop);
7580 {
7581 // Load the current {lhs} and {rhs} values.
7582 lhs = var_lhs.value();
7583 rhs = var_rhs.value();
7584
7585 // Check if the {lhs} is a Smi or a HeapObject.
7586 Label if_lhsissmi(this), if_lhsisnotsmi(this);
7587 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
7588
7589 Bind(&if_lhsissmi);
7590 {
7591 // Check if {rhs} is a Smi or a HeapObject.
7592 Label if_rhsissmi(this), if_rhsisnotsmi(this);
7593 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7594
7595 Bind(&if_rhsissmi);
7596 {
7597 // Both {lhs} and {rhs} are Smi, so just perform a fast Smi comparison.
7598 switch (mode) {
7599 case kLessThan:
7600 BranchIfSmiLessThan(lhs, rhs, &return_true, &return_false);
7601 break;
7602 case kLessThanOrEqual:
7603 BranchIfSmiLessThanOrEqual(lhs, rhs, &return_true, &return_false);
7604 break;
7605 case kGreaterThan:
7606 BranchIfSmiLessThan(rhs, lhs, &return_true, &return_false);
7607 break;
7608 case kGreaterThanOrEqual:
7609 BranchIfSmiLessThanOrEqual(rhs, lhs, &return_true, &return_false);
7610 break;
7611 }
7612 }
7613
7614 Bind(&if_rhsisnotsmi);
7615 {
7616 // Load the map of {rhs}.
7617 Node* rhs_map = LoadMap(rhs);
7618
7619 // Check if the {rhs} is a HeapNumber.
7620 Label if_rhsisnumber(this), if_rhsisnotnumber(this, Label::kDeferred);
7621 Branch(IsHeapNumberMap(rhs_map), &if_rhsisnumber, &if_rhsisnotnumber);
7622
7623 Bind(&if_rhsisnumber);
7624 {
7625 // Convert the {lhs} and {rhs} to floating point values, and
7626 // perform a floating point comparison.
7627 var_fcmp_lhs.Bind(SmiToFloat64(lhs));
7628 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
7629 Goto(&do_fcmp);
7630 }
7631
7632 Bind(&if_rhsisnotnumber);
7633 {
7634 // Convert the {rhs} to a Number; we don't need to perform the
7635 // dedicated ToPrimitive(rhs, hint Number) operation, as the
7636 // ToNumber(rhs) will by itself already invoke ToPrimitive with
7637 // a Number hint.
7638 Callable callable = CodeFactory::NonNumberToNumber(isolate());
7639 var_rhs.Bind(CallStub(callable, context, rhs));
7640 Goto(&loop);
7641 }
7642 }
7643 }
7644
7645 Bind(&if_lhsisnotsmi);
7646 {
7647 // Load the HeapNumber map for later comparisons.
7648 Node* number_map = HeapNumberMapConstant();
7649
7650 // Load the map of {lhs}.
7651 Node* lhs_map = LoadMap(lhs);
7652
7653 // Check if {rhs} is a Smi or a HeapObject.
7654 Label if_rhsissmi(this), if_rhsisnotsmi(this);
7655 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7656
7657 Bind(&if_rhsissmi);
7658 {
7659 // Check if the {lhs} is a HeapNumber.
7660 Label if_lhsisnumber(this), if_lhsisnotnumber(this, Label::kDeferred);
7661 Branch(WordEqual(lhs_map, number_map), &if_lhsisnumber,
7662 &if_lhsisnotnumber);
7663
7664 Bind(&if_lhsisnumber);
7665 {
7666 // Convert the {lhs} and {rhs} to floating point values, and
7667 // perform a floating point comparison.
7668 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
7669 var_fcmp_rhs.Bind(SmiToFloat64(rhs));
7670 Goto(&do_fcmp);
7671 }
7672
7673 Bind(&if_lhsisnotnumber);
7674 {
7675 // Convert the {lhs} to a Number; we don't need to perform the
7676 // dedicated ToPrimitive(lhs, hint Number) operation, as the
7677 // ToNumber(lhs) will by itself already invoke ToPrimitive with
7678 // a Number hint.
7679 Callable callable = CodeFactory::NonNumberToNumber(isolate());
7680 var_lhs.Bind(CallStub(callable, context, lhs));
7681 Goto(&loop);
7682 }
7683 }
7684
7685 Bind(&if_rhsisnotsmi);
7686 {
7687 // Load the map of {rhs}.
7688 Node* rhs_map = LoadMap(rhs);
7689
7690 // Check if {lhs} is a HeapNumber.
7691 Label if_lhsisnumber(this), if_lhsisnotnumber(this);
7692 Branch(WordEqual(lhs_map, number_map), &if_lhsisnumber,
7693 &if_lhsisnotnumber);
7694
7695 Bind(&if_lhsisnumber);
7696 {
7697 // Check if {rhs} is also a HeapNumber.
7698 Label if_rhsisnumber(this), if_rhsisnotnumber(this, Label::kDeferred);
7699 Branch(WordEqual(lhs_map, rhs_map), &if_rhsisnumber,
7700 &if_rhsisnotnumber);
7701
7702 Bind(&if_rhsisnumber);
7703 {
7704 // Convert the {lhs} and {rhs} to floating point values, and
7705 // perform a floating point comparison.
7706 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
7707 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
7708 Goto(&do_fcmp);
7709 }
7710
7711 Bind(&if_rhsisnotnumber);
7712 {
7713 // Convert the {rhs} to a Number; we don't need to perform
7714 // dedicated ToPrimitive(rhs, hint Number) operation, as the
7715 // ToNumber(rhs) will by itself already invoke ToPrimitive with
7716 // a Number hint.
7717 Callable callable = CodeFactory::NonNumberToNumber(isolate());
7718 var_rhs.Bind(CallStub(callable, context, rhs));
7719 Goto(&loop);
7720 }
7721 }
7722
7723 Bind(&if_lhsisnotnumber);
7724 {
7725 // Load the instance type of {lhs}.
7726 Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
7727
7728 // Check if {lhs} is a String.
7729 Label if_lhsisstring(this), if_lhsisnotstring(this, Label::kDeferred);
7730 Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
7731 &if_lhsisnotstring);
7732
7733 Bind(&if_lhsisstring);
7734 {
7735 // Load the instance type of {rhs}.
7736 Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
7737
7738 // Check if {rhs} is also a String.
7739 Label if_rhsisstring(this, Label::kDeferred),
7740 if_rhsisnotstring(this, Label::kDeferred);
7741 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
7742 &if_rhsisnotstring);
7743
7744 Bind(&if_rhsisstring);
7745 {
7746 // Both {lhs} and {rhs} are strings.
7747 switch (mode) {
7748 case kLessThan:
7749 result.Bind(CallStub(CodeFactory::StringLessThan(isolate()),
7750 context, lhs, rhs));
7751 Goto(&end);
7752 break;
7753 case kLessThanOrEqual:
7754 result.Bind(
7755 CallStub(CodeFactory::StringLessThanOrEqual(isolate()),
7756 context, lhs, rhs));
7757 Goto(&end);
7758 break;
7759 case kGreaterThan:
7760 result.Bind(
7761 CallStub(CodeFactory::StringGreaterThan(isolate()),
7762 context, lhs, rhs));
7763 Goto(&end);
7764 break;
7765 case kGreaterThanOrEqual:
7766 result.Bind(
7767 CallStub(CodeFactory::StringGreaterThanOrEqual(isolate()),
7768 context, lhs, rhs));
7769 Goto(&end);
7770 break;
7771 }
7772 }
7773
7774 Bind(&if_rhsisnotstring);
7775 {
7776 // The {lhs} is a String, while {rhs} is neither a Number nor a
7777 // String, so we need to call ToPrimitive(rhs, hint Number) if
7778 // {rhs} is a receiver or ToNumber(lhs) and ToNumber(rhs) in the
7779 // other cases.
7780 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
7781 Label if_rhsisreceiver(this, Label::kDeferred),
7782 if_rhsisnotreceiver(this, Label::kDeferred);
7783 Branch(IsJSReceiverInstanceType(rhs_instance_type),
7784 &if_rhsisreceiver, &if_rhsisnotreceiver);
7785
7786 Bind(&if_rhsisreceiver);
7787 {
7788 // Convert {rhs} to a primitive first passing Number hint.
7789 Callable callable = CodeFactory::NonPrimitiveToPrimitive(
7790 isolate(), ToPrimitiveHint::kNumber);
7791 var_rhs.Bind(CallStub(callable, context, rhs));
7792 Goto(&loop);
7793 }
7794
7795 Bind(&if_rhsisnotreceiver);
7796 {
7797 // Convert both {lhs} and {rhs} to Number.
7798 Callable callable = CodeFactory::ToNumber(isolate());
7799 var_lhs.Bind(CallStub(callable, context, lhs));
7800 var_rhs.Bind(CallStub(callable, context, rhs));
7801 Goto(&loop);
7802 }
7803 }
7804 }
7805
7806 Bind(&if_lhsisnotstring);
7807 {
7808 // The {lhs} is neither a Number nor a String, so we need to call
7809 // ToPrimitive(lhs, hint Number) if {lhs} is a receiver or
7810 // ToNumber(lhs) and ToNumber(rhs) in the other cases.
7811 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
7812 Label if_lhsisreceiver(this, Label::kDeferred),
7813 if_lhsisnotreceiver(this, Label::kDeferred);
7814 Branch(IsJSReceiverInstanceType(lhs_instance_type),
7815 &if_lhsisreceiver, &if_lhsisnotreceiver);
7816
7817 Bind(&if_lhsisreceiver);
7818 {
7819 // Convert {lhs} to a primitive first passing Number hint.
7820 Callable callable = CodeFactory::NonPrimitiveToPrimitive(
7821 isolate(), ToPrimitiveHint::kNumber);
7822 var_lhs.Bind(CallStub(callable, context, lhs));
7823 Goto(&loop);
7824 }
7825
7826 Bind(&if_lhsisnotreceiver);
7827 {
7828 // Convert both {lhs} and {rhs} to Number.
7829 Callable callable = CodeFactory::ToNumber(isolate());
7830 var_lhs.Bind(CallStub(callable, context, lhs));
7831 var_rhs.Bind(CallStub(callable, context, rhs));
7832 Goto(&loop);
7833 }
7834 }
7835 }
7836 }
7837 }
7838 }
7839
7840 Bind(&do_fcmp);
7841 {
7842 // Load the {lhs} and {rhs} floating point values.
7843 Node* lhs = var_fcmp_lhs.value();
7844 Node* rhs = var_fcmp_rhs.value();
7845
7846 // Perform a fast floating point comparison.
7847 switch (mode) {
7848 case kLessThan:
7849 Branch(Float64LessThan(lhs, rhs), &return_true, &return_false);
7850 break;
7851 case kLessThanOrEqual:
7852 Branch(Float64LessThanOrEqual(lhs, rhs), &return_true, &return_false);
7853 break;
7854 case kGreaterThan:
7855 Branch(Float64GreaterThan(lhs, rhs), &return_true, &return_false);
7856 break;
7857 case kGreaterThanOrEqual:
7858 Branch(Float64GreaterThanOrEqual(lhs, rhs), &return_true,
7859 &return_false);
7860 break;
7861 }
7862 }
7863
7864 Bind(&return_true);
7865 {
7866 result.Bind(BooleanConstant(true));
7867 Goto(&end);
7868 }
7869
7870 Bind(&return_false);
7871 {
7872 result.Bind(BooleanConstant(false));
7873 Goto(&end);
7874 }
7875
7876 Bind(&end);
7877 return result.value();
7878 }
7879
7880 namespace {
7881
GenerateEqual_Same(CodeStubAssembler * assembler,compiler::Node * value,CodeStubAssembler::Label * if_equal,CodeStubAssembler::Label * if_notequal)7882 void GenerateEqual_Same(CodeStubAssembler* assembler, compiler::Node* value,
7883 CodeStubAssembler::Label* if_equal,
7884 CodeStubAssembler::Label* if_notequal) {
7885 // In case of abstract or strict equality checks, we need additional checks
7886 // for NaN values because they are not considered equal, even if both the
7887 // left and the right hand side reference exactly the same value.
7888 // TODO(bmeurer): This seems to violate the SIMD.js specification, but it
7889 // seems to be what is tested in the current SIMD.js testsuite.
7890
7891 typedef CodeStubAssembler::Label Label;
7892 typedef compiler::Node Node;
7893
7894 // Check if {value} is a Smi or a HeapObject.
7895 Label if_valueissmi(assembler), if_valueisnotsmi(assembler);
7896 assembler->Branch(assembler->TaggedIsSmi(value), &if_valueissmi,
7897 &if_valueisnotsmi);
7898
7899 assembler->Bind(&if_valueisnotsmi);
7900 {
7901 // Load the map of {value}.
7902 Node* value_map = assembler->LoadMap(value);
7903
7904 // Check if {value} (and therefore {rhs}) is a HeapNumber.
7905 Label if_valueisnumber(assembler), if_valueisnotnumber(assembler);
7906 assembler->Branch(assembler->IsHeapNumberMap(value_map), &if_valueisnumber,
7907 &if_valueisnotnumber);
7908
7909 assembler->Bind(&if_valueisnumber);
7910 {
7911 // Convert {value} (and therefore {rhs}) to floating point value.
7912 Node* value_value = assembler->LoadHeapNumberValue(value);
7913
7914 // Check if the HeapNumber value is a NaN.
7915 assembler->BranchIfFloat64IsNaN(value_value, if_notequal, if_equal);
7916 }
7917
7918 assembler->Bind(&if_valueisnotnumber);
7919 assembler->Goto(if_equal);
7920 }
7921
7922 assembler->Bind(&if_valueissmi);
7923 assembler->Goto(if_equal);
7924 }
7925
GenerateEqual_Simd128Value_HeapObject(CodeStubAssembler * assembler,compiler::Node * lhs,compiler::Node * lhs_map,compiler::Node * rhs,compiler::Node * rhs_map,CodeStubAssembler::Label * if_equal,CodeStubAssembler::Label * if_notequal)7926 void GenerateEqual_Simd128Value_HeapObject(
7927 CodeStubAssembler* assembler, compiler::Node* lhs, compiler::Node* lhs_map,
7928 compiler::Node* rhs, compiler::Node* rhs_map,
7929 CodeStubAssembler::Label* if_equal, CodeStubAssembler::Label* if_notequal) {
7930 assembler->BranchIfSimd128Equal(lhs, lhs_map, rhs, rhs_map, if_equal,
7931 if_notequal);
7932 }
7933
7934 } // namespace
7935
7936 // ES6 section 7.2.12 Abstract Equality Comparison
Equal(ResultMode mode,compiler::Node * lhs,compiler::Node * rhs,compiler::Node * context)7937 compiler::Node* CodeStubAssembler::Equal(ResultMode mode, compiler::Node* lhs,
7938 compiler::Node* rhs,
7939 compiler::Node* context) {
7940 // This is a slightly optimized version of Object::Equals represented as
7941 // scheduled TurboFan graph utilizing the CodeStubAssembler. Whenever you
7942 // change something functionality wise in here, remember to update the
7943 // Object::Equals method as well.
7944 typedef compiler::Node Node;
7945
7946 Label if_equal(this), if_notequal(this),
7947 do_rhsstringtonumber(this, Label::kDeferred), end(this);
7948 Variable result(this, MachineRepresentation::kTagged);
7949
7950 // Shared entry for floating point comparison.
7951 Label do_fcmp(this);
7952 Variable var_fcmp_lhs(this, MachineRepresentation::kFloat64),
7953 var_fcmp_rhs(this, MachineRepresentation::kFloat64);
7954
7955 // We might need to loop several times due to ToPrimitive and/or ToNumber
7956 // conversions.
7957 Variable var_lhs(this, MachineRepresentation::kTagged),
7958 var_rhs(this, MachineRepresentation::kTagged);
7959 Variable* loop_vars[2] = {&var_lhs, &var_rhs};
7960 Label loop(this, 2, loop_vars);
7961 var_lhs.Bind(lhs);
7962 var_rhs.Bind(rhs);
7963 Goto(&loop);
7964 Bind(&loop);
7965 {
7966 // Load the current {lhs} and {rhs} values.
7967 lhs = var_lhs.value();
7968 rhs = var_rhs.value();
7969
7970 // Check if {lhs} and {rhs} refer to the same object.
7971 Label if_same(this), if_notsame(this);
7972 Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
7973
7974 Bind(&if_same);
7975 {
7976 // The {lhs} and {rhs} reference the exact same value, yet we need special
7977 // treatment for HeapNumber, as NaN is not equal to NaN.
7978 GenerateEqual_Same(this, lhs, &if_equal, &if_notequal);
7979 }
7980
7981 Bind(&if_notsame);
7982 {
7983 // Check if {lhs} is a Smi or a HeapObject.
7984 Label if_lhsissmi(this), if_lhsisnotsmi(this);
7985 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
7986
7987 Bind(&if_lhsissmi);
7988 {
7989 // Check if {rhs} is a Smi or a HeapObject.
7990 Label if_rhsissmi(this), if_rhsisnotsmi(this);
7991 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
7992
7993 Bind(&if_rhsissmi);
7994 // We have already checked for {lhs} and {rhs} being the same value, so
7995 // if both are Smis when we get here they must not be equal.
7996 Goto(&if_notequal);
7997
7998 Bind(&if_rhsisnotsmi);
7999 {
8000 // Load the map of {rhs}.
8001 Node* rhs_map = LoadMap(rhs);
8002
8003 // Check if {rhs} is a HeapNumber.
8004 Node* number_map = HeapNumberMapConstant();
8005 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
8006 Branch(WordEqual(rhs_map, number_map), &if_rhsisnumber,
8007 &if_rhsisnotnumber);
8008
8009 Bind(&if_rhsisnumber);
8010 {
8011 // Convert {lhs} and {rhs} to floating point values, and
8012 // perform a floating point comparison.
8013 var_fcmp_lhs.Bind(SmiToFloat64(lhs));
8014 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
8015 Goto(&do_fcmp);
8016 }
8017
8018 Bind(&if_rhsisnotnumber);
8019 {
8020 // Load the instance type of the {rhs}.
8021 Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
8022
8023 // Check if the {rhs} is a String.
8024 Label if_rhsisstring(this, Label::kDeferred),
8025 if_rhsisnotstring(this);
8026 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
8027 &if_rhsisnotstring);
8028
8029 Bind(&if_rhsisstring);
8030 {
8031 // The {rhs} is a String and the {lhs} is a Smi; we need
8032 // to convert the {rhs} to a Number and compare the output to
8033 // the Number on the {lhs}.
8034 Goto(&do_rhsstringtonumber);
8035 }
8036
8037 Bind(&if_rhsisnotstring);
8038 {
8039 // Check if the {rhs} is a Boolean.
8040 Label if_rhsisboolean(this), if_rhsisnotboolean(this);
8041 Branch(IsBooleanMap(rhs_map), &if_rhsisboolean,
8042 &if_rhsisnotboolean);
8043
8044 Bind(&if_rhsisboolean);
8045 {
8046 // The {rhs} is a Boolean, load its number value.
8047 var_rhs.Bind(LoadObjectField(rhs, Oddball::kToNumberOffset));
8048 Goto(&loop);
8049 }
8050
8051 Bind(&if_rhsisnotboolean);
8052 {
8053 // Check if the {rhs} is a Receiver.
8054 STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
8055 Label if_rhsisreceiver(this, Label::kDeferred),
8056 if_rhsisnotreceiver(this);
8057 Branch(IsJSReceiverInstanceType(rhs_instance_type),
8058 &if_rhsisreceiver, &if_rhsisnotreceiver);
8059
8060 Bind(&if_rhsisreceiver);
8061 {
8062 // Convert {rhs} to a primitive first (passing no hint).
8063 Callable callable =
8064 CodeFactory::NonPrimitiveToPrimitive(isolate());
8065 var_rhs.Bind(CallStub(callable, context, rhs));
8066 Goto(&loop);
8067 }
8068
8069 Bind(&if_rhsisnotreceiver);
8070 Goto(&if_notequal);
8071 }
8072 }
8073 }
8074 }
8075 }
8076
8077 Bind(&if_lhsisnotsmi);
8078 {
8079 // Check if {rhs} is a Smi or a HeapObject.
8080 Label if_rhsissmi(this), if_rhsisnotsmi(this);
8081 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
8082
8083 Bind(&if_rhsissmi);
8084 {
8085 // The {lhs} is a HeapObject and the {rhs} is a Smi; swapping {lhs}
8086 // and {rhs} is not observable and doesn't matter for the result, so
8087 // we can just swap them and use the Smi handling above (for {lhs}
8088 // being a Smi).
8089 var_lhs.Bind(rhs);
8090 var_rhs.Bind(lhs);
8091 Goto(&loop);
8092 }
8093
8094 Bind(&if_rhsisnotsmi);
8095 {
8096 Label if_lhsisstring(this), if_lhsisnumber(this),
8097 if_lhsissymbol(this), if_lhsissimd128value(this),
8098 if_lhsisoddball(this), if_lhsisreceiver(this);
8099
8100 // Both {lhs} and {rhs} are HeapObjects, load their maps
8101 // and their instance types.
8102 Node* lhs_map = LoadMap(lhs);
8103 Node* rhs_map = LoadMap(rhs);
8104
8105 // Load the instance types of {lhs} and {rhs}.
8106 Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
8107 Node* rhs_instance_type = LoadMapInstanceType(rhs_map);
8108
8109 // Dispatch based on the instance type of {lhs}.
8110 size_t const kNumCases = FIRST_NONSTRING_TYPE + 4;
8111 Label* case_labels[kNumCases];
8112 int32_t case_values[kNumCases];
8113 for (int32_t i = 0; i < FIRST_NONSTRING_TYPE; ++i) {
8114 case_labels[i] = new Label(this);
8115 case_values[i] = i;
8116 }
8117 case_labels[FIRST_NONSTRING_TYPE + 0] = &if_lhsisnumber;
8118 case_values[FIRST_NONSTRING_TYPE + 0] = HEAP_NUMBER_TYPE;
8119 case_labels[FIRST_NONSTRING_TYPE + 1] = &if_lhsissymbol;
8120 case_values[FIRST_NONSTRING_TYPE + 1] = SYMBOL_TYPE;
8121 case_labels[FIRST_NONSTRING_TYPE + 2] = &if_lhsissimd128value;
8122 case_values[FIRST_NONSTRING_TYPE + 2] = SIMD128_VALUE_TYPE;
8123 case_labels[FIRST_NONSTRING_TYPE + 3] = &if_lhsisoddball;
8124 case_values[FIRST_NONSTRING_TYPE + 3] = ODDBALL_TYPE;
8125 Switch(lhs_instance_type, &if_lhsisreceiver, case_values, case_labels,
8126 arraysize(case_values));
8127 for (int32_t i = 0; i < FIRST_NONSTRING_TYPE; ++i) {
8128 Bind(case_labels[i]);
8129 Goto(&if_lhsisstring);
8130 delete case_labels[i];
8131 }
8132
8133 Bind(&if_lhsisstring);
8134 {
8135 // Check if {rhs} is also a String.
8136 Label if_rhsisstring(this, Label::kDeferred),
8137 if_rhsisnotstring(this);
8138 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
8139 &if_rhsisnotstring);
8140
8141 Bind(&if_rhsisstring);
8142 {
8143 // Both {lhs} and {rhs} are of type String, just do the
8144 // string comparison then.
8145 Callable callable = (mode == kDontNegateResult)
8146 ? CodeFactory::StringEqual(isolate())
8147 : CodeFactory::StringNotEqual(isolate());
8148 result.Bind(CallStub(callable, context, lhs, rhs));
8149 Goto(&end);
8150 }
8151
8152 Bind(&if_rhsisnotstring);
8153 {
8154 // The {lhs} is a String and the {rhs} is some other HeapObject.
8155 // Swapping {lhs} and {rhs} is not observable and doesn't matter
8156 // for the result, so we can just swap them and use the String
8157 // handling below (for {rhs} being a String).
8158 var_lhs.Bind(rhs);
8159 var_rhs.Bind(lhs);
8160 Goto(&loop);
8161 }
8162 }
8163
8164 Bind(&if_lhsisnumber);
8165 {
8166 // Check if {rhs} is also a HeapNumber.
8167 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
8168 Branch(Word32Equal(lhs_instance_type, rhs_instance_type),
8169 &if_rhsisnumber, &if_rhsisnotnumber);
8170
8171 Bind(&if_rhsisnumber);
8172 {
8173 // Convert {lhs} and {rhs} to floating point values, and
8174 // perform a floating point comparison.
8175 var_fcmp_lhs.Bind(LoadHeapNumberValue(lhs));
8176 var_fcmp_rhs.Bind(LoadHeapNumberValue(rhs));
8177 Goto(&do_fcmp);
8178 }
8179
8180 Bind(&if_rhsisnotnumber);
8181 {
8182 // The {lhs} is a Number, the {rhs} is some other HeapObject.
8183 Label if_rhsisstring(this, Label::kDeferred),
8184 if_rhsisnotstring(this);
8185 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
8186 &if_rhsisnotstring);
8187
8188 Bind(&if_rhsisstring);
8189 {
8190 // The {rhs} is a String and the {lhs} is a HeapNumber; we need
8191 // to convert the {rhs} to a Number and compare the output to
8192 // the Number on the {lhs}.
8193 Goto(&do_rhsstringtonumber);
8194 }
8195
8196 Bind(&if_rhsisnotstring);
8197 {
8198 // Check if the {rhs} is a JSReceiver.
8199 Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
8200 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
8201 Branch(IsJSReceiverInstanceType(rhs_instance_type),
8202 &if_rhsisreceiver, &if_rhsisnotreceiver);
8203
8204 Bind(&if_rhsisreceiver);
8205 {
8206 // The {lhs} is a Primitive and the {rhs} is a JSReceiver.
8207 // Swapping {lhs} and {rhs} is not observable and doesn't
8208 // matter for the result, so we can just swap them and use
8209 // the JSReceiver handling below (for {lhs} being a
8210 // JSReceiver).
8211 var_lhs.Bind(rhs);
8212 var_rhs.Bind(lhs);
8213 Goto(&loop);
8214 }
8215
8216 Bind(&if_rhsisnotreceiver);
8217 {
8218 // Check if {rhs} is a Boolean.
8219 Label if_rhsisboolean(this), if_rhsisnotboolean(this);
8220 Branch(IsBooleanMap(rhs_map), &if_rhsisboolean,
8221 &if_rhsisnotboolean);
8222
8223 Bind(&if_rhsisboolean);
8224 {
8225 // The {rhs} is a Boolean, convert it to a Smi first.
8226 var_rhs.Bind(
8227 LoadObjectField(rhs, Oddball::kToNumberOffset));
8228 Goto(&loop);
8229 }
8230
8231 Bind(&if_rhsisnotboolean);
8232 Goto(&if_notequal);
8233 }
8234 }
8235 }
8236 }
8237
8238 Bind(&if_lhsisoddball);
8239 {
8240 // The {lhs} is an Oddball and {rhs} is some other HeapObject.
8241 Label if_lhsisboolean(this), if_lhsisnotboolean(this);
8242 Node* boolean_map = BooleanMapConstant();
8243 Branch(WordEqual(lhs_map, boolean_map), &if_lhsisboolean,
8244 &if_lhsisnotboolean);
8245
8246 Bind(&if_lhsisboolean);
8247 {
8248 // The {lhs} is a Boolean, check if {rhs} is also a Boolean.
8249 Label if_rhsisboolean(this), if_rhsisnotboolean(this);
8250 Branch(WordEqual(rhs_map, boolean_map), &if_rhsisboolean,
8251 &if_rhsisnotboolean);
8252
8253 Bind(&if_rhsisboolean);
8254 {
8255 // Both {lhs} and {rhs} are distinct Boolean values.
8256 Goto(&if_notequal);
8257 }
8258
8259 Bind(&if_rhsisnotboolean);
8260 {
8261 // Convert the {lhs} to a Number first.
8262 var_lhs.Bind(LoadObjectField(lhs, Oddball::kToNumberOffset));
8263 Goto(&loop);
8264 }
8265 }
8266
8267 Bind(&if_lhsisnotboolean);
8268 {
8269 // The {lhs} is either Null or Undefined; check if the {rhs} is
8270 // undetectable (i.e. either also Null or Undefined or some
8271 // undetectable JSReceiver).
8272 Node* rhs_bitfield = LoadMapBitField(rhs_map);
8273 Branch(Word32Equal(
8274 Word32And(rhs_bitfield,
8275 Int32Constant(1 << Map::kIsUndetectable)),
8276 Int32Constant(0)),
8277 &if_notequal, &if_equal);
8278 }
8279 }
8280
8281 Bind(&if_lhsissymbol);
8282 {
8283 // Check if the {rhs} is a JSReceiver.
8284 Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
8285 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
8286 Branch(IsJSReceiverInstanceType(rhs_instance_type),
8287 &if_rhsisreceiver, &if_rhsisnotreceiver);
8288
8289 Bind(&if_rhsisreceiver);
8290 {
8291 // The {lhs} is a Primitive and the {rhs} is a JSReceiver.
8292 // Swapping {lhs} and {rhs} is not observable and doesn't
8293 // matter for the result, so we can just swap them and use
8294 // the JSReceiver handling below (for {lhs} being a JSReceiver).
8295 var_lhs.Bind(rhs);
8296 var_rhs.Bind(lhs);
8297 Goto(&loop);
8298 }
8299
8300 Bind(&if_rhsisnotreceiver);
8301 {
8302 // The {rhs} is not a JSReceiver and also not the same Symbol
8303 // as the {lhs}, so this is equality check is considered false.
8304 Goto(&if_notequal);
8305 }
8306 }
8307
8308 Bind(&if_lhsissimd128value);
8309 {
8310 // Check if the {rhs} is also a Simd128Value.
8311 Label if_rhsissimd128value(this), if_rhsisnotsimd128value(this);
8312 Branch(Word32Equal(lhs_instance_type, rhs_instance_type),
8313 &if_rhsissimd128value, &if_rhsisnotsimd128value);
8314
8315 Bind(&if_rhsissimd128value);
8316 {
8317 // Both {lhs} and {rhs} is a Simd128Value.
8318 GenerateEqual_Simd128Value_HeapObject(
8319 this, lhs, lhs_map, rhs, rhs_map, &if_equal, &if_notequal);
8320 }
8321
8322 Bind(&if_rhsisnotsimd128value);
8323 {
8324 // Check if the {rhs} is a JSReceiver.
8325 Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
8326 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
8327 Branch(IsJSReceiverInstanceType(rhs_instance_type),
8328 &if_rhsisreceiver, &if_rhsisnotreceiver);
8329
8330 Bind(&if_rhsisreceiver);
8331 {
8332 // The {lhs} is a Primitive and the {rhs} is a JSReceiver.
8333 // Swapping {lhs} and {rhs} is not observable and doesn't
8334 // matter for the result, so we can just swap them and use
8335 // the JSReceiver handling below (for {lhs} being a JSReceiver).
8336 var_lhs.Bind(rhs);
8337 var_rhs.Bind(lhs);
8338 Goto(&loop);
8339 }
8340
8341 Bind(&if_rhsisnotreceiver);
8342 {
8343 // The {rhs} is some other Primitive.
8344 Goto(&if_notequal);
8345 }
8346 }
8347 }
8348
8349 Bind(&if_lhsisreceiver);
8350 {
8351 // Check if the {rhs} is also a JSReceiver.
8352 Label if_rhsisreceiver(this), if_rhsisnotreceiver(this);
8353 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
8354 Branch(IsJSReceiverInstanceType(rhs_instance_type),
8355 &if_rhsisreceiver, &if_rhsisnotreceiver);
8356
8357 Bind(&if_rhsisreceiver);
8358 {
8359 // Both {lhs} and {rhs} are different JSReceiver references, so
8360 // this cannot be considered equal.
8361 Goto(&if_notequal);
8362 }
8363
8364 Bind(&if_rhsisnotreceiver);
8365 {
8366 // Check if {rhs} is Null or Undefined (an undetectable check
8367 // is sufficient here, since we already know that {rhs} is not
8368 // a JSReceiver).
8369 Label if_rhsisundetectable(this),
8370 if_rhsisnotundetectable(this, Label::kDeferred);
8371 Node* rhs_bitfield = LoadMapBitField(rhs_map);
8372 Branch(Word32Equal(
8373 Word32And(rhs_bitfield,
8374 Int32Constant(1 << Map::kIsUndetectable)),
8375 Int32Constant(0)),
8376 &if_rhsisnotundetectable, &if_rhsisundetectable);
8377
8378 Bind(&if_rhsisundetectable);
8379 {
8380 // Check if {lhs} is an undetectable JSReceiver.
8381 Node* lhs_bitfield = LoadMapBitField(lhs_map);
8382 Branch(Word32Equal(
8383 Word32And(lhs_bitfield,
8384 Int32Constant(1 << Map::kIsUndetectable)),
8385 Int32Constant(0)),
8386 &if_notequal, &if_equal);
8387 }
8388
8389 Bind(&if_rhsisnotundetectable);
8390 {
8391 // The {rhs} is some Primitive different from Null and
8392 // Undefined, need to convert {lhs} to Primitive first.
8393 Callable callable =
8394 CodeFactory::NonPrimitiveToPrimitive(isolate());
8395 var_lhs.Bind(CallStub(callable, context, lhs));
8396 Goto(&loop);
8397 }
8398 }
8399 }
8400 }
8401 }
8402 }
8403
8404 Bind(&do_rhsstringtonumber);
8405 {
8406 Callable callable = CodeFactory::StringToNumber(isolate());
8407 var_rhs.Bind(CallStub(callable, context, rhs));
8408 Goto(&loop);
8409 }
8410 }
8411
8412 Bind(&do_fcmp);
8413 {
8414 // Load the {lhs} and {rhs} floating point values.
8415 Node* lhs = var_fcmp_lhs.value();
8416 Node* rhs = var_fcmp_rhs.value();
8417
8418 // Perform a fast floating point comparison.
8419 Branch(Float64Equal(lhs, rhs), &if_equal, &if_notequal);
8420 }
8421
8422 Bind(&if_equal);
8423 {
8424 result.Bind(BooleanConstant(mode == kDontNegateResult));
8425 Goto(&end);
8426 }
8427
8428 Bind(&if_notequal);
8429 {
8430 result.Bind(BooleanConstant(mode == kNegateResult));
8431 Goto(&end);
8432 }
8433
8434 Bind(&end);
8435 return result.value();
8436 }
8437
StrictEqual(ResultMode mode,compiler::Node * lhs,compiler::Node * rhs,compiler::Node * context)8438 compiler::Node* CodeStubAssembler::StrictEqual(ResultMode mode,
8439 compiler::Node* lhs,
8440 compiler::Node* rhs,
8441 compiler::Node* context) {
8442 // Here's pseudo-code for the algorithm below in case of kDontNegateResult
8443 // mode; for kNegateResult mode we properly negate the result.
8444 //
8445 // if (lhs == rhs) {
8446 // if (lhs->IsHeapNumber()) return HeapNumber::cast(lhs)->value() != NaN;
8447 // return true;
8448 // }
8449 // if (!lhs->IsSmi()) {
8450 // if (lhs->IsHeapNumber()) {
8451 // if (rhs->IsSmi()) {
8452 // return Smi::cast(rhs)->value() == HeapNumber::cast(lhs)->value();
8453 // } else if (rhs->IsHeapNumber()) {
8454 // return HeapNumber::cast(rhs)->value() ==
8455 // HeapNumber::cast(lhs)->value();
8456 // } else {
8457 // return false;
8458 // }
8459 // } else {
8460 // if (rhs->IsSmi()) {
8461 // return false;
8462 // } else {
8463 // if (lhs->IsString()) {
8464 // if (rhs->IsString()) {
8465 // return %StringEqual(lhs, rhs);
8466 // } else {
8467 // return false;
8468 // }
8469 // } else if (lhs->IsSimd128()) {
8470 // if (rhs->IsSimd128()) {
8471 // return %StrictEqual(lhs, rhs);
8472 // }
8473 // } else {
8474 // return false;
8475 // }
8476 // }
8477 // }
8478 // } else {
8479 // if (rhs->IsSmi()) {
8480 // return false;
8481 // } else {
8482 // if (rhs->IsHeapNumber()) {
8483 // return Smi::cast(lhs)->value() == HeapNumber::cast(rhs)->value();
8484 // } else {
8485 // return false;
8486 // }
8487 // }
8488 // }
8489
8490 typedef compiler::Node Node;
8491
8492 Label if_equal(this), if_notequal(this), end(this);
8493 Variable result(this, MachineRepresentation::kTagged);
8494
8495 // Check if {lhs} and {rhs} refer to the same object.
8496 Label if_same(this), if_notsame(this);
8497 Branch(WordEqual(lhs, rhs), &if_same, &if_notsame);
8498
8499 Bind(&if_same);
8500 {
8501 // The {lhs} and {rhs} reference the exact same value, yet we need special
8502 // treatment for HeapNumber, as NaN is not equal to NaN.
8503 GenerateEqual_Same(this, lhs, &if_equal, &if_notequal);
8504 }
8505
8506 Bind(&if_notsame);
8507 {
8508 // The {lhs} and {rhs} reference different objects, yet for Smi, HeapNumber,
8509 // String and Simd128Value they can still be considered equal.
8510 Node* number_map = HeapNumberMapConstant();
8511
8512 // Check if {lhs} is a Smi or a HeapObject.
8513 Label if_lhsissmi(this), if_lhsisnotsmi(this);
8514 Branch(TaggedIsSmi(lhs), &if_lhsissmi, &if_lhsisnotsmi);
8515
8516 Bind(&if_lhsisnotsmi);
8517 {
8518 // Load the map of {lhs}.
8519 Node* lhs_map = LoadMap(lhs);
8520
8521 // Check if {lhs} is a HeapNumber.
8522 Label if_lhsisnumber(this), if_lhsisnotnumber(this);
8523 Branch(WordEqual(lhs_map, number_map), &if_lhsisnumber,
8524 &if_lhsisnotnumber);
8525
8526 Bind(&if_lhsisnumber);
8527 {
8528 // Check if {rhs} is a Smi or a HeapObject.
8529 Label if_rhsissmi(this), if_rhsisnotsmi(this);
8530 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
8531
8532 Bind(&if_rhsissmi);
8533 {
8534 // Convert {lhs} and {rhs} to floating point values.
8535 Node* lhs_value = LoadHeapNumberValue(lhs);
8536 Node* rhs_value = SmiToFloat64(rhs);
8537
8538 // Perform a floating point comparison of {lhs} and {rhs}.
8539 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
8540 }
8541
8542 Bind(&if_rhsisnotsmi);
8543 {
8544 // Load the map of {rhs}.
8545 Node* rhs_map = LoadMap(rhs);
8546
8547 // Check if {rhs} is also a HeapNumber.
8548 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
8549 Branch(WordEqual(rhs_map, number_map), &if_rhsisnumber,
8550 &if_rhsisnotnumber);
8551
8552 Bind(&if_rhsisnumber);
8553 {
8554 // Convert {lhs} and {rhs} to floating point values.
8555 Node* lhs_value = LoadHeapNumberValue(lhs);
8556 Node* rhs_value = LoadHeapNumberValue(rhs);
8557
8558 // Perform a floating point comparison of {lhs} and {rhs}.
8559 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
8560 }
8561
8562 Bind(&if_rhsisnotnumber);
8563 Goto(&if_notequal);
8564 }
8565 }
8566
8567 Bind(&if_lhsisnotnumber);
8568 {
8569 // Check if {rhs} is a Smi or a HeapObject.
8570 Label if_rhsissmi(this), if_rhsisnotsmi(this);
8571 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
8572
8573 Bind(&if_rhsissmi);
8574 Goto(&if_notequal);
8575
8576 Bind(&if_rhsisnotsmi);
8577 {
8578 // Load the instance type of {lhs}.
8579 Node* lhs_instance_type = LoadMapInstanceType(lhs_map);
8580
8581 // Check if {lhs} is a String.
8582 Label if_lhsisstring(this), if_lhsisnotstring(this);
8583 Branch(IsStringInstanceType(lhs_instance_type), &if_lhsisstring,
8584 &if_lhsisnotstring);
8585
8586 Bind(&if_lhsisstring);
8587 {
8588 // Load the instance type of {rhs}.
8589 Node* rhs_instance_type = LoadInstanceType(rhs);
8590
8591 // Check if {rhs} is also a String.
8592 Label if_rhsisstring(this, Label::kDeferred),
8593 if_rhsisnotstring(this);
8594 Branch(IsStringInstanceType(rhs_instance_type), &if_rhsisstring,
8595 &if_rhsisnotstring);
8596
8597 Bind(&if_rhsisstring);
8598 {
8599 Callable callable = (mode == kDontNegateResult)
8600 ? CodeFactory::StringEqual(isolate())
8601 : CodeFactory::StringNotEqual(isolate());
8602 result.Bind(CallStub(callable, context, lhs, rhs));
8603 Goto(&end);
8604 }
8605
8606 Bind(&if_rhsisnotstring);
8607 Goto(&if_notequal);
8608 }
8609
8610 Bind(&if_lhsisnotstring);
8611 {
8612 // Check if {lhs} is a Simd128Value.
8613 Label if_lhsissimd128value(this), if_lhsisnotsimd128value(this);
8614 Branch(Word32Equal(lhs_instance_type,
8615 Int32Constant(SIMD128_VALUE_TYPE)),
8616 &if_lhsissimd128value, &if_lhsisnotsimd128value);
8617
8618 Bind(&if_lhsissimd128value);
8619 {
8620 // Load the map of {rhs}.
8621 Node* rhs_map = LoadMap(rhs);
8622
8623 // Check if {rhs} is also a Simd128Value that is equal to {lhs}.
8624 GenerateEqual_Simd128Value_HeapObject(
8625 this, lhs, lhs_map, rhs, rhs_map, &if_equal, &if_notequal);
8626 }
8627
8628 Bind(&if_lhsisnotsimd128value);
8629 Goto(&if_notequal);
8630 }
8631 }
8632 }
8633 }
8634
8635 Bind(&if_lhsissmi);
8636 {
8637 // We already know that {lhs} and {rhs} are not reference equal, and {lhs}
8638 // is a Smi; so {lhs} and {rhs} can only be strictly equal if {rhs} is a
8639 // HeapNumber with an equal floating point value.
8640
8641 // Check if {rhs} is a Smi or a HeapObject.
8642 Label if_rhsissmi(this), if_rhsisnotsmi(this);
8643 Branch(TaggedIsSmi(rhs), &if_rhsissmi, &if_rhsisnotsmi);
8644
8645 Bind(&if_rhsissmi);
8646 Goto(&if_notequal);
8647
8648 Bind(&if_rhsisnotsmi);
8649 {
8650 // Load the map of the {rhs}.
8651 Node* rhs_map = LoadMap(rhs);
8652
8653 // The {rhs} could be a HeapNumber with the same value as {lhs}.
8654 Label if_rhsisnumber(this), if_rhsisnotnumber(this);
8655 Branch(WordEqual(rhs_map, number_map), &if_rhsisnumber,
8656 &if_rhsisnotnumber);
8657
8658 Bind(&if_rhsisnumber);
8659 {
8660 // Convert {lhs} and {rhs} to floating point values.
8661 Node* lhs_value = SmiToFloat64(lhs);
8662 Node* rhs_value = LoadHeapNumberValue(rhs);
8663
8664 // Perform a floating point comparison of {lhs} and {rhs}.
8665 Branch(Float64Equal(lhs_value, rhs_value), &if_equal, &if_notequal);
8666 }
8667
8668 Bind(&if_rhsisnotnumber);
8669 Goto(&if_notequal);
8670 }
8671 }
8672 }
8673
8674 Bind(&if_equal);
8675 {
8676 result.Bind(BooleanConstant(mode == kDontNegateResult));
8677 Goto(&end);
8678 }
8679
8680 Bind(&if_notequal);
8681 {
8682 result.Bind(BooleanConstant(mode == kNegateResult));
8683 Goto(&end);
8684 }
8685
8686 Bind(&end);
8687 return result.value();
8688 }
8689
8690 // ECMA#sec-samevalue
8691 // This algorithm differs from the Strict Equality Comparison Algorithm in its
8692 // treatment of signed zeroes and NaNs.
SameValue(compiler::Node * lhs,compiler::Node * rhs,compiler::Node * context)8693 compiler::Node* CodeStubAssembler::SameValue(compiler::Node* lhs,
8694 compiler::Node* rhs,
8695 compiler::Node* context) {
8696 Variable var_result(this, MachineType::PointerRepresentation());
8697 Label strict_equal(this), out(this);
8698
8699 Node* const int_false = IntPtrConstant(0);
8700 Node* const int_true = IntPtrConstant(1);
8701
8702 Label if_equal(this), if_notequal(this);
8703 Branch(WordEqual(lhs, rhs), &if_equal, &if_notequal);
8704
8705 Bind(&if_equal);
8706 {
8707 // This covers the case when {lhs} == {rhs}. We can simply return true
8708 // because SameValue considers two NaNs to be equal.
8709
8710 var_result.Bind(int_true);
8711 Goto(&out);
8712 }
8713
8714 Bind(&if_notequal);
8715 {
8716 // This covers the case when {lhs} != {rhs}. We only handle numbers here
8717 // and defer to StrictEqual for the rest.
8718
8719 Node* const lhs_float = TryTaggedToFloat64(lhs, &strict_equal);
8720 Node* const rhs_float = TryTaggedToFloat64(rhs, &strict_equal);
8721
8722 Label if_lhsisnan(this), if_lhsnotnan(this);
8723 BranchIfFloat64IsNaN(lhs_float, &if_lhsisnan, &if_lhsnotnan);
8724
8725 Bind(&if_lhsisnan);
8726 {
8727 // Return true iff {rhs} is NaN.
8728
8729 Node* const result =
8730 Select(Float64Equal(rhs_float, rhs_float), int_false, int_true,
8731 MachineType::PointerRepresentation());
8732 var_result.Bind(result);
8733 Goto(&out);
8734 }
8735
8736 Bind(&if_lhsnotnan);
8737 {
8738 Label if_floatisequal(this), if_floatnotequal(this);
8739 Branch(Float64Equal(lhs_float, rhs_float), &if_floatisequal,
8740 &if_floatnotequal);
8741
8742 Bind(&if_floatisequal);
8743 {
8744 // We still need to handle the case when {lhs} and {rhs} are -0.0 and
8745 // 0.0 (or vice versa). Compare the high word to
8746 // distinguish between the two.
8747
8748 Node* const lhs_hi_word = Float64ExtractHighWord32(lhs_float);
8749 Node* const rhs_hi_word = Float64ExtractHighWord32(rhs_float);
8750
8751 // If x is +0 and y is -0, return false.
8752 // If x is -0 and y is +0, return false.
8753
8754 Node* const result = Word32Equal(lhs_hi_word, rhs_hi_word);
8755 var_result.Bind(result);
8756 Goto(&out);
8757 }
8758
8759 Bind(&if_floatnotequal);
8760 {
8761 var_result.Bind(int_false);
8762 Goto(&out);
8763 }
8764 }
8765 }
8766
8767 Bind(&strict_equal);
8768 {
8769 Node* const is_equal = StrictEqual(kDontNegateResult, lhs, rhs, context);
8770 Node* const result = WordEqual(is_equal, TrueConstant());
8771 var_result.Bind(result);
8772 Goto(&out);
8773 }
8774
8775 Bind(&out);
8776 return var_result.value();
8777 }
8778
ForInFilter(compiler::Node * key,compiler::Node * object,compiler::Node * context)8779 compiler::Node* CodeStubAssembler::ForInFilter(compiler::Node* key,
8780 compiler::Node* object,
8781 compiler::Node* context) {
8782 Label return_undefined(this, Label::kDeferred), return_to_name(this),
8783 end(this);
8784
8785 Variable var_result(this, MachineRepresentation::kTagged);
8786
8787 Node* has_property =
8788 HasProperty(object, key, context, Runtime::kForInHasProperty);
8789
8790 Branch(WordEqual(has_property, BooleanConstant(true)), &return_to_name,
8791 &return_undefined);
8792
8793 Bind(&return_to_name);
8794 {
8795 var_result.Bind(ToName(context, key));
8796 Goto(&end);
8797 }
8798
8799 Bind(&return_undefined);
8800 {
8801 var_result.Bind(UndefinedConstant());
8802 Goto(&end);
8803 }
8804
8805 Bind(&end);
8806 return var_result.value();
8807 }
8808
HasProperty(compiler::Node * object,compiler::Node * key,compiler::Node * context,Runtime::FunctionId fallback_runtime_function_id)8809 compiler::Node* CodeStubAssembler::HasProperty(
8810 compiler::Node* object, compiler::Node* key, compiler::Node* context,
8811 Runtime::FunctionId fallback_runtime_function_id) {
8812 typedef compiler::Node Node;
8813 typedef CodeStubAssembler::Label Label;
8814 typedef CodeStubAssembler::Variable Variable;
8815
8816 Label call_runtime(this, Label::kDeferred), return_true(this),
8817 return_false(this), end(this);
8818
8819 CodeStubAssembler::LookupInHolder lookup_property_in_holder =
8820 [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
8821 Node* holder_instance_type, Node* unique_name,
8822 Label* next_holder, Label* if_bailout) {
8823 TryHasOwnProperty(holder, holder_map, holder_instance_type, unique_name,
8824 &return_true, next_holder, if_bailout);
8825 };
8826
8827 CodeStubAssembler::LookupInHolder lookup_element_in_holder =
8828 [this, &return_true](Node* receiver, Node* holder, Node* holder_map,
8829 Node* holder_instance_type, Node* index,
8830 Label* next_holder, Label* if_bailout) {
8831 TryLookupElement(holder, holder_map, holder_instance_type, index,
8832 &return_true, next_holder, if_bailout);
8833 };
8834
8835 TryPrototypeChainLookup(object, key, lookup_property_in_holder,
8836 lookup_element_in_holder, &return_false,
8837 &call_runtime);
8838
8839 Variable result(this, MachineRepresentation::kTagged);
8840 Bind(&return_true);
8841 {
8842 result.Bind(BooleanConstant(true));
8843 Goto(&end);
8844 }
8845
8846 Bind(&return_false);
8847 {
8848 result.Bind(BooleanConstant(false));
8849 Goto(&end);
8850 }
8851
8852 Bind(&call_runtime);
8853 {
8854 result.Bind(
8855 CallRuntime(fallback_runtime_function_id, context, object, key));
8856 Goto(&end);
8857 }
8858
8859 Bind(&end);
8860 return result.value();
8861 }
8862
Typeof(compiler::Node * value,compiler::Node * context)8863 compiler::Node* CodeStubAssembler::Typeof(compiler::Node* value,
8864 compiler::Node* context) {
8865 Variable result_var(this, MachineRepresentation::kTagged);
8866
8867 Label return_number(this, Label::kDeferred), if_oddball(this),
8868 return_function(this), return_undefined(this), return_object(this),
8869 return_string(this), return_result(this);
8870
8871 GotoIf(TaggedIsSmi(value), &return_number);
8872
8873 Node* map = LoadMap(value);
8874
8875 GotoIf(IsHeapNumberMap(map), &return_number);
8876
8877 Node* instance_type = LoadMapInstanceType(map);
8878
8879 GotoIf(Word32Equal(instance_type, Int32Constant(ODDBALL_TYPE)), &if_oddball);
8880
8881 Node* callable_or_undetectable_mask = Word32And(
8882 LoadMapBitField(map),
8883 Int32Constant(1 << Map::kIsCallable | 1 << Map::kIsUndetectable));
8884
8885 GotoIf(Word32Equal(callable_or_undetectable_mask,
8886 Int32Constant(1 << Map::kIsCallable)),
8887 &return_function);
8888
8889 GotoUnless(Word32Equal(callable_or_undetectable_mask, Int32Constant(0)),
8890 &return_undefined);
8891
8892 GotoIf(IsJSReceiverInstanceType(instance_type), &return_object);
8893
8894 GotoIf(IsStringInstanceType(instance_type), &return_string);
8895
8896 #define SIMD128_BRANCH(TYPE, Type, type, lane_count, lane_type) \
8897 Label return_##type(this); \
8898 Node* type##_map = HeapConstant(factory()->type##_map()); \
8899 GotoIf(WordEqual(map, type##_map), &return_##type);
8900 SIMD128_TYPES(SIMD128_BRANCH)
8901 #undef SIMD128_BRANCH
8902
8903 CSA_ASSERT(this, Word32Equal(instance_type, Int32Constant(SYMBOL_TYPE)));
8904 result_var.Bind(HeapConstant(isolate()->factory()->symbol_string()));
8905 Goto(&return_result);
8906
8907 Bind(&return_number);
8908 {
8909 result_var.Bind(HeapConstant(isolate()->factory()->number_string()));
8910 Goto(&return_result);
8911 }
8912
8913 Bind(&if_oddball);
8914 {
8915 Node* type = LoadObjectField(value, Oddball::kTypeOfOffset);
8916 result_var.Bind(type);
8917 Goto(&return_result);
8918 }
8919
8920 Bind(&return_function);
8921 {
8922 result_var.Bind(HeapConstant(isolate()->factory()->function_string()));
8923 Goto(&return_result);
8924 }
8925
8926 Bind(&return_undefined);
8927 {
8928 result_var.Bind(HeapConstant(isolate()->factory()->undefined_string()));
8929 Goto(&return_result);
8930 }
8931
8932 Bind(&return_object);
8933 {
8934 result_var.Bind(HeapConstant(isolate()->factory()->object_string()));
8935 Goto(&return_result);
8936 }
8937
8938 Bind(&return_string);
8939 {
8940 result_var.Bind(HeapConstant(isolate()->factory()->string_string()));
8941 Goto(&return_result);
8942 }
8943
8944 #define SIMD128_BIND_RETURN(TYPE, Type, type, lane_count, lane_type) \
8945 Bind(&return_##type); \
8946 { \
8947 result_var.Bind(HeapConstant(isolate()->factory()->type##_string())); \
8948 Goto(&return_result); \
8949 }
8950 SIMD128_TYPES(SIMD128_BIND_RETURN)
8951 #undef SIMD128_BIND_RETURN
8952
8953 Bind(&return_result);
8954 return result_var.value();
8955 }
8956
InstanceOf(compiler::Node * object,compiler::Node * callable,compiler::Node * context)8957 compiler::Node* CodeStubAssembler::InstanceOf(compiler::Node* object,
8958 compiler::Node* callable,
8959 compiler::Node* context) {
8960 Label return_runtime(this, Label::kDeferred), end(this);
8961 Variable result(this, MachineRepresentation::kTagged);
8962
8963 // Check if no one installed @@hasInstance somewhere.
8964 GotoUnless(
8965 WordEqual(LoadObjectField(LoadRoot(Heap::kHasInstanceProtectorRootIndex),
8966 PropertyCell::kValueOffset),
8967 SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
8968 &return_runtime);
8969
8970 // Check if {callable} is a valid receiver.
8971 GotoIf(TaggedIsSmi(callable), &return_runtime);
8972 GotoUnless(IsCallableMap(LoadMap(callable)), &return_runtime);
8973
8974 // Use the inline OrdinaryHasInstance directly.
8975 result.Bind(OrdinaryHasInstance(context, callable, object));
8976 Goto(&end);
8977
8978 // TODO(bmeurer): Use GetPropertyStub here once available.
8979 Bind(&return_runtime);
8980 {
8981 result.Bind(CallRuntime(Runtime::kInstanceOf, context, object, callable));
8982 Goto(&end);
8983 }
8984
8985 Bind(&end);
8986 return result.value();
8987 }
8988
NumberInc(compiler::Node * value)8989 compiler::Node* CodeStubAssembler::NumberInc(compiler::Node* value) {
8990 Variable var_result(this, MachineRepresentation::kTagged),
8991 var_finc_value(this, MachineRepresentation::kFloat64);
8992 Label if_issmi(this), if_isnotsmi(this), do_finc(this), end(this);
8993 Branch(TaggedIsSmi(value), &if_issmi, &if_isnotsmi);
8994
8995 Bind(&if_issmi);
8996 {
8997 // Try fast Smi addition first.
8998 Node* one = SmiConstant(Smi::FromInt(1));
8999 Node* pair = IntPtrAddWithOverflow(BitcastTaggedToWord(value),
9000 BitcastTaggedToWord(one));
9001 Node* overflow = Projection(1, pair);
9002
9003 // Check if the Smi addition overflowed.
9004 Label if_overflow(this), if_notoverflow(this);
9005 Branch(overflow, &if_overflow, &if_notoverflow);
9006
9007 Bind(&if_notoverflow);
9008 var_result.Bind(Projection(0, pair));
9009 Goto(&end);
9010
9011 Bind(&if_overflow);
9012 {
9013 var_finc_value.Bind(SmiToFloat64(value));
9014 Goto(&do_finc);
9015 }
9016 }
9017
9018 Bind(&if_isnotsmi);
9019 {
9020 // Check if the value is a HeapNumber.
9021 CSA_ASSERT(this, IsHeapNumberMap(LoadMap(value)));
9022
9023 // Load the HeapNumber value.
9024 var_finc_value.Bind(LoadHeapNumberValue(value));
9025 Goto(&do_finc);
9026 }
9027
9028 Bind(&do_finc);
9029 {
9030 Node* finc_value = var_finc_value.value();
9031 Node* one = Float64Constant(1.0);
9032 Node* finc_result = Float64Add(finc_value, one);
9033 var_result.Bind(AllocateHeapNumberWithValue(finc_result));
9034 Goto(&end);
9035 }
9036
9037 Bind(&end);
9038 return var_result.value();
9039 }
9040
CreateArrayIterator(compiler::Node * array,compiler::Node * array_map,compiler::Node * array_type,compiler::Node * context,IterationKind mode)9041 compiler::Node* CodeStubAssembler::CreateArrayIterator(
9042 compiler::Node* array, compiler::Node* array_map,
9043 compiler::Node* array_type, compiler::Node* context, IterationKind mode) {
9044 int kBaseMapIndex = 0;
9045 switch (mode) {
9046 case IterationKind::kKeys:
9047 kBaseMapIndex = Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX;
9048 break;
9049 case IterationKind::kValues:
9050 kBaseMapIndex = Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
9051 break;
9052 case IterationKind::kEntries:
9053 kBaseMapIndex = Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX;
9054 break;
9055 }
9056
9057 // Fast Array iterator map index:
9058 // (kBaseIndex + kFastIteratorOffset) + ElementsKind (for JSArrays)
9059 // kBaseIndex + (ElementsKind - UINT8_ELEMENTS) (for JSTypedArrays)
9060 const int kFastIteratorOffset =
9061 Context::FAST_SMI_ARRAY_VALUE_ITERATOR_MAP_INDEX -
9062 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
9063 STATIC_ASSERT(kFastIteratorOffset ==
9064 (Context::FAST_SMI_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX -
9065 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX));
9066
9067 // Slow Array iterator map index: (kBaseIndex + kSlowIteratorOffset)
9068 const int kSlowIteratorOffset =
9069 Context::GENERIC_ARRAY_VALUE_ITERATOR_MAP_INDEX -
9070 Context::UINT8_ARRAY_VALUE_ITERATOR_MAP_INDEX;
9071 STATIC_ASSERT(kSlowIteratorOffset ==
9072 (Context::GENERIC_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX -
9073 Context::UINT8_ARRAY_KEY_VALUE_ITERATOR_MAP_INDEX));
9074
9075 // Assert: Type(array) is Object
9076 CSA_ASSERT(this, IsJSReceiverInstanceType(array_type));
9077
9078 Variable var_result(this, MachineRepresentation::kTagged);
9079 Variable var_map_index(this, MachineType::PointerRepresentation());
9080 Variable var_array_map(this, MachineRepresentation::kTagged);
9081
9082 Label return_result(this);
9083 Label allocate_iterator(this);
9084
9085 if (mode == IterationKind::kKeys) {
9086 // There are only two key iterator maps, branch depending on whether or not
9087 // the receiver is a TypedArray or not.
9088
9089 Label if_istypedarray(this), if_isgeneric(this);
9090
9091 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
9092 &if_istypedarray, &if_isgeneric);
9093
9094 Bind(&if_isgeneric);
9095 {
9096 Label if_isfast(this), if_isslow(this);
9097 BranchIfFastJSArray(array, context, &if_isfast, &if_isslow);
9098
9099 Bind(&if_isfast);
9100 {
9101 var_map_index.Bind(
9102 IntPtrConstant(Context::FAST_ARRAY_KEY_ITERATOR_MAP_INDEX));
9103 var_array_map.Bind(array_map);
9104 Goto(&allocate_iterator);
9105 }
9106
9107 Bind(&if_isslow);
9108 {
9109 var_map_index.Bind(
9110 IntPtrConstant(Context::GENERIC_ARRAY_KEY_ITERATOR_MAP_INDEX));
9111 var_array_map.Bind(UndefinedConstant());
9112 Goto(&allocate_iterator);
9113 }
9114 }
9115
9116 Bind(&if_istypedarray);
9117 {
9118 var_map_index.Bind(
9119 IntPtrConstant(Context::TYPED_ARRAY_KEY_ITERATOR_MAP_INDEX));
9120 var_array_map.Bind(UndefinedConstant());
9121 Goto(&allocate_iterator);
9122 }
9123 } else {
9124 Label if_istypedarray(this), if_isgeneric(this);
9125 Branch(Word32Equal(array_type, Int32Constant(JS_TYPED_ARRAY_TYPE)),
9126 &if_istypedarray, &if_isgeneric);
9127
9128 Bind(&if_isgeneric);
9129 {
9130 Label if_isfast(this), if_isslow(this);
9131 BranchIfFastJSArray(array, context, &if_isfast, &if_isslow);
9132
9133 Bind(&if_isfast);
9134 {
9135 Label if_ispacked(this), if_isholey(this);
9136 Node* elements_kind = LoadMapElementsKind(array_map);
9137 Branch(IsHoleyFastElementsKind(elements_kind), &if_isholey,
9138 &if_ispacked);
9139
9140 Bind(&if_isholey);
9141 {
9142 // Fast holey JSArrays can treat the hole as undefined if the
9143 // protector cell is valid, and the prototype chain is unchanged from
9144 // its initial state (because the protector cell is only tracked for
9145 // initial the Array and Object prototypes). Check these conditions
9146 // here, and take the slow path if any fail.
9147 Node* protector_cell = LoadRoot(Heap::kArrayProtectorRootIndex);
9148 DCHECK(isolate()->heap()->array_protector()->IsPropertyCell());
9149 GotoUnless(
9150 WordEqual(
9151 LoadObjectField(protector_cell, PropertyCell::kValueOffset),
9152 SmiConstant(Smi::FromInt(Isolate::kProtectorValid))),
9153 &if_isslow);
9154
9155 Node* native_context = LoadNativeContext(context);
9156
9157 Node* prototype = LoadMapPrototype(array_map);
9158 Node* array_prototype = LoadContextElement(
9159 native_context, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
9160 GotoUnless(WordEqual(prototype, array_prototype), &if_isslow);
9161
9162 Node* map = LoadMap(prototype);
9163 prototype = LoadMapPrototype(map);
9164 Node* object_prototype = LoadContextElement(
9165 native_context, Context::INITIAL_OBJECT_PROTOTYPE_INDEX);
9166 GotoUnless(WordEqual(prototype, object_prototype), &if_isslow);
9167
9168 map = LoadMap(prototype);
9169 prototype = LoadMapPrototype(map);
9170 Branch(IsNull(prototype), &if_ispacked, &if_isslow);
9171 }
9172 Bind(&if_ispacked);
9173 {
9174 Node* map_index =
9175 IntPtrAdd(IntPtrConstant(kBaseMapIndex + kFastIteratorOffset),
9176 LoadMapElementsKind(array_map));
9177 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
9178 map_index, IntPtrConstant(kBaseMapIndex +
9179 kFastIteratorOffset)));
9180 CSA_ASSERT(this, IntPtrLessThan(map_index,
9181 IntPtrConstant(kBaseMapIndex +
9182 kSlowIteratorOffset)));
9183
9184 var_map_index.Bind(map_index);
9185 var_array_map.Bind(array_map);
9186 Goto(&allocate_iterator);
9187 }
9188 }
9189
9190 Bind(&if_isslow);
9191 {
9192 Node* map_index = IntPtrAdd(IntPtrConstant(kBaseMapIndex),
9193 IntPtrConstant(kSlowIteratorOffset));
9194 var_map_index.Bind(map_index);
9195 var_array_map.Bind(UndefinedConstant());
9196 Goto(&allocate_iterator);
9197 }
9198 }
9199
9200 Bind(&if_istypedarray);
9201 {
9202 Node* map_index =
9203 IntPtrAdd(IntPtrConstant(kBaseMapIndex - UINT8_ELEMENTS),
9204 LoadMapElementsKind(array_map));
9205 CSA_ASSERT(
9206 this, IntPtrLessThan(map_index, IntPtrConstant(kBaseMapIndex +
9207 kFastIteratorOffset)));
9208 CSA_ASSERT(this, IntPtrGreaterThanOrEqual(map_index,
9209 IntPtrConstant(kBaseMapIndex)));
9210 var_map_index.Bind(map_index);
9211 var_array_map.Bind(UndefinedConstant());
9212 Goto(&allocate_iterator);
9213 }
9214 }
9215
9216 Bind(&allocate_iterator);
9217 {
9218 Node* map =
9219 LoadFixedArrayElement(LoadNativeContext(context), var_map_index.value(),
9220 0, CodeStubAssembler::INTPTR_PARAMETERS);
9221 var_result.Bind(AllocateJSArrayIterator(array, var_array_map.value(), map));
9222 Goto(&return_result);
9223 }
9224
9225 Bind(&return_result);
9226 return var_result.value();
9227 }
9228
AllocateJSArrayIterator(compiler::Node * array,compiler::Node * array_map,compiler::Node * map)9229 compiler::Node* CodeStubAssembler::AllocateJSArrayIterator(
9230 compiler::Node* array, compiler::Node* array_map, compiler::Node* map) {
9231 Node* iterator = Allocate(JSArrayIterator::kSize);
9232 StoreMapNoWriteBarrier(iterator, map);
9233 StoreObjectFieldRoot(iterator, JSArrayIterator::kPropertiesOffset,
9234 Heap::kEmptyFixedArrayRootIndex);
9235 StoreObjectFieldRoot(iterator, JSArrayIterator::kElementsOffset,
9236 Heap::kEmptyFixedArrayRootIndex);
9237 StoreObjectFieldNoWriteBarrier(iterator,
9238 JSArrayIterator::kIteratedObjectOffset, array);
9239 StoreObjectFieldNoWriteBarrier(iterator, JSArrayIterator::kNextIndexOffset,
9240 SmiConstant(Smi::FromInt(0)));
9241 StoreObjectFieldNoWriteBarrier(
9242 iterator, JSArrayIterator::kIteratedObjectMapOffset, array_map);
9243 return iterator;
9244 }
9245
IsDetachedBuffer(compiler::Node * buffer)9246 compiler::Node* CodeStubAssembler::IsDetachedBuffer(compiler::Node* buffer) {
9247 CSA_ASSERT(this, HasInstanceType(buffer, JS_ARRAY_BUFFER_TYPE));
9248
9249 Node* buffer_bit_field = LoadObjectField(
9250 buffer, JSArrayBuffer::kBitFieldOffset, MachineType::Uint32());
9251 Node* was_neutered_mask = Int32Constant(JSArrayBuffer::WasNeutered::kMask);
9252
9253 return Word32NotEqual(Word32And(buffer_bit_field, was_neutered_mask),
9254 Int32Constant(0));
9255 }
9256
CodeStubArguments(CodeStubAssembler * assembler,compiler::Node * argc,CodeStubAssembler::ParameterMode mode)9257 CodeStubArguments::CodeStubArguments(CodeStubAssembler* assembler,
9258 compiler::Node* argc,
9259 CodeStubAssembler::ParameterMode mode)
9260 : assembler_(assembler),
9261 argc_(argc),
9262 arguments_(nullptr),
9263 fp_(assembler->LoadFramePointer()) {
9264 compiler::Node* offset = assembler->ElementOffsetFromIndex(
9265 argc_, FAST_ELEMENTS, mode,
9266 (StandardFrameConstants::kFixedSlotCountAboveFp - 1) * kPointerSize);
9267 arguments_ = assembler_->IntPtrAddFoldConstants(fp_, offset);
9268 if (mode == CodeStubAssembler::INTEGER_PARAMETERS) {
9269 argc_ = assembler->ChangeInt32ToIntPtr(argc_);
9270 } else if (mode == CodeStubAssembler::SMI_PARAMETERS) {
9271 argc_ = assembler->SmiUntag(argc_);
9272 }
9273 }
9274
GetReceiver()9275 compiler::Node* CodeStubArguments::GetReceiver() {
9276 return assembler_->Load(MachineType::AnyTagged(), arguments_,
9277 assembler_->IntPtrConstant(kPointerSize));
9278 }
9279
AtIndex(compiler::Node * index,CodeStubAssembler::ParameterMode mode)9280 compiler::Node* CodeStubArguments::AtIndex(
9281 compiler::Node* index, CodeStubAssembler::ParameterMode mode) {
9282 typedef compiler::Node Node;
9283 Node* negated_index = assembler_->IntPtrSubFoldConstants(
9284 assembler_->IntPtrOrSmiConstant(0, mode), index);
9285 Node* offset =
9286 assembler_->ElementOffsetFromIndex(negated_index, FAST_ELEMENTS, mode, 0);
9287 return assembler_->Load(MachineType::AnyTagged(), arguments_, offset);
9288 }
9289
AtIndex(int index)9290 compiler::Node* CodeStubArguments::AtIndex(int index) {
9291 return AtIndex(assembler_->IntPtrConstant(index));
9292 }
9293
ForEach(const CodeStubAssembler::VariableList & vars,CodeStubArguments::ForEachBodyFunction body,compiler::Node * first,compiler::Node * last,CodeStubAssembler::ParameterMode mode)9294 void CodeStubArguments::ForEach(const CodeStubAssembler::VariableList& vars,
9295 CodeStubArguments::ForEachBodyFunction body,
9296 compiler::Node* first, compiler::Node* last,
9297 CodeStubAssembler::ParameterMode mode) {
9298 assembler_->Comment("CodeStubArguments::ForEach");
9299 DCHECK_IMPLIES(first == nullptr || last == nullptr,
9300 mode == CodeStubAssembler::INTPTR_PARAMETERS);
9301 if (first == nullptr) {
9302 first = assembler_->IntPtrOrSmiConstant(0, mode);
9303 }
9304 if (last == nullptr) {
9305 last = argc_;
9306 }
9307 compiler::Node* start = assembler_->IntPtrSubFoldConstants(
9308 arguments_,
9309 assembler_->ElementOffsetFromIndex(first, FAST_ELEMENTS, mode));
9310 compiler::Node* end = assembler_->IntPtrSubFoldConstants(
9311 arguments_,
9312 assembler_->ElementOffsetFromIndex(last, FAST_ELEMENTS, mode));
9313 assembler_->BuildFastLoop(
9314 vars, MachineType::PointerRepresentation(), start, end,
9315 [body](CodeStubAssembler* assembler, compiler::Node* current) {
9316 Node* arg = assembler->Load(MachineType::AnyTagged(), current);
9317 body(assembler, arg);
9318 },
9319 -kPointerSize, CodeStubAssembler::IndexAdvanceMode::kPost);
9320 }
9321
PopAndReturn(compiler::Node * value)9322 void CodeStubArguments::PopAndReturn(compiler::Node* value) {
9323 assembler_->PopAndReturn(
9324 assembler_->IntPtrAddFoldConstants(argc_, assembler_->IntPtrConstant(1)),
9325 value);
9326 }
9327
IsFastElementsKind(compiler::Node * elements_kind)9328 compiler::Node* CodeStubAssembler::IsFastElementsKind(
9329 compiler::Node* elements_kind) {
9330 return Uint32LessThanOrEqual(elements_kind,
9331 Int32Constant(LAST_FAST_ELEMENTS_KIND));
9332 }
9333
IsHoleyFastElementsKind(compiler::Node * elements_kind)9334 compiler::Node* CodeStubAssembler::IsHoleyFastElementsKind(
9335 compiler::Node* elements_kind) {
9336 CSA_ASSERT(this, IsFastElementsKind(elements_kind));
9337
9338 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == (FAST_SMI_ELEMENTS | 1));
9339 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == (FAST_ELEMENTS | 1));
9340 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == (FAST_DOUBLE_ELEMENTS | 1));
9341
9342 // Check prototype chain if receiver does not have packed elements.
9343 Node* holey_elements = Word32And(elements_kind, Int32Constant(1));
9344 return Word32Equal(holey_elements, Int32Constant(1));
9345 }
9346
9347 } // namespace internal
9348 } // namespace v8
9349