1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 #if V8_TARGET_ARCH_X87
6
7 #include "src/code-stubs.h"
8 #include "src/api-arguments.h"
9 #include "src/base/bits.h"
10 #include "src/bootstrapper.h"
11 #include "src/codegen.h"
12 #include "src/ic/handler-compiler.h"
13 #include "src/ic/ic.h"
14 #include "src/ic/stub-cache.h"
15 #include "src/isolate.h"
16 #include "src/regexp/jsregexp.h"
17 #include "src/regexp/regexp-macro-assembler.h"
18 #include "src/runtime/runtime.h"
19 #include "src/x87/code-stubs-x87.h"
20 #include "src/x87/frames-x87.h"
21
22 namespace v8 {
23 namespace internal {
24
25 #define __ ACCESS_MASM(masm)
26
Generate(MacroAssembler * masm)27 void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
28 __ pop(ecx);
29 __ mov(MemOperand(esp, eax, times_4, 0), edi);
30 __ push(edi);
31 __ push(ebx);
32 __ push(ecx);
33 __ add(eax, Immediate(3));
34 __ TailCallRuntime(Runtime::kNewArray);
35 }
36
InitializeDescriptor(CodeStubDescriptor * descriptor)37 void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
38 Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
39 descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
40 }
41
InitializeDescriptor(CodeStubDescriptor * descriptor)42 void FastFunctionBindStub::InitializeDescriptor(
43 CodeStubDescriptor* descriptor) {
44 Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
45 descriptor->Initialize(eax, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
46 }
47
GenerateLightweightMiss(MacroAssembler * masm,ExternalReference miss)48 void HydrogenCodeStub::GenerateLightweightMiss(MacroAssembler* masm,
49 ExternalReference miss) {
50 // Update the static counter each time a new code stub is generated.
51 isolate()->counters()->code_stubs()->Increment();
52
53 CallInterfaceDescriptor descriptor = GetCallInterfaceDescriptor();
54 int param_count = descriptor.GetRegisterParameterCount();
55 {
56 // Call the runtime system in a fresh internal frame.
57 FrameScope scope(masm, StackFrame::INTERNAL);
58 DCHECK(param_count == 0 ||
59 eax.is(descriptor.GetRegisterParameter(param_count - 1)));
60 // Push arguments
61 for (int i = 0; i < param_count; ++i) {
62 __ push(descriptor.GetRegisterParameter(i));
63 }
64 __ CallExternalReference(miss, param_count);
65 }
66
67 __ ret(0);
68 }
69
70
Generate(MacroAssembler * masm)71 void StoreBufferOverflowStub::Generate(MacroAssembler* masm) {
72 // We don't allow a GC during a store buffer overflow so there is no need to
73 // store the registers in any particular way, but we do have to store and
74 // restore them.
75 __ pushad();
76 if (save_doubles()) {
77 // Save FPU stat in m108byte.
78 __ sub(esp, Immediate(108));
79 __ fnsave(Operand(esp, 0));
80 }
81 const int argument_count = 1;
82
83 AllowExternalCallThatCantCauseGC scope(masm);
84 __ PrepareCallCFunction(argument_count, ecx);
85 __ mov(Operand(esp, 0 * kPointerSize),
86 Immediate(ExternalReference::isolate_address(isolate())));
87 __ CallCFunction(
88 ExternalReference::store_buffer_overflow_function(isolate()),
89 argument_count);
90 if (save_doubles()) {
91 // Restore FPU stat in m108byte.
92 __ frstor(Operand(esp, 0));
93 __ add(esp, Immediate(108));
94 }
95 __ popad();
96 __ ret(0);
97 }
98
99
100 class FloatingPointHelper : public AllStatic {
101 public:
102 enum ArgLocation {
103 ARGS_ON_STACK,
104 ARGS_IN_REGISTERS
105 };
106
107 // Code pattern for loading a floating point value. Input value must
108 // be either a smi or a heap number object (fp value). Requirements:
109 // operand in register number. Returns operand as floating point number
110 // on FPU stack.
111 static void LoadFloatOperand(MacroAssembler* masm, Register number);
112
113 // Test if operands are smi or number objects (fp). Requirements:
114 // operand_1 in eax, operand_2 in edx; falls through on float
115 // operands, jumps to the non_float label otherwise.
116 static void CheckFloatOperands(MacroAssembler* masm,
117 Label* non_float,
118 Register scratch);
119 };
120
121
Generate(MacroAssembler * masm)122 void DoubleToIStub::Generate(MacroAssembler* masm) {
123 Register input_reg = this->source();
124 Register final_result_reg = this->destination();
125 DCHECK(is_truncating());
126
127 Label check_negative, process_64_bits, done, done_no_stash;
128
129 int double_offset = offset();
130
131 // Account for return address and saved regs if input is esp.
132 if (input_reg.is(esp)) double_offset += 3 * kPointerSize;
133
134 MemOperand mantissa_operand(MemOperand(input_reg, double_offset));
135 MemOperand exponent_operand(MemOperand(input_reg,
136 double_offset + kDoubleSize / 2));
137
138 Register scratch1;
139 {
140 Register scratch_candidates[3] = { ebx, edx, edi };
141 for (int i = 0; i < 3; i++) {
142 scratch1 = scratch_candidates[i];
143 if (!final_result_reg.is(scratch1) && !input_reg.is(scratch1)) break;
144 }
145 }
146 // Since we must use ecx for shifts below, use some other register (eax)
147 // to calculate the result if ecx is the requested return register.
148 Register result_reg = final_result_reg.is(ecx) ? eax : final_result_reg;
149 // Save ecx if it isn't the return register and therefore volatile, or if it
150 // is the return register, then save the temp register we use in its stead for
151 // the result.
152 Register save_reg = final_result_reg.is(ecx) ? eax : ecx;
153 __ push(scratch1);
154 __ push(save_reg);
155
156 bool stash_exponent_copy = !input_reg.is(esp);
157 __ mov(scratch1, mantissa_operand);
158 __ mov(ecx, exponent_operand);
159 if (stash_exponent_copy) __ push(ecx);
160
161 __ and_(ecx, HeapNumber::kExponentMask);
162 __ shr(ecx, HeapNumber::kExponentShift);
163 __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias));
164 __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits));
165 __ j(below, &process_64_bits);
166
167 // Result is entirely in lower 32-bits of mantissa
168 int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize;
169 __ sub(ecx, Immediate(delta));
170 __ xor_(result_reg, result_reg);
171 __ cmp(ecx, Immediate(31));
172 __ j(above, &done);
173 __ shl_cl(scratch1);
174 __ jmp(&check_negative);
175
176 __ bind(&process_64_bits);
177 // Result must be extracted from shifted 32-bit mantissa
178 __ sub(ecx, Immediate(delta));
179 __ neg(ecx);
180 if (stash_exponent_copy) {
181 __ mov(result_reg, MemOperand(esp, 0));
182 } else {
183 __ mov(result_reg, exponent_operand);
184 }
185 __ and_(result_reg,
186 Immediate(static_cast<uint32_t>(Double::kSignificandMask >> 32)));
187 __ add(result_reg,
188 Immediate(static_cast<uint32_t>(Double::kHiddenBit >> 32)));
189 __ shrd_cl(scratch1, result_reg);
190 __ shr_cl(result_reg);
191 __ test(ecx, Immediate(32));
192 {
193 Label skip_mov;
194 __ j(equal, &skip_mov, Label::kNear);
195 __ mov(scratch1, result_reg);
196 __ bind(&skip_mov);
197 }
198
199 // If the double was negative, negate the integer result.
200 __ bind(&check_negative);
201 __ mov(result_reg, scratch1);
202 __ neg(result_reg);
203 if (stash_exponent_copy) {
204 __ cmp(MemOperand(esp, 0), Immediate(0));
205 } else {
206 __ cmp(exponent_operand, Immediate(0));
207 }
208 {
209 Label skip_mov;
210 __ j(less_equal, &skip_mov, Label::kNear);
211 __ mov(result_reg, scratch1);
212 __ bind(&skip_mov);
213 }
214
215 // Restore registers
216 __ bind(&done);
217 if (stash_exponent_copy) {
218 __ add(esp, Immediate(kDoubleSize / 2));
219 }
220 __ bind(&done_no_stash);
221 if (!final_result_reg.is(result_reg)) {
222 DCHECK(final_result_reg.is(ecx));
223 __ mov(final_result_reg, result_reg);
224 }
225 __ pop(save_reg);
226 __ pop(scratch1);
227 __ ret(0);
228 }
229
230
LoadFloatOperand(MacroAssembler * masm,Register number)231 void FloatingPointHelper::LoadFloatOperand(MacroAssembler* masm,
232 Register number) {
233 Label load_smi, done;
234
235 __ JumpIfSmi(number, &load_smi, Label::kNear);
236 __ fld_d(FieldOperand(number, HeapNumber::kValueOffset));
237 __ jmp(&done, Label::kNear);
238
239 __ bind(&load_smi);
240 __ SmiUntag(number);
241 __ push(number);
242 __ fild_s(Operand(esp, 0));
243 __ pop(number);
244
245 __ bind(&done);
246 }
247
248
CheckFloatOperands(MacroAssembler * masm,Label * non_float,Register scratch)249 void FloatingPointHelper::CheckFloatOperands(MacroAssembler* masm,
250 Label* non_float,
251 Register scratch) {
252 Label test_other, done;
253 // Test if both operands are floats or smi -> scratch=k_is_float;
254 // Otherwise scratch = k_not_float.
255 __ JumpIfSmi(edx, &test_other, Label::kNear);
256 __ mov(scratch, FieldOperand(edx, HeapObject::kMapOffset));
257 Factory* factory = masm->isolate()->factory();
258 __ cmp(scratch, factory->heap_number_map());
259 __ j(not_equal, non_float); // argument in edx is not a number -> NaN
260
261 __ bind(&test_other);
262 __ JumpIfSmi(eax, &done, Label::kNear);
263 __ mov(scratch, FieldOperand(eax, HeapObject::kMapOffset));
264 __ cmp(scratch, factory->heap_number_map());
265 __ j(not_equal, non_float); // argument in eax is not a number -> NaN
266
267 // Fall-through: Both operands are numbers.
268 __ bind(&done);
269 }
270
271
Generate(MacroAssembler * masm)272 void MathPowStub::Generate(MacroAssembler* masm) {
273 const Register scratch = ecx;
274
275 // Load the double_exponent into x87 FPU
276 __ fld_d(Operand(esp, 0 * kDoubleSize + 4));
277 // Load the double_base into x87 FPU
278 __ fld_d(Operand(esp, 1 * kDoubleSize + 4));
279
280 // Call ieee754 runtime directly.
281 {
282 AllowExternalCallThatCantCauseGC scope(masm);
283 __ PrepareCallCFunction(4, scratch);
284 // Put the double_base parameter in call stack
285 __ fstp_d(Operand(esp, 0 * kDoubleSize));
286 // Put the double_exponent parameter in call stack
287 __ fstp_d(Operand(esp, 1 * kDoubleSize));
288 __ CallCFunction(ExternalReference::power_double_double_function(isolate()),
289 4);
290 }
291 // Return value is in st(0) on ia32.
292 __ ret(0);
293 }
294
295
Generate(MacroAssembler * masm)296 void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
297 Label miss;
298 Register receiver = LoadDescriptor::ReceiverRegister();
299 // With careful management, we won't have to save slot and vector on
300 // the stack. Simply handle the possibly missing case first.
301 // TODO(mvstanton): this code can be more efficient.
302 __ cmp(FieldOperand(receiver, JSFunction::kPrototypeOrInitialMapOffset),
303 Immediate(isolate()->factory()->the_hole_value()));
304 __ j(equal, &miss);
305 __ TryGetFunctionPrototype(receiver, eax, ebx, &miss);
306 __ ret(0);
307
308 __ bind(&miss);
309 PropertyAccessCompiler::TailCallBuiltin(
310 masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
311 }
312
313
Generate(MacroAssembler * masm)314 void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
315 // Return address is on the stack.
316 Label miss;
317
318 Register receiver = LoadDescriptor::ReceiverRegister();
319 Register index = LoadDescriptor::NameRegister();
320 Register scratch = edi;
321 DCHECK(!scratch.is(receiver) && !scratch.is(index));
322 Register result = eax;
323 DCHECK(!result.is(scratch));
324 DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()) &&
325 result.is(LoadDescriptor::SlotRegister()));
326
327 // StringCharAtGenerator doesn't use the result register until it's passed
328 // the different miss possibilities. If it did, we would have a conflict
329 // when FLAG_vector_ics is true.
330
331 StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
332 &miss, // When not a string.
333 &miss, // When not a number.
334 &miss, // When index out of range.
335 RECEIVER_IS_STRING);
336 char_at_generator.GenerateFast(masm);
337 __ ret(0);
338
339 StubRuntimeCallHelper call_helper;
340 char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
341
342 __ bind(&miss);
343 PropertyAccessCompiler::TailCallBuiltin(
344 masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
345 }
346
347
Generate(MacroAssembler * masm)348 void RegExpExecStub::Generate(MacroAssembler* masm) {
349 // Just jump directly to runtime if native RegExp is not selected at compile
350 // time or if regexp entry in generated code is turned off runtime switch or
351 // at compilation.
352 #ifdef V8_INTERPRETED_REGEXP
353 __ TailCallRuntime(Runtime::kRegExpExec);
354 #else // V8_INTERPRETED_REGEXP
355
356 // Stack frame on entry.
357 // esp[0]: return address
358 // esp[4]: last_match_info (expected JSArray)
359 // esp[8]: previous index
360 // esp[12]: subject string
361 // esp[16]: JSRegExp object
362
363 static const int kLastMatchInfoOffset = 1 * kPointerSize;
364 static const int kPreviousIndexOffset = 2 * kPointerSize;
365 static const int kSubjectOffset = 3 * kPointerSize;
366 static const int kJSRegExpOffset = 4 * kPointerSize;
367
368 Label runtime;
369 Factory* factory = isolate()->factory();
370
371 // Ensure that a RegExp stack is allocated.
372 ExternalReference address_of_regexp_stack_memory_address =
373 ExternalReference::address_of_regexp_stack_memory_address(isolate());
374 ExternalReference address_of_regexp_stack_memory_size =
375 ExternalReference::address_of_regexp_stack_memory_size(isolate());
376 __ mov(ebx, Operand::StaticVariable(address_of_regexp_stack_memory_size));
377 __ test(ebx, ebx);
378 __ j(zero, &runtime);
379
380 // Check that the first argument is a JSRegExp object.
381 __ mov(eax, Operand(esp, kJSRegExpOffset));
382 STATIC_ASSERT(kSmiTag == 0);
383 __ JumpIfSmi(eax, &runtime);
384 __ CmpObjectType(eax, JS_REGEXP_TYPE, ecx);
385 __ j(not_equal, &runtime);
386
387 // Check that the RegExp has been compiled (data contains a fixed array).
388 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
389 if (FLAG_debug_code) {
390 __ test(ecx, Immediate(kSmiTagMask));
391 __ Check(not_zero, kUnexpectedTypeForRegExpDataFixedArrayExpected);
392 __ CmpObjectType(ecx, FIXED_ARRAY_TYPE, ebx);
393 __ Check(equal, kUnexpectedTypeForRegExpDataFixedArrayExpected);
394 }
395
396 // ecx: RegExp data (FixedArray)
397 // Check the type of the RegExp. Only continue if type is JSRegExp::IRREGEXP.
398 __ mov(ebx, FieldOperand(ecx, JSRegExp::kDataTagOffset));
399 __ cmp(ebx, Immediate(Smi::FromInt(JSRegExp::IRREGEXP)));
400 __ j(not_equal, &runtime);
401
402 // ecx: RegExp data (FixedArray)
403 // Check that the number of captures fit in the static offsets vector buffer.
404 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
405 // Check (number_of_captures + 1) * 2 <= offsets vector size
406 // Or number_of_captures * 2 <= offsets vector size - 2
407 // Multiplying by 2 comes for free since edx is smi-tagged.
408 STATIC_ASSERT(kSmiTag == 0);
409 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
410 STATIC_ASSERT(Isolate::kJSRegexpStaticOffsetsVectorSize >= 2);
411 __ cmp(edx, Isolate::kJSRegexpStaticOffsetsVectorSize - 2);
412 __ j(above, &runtime);
413
414 // Reset offset for possibly sliced string.
415 __ Move(edi, Immediate(0));
416 __ mov(eax, Operand(esp, kSubjectOffset));
417 __ JumpIfSmi(eax, &runtime);
418 __ mov(edx, eax); // Make a copy of the original subject string.
419
420 // eax: subject string
421 // edx: subject string
422 // ecx: RegExp data (FixedArray)
423 // Handle subject string according to its encoding and representation:
424 // (1) Sequential two byte? If yes, go to (9).
425 // (2) Sequential one byte? If yes, go to (5).
426 // (3) Sequential or cons? If not, go to (6).
427 // (4) Cons string. If the string is flat, replace subject with first string
428 // and go to (1). Otherwise bail out to runtime.
429 // (5) One byte sequential. Load regexp code for one byte.
430 // (E) Carry on.
431 /// [...]
432
433 // Deferred code at the end of the stub:
434 // (6) Long external string? If not, go to (10).
435 // (7) External string. Make it, offset-wise, look like a sequential string.
436 // (8) Is the external string one byte? If yes, go to (5).
437 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
438 // (10) Short external string or not a string? If yes, bail out to runtime.
439 // (11) Sliced string. Replace subject with parent. Go to (1).
440
441 Label seq_one_byte_string /* 5 */, seq_two_byte_string /* 9 */,
442 external_string /* 7 */, check_underlying /* 1 */,
443 not_seq_nor_cons /* 6 */, check_code /* E */, not_long_external /* 10 */;
444
445 __ bind(&check_underlying);
446 // (1) Sequential two byte? If yes, go to (9).
447 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
448 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
449
450 __ and_(ebx, kIsNotStringMask |
451 kStringRepresentationMask |
452 kStringEncodingMask |
453 kShortExternalStringMask);
454 STATIC_ASSERT((kStringTag | kSeqStringTag | kTwoByteStringTag) == 0);
455 __ j(zero, &seq_two_byte_string); // Go to (9).
456
457 // (2) Sequential one byte? If yes, go to (5).
458 // Any other sequential string must be one byte.
459 __ and_(ebx, Immediate(kIsNotStringMask |
460 kStringRepresentationMask |
461 kShortExternalStringMask));
462 __ j(zero, &seq_one_byte_string, Label::kNear); // Go to (5).
463
464 // (3) Sequential or cons? If not, go to (6).
465 // We check whether the subject string is a cons, since sequential strings
466 // have already been covered.
467 STATIC_ASSERT(kConsStringTag < kExternalStringTag);
468 STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
469 STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
470 STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
471 __ cmp(ebx, Immediate(kExternalStringTag));
472 __ j(greater_equal, ¬_seq_nor_cons); // Go to (6).
473
474 // (4) Cons string. Check that it's flat.
475 // Replace subject with first string and reload instance type.
476 __ cmp(FieldOperand(eax, ConsString::kSecondOffset), factory->empty_string());
477 __ j(not_equal, &runtime);
478 __ mov(eax, FieldOperand(eax, ConsString::kFirstOffset));
479 __ jmp(&check_underlying);
480
481 // eax: sequential subject string (or look-alike, external string)
482 // edx: original subject string
483 // ecx: RegExp data (FixedArray)
484 // (5) One byte sequential. Load regexp code for one byte.
485 __ bind(&seq_one_byte_string);
486 // Load previous index and check range before edx is overwritten. We have
487 // to use edx instead of eax here because it might have been only made to
488 // look like a sequential string when it actually is an external string.
489 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
490 __ JumpIfNotSmi(ebx, &runtime);
491 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
492 __ j(above_equal, &runtime);
493 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataOneByteCodeOffset));
494 __ Move(ecx, Immediate(1)); // Type is one byte.
495
496 // (E) Carry on. String handling is done.
497 __ bind(&check_code);
498 // edx: irregexp code
499 // Check that the irregexp code has been generated for the actual string
500 // encoding. If it has, the field contains a code object otherwise it contains
501 // a smi (code flushing support).
502 __ JumpIfSmi(edx, &runtime);
503
504 // eax: subject string
505 // ebx: previous index (smi)
506 // edx: code
507 // ecx: encoding of subject string (1 if one_byte, 0 if two_byte);
508 // All checks done. Now push arguments for native regexp code.
509 Counters* counters = isolate()->counters();
510 __ IncrementCounter(counters->regexp_entry_native(), 1);
511
512 // Isolates: note we add an additional parameter here (isolate pointer).
513 static const int kRegExpExecuteArguments = 9;
514 __ EnterApiExitFrame(kRegExpExecuteArguments);
515
516 // Argument 9: Pass current isolate address.
517 __ mov(Operand(esp, 8 * kPointerSize),
518 Immediate(ExternalReference::isolate_address(isolate())));
519
520 // Argument 8: Indicate that this is a direct call from JavaScript.
521 __ mov(Operand(esp, 7 * kPointerSize), Immediate(1));
522
523 // Argument 7: Start (high end) of backtracking stack memory area.
524 __ mov(esi, Operand::StaticVariable(address_of_regexp_stack_memory_address));
525 __ add(esi, Operand::StaticVariable(address_of_regexp_stack_memory_size));
526 __ mov(Operand(esp, 6 * kPointerSize), esi);
527
528 // Argument 6: Set the number of capture registers to zero to force global
529 // regexps to behave as non-global. This does not affect non-global regexps.
530 __ mov(Operand(esp, 5 * kPointerSize), Immediate(0));
531
532 // Argument 5: static offsets vector buffer.
533 __ mov(Operand(esp, 4 * kPointerSize),
534 Immediate(ExternalReference::address_of_static_offsets_vector(
535 isolate())));
536
537 // Argument 2: Previous index.
538 __ SmiUntag(ebx);
539 __ mov(Operand(esp, 1 * kPointerSize), ebx);
540
541 // Argument 1: Original subject string.
542 // The original subject is in the previous stack frame. Therefore we have to
543 // use ebp, which points exactly to one pointer size below the previous esp.
544 // (Because creating a new stack frame pushes the previous ebp onto the stack
545 // and thereby moves up esp by one kPointerSize.)
546 __ mov(esi, Operand(ebp, kSubjectOffset + kPointerSize));
547 __ mov(Operand(esp, 0 * kPointerSize), esi);
548
549 // esi: original subject string
550 // eax: underlying subject string
551 // ebx: previous index
552 // ecx: encoding of subject string (1 if one_byte 0 if two_byte);
553 // edx: code
554 // Argument 4: End of string data
555 // Argument 3: Start of string data
556 // Prepare start and end index of the input.
557 // Load the length from the original sliced string if that is the case.
558 __ mov(esi, FieldOperand(esi, String::kLengthOffset));
559 __ add(esi, edi); // Calculate input end wrt offset.
560 __ SmiUntag(edi);
561 __ add(ebx, edi); // Calculate input start wrt offset.
562
563 // ebx: start index of the input string
564 // esi: end index of the input string
565 Label setup_two_byte, setup_rest;
566 __ test(ecx, ecx);
567 __ j(zero, &setup_two_byte, Label::kNear);
568 __ SmiUntag(esi);
569 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqOneByteString::kHeaderSize));
570 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
571 __ lea(ecx, FieldOperand(eax, ebx, times_1, SeqOneByteString::kHeaderSize));
572 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
573 __ jmp(&setup_rest, Label::kNear);
574
575 __ bind(&setup_two_byte);
576 STATIC_ASSERT(kSmiTag == 0);
577 STATIC_ASSERT(kSmiTagSize == 1); // esi is smi (powered by 2).
578 __ lea(ecx, FieldOperand(eax, esi, times_1, SeqTwoByteString::kHeaderSize));
579 __ mov(Operand(esp, 3 * kPointerSize), ecx); // Argument 4.
580 __ lea(ecx, FieldOperand(eax, ebx, times_2, SeqTwoByteString::kHeaderSize));
581 __ mov(Operand(esp, 2 * kPointerSize), ecx); // Argument 3.
582
583 __ bind(&setup_rest);
584
585 // Locate the code entry and call it.
586 __ add(edx, Immediate(Code::kHeaderSize - kHeapObjectTag));
587 __ call(edx);
588
589 // Drop arguments and come back to JS mode.
590 __ LeaveApiExitFrame(true);
591
592 // Check the result.
593 Label success;
594 __ cmp(eax, 1);
595 // We expect exactly one result since we force the called regexp to behave
596 // as non-global.
597 __ j(equal, &success);
598 Label failure;
599 __ cmp(eax, NativeRegExpMacroAssembler::FAILURE);
600 __ j(equal, &failure);
601 __ cmp(eax, NativeRegExpMacroAssembler::EXCEPTION);
602 // If not exception it can only be retry. Handle that in the runtime system.
603 __ j(not_equal, &runtime);
604 // Result must now be exception. If there is no pending exception already a
605 // stack overflow (on the backtrack stack) was detected in RegExp code but
606 // haven't created the exception yet. Handle that in the runtime system.
607 // TODO(592): Rerunning the RegExp to get the stack overflow exception.
608 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
609 isolate());
610 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
611 __ mov(eax, Operand::StaticVariable(pending_exception));
612 __ cmp(edx, eax);
613 __ j(equal, &runtime);
614
615 // For exception, throw the exception again.
616 __ TailCallRuntime(Runtime::kRegExpExecReThrow);
617
618 __ bind(&failure);
619 // For failure to match, return null.
620 __ mov(eax, factory->null_value());
621 __ ret(4 * kPointerSize);
622
623 // Load RegExp data.
624 __ bind(&success);
625 __ mov(eax, Operand(esp, kJSRegExpOffset));
626 __ mov(ecx, FieldOperand(eax, JSRegExp::kDataOffset));
627 __ mov(edx, FieldOperand(ecx, JSRegExp::kIrregexpCaptureCountOffset));
628 // Calculate number of capture registers (number_of_captures + 1) * 2.
629 STATIC_ASSERT(kSmiTag == 0);
630 STATIC_ASSERT(kSmiTagSize + kSmiShiftSize == 1);
631 __ add(edx, Immediate(2)); // edx was a smi.
632
633 // edx: Number of capture registers
634 // Check that the last match info is a FixedArray.
635 __ mov(ebx, Operand(esp, kLastMatchInfoOffset));
636 __ JumpIfSmi(ebx, &runtime);
637 // Check that the object has fast elements.
638 __ mov(eax, FieldOperand(ebx, HeapObject::kMapOffset));
639 __ cmp(eax, factory->fixed_array_map());
640 __ j(not_equal, &runtime);
641 // Check that the last match info has space for the capture registers and the
642 // additional information.
643 __ mov(eax, FieldOperand(ebx, FixedArray::kLengthOffset));
644 __ SmiUntag(eax);
645 __ sub(eax, Immediate(RegExpMatchInfo::kLastMatchOverhead));
646 __ cmp(edx, eax);
647 __ j(greater, &runtime);
648
649 // ebx: last_match_info backing store (FixedArray)
650 // edx: number of capture registers
651 // Store the capture count.
652 __ SmiTag(edx); // Number of capture registers to smi.
653 __ mov(FieldOperand(ebx, RegExpMatchInfo::kNumberOfCapturesOffset), edx);
654 __ SmiUntag(edx); // Number of capture registers back from smi.
655 // Store last subject and last input.
656 __ mov(eax, Operand(esp, kSubjectOffset));
657 __ mov(ecx, eax);
658 __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastSubjectOffset), eax);
659 __ RecordWriteField(ebx, RegExpMatchInfo::kLastSubjectOffset, eax, edi,
660 kDontSaveFPRegs);
661 __ mov(eax, ecx);
662 __ mov(FieldOperand(ebx, RegExpMatchInfo::kLastInputOffset), eax);
663 __ RecordWriteField(ebx, RegExpMatchInfo::kLastInputOffset, eax, edi,
664 kDontSaveFPRegs);
665
666 // Get the static offsets vector filled by the native regexp code.
667 ExternalReference address_of_static_offsets_vector =
668 ExternalReference::address_of_static_offsets_vector(isolate());
669 __ mov(ecx, Immediate(address_of_static_offsets_vector));
670
671 // ebx: last_match_info backing store (FixedArray)
672 // ecx: offsets vector
673 // edx: number of capture registers
674 Label next_capture, done;
675 // Capture register counter starts from number of capture registers and
676 // counts down until wrapping after zero.
677 __ bind(&next_capture);
678 __ sub(edx, Immediate(1));
679 __ j(negative, &done, Label::kNear);
680 // Read the value from the static offsets vector buffer.
681 __ mov(edi, Operand(ecx, edx, times_int_size, 0));
682 __ SmiTag(edi);
683 // Store the smi value in the last match info.
684 __ mov(FieldOperand(ebx, edx, times_pointer_size,
685 RegExpMatchInfo::kFirstCaptureOffset),
686 edi);
687 __ jmp(&next_capture);
688 __ bind(&done);
689
690 // Return last match info.
691 __ mov(eax, ebx);
692 __ ret(4 * kPointerSize);
693
694 // Do the runtime call to execute the regexp.
695 __ bind(&runtime);
696 __ TailCallRuntime(Runtime::kRegExpExec);
697
698 // Deferred code for string handling.
699 // (6) Long external string? If not, go to (10).
700 __ bind(¬_seq_nor_cons);
701 // Compare flags are still set from (3).
702 __ j(greater, ¬_long_external, Label::kNear); // Go to (10).
703
704 // (7) External string. Short external strings have been ruled out.
705 __ bind(&external_string);
706 // Reload instance type.
707 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
708 __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
709 if (FLAG_debug_code) {
710 // Assert that we do not have a cons or slice (indirect strings) here.
711 // Sequential strings have already been ruled out.
712 __ test_b(ebx, Immediate(kIsIndirectStringMask));
713 __ Assert(zero, kExternalStringExpectedButNotFound);
714 }
715 __ mov(eax, FieldOperand(eax, ExternalString::kResourceDataOffset));
716 // Move the pointer so that offset-wise, it looks like a sequential string.
717 STATIC_ASSERT(SeqTwoByteString::kHeaderSize == SeqOneByteString::kHeaderSize);
718 __ sub(eax, Immediate(SeqTwoByteString::kHeaderSize - kHeapObjectTag));
719 STATIC_ASSERT(kTwoByteStringTag == 0);
720 // (8) Is the external string one byte? If yes, go to (5).
721 __ test_b(ebx, Immediate(kStringEncodingMask));
722 __ j(not_zero, &seq_one_byte_string); // Go to (5).
723
724 // eax: sequential subject string (or look-alike, external string)
725 // edx: original subject string
726 // ecx: RegExp data (FixedArray)
727 // (9) Two byte sequential. Load regexp code for two byte. Go to (E).
728 __ bind(&seq_two_byte_string);
729 // Load previous index and check range before edx is overwritten. We have
730 // to use edx instead of eax here because it might have been only made to
731 // look like a sequential string when it actually is an external string.
732 __ mov(ebx, Operand(esp, kPreviousIndexOffset));
733 __ JumpIfNotSmi(ebx, &runtime);
734 __ cmp(ebx, FieldOperand(edx, String::kLengthOffset));
735 __ j(above_equal, &runtime);
736 __ mov(edx, FieldOperand(ecx, JSRegExp::kDataUC16CodeOffset));
737 __ Move(ecx, Immediate(0)); // Type is two byte.
738 __ jmp(&check_code); // Go to (E).
739
740 // (10) Not a string or a short external string? If yes, bail out to runtime.
741 __ bind(¬_long_external);
742 // Catch non-string subject or short external string.
743 STATIC_ASSERT(kNotStringTag != 0 && kShortExternalStringTag !=0);
744 __ test(ebx, Immediate(kIsNotStringMask | kShortExternalStringTag));
745 __ j(not_zero, &runtime);
746
747 // (11) Sliced string. Replace subject with parent. Go to (1).
748 // Load offset into edi and replace subject string with parent.
749 __ mov(edi, FieldOperand(eax, SlicedString::kOffsetOffset));
750 __ mov(eax, FieldOperand(eax, SlicedString::kParentOffset));
751 __ jmp(&check_underlying); // Go to (1).
752 #endif // V8_INTERPRETED_REGEXP
753 }
754
755
NegativeComparisonResult(Condition cc)756 static int NegativeComparisonResult(Condition cc) {
757 DCHECK(cc != equal);
758 DCHECK((cc == less) || (cc == less_equal)
759 || (cc == greater) || (cc == greater_equal));
760 return (cc == greater || cc == greater_equal) ? LESS : GREATER;
761 }
762
763
CheckInputType(MacroAssembler * masm,Register input,CompareICState::State expected,Label * fail)764 static void CheckInputType(MacroAssembler* masm, Register input,
765 CompareICState::State expected, Label* fail) {
766 Label ok;
767 if (expected == CompareICState::SMI) {
768 __ JumpIfNotSmi(input, fail);
769 } else if (expected == CompareICState::NUMBER) {
770 __ JumpIfSmi(input, &ok);
771 __ cmp(FieldOperand(input, HeapObject::kMapOffset),
772 Immediate(masm->isolate()->factory()->heap_number_map()));
773 __ j(not_equal, fail);
774 }
775 // We could be strict about internalized/non-internalized here, but as long as
776 // hydrogen doesn't care, the stub doesn't have to care either.
777 __ bind(&ok);
778 }
779
780
BranchIfNotInternalizedString(MacroAssembler * masm,Label * label,Register object,Register scratch)781 static void BranchIfNotInternalizedString(MacroAssembler* masm,
782 Label* label,
783 Register object,
784 Register scratch) {
785 __ JumpIfSmi(object, label);
786 __ mov(scratch, FieldOperand(object, HeapObject::kMapOffset));
787 __ movzx_b(scratch, FieldOperand(scratch, Map::kInstanceTypeOffset));
788 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
789 __ test(scratch, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
790 __ j(not_zero, label);
791 }
792
793
GenerateGeneric(MacroAssembler * masm)794 void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
795 Label runtime_call, check_unequal_objects;
796 Condition cc = GetCondition();
797
798 Label miss;
799 CheckInputType(masm, edx, left(), &miss);
800 CheckInputType(masm, eax, right(), &miss);
801
802 // Compare two smis.
803 Label non_smi, smi_done;
804 __ mov(ecx, edx);
805 __ or_(ecx, eax);
806 __ JumpIfNotSmi(ecx, &non_smi, Label::kNear);
807 __ sub(edx, eax); // Return on the result of the subtraction.
808 __ j(no_overflow, &smi_done, Label::kNear);
809 __ not_(edx); // Correct sign in case of overflow. edx is never 0 here.
810 __ bind(&smi_done);
811 __ mov(eax, edx);
812 __ ret(0);
813 __ bind(&non_smi);
814
815 // NOTICE! This code is only reached after a smi-fast-case check, so
816 // it is certain that at least one operand isn't a smi.
817
818 // Identical objects can be compared fast, but there are some tricky cases
819 // for NaN and undefined.
820 Label generic_heap_number_comparison;
821 {
822 Label not_identical;
823 __ cmp(eax, edx);
824 __ j(not_equal, ¬_identical);
825
826 if (cc != equal) {
827 // Check for undefined. undefined OP undefined is false even though
828 // undefined == undefined.
829 __ cmp(edx, isolate()->factory()->undefined_value());
830 Label check_for_nan;
831 __ j(not_equal, &check_for_nan, Label::kNear);
832 __ Move(eax, Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
833 __ ret(0);
834 __ bind(&check_for_nan);
835 }
836
837 // Test for NaN. Compare heap numbers in a general way,
838 // to handle NaNs correctly.
839 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
840 Immediate(isolate()->factory()->heap_number_map()));
841 __ j(equal, &generic_heap_number_comparison, Label::kNear);
842 if (cc != equal) {
843 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
844 __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
845 // Call runtime on identical JSObjects. Otherwise return equal.
846 __ cmpb(ecx, Immediate(FIRST_JS_RECEIVER_TYPE));
847 __ j(above_equal, &runtime_call, Label::kFar);
848 // Call runtime on identical symbols since we need to throw a TypeError.
849 __ cmpb(ecx, Immediate(SYMBOL_TYPE));
850 __ j(equal, &runtime_call, Label::kFar);
851 // Call runtime on identical SIMD values since we must throw a TypeError.
852 __ cmpb(ecx, Immediate(SIMD128_VALUE_TYPE));
853 __ j(equal, &runtime_call, Label::kFar);
854 }
855 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
856 __ ret(0);
857
858
859 __ bind(¬_identical);
860 }
861
862 // Strict equality can quickly decide whether objects are equal.
863 // Non-strict object equality is slower, so it is handled later in the stub.
864 if (cc == equal && strict()) {
865 Label slow; // Fallthrough label.
866 Label not_smis;
867 // If we're doing a strict equality comparison, we don't have to do
868 // type conversion, so we generate code to do fast comparison for objects
869 // and oddballs. Non-smi numbers and strings still go through the usual
870 // slow-case code.
871 // If either is a Smi (we know that not both are), then they can only
872 // be equal if the other is a HeapNumber. If so, use the slow case.
873 STATIC_ASSERT(kSmiTag == 0);
874 DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
875 __ mov(ecx, Immediate(kSmiTagMask));
876 __ and_(ecx, eax);
877 __ test(ecx, edx);
878 __ j(not_zero, ¬_smis, Label::kNear);
879 // One operand is a smi.
880
881 // Check whether the non-smi is a heap number.
882 STATIC_ASSERT(kSmiTagMask == 1);
883 // ecx still holds eax & kSmiTag, which is either zero or one.
884 __ sub(ecx, Immediate(0x01));
885 __ mov(ebx, edx);
886 __ xor_(ebx, eax);
887 __ and_(ebx, ecx); // ebx holds either 0 or eax ^ edx.
888 __ xor_(ebx, eax);
889 // if eax was smi, ebx is now edx, else eax.
890
891 // Check if the non-smi operand is a heap number.
892 __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
893 Immediate(isolate()->factory()->heap_number_map()));
894 // If heap number, handle it in the slow case.
895 __ j(equal, &slow, Label::kNear);
896 // Return non-equal (ebx is not zero)
897 __ mov(eax, ebx);
898 __ ret(0);
899
900 __ bind(¬_smis);
901 // If either operand is a JSObject or an oddball value, then they are not
902 // equal since their pointers are different
903 // There is no test for undetectability in strict equality.
904
905 // Get the type of the first operand.
906 // If the first object is a JS object, we have done pointer comparison.
907 Label first_non_object;
908 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
909 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
910 __ j(below, &first_non_object, Label::kNear);
911
912 // Return non-zero (eax is not zero)
913 Label return_not_equal;
914 STATIC_ASSERT(kHeapObjectTag != 0);
915 __ bind(&return_not_equal);
916 __ ret(0);
917
918 __ bind(&first_non_object);
919 // Check for oddballs: true, false, null, undefined.
920 __ CmpInstanceType(ecx, ODDBALL_TYPE);
921 __ j(equal, &return_not_equal);
922
923 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
924 __ j(above_equal, &return_not_equal);
925
926 // Check for oddballs: true, false, null, undefined.
927 __ CmpInstanceType(ecx, ODDBALL_TYPE);
928 __ j(equal, &return_not_equal);
929
930 // Fall through to the general case.
931 __ bind(&slow);
932 }
933
934 // Generate the number comparison code.
935 Label non_number_comparison;
936 Label unordered;
937 __ bind(&generic_heap_number_comparison);
938 FloatingPointHelper::CheckFloatOperands(
939 masm, &non_number_comparison, ebx);
940 FloatingPointHelper::LoadFloatOperand(masm, eax);
941 FloatingPointHelper::LoadFloatOperand(masm, edx);
942 __ FCmp();
943
944 // Don't base result on EFLAGS when a NaN is involved.
945 __ j(parity_even, &unordered, Label::kNear);
946
947 Label below_label, above_label;
948 // Return a result of -1, 0, or 1, based on EFLAGS.
949 __ j(below, &below_label, Label::kNear);
950 __ j(above, &above_label, Label::kNear);
951
952 __ Move(eax, Immediate(0));
953 __ ret(0);
954
955 __ bind(&below_label);
956 __ mov(eax, Immediate(Smi::FromInt(-1)));
957 __ ret(0);
958
959 __ bind(&above_label);
960 __ mov(eax, Immediate(Smi::FromInt(1)));
961 __ ret(0);
962
963 // If one of the numbers was NaN, then the result is always false.
964 // The cc is never not-equal.
965 __ bind(&unordered);
966 DCHECK(cc != not_equal);
967 if (cc == less || cc == less_equal) {
968 __ mov(eax, Immediate(Smi::FromInt(1)));
969 } else {
970 __ mov(eax, Immediate(Smi::FromInt(-1)));
971 }
972 __ ret(0);
973
974 // The number comparison code did not provide a valid result.
975 __ bind(&non_number_comparison);
976
977 // Fast negative check for internalized-to-internalized equality.
978 Label check_for_strings;
979 if (cc == equal) {
980 BranchIfNotInternalizedString(masm, &check_for_strings, eax, ecx);
981 BranchIfNotInternalizedString(masm, &check_for_strings, edx, ecx);
982
983 // We've already checked for object identity, so if both operands
984 // are internalized they aren't equal. Register eax already holds a
985 // non-zero value, which indicates not equal, so just return.
986 __ ret(0);
987 }
988
989 __ bind(&check_for_strings);
990
991 __ JumpIfNotBothSequentialOneByteStrings(edx, eax, ecx, ebx,
992 &check_unequal_objects);
993
994 // Inline comparison of one-byte strings.
995 if (cc == equal) {
996 StringHelper::GenerateFlatOneByteStringEquals(masm, edx, eax, ecx, ebx);
997 } else {
998 StringHelper::GenerateCompareFlatOneByteStrings(masm, edx, eax, ecx, ebx,
999 edi);
1000 }
1001 #ifdef DEBUG
1002 __ Abort(kUnexpectedFallThroughFromStringComparison);
1003 #endif
1004
1005 __ bind(&check_unequal_objects);
1006 if (cc == equal && !strict()) {
1007 // Non-strict equality. Objects are unequal if
1008 // they are both JSObjects and not undetectable,
1009 // and their pointers are different.
1010 Label return_equal, return_unequal, undetectable;
1011 // At most one is a smi, so we can test for smi by adding the two.
1012 // A smi plus a heap object has the low bit set, a heap object plus
1013 // a heap object has the low bit clear.
1014 STATIC_ASSERT(kSmiTag == 0);
1015 STATIC_ASSERT(kSmiTagMask == 1);
1016 __ lea(ecx, Operand(eax, edx, times_1, 0));
1017 __ test(ecx, Immediate(kSmiTagMask));
1018 __ j(not_zero, &runtime_call);
1019
1020 __ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
1021 __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
1022
1023 __ test_b(FieldOperand(ebx, Map::kBitFieldOffset),
1024 Immediate(1 << Map::kIsUndetectable));
1025 __ j(not_zero, &undetectable, Label::kNear);
1026 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1027 Immediate(1 << Map::kIsUndetectable));
1028 __ j(not_zero, &return_unequal, Label::kNear);
1029
1030 __ CmpInstanceType(ebx, FIRST_JS_RECEIVER_TYPE);
1031 __ j(below, &runtime_call, Label::kNear);
1032 __ CmpInstanceType(ecx, FIRST_JS_RECEIVER_TYPE);
1033 __ j(below, &runtime_call, Label::kNear);
1034
1035 __ bind(&return_unequal);
1036 // Return non-equal by returning the non-zero object pointer in eax.
1037 __ ret(0); // eax, edx were pushed
1038
1039 __ bind(&undetectable);
1040 __ test_b(FieldOperand(ecx, Map::kBitFieldOffset),
1041 Immediate(1 << Map::kIsUndetectable));
1042 __ j(zero, &return_unequal, Label::kNear);
1043
1044 // If both sides are JSReceivers, then the result is false according to
1045 // the HTML specification, which says that only comparisons with null or
1046 // undefined are affected by special casing for document.all.
1047 __ CmpInstanceType(ebx, ODDBALL_TYPE);
1048 __ j(zero, &return_equal, Label::kNear);
1049 __ CmpInstanceType(ecx, ODDBALL_TYPE);
1050 __ j(not_zero, &return_unequal, Label::kNear);
1051
1052 __ bind(&return_equal);
1053 __ Move(eax, Immediate(EQUAL));
1054 __ ret(0); // eax, edx were pushed
1055 }
1056 __ bind(&runtime_call);
1057
1058 if (cc == equal) {
1059 {
1060 FrameScope scope(masm, StackFrame::INTERNAL);
1061 __ Push(edx);
1062 __ Push(eax);
1063 __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
1064 }
1065 // Turn true into 0 and false into some non-zero value.
1066 STATIC_ASSERT(EQUAL == 0);
1067 __ sub(eax, Immediate(isolate()->factory()->true_value()));
1068 __ Ret();
1069 } else {
1070 // Push arguments below the return address.
1071 __ pop(ecx);
1072 __ push(edx);
1073 __ push(eax);
1074 __ push(Immediate(Smi::FromInt(NegativeComparisonResult(cc))));
1075
1076 // Restore return address on the stack.
1077 __ push(ecx);
1078 // Call the native; it returns -1 (less), 0 (equal), or 1 (greater)
1079 // tagged as a small integer.
1080 __ TailCallRuntime(Runtime::kCompare);
1081 }
1082
1083 __ bind(&miss);
1084 GenerateMiss(masm);
1085 }
1086
1087
CallStubInRecordCallTarget(MacroAssembler * masm,CodeStub * stub)1088 static void CallStubInRecordCallTarget(MacroAssembler* masm, CodeStub* stub) {
1089 // eax : number of arguments to the construct function
1090 // ebx : feedback vector
1091 // edx : slot in feedback vector (Smi)
1092 // edi : the function to call
1093
1094 {
1095 FrameScope scope(masm, StackFrame::INTERNAL);
1096
1097 // Number-of-arguments register must be smi-tagged to call out.
1098 __ SmiTag(eax);
1099 __ push(eax);
1100 __ push(edi);
1101 __ push(edx);
1102 __ push(ebx);
1103 __ push(esi);
1104
1105 __ CallStub(stub);
1106
1107 __ pop(esi);
1108 __ pop(ebx);
1109 __ pop(edx);
1110 __ pop(edi);
1111 __ pop(eax);
1112 __ SmiUntag(eax);
1113 }
1114 }
1115
1116
GenerateRecordCallTarget(MacroAssembler * masm)1117 static void GenerateRecordCallTarget(MacroAssembler* masm) {
1118 // Cache the called function in a feedback vector slot. Cache states
1119 // are uninitialized, monomorphic (indicated by a JSFunction), and
1120 // megamorphic.
1121 // eax : number of arguments to the construct function
1122 // ebx : feedback vector
1123 // edx : slot in feedback vector (Smi)
1124 // edi : the function to call
1125 Isolate* isolate = masm->isolate();
1126 Label initialize, done, miss, megamorphic, not_array_function;
1127
1128 // Load the cache state into ecx.
1129 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1130 FixedArray::kHeaderSize));
1131
1132 // A monomorphic cache hit or an already megamorphic state: invoke the
1133 // function without changing the state.
1134 // We don't know if ecx is a WeakCell or a Symbol, but it's harmless to read
1135 // at this position in a symbol (see static asserts in
1136 // type-feedback-vector.h).
1137 Label check_allocation_site;
1138 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
1139 __ j(equal, &done, Label::kFar);
1140 __ CompareRoot(ecx, Heap::kmegamorphic_symbolRootIndex);
1141 __ j(equal, &done, Label::kFar);
1142 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1143 Heap::kWeakCellMapRootIndex);
1144 __ j(not_equal, &check_allocation_site);
1145
1146 // If the weak cell is cleared, we have a new chance to become monomorphic.
1147 __ JumpIfSmi(FieldOperand(ecx, WeakCell::kValueOffset), &initialize);
1148 __ jmp(&megamorphic);
1149
1150 __ bind(&check_allocation_site);
1151 // If we came here, we need to see if we are the array function.
1152 // If we didn't have a matching function, and we didn't find the megamorph
1153 // sentinel, then we have in the slot either some other function or an
1154 // AllocationSite.
1155 __ CompareRoot(FieldOperand(ecx, 0), Heap::kAllocationSiteMapRootIndex);
1156 __ j(not_equal, &miss);
1157
1158 // Make sure the function is the Array() function
1159 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1160 __ cmp(edi, ecx);
1161 __ j(not_equal, &megamorphic);
1162 __ jmp(&done, Label::kFar);
1163
1164 __ bind(&miss);
1165
1166 // A monomorphic miss (i.e, here the cache is not uninitialized) goes
1167 // megamorphic.
1168 __ CompareRoot(ecx, Heap::kuninitialized_symbolRootIndex);
1169 __ j(equal, &initialize);
1170 // MegamorphicSentinel is an immortal immovable object (undefined) so no
1171 // write-barrier is needed.
1172 __ bind(&megamorphic);
1173 __ mov(
1174 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
1175 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1176 __ jmp(&done, Label::kFar);
1177
1178 // An uninitialized cache is patched with the function or sentinel to
1179 // indicate the ElementsKind if function is the Array constructor.
1180 __ bind(&initialize);
1181 // Make sure the function is the Array() function
1182 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1183 __ cmp(edi, ecx);
1184 __ j(not_equal, ¬_array_function);
1185
1186 // The target function is the Array constructor,
1187 // Create an AllocationSite if we don't already have it, store it in the
1188 // slot.
1189 CreateAllocationSiteStub create_stub(isolate);
1190 CallStubInRecordCallTarget(masm, &create_stub);
1191 __ jmp(&done);
1192
1193 __ bind(¬_array_function);
1194 CreateWeakCellStub weak_cell_stub(isolate);
1195 CallStubInRecordCallTarget(masm, &weak_cell_stub);
1196
1197 __ bind(&done);
1198 // Increment the call count for all function calls.
1199 __ add(FieldOperand(ebx, edx, times_half_pointer_size,
1200 FixedArray::kHeaderSize + kPointerSize),
1201 Immediate(Smi::FromInt(1)));
1202 }
1203
1204
Generate(MacroAssembler * masm)1205 void CallConstructStub::Generate(MacroAssembler* masm) {
1206 // eax : number of arguments
1207 // ebx : feedback vector
1208 // edx : slot in feedback vector (Smi, for RecordCallTarget)
1209 // edi : constructor function
1210
1211 Label non_function;
1212 // Check that function is not a smi.
1213 __ JumpIfSmi(edi, &non_function);
1214 // Check that function is a JSFunction.
1215 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1216 __ j(not_equal, &non_function);
1217
1218 GenerateRecordCallTarget(masm);
1219
1220 Label feedback_register_initialized;
1221 // Put the AllocationSite from the feedback vector into ebx, or undefined.
1222 __ mov(ebx, FieldOperand(ebx, edx, times_half_pointer_size,
1223 FixedArray::kHeaderSize));
1224 Handle<Map> allocation_site_map = isolate()->factory()->allocation_site_map();
1225 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
1226 __ j(equal, &feedback_register_initialized);
1227 __ mov(ebx, isolate()->factory()->undefined_value());
1228 __ bind(&feedback_register_initialized);
1229
1230 __ AssertUndefinedOrAllocationSite(ebx);
1231
1232 // Pass new target to construct stub.
1233 __ mov(edx, edi);
1234
1235 // Tail call to the function-specific construct stub (still in the caller
1236 // context at this point).
1237 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
1238 __ mov(ecx, FieldOperand(ecx, SharedFunctionInfo::kConstructStubOffset));
1239 __ lea(ecx, FieldOperand(ecx, Code::kHeaderSize));
1240 __ jmp(ecx);
1241
1242 __ bind(&non_function);
1243 __ mov(edx, edi);
1244 __ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
1245 }
1246
IncrementCallCount(MacroAssembler * masm,Register feedback_vector,Register slot)1247 static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
1248 Register slot) {
1249 __ add(FieldOperand(feedback_vector, slot, times_half_pointer_size,
1250 FixedArray::kHeaderSize + kPointerSize),
1251 Immediate(Smi::FromInt(1)));
1252 }
1253
HandleArrayCase(MacroAssembler * masm,Label * miss)1254 void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
1255 // eax - number of arguments
1256 // edi - function
1257 // edx - slot id
1258 // ebx - vector
1259 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1260 __ cmp(edi, ecx);
1261 __ j(not_equal, miss);
1262
1263 // Reload ecx.
1264 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1265 FixedArray::kHeaderSize));
1266
1267 // Increment the call count for monomorphic function calls.
1268 IncrementCallCount(masm, ebx, edx);
1269
1270 __ mov(ebx, ecx);
1271 __ mov(edx, edi);
1272 ArrayConstructorStub stub(masm->isolate());
1273 __ TailCallStub(&stub);
1274
1275 // Unreachable.
1276 }
1277
1278
Generate(MacroAssembler * masm)1279 void CallICStub::Generate(MacroAssembler* masm) {
1280 // edi - number of arguments
1281 // edi - function
1282 // edx - slot id
1283 // ebx - vector
1284 Isolate* isolate = masm->isolate();
1285 Label extra_checks_or_miss, call, call_function, call_count_incremented;
1286
1287 // The checks. First, does edi match the recorded monomorphic target?
1288 __ mov(ecx, FieldOperand(ebx, edx, times_half_pointer_size,
1289 FixedArray::kHeaderSize));
1290
1291 // We don't know that we have a weak cell. We might have a private symbol
1292 // or an AllocationSite, but the memory is safe to examine.
1293 // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
1294 // FixedArray.
1295 // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
1296 // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
1297 // computed, meaning that it can't appear to be a pointer. If the low bit is
1298 // 0, then hash is computed, but the 0 bit prevents the field from appearing
1299 // to be a pointer.
1300 STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
1301 STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
1302 WeakCell::kValueOffset &&
1303 WeakCell::kValueOffset == Symbol::kHashFieldSlot);
1304
1305 __ cmp(edi, FieldOperand(ecx, WeakCell::kValueOffset));
1306 __ j(not_equal, &extra_checks_or_miss);
1307
1308 // The compare above could have been a SMI/SMI comparison. Guard against this
1309 // convincing us that we have a monomorphic JSFunction.
1310 __ JumpIfSmi(edi, &extra_checks_or_miss);
1311
1312 __ bind(&call_function);
1313
1314 // Increment the call count for monomorphic function calls.
1315 IncrementCallCount(masm, ebx, edx);
1316
1317 __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
1318 tail_call_mode()),
1319 RelocInfo::CODE_TARGET);
1320
1321 __ bind(&extra_checks_or_miss);
1322 Label uninitialized, miss, not_allocation_site;
1323
1324 __ cmp(ecx, Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1325 __ j(equal, &call);
1326
1327 // Check if we have an allocation site.
1328 __ CompareRoot(FieldOperand(ecx, HeapObject::kMapOffset),
1329 Heap::kAllocationSiteMapRootIndex);
1330 __ j(not_equal, ¬_allocation_site);
1331
1332 // We have an allocation site.
1333 HandleArrayCase(masm, &miss);
1334
1335 __ bind(¬_allocation_site);
1336
1337 // The following cases attempt to handle MISS cases without going to the
1338 // runtime.
1339 if (FLAG_trace_ic) {
1340 __ jmp(&miss);
1341 }
1342
1343 __ cmp(ecx, Immediate(TypeFeedbackVector::UninitializedSentinel(isolate)));
1344 __ j(equal, &uninitialized);
1345
1346 // We are going megamorphic. If the feedback is a JSFunction, it is fine
1347 // to handle it here. More complex cases are dealt with in the runtime.
1348 __ AssertNotSmi(ecx);
1349 __ CmpObjectType(ecx, JS_FUNCTION_TYPE, ecx);
1350 __ j(not_equal, &miss);
1351 __ mov(
1352 FieldOperand(ebx, edx, times_half_pointer_size, FixedArray::kHeaderSize),
1353 Immediate(TypeFeedbackVector::MegamorphicSentinel(isolate)));
1354
1355 __ bind(&call);
1356
1357 // Increment the call count for megamorphic function calls.
1358 IncrementCallCount(masm, ebx, edx);
1359
1360 __ bind(&call_count_incremented);
1361
1362 __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
1363 RelocInfo::CODE_TARGET);
1364
1365 __ bind(&uninitialized);
1366
1367 // We are going monomorphic, provided we actually have a JSFunction.
1368 __ JumpIfSmi(edi, &miss);
1369
1370 // Goto miss case if we do not have a function.
1371 __ CmpObjectType(edi, JS_FUNCTION_TYPE, ecx);
1372 __ j(not_equal, &miss);
1373
1374 // Make sure the function is not the Array() function, which requires special
1375 // behavior on MISS.
1376 __ LoadGlobalFunction(Context::ARRAY_FUNCTION_INDEX, ecx);
1377 __ cmp(edi, ecx);
1378 __ j(equal, &miss);
1379
1380 // Make sure the function belongs to the same native context.
1381 __ mov(ecx, FieldOperand(edi, JSFunction::kContextOffset));
1382 __ mov(ecx, ContextOperand(ecx, Context::NATIVE_CONTEXT_INDEX));
1383 __ cmp(ecx, NativeContextOperand());
1384 __ j(not_equal, &miss);
1385
1386 // Store the function. Use a stub since we need a frame for allocation.
1387 // eax - number of arguments
1388 // ebx - vector
1389 // edx - slot
1390 // edi - function
1391 {
1392 FrameScope scope(masm, StackFrame::INTERNAL);
1393 CreateWeakCellStub create_stub(isolate);
1394 __ SmiTag(eax);
1395 __ push(eax);
1396 __ push(ebx);
1397 __ push(edx);
1398 __ push(edi);
1399 __ push(esi);
1400 __ CallStub(&create_stub);
1401 __ pop(esi);
1402 __ pop(edi);
1403 __ pop(edx);
1404 __ pop(ebx);
1405 __ pop(eax);
1406 __ SmiUntag(eax);
1407 }
1408
1409 __ jmp(&call_function);
1410
1411 // We are here because tracing is on or we encountered a MISS case we can't
1412 // handle here.
1413 __ bind(&miss);
1414 GenerateMiss(masm);
1415
1416 __ jmp(&call_count_incremented);
1417
1418 // Unreachable
1419 __ int3();
1420 }
1421
1422
GenerateMiss(MacroAssembler * masm)1423 void CallICStub::GenerateMiss(MacroAssembler* masm) {
1424 FrameScope scope(masm, StackFrame::INTERNAL);
1425
1426 // Preserve the number of arguments.
1427 __ SmiTag(eax);
1428 __ push(eax);
1429
1430 // Push the function and feedback info.
1431 __ push(edi);
1432 __ push(ebx);
1433 __ push(edx);
1434
1435 // Call the entry.
1436 __ CallRuntime(Runtime::kCallIC_Miss);
1437
1438 // Move result to edi and exit the internal frame.
1439 __ mov(edi, eax);
1440
1441 // Restore number of arguments.
1442 __ pop(eax);
1443 __ SmiUntag(eax);
1444 }
1445
1446
NeedsImmovableCode()1447 bool CEntryStub::NeedsImmovableCode() {
1448 return false;
1449 }
1450
1451
GenerateStubsAheadOfTime(Isolate * isolate)1452 void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) {
1453 CEntryStub::GenerateAheadOfTime(isolate);
1454 StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(isolate);
1455 StubFailureTrampolineStub::GenerateAheadOfTime(isolate);
1456 // It is important that the store buffer overflow stubs are generated first.
1457 CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate);
1458 CreateAllocationSiteStub::GenerateAheadOfTime(isolate);
1459 CreateWeakCellStub::GenerateAheadOfTime(isolate);
1460 BinaryOpICStub::GenerateAheadOfTime(isolate);
1461 BinaryOpICWithAllocationSiteStub::GenerateAheadOfTime(isolate);
1462 StoreFastElementStub::GenerateAheadOfTime(isolate);
1463 }
1464
1465
GenerateFPStubs(Isolate * isolate)1466 void CodeStub::GenerateFPStubs(Isolate* isolate) {
1467 CEntryStub save_doubles(isolate, 1, kSaveFPRegs);
1468 // Stubs might already be in the snapshot, detect that and don't regenerate,
1469 // which would lead to code stub initialization state being messed up.
1470 Code* save_doubles_code;
1471 if (!save_doubles.FindCodeInCache(&save_doubles_code)) {
1472 save_doubles_code = *(save_doubles.GetCode());
1473 }
1474 isolate->set_fp_stubs_generated(true);
1475 }
1476
1477
GenerateAheadOfTime(Isolate * isolate)1478 void CEntryStub::GenerateAheadOfTime(Isolate* isolate) {
1479 CEntryStub stub(isolate, 1, kDontSaveFPRegs);
1480 stub.GetCode();
1481 }
1482
1483
Generate(MacroAssembler * masm)1484 void CEntryStub::Generate(MacroAssembler* masm) {
1485 // eax: number of arguments including receiver
1486 // ebx: pointer to C function (C callee-saved)
1487 // ebp: frame pointer (restored after C call)
1488 // esp: stack pointer (restored after C call)
1489 // esi: current context (C callee-saved)
1490 // edi: JS function of the caller (C callee-saved)
1491 //
1492 // If argv_in_register():
1493 // ecx: pointer to the first argument
1494
1495 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1496
1497 // Reserve space on the stack for the three arguments passed to the call. If
1498 // result size is greater than can be returned in registers, also reserve
1499 // space for the hidden argument for the result location, and space for the
1500 // result itself.
1501 int arg_stack_space = result_size() < 3 ? 3 : 4 + result_size();
1502
1503 // Enter the exit frame that transitions from JavaScript to C++.
1504 if (argv_in_register()) {
1505 DCHECK(!save_doubles());
1506 DCHECK(!is_builtin_exit());
1507 __ EnterApiExitFrame(arg_stack_space);
1508
1509 // Move argc and argv into the correct registers.
1510 __ mov(esi, ecx);
1511 __ mov(edi, eax);
1512 } else {
1513 __ EnterExitFrame(
1514 arg_stack_space, save_doubles(),
1515 is_builtin_exit() ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT);
1516 }
1517
1518 // ebx: pointer to C function (C callee-saved)
1519 // ebp: frame pointer (restored after C call)
1520 // esp: stack pointer (restored after C call)
1521 // edi: number of arguments including receiver (C callee-saved)
1522 // esi: pointer to the first argument (C callee-saved)
1523
1524 // Result returned in eax, or eax+edx if result size is 2.
1525
1526 // Check stack alignment.
1527 if (FLAG_debug_code) {
1528 __ CheckStackAlignment();
1529 }
1530 // Call C function.
1531 if (result_size() <= 2) {
1532 __ mov(Operand(esp, 0 * kPointerSize), edi); // argc.
1533 __ mov(Operand(esp, 1 * kPointerSize), esi); // argv.
1534 __ mov(Operand(esp, 2 * kPointerSize),
1535 Immediate(ExternalReference::isolate_address(isolate())));
1536 } else {
1537 DCHECK_EQ(3, result_size());
1538 // Pass a pointer to the result location as the first argument.
1539 __ lea(eax, Operand(esp, 4 * kPointerSize));
1540 __ mov(Operand(esp, 0 * kPointerSize), eax);
1541 __ mov(Operand(esp, 1 * kPointerSize), edi); // argc.
1542 __ mov(Operand(esp, 2 * kPointerSize), esi); // argv.
1543 __ mov(Operand(esp, 3 * kPointerSize),
1544 Immediate(ExternalReference::isolate_address(isolate())));
1545 }
1546 __ call(ebx);
1547
1548 if (result_size() > 2) {
1549 DCHECK_EQ(3, result_size());
1550 #ifndef _WIN32
1551 // Restore the "hidden" argument on the stack which was popped by caller.
1552 __ sub(esp, Immediate(kPointerSize));
1553 #endif
1554 // Read result values stored on stack. Result is stored above the arguments.
1555 __ mov(kReturnRegister0, Operand(esp, 4 * kPointerSize));
1556 __ mov(kReturnRegister1, Operand(esp, 5 * kPointerSize));
1557 __ mov(kReturnRegister2, Operand(esp, 6 * kPointerSize));
1558 }
1559 // Result is in eax, edx:eax or edi:edx:eax - do not destroy these registers!
1560
1561 // Check result for exception sentinel.
1562 Label exception_returned;
1563 __ cmp(eax, isolate()->factory()->exception());
1564 __ j(equal, &exception_returned);
1565
1566 // Check that there is no pending exception, otherwise we
1567 // should have returned the exception sentinel.
1568 if (FLAG_debug_code) {
1569 __ push(edx);
1570 __ mov(edx, Immediate(isolate()->factory()->the_hole_value()));
1571 Label okay;
1572 ExternalReference pending_exception_address(
1573 Isolate::kPendingExceptionAddress, isolate());
1574 __ cmp(edx, Operand::StaticVariable(pending_exception_address));
1575 // Cannot use check here as it attempts to generate call into runtime.
1576 __ j(equal, &okay, Label::kNear);
1577 __ int3();
1578 __ bind(&okay);
1579 __ pop(edx);
1580 }
1581
1582 // Exit the JavaScript to C++ exit frame.
1583 __ LeaveExitFrame(save_doubles(), !argv_in_register());
1584 __ ret(0);
1585
1586 // Handling of exception.
1587 __ bind(&exception_returned);
1588
1589 ExternalReference pending_handler_context_address(
1590 Isolate::kPendingHandlerContextAddress, isolate());
1591 ExternalReference pending_handler_code_address(
1592 Isolate::kPendingHandlerCodeAddress, isolate());
1593 ExternalReference pending_handler_offset_address(
1594 Isolate::kPendingHandlerOffsetAddress, isolate());
1595 ExternalReference pending_handler_fp_address(
1596 Isolate::kPendingHandlerFPAddress, isolate());
1597 ExternalReference pending_handler_sp_address(
1598 Isolate::kPendingHandlerSPAddress, isolate());
1599
1600 // Ask the runtime for help to determine the handler. This will set eax to
1601 // contain the current pending exception, don't clobber it.
1602 ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler,
1603 isolate());
1604 {
1605 FrameScope scope(masm, StackFrame::MANUAL);
1606 __ PrepareCallCFunction(3, eax);
1607 __ mov(Operand(esp, 0 * kPointerSize), Immediate(0)); // argc.
1608 __ mov(Operand(esp, 1 * kPointerSize), Immediate(0)); // argv.
1609 __ mov(Operand(esp, 2 * kPointerSize),
1610 Immediate(ExternalReference::isolate_address(isolate())));
1611 __ CallCFunction(find_handler, 3);
1612 }
1613
1614 // Retrieve the handler context, SP and FP.
1615 __ mov(esi, Operand::StaticVariable(pending_handler_context_address));
1616 __ mov(esp, Operand::StaticVariable(pending_handler_sp_address));
1617 __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address));
1618
1619 // If the handler is a JS frame, restore the context to the frame. Note that
1620 // the context will be set to (esi == 0) for non-JS frames.
1621 Label skip;
1622 __ test(esi, esi);
1623 __ j(zero, &skip, Label::kNear);
1624 __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi);
1625 __ bind(&skip);
1626
1627 // Compute the handler entry address and jump to it.
1628 __ mov(edi, Operand::StaticVariable(pending_handler_code_address));
1629 __ mov(edx, Operand::StaticVariable(pending_handler_offset_address));
1630 // Check whether it's a turbofanned exception handler code before jump to it.
1631 Label not_turbo;
1632 __ push(eax);
1633 __ mov(eax, Operand(edi, Code::kKindSpecificFlags1Offset - kHeapObjectTag));
1634 __ and_(eax, Immediate(1 << Code::kIsTurbofannedBit));
1635 __ j(zero, ¬_turbo);
1636 __ fninit();
1637 __ fld1();
1638 __ bind(¬_turbo);
1639 __ pop(eax);
1640 __ lea(edi, FieldOperand(edi, edx, times_1, Code::kHeaderSize));
1641 __ jmp(edi);
1642 }
1643
1644
Generate(MacroAssembler * masm)1645 void JSEntryStub::Generate(MacroAssembler* masm) {
1646 Label invoke, handler_entry, exit;
1647 Label not_outermost_js, not_outermost_js_2;
1648
1649 ProfileEntryHookStub::MaybeCallEntryHook(masm);
1650
1651 // Set up frame.
1652 __ push(ebp);
1653 __ mov(ebp, esp);
1654
1655 // Push marker in two places.
1656 int marker = type();
1657 __ push(Immediate(Smi::FromInt(marker))); // marker
1658 ExternalReference context_address(Isolate::kContextAddress, isolate());
1659 __ push(Operand::StaticVariable(context_address)); // context
1660 // Save callee-saved registers (C calling conventions).
1661 __ push(edi);
1662 __ push(esi);
1663 __ push(ebx);
1664
1665 // Save copies of the top frame descriptor on the stack.
1666 ExternalReference c_entry_fp(Isolate::kCEntryFPAddress, isolate());
1667 __ push(Operand::StaticVariable(c_entry_fp));
1668
1669 // If this is the outermost JS call, set js_entry_sp value.
1670 ExternalReference js_entry_sp(Isolate::kJSEntrySPAddress, isolate());
1671 __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0));
1672 __ j(not_equal, ¬_outermost_js, Label::kNear);
1673 __ mov(Operand::StaticVariable(js_entry_sp), ebp);
1674 __ push(Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1675 __ jmp(&invoke, Label::kNear);
1676 __ bind(¬_outermost_js);
1677 __ push(Immediate(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
1678
1679 // Jump to a faked try block that does the invoke, with a faked catch
1680 // block that sets the pending exception.
1681 __ jmp(&invoke);
1682 __ bind(&handler_entry);
1683 handler_offset_ = handler_entry.pos();
1684 // Caught exception: Store result (exception) in the pending exception
1685 // field in the JSEnv and return a failure sentinel.
1686 ExternalReference pending_exception(Isolate::kPendingExceptionAddress,
1687 isolate());
1688 __ mov(Operand::StaticVariable(pending_exception), eax);
1689 __ mov(eax, Immediate(isolate()->factory()->exception()));
1690 __ jmp(&exit);
1691
1692 // Invoke: Link this frame into the handler chain.
1693 __ bind(&invoke);
1694 __ PushStackHandler();
1695
1696 // Fake a receiver (NULL).
1697 __ push(Immediate(0)); // receiver
1698
1699 // Invoke the function by calling through JS entry trampoline builtin and
1700 // pop the faked function when we return. Notice that we cannot store a
1701 // reference to the trampoline code directly in this stub, because the
1702 // builtin stubs may not have been generated yet.
1703 if (type() == StackFrame::ENTRY_CONSTRUCT) {
1704 ExternalReference construct_entry(Builtins::kJSConstructEntryTrampoline,
1705 isolate());
1706 __ mov(edx, Immediate(construct_entry));
1707 } else {
1708 ExternalReference entry(Builtins::kJSEntryTrampoline, isolate());
1709 __ mov(edx, Immediate(entry));
1710 }
1711 __ mov(edx, Operand(edx, 0)); // deref address
1712 __ lea(edx, FieldOperand(edx, Code::kHeaderSize));
1713 __ call(edx);
1714
1715 // Unlink this frame from the handler chain.
1716 __ PopStackHandler();
1717
1718 __ bind(&exit);
1719 // Check if the current stack frame is marked as the outermost JS frame.
1720 __ pop(ebx);
1721 __ cmp(ebx, Immediate(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
1722 __ j(not_equal, ¬_outermost_js_2);
1723 __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0));
1724 __ bind(¬_outermost_js_2);
1725
1726 // Restore the top frame descriptor from the stack.
1727 __ pop(Operand::StaticVariable(ExternalReference(
1728 Isolate::kCEntryFPAddress, isolate())));
1729
1730 // Restore callee-saved registers (C calling conventions).
1731 __ pop(ebx);
1732 __ pop(esi);
1733 __ pop(edi);
1734 __ add(esp, Immediate(2 * kPointerSize)); // remove markers
1735
1736 // Restore frame pointer and return.
1737 __ pop(ebp);
1738 __ ret(0);
1739 }
1740
1741
1742 // -------------------------------------------------------------------------
1743 // StringCharCodeAtGenerator
1744
GenerateFast(MacroAssembler * masm)1745 void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
1746 // If the receiver is a smi trigger the non-string case.
1747 if (check_mode_ == RECEIVER_IS_UNKNOWN) {
1748 __ JumpIfSmi(object_, receiver_not_string_);
1749
1750 // Fetch the instance type of the receiver into result register.
1751 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
1752 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1753 // If the receiver is not a string trigger the non-string case.
1754 __ test(result_, Immediate(kIsNotStringMask));
1755 __ j(not_zero, receiver_not_string_);
1756 }
1757
1758 // If the index is non-smi trigger the non-smi case.
1759 __ JumpIfNotSmi(index_, &index_not_smi_);
1760 __ bind(&got_smi_index_);
1761
1762 // Check for index out of range.
1763 __ cmp(index_, FieldOperand(object_, String::kLengthOffset));
1764 __ j(above_equal, index_out_of_range_);
1765
1766 __ SmiUntag(index_);
1767
1768 Factory* factory = masm->isolate()->factory();
1769 StringCharLoadGenerator::Generate(
1770 masm, factory, object_, index_, result_, &call_runtime_);
1771
1772 __ SmiTag(result_);
1773 __ bind(&exit_);
1774 }
1775
1776
GenerateSlow(MacroAssembler * masm,EmbedMode embed_mode,const RuntimeCallHelper & call_helper)1777 void StringCharCodeAtGenerator::GenerateSlow(
1778 MacroAssembler* masm, EmbedMode embed_mode,
1779 const RuntimeCallHelper& call_helper) {
1780 __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
1781
1782 // Index is not a smi.
1783 __ bind(&index_not_smi_);
1784 // If index is a heap number, try converting it to an integer.
1785 __ CheckMap(index_,
1786 masm->isolate()->factory()->heap_number_map(),
1787 index_not_number_,
1788 DONT_DO_SMI_CHECK);
1789 call_helper.BeforeCall(masm);
1790 if (embed_mode == PART_OF_IC_HANDLER) {
1791 __ push(LoadWithVectorDescriptor::VectorRegister());
1792 __ push(LoadDescriptor::SlotRegister());
1793 }
1794 __ push(object_);
1795 __ push(index_); // Consumed by runtime conversion function.
1796 __ CallRuntime(Runtime::kNumberToSmi);
1797 if (!index_.is(eax)) {
1798 // Save the conversion result before the pop instructions below
1799 // have a chance to overwrite it.
1800 __ mov(index_, eax);
1801 }
1802 __ pop(object_);
1803 if (embed_mode == PART_OF_IC_HANDLER) {
1804 __ pop(LoadDescriptor::SlotRegister());
1805 __ pop(LoadWithVectorDescriptor::VectorRegister());
1806 }
1807 // Reload the instance type.
1808 __ mov(result_, FieldOperand(object_, HeapObject::kMapOffset));
1809 __ movzx_b(result_, FieldOperand(result_, Map::kInstanceTypeOffset));
1810 call_helper.AfterCall(masm);
1811 // If index is still not a smi, it must be out of range.
1812 STATIC_ASSERT(kSmiTag == 0);
1813 __ JumpIfNotSmi(index_, index_out_of_range_);
1814 // Otherwise, return to the fast path.
1815 __ jmp(&got_smi_index_);
1816
1817 // Call runtime. We get here when the receiver is a string and the
1818 // index is a number, but the code of getting the actual character
1819 // is too complex (e.g., when the string needs to be flattened).
1820 __ bind(&call_runtime_);
1821 call_helper.BeforeCall(masm);
1822 __ push(object_);
1823 __ SmiTag(index_);
1824 __ push(index_);
1825 __ CallRuntime(Runtime::kStringCharCodeAtRT);
1826 if (!result_.is(eax)) {
1827 __ mov(result_, eax);
1828 }
1829 call_helper.AfterCall(masm);
1830 __ jmp(&exit_);
1831
1832 __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
1833 }
1834
1835
1836 // -------------------------------------------------------------------------
1837 // StringCharFromCodeGenerator
1838
GenerateFast(MacroAssembler * masm)1839 void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
1840 // Fast case of Heap::LookupSingleCharacterStringFromCode.
1841 STATIC_ASSERT(kSmiTag == 0);
1842 STATIC_ASSERT(kSmiShiftSize == 0);
1843 DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
1844 __ test(code_, Immediate(kSmiTagMask |
1845 ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
1846 __ j(not_zero, &slow_case_);
1847
1848 Factory* factory = masm->isolate()->factory();
1849 __ Move(result_, Immediate(factory->single_character_string_cache()));
1850 STATIC_ASSERT(kSmiTag == 0);
1851 STATIC_ASSERT(kSmiTagSize == 1);
1852 STATIC_ASSERT(kSmiShiftSize == 0);
1853 // At this point code register contains smi tagged one byte char code.
1854 __ mov(result_, FieldOperand(result_,
1855 code_, times_half_pointer_size,
1856 FixedArray::kHeaderSize));
1857 __ cmp(result_, factory->undefined_value());
1858 __ j(equal, &slow_case_);
1859 __ bind(&exit_);
1860 }
1861
1862
GenerateSlow(MacroAssembler * masm,const RuntimeCallHelper & call_helper)1863 void StringCharFromCodeGenerator::GenerateSlow(
1864 MacroAssembler* masm,
1865 const RuntimeCallHelper& call_helper) {
1866 __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
1867
1868 __ bind(&slow_case_);
1869 call_helper.BeforeCall(masm);
1870 __ push(code_);
1871 __ CallRuntime(Runtime::kStringCharFromCode);
1872 if (!result_.is(eax)) {
1873 __ mov(result_, eax);
1874 }
1875 call_helper.AfterCall(masm);
1876 __ jmp(&exit_);
1877
1878 __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
1879 }
1880
1881
GenerateCopyCharacters(MacroAssembler * masm,Register dest,Register src,Register count,Register scratch,String::Encoding encoding)1882 void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
1883 Register dest,
1884 Register src,
1885 Register count,
1886 Register scratch,
1887 String::Encoding encoding) {
1888 DCHECK(!scratch.is(dest));
1889 DCHECK(!scratch.is(src));
1890 DCHECK(!scratch.is(count));
1891
1892 // Nothing to do for zero characters.
1893 Label done;
1894 __ test(count, count);
1895 __ j(zero, &done);
1896
1897 // Make count the number of bytes to copy.
1898 if (encoding == String::TWO_BYTE_ENCODING) {
1899 __ shl(count, 1);
1900 }
1901
1902 Label loop;
1903 __ bind(&loop);
1904 __ mov_b(scratch, Operand(src, 0));
1905 __ mov_b(Operand(dest, 0), scratch);
1906 __ inc(src);
1907 __ inc(dest);
1908 __ dec(count);
1909 __ j(not_zero, &loop);
1910
1911 __ bind(&done);
1912 }
1913
1914
GenerateFlatOneByteStringEquals(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2)1915 void StringHelper::GenerateFlatOneByteStringEquals(MacroAssembler* masm,
1916 Register left,
1917 Register right,
1918 Register scratch1,
1919 Register scratch2) {
1920 Register length = scratch1;
1921
1922 // Compare lengths.
1923 Label strings_not_equal, check_zero_length;
1924 __ mov(length, FieldOperand(left, String::kLengthOffset));
1925 __ cmp(length, FieldOperand(right, String::kLengthOffset));
1926 __ j(equal, &check_zero_length, Label::kNear);
1927 __ bind(&strings_not_equal);
1928 __ Move(eax, Immediate(Smi::FromInt(NOT_EQUAL)));
1929 __ ret(0);
1930
1931 // Check if the length is zero.
1932 Label compare_chars;
1933 __ bind(&check_zero_length);
1934 STATIC_ASSERT(kSmiTag == 0);
1935 __ test(length, length);
1936 __ j(not_zero, &compare_chars, Label::kNear);
1937 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1938 __ ret(0);
1939
1940 // Compare characters.
1941 __ bind(&compare_chars);
1942 GenerateOneByteCharsCompareLoop(masm, left, right, length, scratch2,
1943 &strings_not_equal, Label::kNear);
1944
1945 // Characters are equal.
1946 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1947 __ ret(0);
1948 }
1949
1950
GenerateCompareFlatOneByteStrings(MacroAssembler * masm,Register left,Register right,Register scratch1,Register scratch2,Register scratch3)1951 void StringHelper::GenerateCompareFlatOneByteStrings(
1952 MacroAssembler* masm, Register left, Register right, Register scratch1,
1953 Register scratch2, Register scratch3) {
1954 Counters* counters = masm->isolate()->counters();
1955 __ IncrementCounter(counters->string_compare_native(), 1);
1956
1957 // Find minimum length.
1958 Label left_shorter;
1959 __ mov(scratch1, FieldOperand(left, String::kLengthOffset));
1960 __ mov(scratch3, scratch1);
1961 __ sub(scratch3, FieldOperand(right, String::kLengthOffset));
1962
1963 Register length_delta = scratch3;
1964
1965 __ j(less_equal, &left_shorter, Label::kNear);
1966 // Right string is shorter. Change scratch1 to be length of right string.
1967 __ sub(scratch1, length_delta);
1968 __ bind(&left_shorter);
1969
1970 Register min_length = scratch1;
1971
1972 // If either length is zero, just compare lengths.
1973 Label compare_lengths;
1974 __ test(min_length, min_length);
1975 __ j(zero, &compare_lengths, Label::kNear);
1976
1977 // Compare characters.
1978 Label result_not_equal;
1979 GenerateOneByteCharsCompareLoop(masm, left, right, min_length, scratch2,
1980 &result_not_equal, Label::kNear);
1981
1982 // Compare lengths - strings up to min-length are equal.
1983 __ bind(&compare_lengths);
1984 __ test(length_delta, length_delta);
1985 Label length_not_equal;
1986 __ j(not_zero, &length_not_equal, Label::kNear);
1987
1988 // Result is EQUAL.
1989 STATIC_ASSERT(EQUAL == 0);
1990 STATIC_ASSERT(kSmiTag == 0);
1991 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
1992 __ ret(0);
1993
1994 Label result_greater;
1995 Label result_less;
1996 __ bind(&length_not_equal);
1997 __ j(greater, &result_greater, Label::kNear);
1998 __ jmp(&result_less, Label::kNear);
1999 __ bind(&result_not_equal);
2000 __ j(above, &result_greater, Label::kNear);
2001 __ bind(&result_less);
2002
2003 // Result is LESS.
2004 __ Move(eax, Immediate(Smi::FromInt(LESS)));
2005 __ ret(0);
2006
2007 // Result is GREATER.
2008 __ bind(&result_greater);
2009 __ Move(eax, Immediate(Smi::FromInt(GREATER)));
2010 __ ret(0);
2011 }
2012
2013
GenerateOneByteCharsCompareLoop(MacroAssembler * masm,Register left,Register right,Register length,Register scratch,Label * chars_not_equal,Label::Distance chars_not_equal_near)2014 void StringHelper::GenerateOneByteCharsCompareLoop(
2015 MacroAssembler* masm, Register left, Register right, Register length,
2016 Register scratch, Label* chars_not_equal,
2017 Label::Distance chars_not_equal_near) {
2018 // Change index to run from -length to -1 by adding length to string
2019 // start. This means that loop ends when index reaches zero, which
2020 // doesn't need an additional compare.
2021 __ SmiUntag(length);
2022 __ lea(left,
2023 FieldOperand(left, length, times_1, SeqOneByteString::kHeaderSize));
2024 __ lea(right,
2025 FieldOperand(right, length, times_1, SeqOneByteString::kHeaderSize));
2026 __ neg(length);
2027 Register index = length; // index = -length;
2028
2029 // Compare loop.
2030 Label loop;
2031 __ bind(&loop);
2032 __ mov_b(scratch, Operand(left, index, times_1, 0));
2033 __ cmpb(scratch, Operand(right, index, times_1, 0));
2034 __ j(not_equal, chars_not_equal, chars_not_equal_near);
2035 __ inc(index);
2036 __ j(not_zero, &loop);
2037 }
2038
2039
Generate(MacroAssembler * masm)2040 void BinaryOpICWithAllocationSiteStub::Generate(MacroAssembler* masm) {
2041 // ----------- S t a t e -------------
2042 // -- edx : left
2043 // -- eax : right
2044 // -- esp[0] : return address
2045 // -----------------------------------
2046
2047 // Load ecx with the allocation site. We stick an undefined dummy value here
2048 // and replace it with the real allocation site later when we instantiate this
2049 // stub in BinaryOpICWithAllocationSiteStub::GetCodeCopyFromTemplate().
2050 __ mov(ecx, isolate()->factory()->undefined_value());
2051
2052 // Make sure that we actually patched the allocation site.
2053 if (FLAG_debug_code) {
2054 __ test(ecx, Immediate(kSmiTagMask));
2055 __ Assert(not_equal, kExpectedAllocationSite);
2056 __ cmp(FieldOperand(ecx, HeapObject::kMapOffset),
2057 isolate()->factory()->allocation_site_map());
2058 __ Assert(equal, kExpectedAllocationSite);
2059 }
2060
2061 // Tail call into the stub that handles binary operations with allocation
2062 // sites.
2063 BinaryOpWithAllocationSiteStub stub(isolate(), state());
2064 __ TailCallStub(&stub);
2065 }
2066
2067
GenerateBooleans(MacroAssembler * masm)2068 void CompareICStub::GenerateBooleans(MacroAssembler* masm) {
2069 DCHECK_EQ(CompareICState::BOOLEAN, state());
2070 Label miss;
2071 Label::Distance const miss_distance =
2072 masm->emit_debug_code() ? Label::kFar : Label::kNear;
2073
2074 __ JumpIfSmi(edx, &miss, miss_distance);
2075 __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
2076 __ JumpIfSmi(eax, &miss, miss_distance);
2077 __ mov(ebx, FieldOperand(eax, HeapObject::kMapOffset));
2078 __ JumpIfNotRoot(ecx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2079 __ JumpIfNotRoot(ebx, Heap::kBooleanMapRootIndex, &miss, miss_distance);
2080 if (!Token::IsEqualityOp(op())) {
2081 __ mov(eax, FieldOperand(eax, Oddball::kToNumberOffset));
2082 __ AssertSmi(eax);
2083 __ mov(edx, FieldOperand(edx, Oddball::kToNumberOffset));
2084 __ AssertSmi(edx);
2085 __ xchg(eax, edx);
2086 }
2087 __ sub(eax, edx);
2088 __ Ret();
2089
2090 __ bind(&miss);
2091 GenerateMiss(masm);
2092 }
2093
2094
GenerateSmis(MacroAssembler * masm)2095 void CompareICStub::GenerateSmis(MacroAssembler* masm) {
2096 DCHECK(state() == CompareICState::SMI);
2097 Label miss;
2098 __ mov(ecx, edx);
2099 __ or_(ecx, eax);
2100 __ JumpIfNotSmi(ecx, &miss, Label::kNear);
2101
2102 if (GetCondition() == equal) {
2103 // For equality we do not care about the sign of the result.
2104 __ sub(eax, edx);
2105 } else {
2106 Label done;
2107 __ sub(edx, eax);
2108 __ j(no_overflow, &done, Label::kNear);
2109 // Correct sign of result in case of overflow.
2110 __ not_(edx);
2111 __ bind(&done);
2112 __ mov(eax, edx);
2113 }
2114 __ ret(0);
2115
2116 __ bind(&miss);
2117 GenerateMiss(masm);
2118 }
2119
2120
GenerateNumbers(MacroAssembler * masm)2121 void CompareICStub::GenerateNumbers(MacroAssembler* masm) {
2122 DCHECK(state() == CompareICState::NUMBER);
2123
2124 Label generic_stub, check_left;
2125 Label unordered, maybe_undefined1, maybe_undefined2;
2126 Label miss;
2127
2128 if (left() == CompareICState::SMI) {
2129 __ JumpIfNotSmi(edx, &miss);
2130 }
2131 if (right() == CompareICState::SMI) {
2132 __ JumpIfNotSmi(eax, &miss);
2133 }
2134
2135 // Inlining the double comparison and falling back to the general compare
2136 // stub if NaN is involved or SSE2 or CMOV is unsupported.
2137 __ JumpIfSmi(eax, &check_left, Label::kNear);
2138 __ cmp(FieldOperand(eax, HeapObject::kMapOffset),
2139 isolate()->factory()->heap_number_map());
2140 __ j(not_equal, &maybe_undefined1, Label::kNear);
2141
2142 __ bind(&check_left);
2143 __ JumpIfSmi(edx, &generic_stub, Label::kNear);
2144 __ cmp(FieldOperand(edx, HeapObject::kMapOffset),
2145 isolate()->factory()->heap_number_map());
2146 __ j(not_equal, &maybe_undefined2, Label::kNear);
2147
2148 __ bind(&unordered);
2149 __ bind(&generic_stub);
2150 CompareICStub stub(isolate(), op(), CompareICState::GENERIC,
2151 CompareICState::GENERIC, CompareICState::GENERIC);
2152 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
2153
2154 __ bind(&maybe_undefined1);
2155 if (Token::IsOrderedRelationalCompareOp(op())) {
2156 __ cmp(eax, Immediate(isolate()->factory()->undefined_value()));
2157 __ j(not_equal, &miss);
2158 __ JumpIfSmi(edx, &unordered);
2159 __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
2160 __ j(not_equal, &maybe_undefined2, Label::kNear);
2161 __ jmp(&unordered);
2162 }
2163
2164 __ bind(&maybe_undefined2);
2165 if (Token::IsOrderedRelationalCompareOp(op())) {
2166 __ cmp(edx, Immediate(isolate()->factory()->undefined_value()));
2167 __ j(equal, &unordered);
2168 }
2169
2170 __ bind(&miss);
2171 GenerateMiss(masm);
2172 }
2173
2174
GenerateInternalizedStrings(MacroAssembler * masm)2175 void CompareICStub::GenerateInternalizedStrings(MacroAssembler* masm) {
2176 DCHECK(state() == CompareICState::INTERNALIZED_STRING);
2177 DCHECK(GetCondition() == equal);
2178
2179 // Registers containing left and right operands respectively.
2180 Register left = edx;
2181 Register right = eax;
2182 Register tmp1 = ecx;
2183 Register tmp2 = ebx;
2184
2185 // Check that both operands are heap objects.
2186 Label miss;
2187 __ mov(tmp1, left);
2188 STATIC_ASSERT(kSmiTag == 0);
2189 __ and_(tmp1, right);
2190 __ JumpIfSmi(tmp1, &miss, Label::kNear);
2191
2192 // Check that both operands are internalized strings.
2193 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2194 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2195 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2196 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2197 STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2198 __ or_(tmp1, tmp2);
2199 __ test(tmp1, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2200 __ j(not_zero, &miss, Label::kNear);
2201
2202 // Internalized strings are compared by identity.
2203 Label done;
2204 __ cmp(left, right);
2205 // Make sure eax is non-zero. At this point input operands are
2206 // guaranteed to be non-zero.
2207 DCHECK(right.is(eax));
2208 __ j(not_equal, &done, Label::kNear);
2209 STATIC_ASSERT(EQUAL == 0);
2210 STATIC_ASSERT(kSmiTag == 0);
2211 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2212 __ bind(&done);
2213 __ ret(0);
2214
2215 __ bind(&miss);
2216 GenerateMiss(masm);
2217 }
2218
2219
GenerateUniqueNames(MacroAssembler * masm)2220 void CompareICStub::GenerateUniqueNames(MacroAssembler* masm) {
2221 DCHECK(state() == CompareICState::UNIQUE_NAME);
2222 DCHECK(GetCondition() == equal);
2223
2224 // Registers containing left and right operands respectively.
2225 Register left = edx;
2226 Register right = eax;
2227 Register tmp1 = ecx;
2228 Register tmp2 = ebx;
2229
2230 // Check that both operands are heap objects.
2231 Label miss;
2232 __ mov(tmp1, left);
2233 STATIC_ASSERT(kSmiTag == 0);
2234 __ and_(tmp1, right);
2235 __ JumpIfSmi(tmp1, &miss, Label::kNear);
2236
2237 // Check that both operands are unique names. This leaves the instance
2238 // types loaded in tmp1 and tmp2.
2239 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2240 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2241 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2242 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2243
2244 __ JumpIfNotUniqueNameInstanceType(tmp1, &miss, Label::kNear);
2245 __ JumpIfNotUniqueNameInstanceType(tmp2, &miss, Label::kNear);
2246
2247 // Unique names are compared by identity.
2248 Label done;
2249 __ cmp(left, right);
2250 // Make sure eax is non-zero. At this point input operands are
2251 // guaranteed to be non-zero.
2252 DCHECK(right.is(eax));
2253 __ j(not_equal, &done, Label::kNear);
2254 STATIC_ASSERT(EQUAL == 0);
2255 STATIC_ASSERT(kSmiTag == 0);
2256 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2257 __ bind(&done);
2258 __ ret(0);
2259
2260 __ bind(&miss);
2261 GenerateMiss(masm);
2262 }
2263
2264
GenerateStrings(MacroAssembler * masm)2265 void CompareICStub::GenerateStrings(MacroAssembler* masm) {
2266 DCHECK(state() == CompareICState::STRING);
2267 Label miss;
2268
2269 bool equality = Token::IsEqualityOp(op());
2270
2271 // Registers containing left and right operands respectively.
2272 Register left = edx;
2273 Register right = eax;
2274 Register tmp1 = ecx;
2275 Register tmp2 = ebx;
2276 Register tmp3 = edi;
2277
2278 // Check that both operands are heap objects.
2279 __ mov(tmp1, left);
2280 STATIC_ASSERT(kSmiTag == 0);
2281 __ and_(tmp1, right);
2282 __ JumpIfSmi(tmp1, &miss);
2283
2284 // Check that both operands are strings. This leaves the instance
2285 // types loaded in tmp1 and tmp2.
2286 __ mov(tmp1, FieldOperand(left, HeapObject::kMapOffset));
2287 __ mov(tmp2, FieldOperand(right, HeapObject::kMapOffset));
2288 __ movzx_b(tmp1, FieldOperand(tmp1, Map::kInstanceTypeOffset));
2289 __ movzx_b(tmp2, FieldOperand(tmp2, Map::kInstanceTypeOffset));
2290 __ mov(tmp3, tmp1);
2291 STATIC_ASSERT(kNotStringTag != 0);
2292 __ or_(tmp3, tmp2);
2293 __ test(tmp3, Immediate(kIsNotStringMask));
2294 __ j(not_zero, &miss);
2295
2296 // Fast check for identical strings.
2297 Label not_same;
2298 __ cmp(left, right);
2299 __ j(not_equal, ¬_same, Label::kNear);
2300 STATIC_ASSERT(EQUAL == 0);
2301 STATIC_ASSERT(kSmiTag == 0);
2302 __ Move(eax, Immediate(Smi::FromInt(EQUAL)));
2303 __ ret(0);
2304
2305 // Handle not identical strings.
2306 __ bind(¬_same);
2307
2308 // Check that both strings are internalized. If they are, we're done
2309 // because we already know they are not identical. But in the case of
2310 // non-equality compare, we still need to determine the order. We
2311 // also know they are both strings.
2312 if (equality) {
2313 Label do_compare;
2314 STATIC_ASSERT(kInternalizedTag == 0);
2315 __ or_(tmp1, tmp2);
2316 __ test(tmp1, Immediate(kIsNotInternalizedMask));
2317 __ j(not_zero, &do_compare, Label::kNear);
2318 // Make sure eax is non-zero. At this point input operands are
2319 // guaranteed to be non-zero.
2320 DCHECK(right.is(eax));
2321 __ ret(0);
2322 __ bind(&do_compare);
2323 }
2324
2325 // Check that both strings are sequential one-byte.
2326 Label runtime;
2327 __ JumpIfNotBothSequentialOneByteStrings(left, right, tmp1, tmp2, &runtime);
2328
2329 // Compare flat one byte strings. Returns when done.
2330 if (equality) {
2331 StringHelper::GenerateFlatOneByteStringEquals(masm, left, right, tmp1,
2332 tmp2);
2333 } else {
2334 StringHelper::GenerateCompareFlatOneByteStrings(masm, left, right, tmp1,
2335 tmp2, tmp3);
2336 }
2337
2338 // Handle more complex cases in runtime.
2339 __ bind(&runtime);
2340 if (equality) {
2341 {
2342 FrameScope scope(masm, StackFrame::INTERNAL);
2343 __ Push(left);
2344 __ Push(right);
2345 __ CallRuntime(Runtime::kStringEqual);
2346 }
2347 __ sub(eax, Immediate(masm->isolate()->factory()->true_value()));
2348 __ Ret();
2349 } else {
2350 __ pop(tmp1); // Return address.
2351 __ push(left);
2352 __ push(right);
2353 __ push(tmp1);
2354 __ TailCallRuntime(Runtime::kStringCompare);
2355 }
2356
2357 __ bind(&miss);
2358 GenerateMiss(masm);
2359 }
2360
2361
GenerateReceivers(MacroAssembler * masm)2362 void CompareICStub::GenerateReceivers(MacroAssembler* masm) {
2363 DCHECK_EQ(CompareICState::RECEIVER, state());
2364 Label miss;
2365 __ mov(ecx, edx);
2366 __ and_(ecx, eax);
2367 __ JumpIfSmi(ecx, &miss, Label::kNear);
2368
2369 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
2370 __ CmpObjectType(eax, FIRST_JS_RECEIVER_TYPE, ecx);
2371 __ j(below, &miss, Label::kNear);
2372 __ CmpObjectType(edx, FIRST_JS_RECEIVER_TYPE, ecx);
2373 __ j(below, &miss, Label::kNear);
2374
2375 DCHECK_EQ(equal, GetCondition());
2376 __ sub(eax, edx);
2377 __ ret(0);
2378
2379 __ bind(&miss);
2380 GenerateMiss(masm);
2381 }
2382
2383
GenerateKnownReceivers(MacroAssembler * masm)2384 void CompareICStub::GenerateKnownReceivers(MacroAssembler* masm) {
2385 Label miss;
2386 Handle<WeakCell> cell = Map::WeakCellForMap(known_map_);
2387 __ mov(ecx, edx);
2388 __ and_(ecx, eax);
2389 __ JumpIfSmi(ecx, &miss, Label::kNear);
2390
2391 __ GetWeakValue(edi, cell);
2392 __ cmp(edi, FieldOperand(eax, HeapObject::kMapOffset));
2393 __ j(not_equal, &miss, Label::kNear);
2394 __ cmp(edi, FieldOperand(edx, HeapObject::kMapOffset));
2395 __ j(not_equal, &miss, Label::kNear);
2396
2397 if (Token::IsEqualityOp(op())) {
2398 __ sub(eax, edx);
2399 __ ret(0);
2400 } else {
2401 __ PopReturnAddressTo(ecx);
2402 __ Push(edx);
2403 __ Push(eax);
2404 __ Push(Immediate(Smi::FromInt(NegativeComparisonResult(GetCondition()))));
2405 __ PushReturnAddressFrom(ecx);
2406 __ TailCallRuntime(Runtime::kCompare);
2407 }
2408
2409 __ bind(&miss);
2410 GenerateMiss(masm);
2411 }
2412
2413
GenerateMiss(MacroAssembler * masm)2414 void CompareICStub::GenerateMiss(MacroAssembler* masm) {
2415 {
2416 // Call the runtime system in a fresh internal frame.
2417 FrameScope scope(masm, StackFrame::INTERNAL);
2418 __ push(edx); // Preserve edx and eax.
2419 __ push(eax);
2420 __ push(edx); // And also use them as the arguments.
2421 __ push(eax);
2422 __ push(Immediate(Smi::FromInt(op())));
2423 __ CallRuntime(Runtime::kCompareIC_Miss);
2424 // Compute the entry point of the rewritten stub.
2425 __ lea(edi, FieldOperand(eax, Code::kHeaderSize));
2426 __ pop(eax);
2427 __ pop(edx);
2428 }
2429
2430 // Do a tail call to the rewritten stub.
2431 __ jmp(edi);
2432 }
2433
2434
2435 // Helper function used to check that the dictionary doesn't contain
2436 // the property. This function may return false negatives, so miss_label
2437 // must always call a backup property check that is complete.
2438 // This function is safe to call if the receiver has fast properties.
2439 // Name must be a unique name and receiver must be a heap object.
GenerateNegativeLookup(MacroAssembler * masm,Label * miss,Label * done,Register properties,Handle<Name> name,Register r0)2440 void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
2441 Label* miss,
2442 Label* done,
2443 Register properties,
2444 Handle<Name> name,
2445 Register r0) {
2446 DCHECK(name->IsUniqueName());
2447
2448 // If names of slots in range from 1 to kProbes - 1 for the hash value are
2449 // not equal to the name and kProbes-th slot is not used (its name is the
2450 // undefined value), it guarantees the hash table doesn't contain the
2451 // property. It's true even if some slots represent deleted properties
2452 // (their names are the hole value).
2453 for (int i = 0; i < kInlinedProbes; i++) {
2454 // Compute the masked index: (hash + i + i * i) & mask.
2455 Register index = r0;
2456 // Capacity is smi 2^n.
2457 __ mov(index, FieldOperand(properties, kCapacityOffset));
2458 __ dec(index);
2459 __ and_(index,
2460 Immediate(Smi::FromInt(name->Hash() +
2461 NameDictionary::GetProbeOffset(i))));
2462
2463 // Scale the index by multiplying by the entry size.
2464 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2465 __ lea(index, Operand(index, index, times_2, 0)); // index *= 3.
2466 Register entity_name = r0;
2467 // Having undefined at this place means the name is not contained.
2468 STATIC_ASSERT(kSmiTagSize == 1);
2469 __ mov(entity_name, Operand(properties, index, times_half_pointer_size,
2470 kElementsStartOffset - kHeapObjectTag));
2471 __ cmp(entity_name, masm->isolate()->factory()->undefined_value());
2472 __ j(equal, done);
2473
2474 // Stop if found the property.
2475 __ cmp(entity_name, Handle<Name>(name));
2476 __ j(equal, miss);
2477
2478 Label good;
2479 // Check for the hole and skip.
2480 __ cmp(entity_name, masm->isolate()->factory()->the_hole_value());
2481 __ j(equal, &good, Label::kNear);
2482
2483 // Check if the entry name is not a unique name.
2484 __ mov(entity_name, FieldOperand(entity_name, HeapObject::kMapOffset));
2485 __ JumpIfNotUniqueNameInstanceType(
2486 FieldOperand(entity_name, Map::kInstanceTypeOffset), miss);
2487 __ bind(&good);
2488 }
2489
2490 NameDictionaryLookupStub stub(masm->isolate(), properties, r0, r0,
2491 NEGATIVE_LOOKUP);
2492 __ push(Immediate(Handle<Object>(name)));
2493 __ push(Immediate(name->Hash()));
2494 __ CallStub(&stub);
2495 __ test(r0, r0);
2496 __ j(not_zero, miss);
2497 __ jmp(done);
2498 }
2499
2500
2501 // Probe the name dictionary in the |elements| register. Jump to the
2502 // |done| label if a property with the given name is found leaving the
2503 // index into the dictionary in |r0|. Jump to the |miss| label
2504 // otherwise.
GeneratePositiveLookup(MacroAssembler * masm,Label * miss,Label * done,Register elements,Register name,Register r0,Register r1)2505 void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
2506 Label* miss,
2507 Label* done,
2508 Register elements,
2509 Register name,
2510 Register r0,
2511 Register r1) {
2512 DCHECK(!elements.is(r0));
2513 DCHECK(!elements.is(r1));
2514 DCHECK(!name.is(r0));
2515 DCHECK(!name.is(r1));
2516
2517 __ AssertName(name);
2518
2519 __ mov(r1, FieldOperand(elements, kCapacityOffset));
2520 __ shr(r1, kSmiTagSize); // convert smi to int
2521 __ dec(r1);
2522
2523 // Generate an unrolled loop that performs a few probes before
2524 // giving up. Measurements done on Gmail indicate that 2 probes
2525 // cover ~93% of loads from dictionaries.
2526 for (int i = 0; i < kInlinedProbes; i++) {
2527 // Compute the masked index: (hash + i + i * i) & mask.
2528 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
2529 __ shr(r0, Name::kHashShift);
2530 if (i > 0) {
2531 __ add(r0, Immediate(NameDictionary::GetProbeOffset(i)));
2532 }
2533 __ and_(r0, r1);
2534
2535 // Scale the index by multiplying by the entry size.
2536 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2537 __ lea(r0, Operand(r0, r0, times_2, 0)); // r0 = r0 * 3
2538
2539 // Check if the key is identical to the name.
2540 __ cmp(name, Operand(elements,
2541 r0,
2542 times_4,
2543 kElementsStartOffset - kHeapObjectTag));
2544 __ j(equal, done);
2545 }
2546
2547 NameDictionaryLookupStub stub(masm->isolate(), elements, r1, r0,
2548 POSITIVE_LOOKUP);
2549 __ push(name);
2550 __ mov(r0, FieldOperand(name, Name::kHashFieldOffset));
2551 __ shr(r0, Name::kHashShift);
2552 __ push(r0);
2553 __ CallStub(&stub);
2554
2555 __ test(r1, r1);
2556 __ j(zero, miss);
2557 __ jmp(done);
2558 }
2559
2560
Generate(MacroAssembler * masm)2561 void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
2562 // This stub overrides SometimesSetsUpAFrame() to return false. That means
2563 // we cannot call anything that could cause a GC from this stub.
2564 // Stack frame on entry:
2565 // esp[0 * kPointerSize]: return address.
2566 // esp[1 * kPointerSize]: key's hash.
2567 // esp[2 * kPointerSize]: key.
2568 // Registers:
2569 // dictionary_: NameDictionary to probe.
2570 // result_: used as scratch.
2571 // index_: will hold an index of entry if lookup is successful.
2572 // might alias with result_.
2573 // Returns:
2574 // result_ is zero if lookup failed, non zero otherwise.
2575
2576 Label in_dictionary, maybe_in_dictionary, not_in_dictionary;
2577
2578 Register scratch = result();
2579
2580 __ mov(scratch, FieldOperand(dictionary(), kCapacityOffset));
2581 __ dec(scratch);
2582 __ SmiUntag(scratch);
2583 __ push(scratch);
2584
2585 // If names of slots in range from 1 to kProbes - 1 for the hash value are
2586 // not equal to the name and kProbes-th slot is not used (its name is the
2587 // undefined value), it guarantees the hash table doesn't contain the
2588 // property. It's true even if some slots represent deleted properties
2589 // (their names are the null value).
2590 for (int i = kInlinedProbes; i < kTotalProbes; i++) {
2591 // Compute the masked index: (hash + i + i * i) & mask.
2592 __ mov(scratch, Operand(esp, 2 * kPointerSize));
2593 if (i > 0) {
2594 __ add(scratch, Immediate(NameDictionary::GetProbeOffset(i)));
2595 }
2596 __ and_(scratch, Operand(esp, 0));
2597
2598 // Scale the index by multiplying by the entry size.
2599 STATIC_ASSERT(NameDictionary::kEntrySize == 3);
2600 __ lea(index(), Operand(scratch, scratch, times_2, 0)); // index *= 3.
2601
2602 // Having undefined at this place means the name is not contained.
2603 STATIC_ASSERT(kSmiTagSize == 1);
2604 __ mov(scratch, Operand(dictionary(), index(), times_pointer_size,
2605 kElementsStartOffset - kHeapObjectTag));
2606 __ cmp(scratch, isolate()->factory()->undefined_value());
2607 __ j(equal, ¬_in_dictionary);
2608
2609 // Stop if found the property.
2610 __ cmp(scratch, Operand(esp, 3 * kPointerSize));
2611 __ j(equal, &in_dictionary);
2612
2613 if (i != kTotalProbes - 1 && mode() == NEGATIVE_LOOKUP) {
2614 // If we hit a key that is not a unique name during negative
2615 // lookup we have to bailout as this key might be equal to the
2616 // key we are looking for.
2617
2618 // Check if the entry name is not a unique name.
2619 __ mov(scratch, FieldOperand(scratch, HeapObject::kMapOffset));
2620 __ JumpIfNotUniqueNameInstanceType(
2621 FieldOperand(scratch, Map::kInstanceTypeOffset),
2622 &maybe_in_dictionary);
2623 }
2624 }
2625
2626 __ bind(&maybe_in_dictionary);
2627 // If we are doing negative lookup then probing failure should be
2628 // treated as a lookup success. For positive lookup probing failure
2629 // should be treated as lookup failure.
2630 if (mode() == POSITIVE_LOOKUP) {
2631 __ mov(result(), Immediate(0));
2632 __ Drop(1);
2633 __ ret(2 * kPointerSize);
2634 }
2635
2636 __ bind(&in_dictionary);
2637 __ mov(result(), Immediate(1));
2638 __ Drop(1);
2639 __ ret(2 * kPointerSize);
2640
2641 __ bind(¬_in_dictionary);
2642 __ mov(result(), Immediate(0));
2643 __ Drop(1);
2644 __ ret(2 * kPointerSize);
2645 }
2646
2647
GenerateFixedRegStubsAheadOfTime(Isolate * isolate)2648 void StoreBufferOverflowStub::GenerateFixedRegStubsAheadOfTime(
2649 Isolate* isolate) {
2650 StoreBufferOverflowStub stub(isolate, kDontSaveFPRegs);
2651 stub.GetCode();
2652 StoreBufferOverflowStub stub2(isolate, kSaveFPRegs);
2653 stub2.GetCode();
2654 }
2655
2656
2657 // Takes the input in 3 registers: address_ value_ and object_. A pointer to
2658 // the value has just been written into the object, now this stub makes sure
2659 // we keep the GC informed. The word in the object where the value has been
2660 // written is in the address register.
Generate(MacroAssembler * masm)2661 void RecordWriteStub::Generate(MacroAssembler* masm) {
2662 Label skip_to_incremental_noncompacting;
2663 Label skip_to_incremental_compacting;
2664
2665 // The first two instructions are generated with labels so as to get the
2666 // offset fixed up correctly by the bind(Label*) call. We patch it back and
2667 // forth between a compare instructions (a nop in this position) and the
2668 // real branch when we start and stop incremental heap marking.
2669 __ jmp(&skip_to_incremental_noncompacting, Label::kNear);
2670 __ jmp(&skip_to_incremental_compacting, Label::kFar);
2671
2672 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2673 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2674 MacroAssembler::kReturnAtEnd);
2675 } else {
2676 __ ret(0);
2677 }
2678
2679 __ bind(&skip_to_incremental_noncompacting);
2680 GenerateIncremental(masm, INCREMENTAL);
2681
2682 __ bind(&skip_to_incremental_compacting);
2683 GenerateIncremental(masm, INCREMENTAL_COMPACTION);
2684
2685 // Initial mode of the stub is expected to be STORE_BUFFER_ONLY.
2686 // Will be checked in IncrementalMarking::ActivateGeneratedStub.
2687 masm->set_byte_at(0, kTwoByteNopInstruction);
2688 masm->set_byte_at(2, kFiveByteNopInstruction);
2689 }
2690
2691
GenerateIncremental(MacroAssembler * masm,Mode mode)2692 void RecordWriteStub::GenerateIncremental(MacroAssembler* masm, Mode mode) {
2693 regs_.Save(masm);
2694
2695 if (remembered_set_action() == EMIT_REMEMBERED_SET) {
2696 Label dont_need_remembered_set;
2697
2698 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
2699 __ JumpIfNotInNewSpace(regs_.scratch0(), // Value.
2700 regs_.scratch0(),
2701 &dont_need_remembered_set);
2702
2703 __ JumpIfInNewSpace(regs_.object(), regs_.scratch0(),
2704 &dont_need_remembered_set);
2705
2706 // First notify the incremental marker if necessary, then update the
2707 // remembered set.
2708 CheckNeedsToInformIncrementalMarker(
2709 masm,
2710 kUpdateRememberedSetOnNoNeedToInformIncrementalMarker,
2711 mode);
2712 InformIncrementalMarker(masm);
2713 regs_.Restore(masm);
2714 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2715 MacroAssembler::kReturnAtEnd);
2716
2717 __ bind(&dont_need_remembered_set);
2718 }
2719
2720 CheckNeedsToInformIncrementalMarker(
2721 masm,
2722 kReturnOnNoNeedToInformIncrementalMarker,
2723 mode);
2724 InformIncrementalMarker(masm);
2725 regs_.Restore(masm);
2726 __ ret(0);
2727 }
2728
2729
InformIncrementalMarker(MacroAssembler * masm)2730 void RecordWriteStub::InformIncrementalMarker(MacroAssembler* masm) {
2731 regs_.SaveCallerSaveRegisters(masm, save_fp_regs_mode());
2732 int argument_count = 3;
2733 __ PrepareCallCFunction(argument_count, regs_.scratch0());
2734 __ mov(Operand(esp, 0 * kPointerSize), regs_.object());
2735 __ mov(Operand(esp, 1 * kPointerSize), regs_.address()); // Slot.
2736 __ mov(Operand(esp, 2 * kPointerSize),
2737 Immediate(ExternalReference::isolate_address(isolate())));
2738
2739 AllowExternalCallThatCantCauseGC scope(masm);
2740 __ CallCFunction(
2741 ExternalReference::incremental_marking_record_write_function(isolate()),
2742 argument_count);
2743
2744 regs_.RestoreCallerSaveRegisters(masm, save_fp_regs_mode());
2745 }
2746
2747
CheckNeedsToInformIncrementalMarker(MacroAssembler * masm,OnNoNeedToInformIncrementalMarker on_no_need,Mode mode)2748 void RecordWriteStub::CheckNeedsToInformIncrementalMarker(
2749 MacroAssembler* masm,
2750 OnNoNeedToInformIncrementalMarker on_no_need,
2751 Mode mode) {
2752 Label object_is_black, need_incremental, need_incremental_pop_object;
2753
2754 // Let's look at the color of the object: If it is not black we don't have
2755 // to inform the incremental marker.
2756 __ JumpIfBlack(regs_.object(),
2757 regs_.scratch0(),
2758 regs_.scratch1(),
2759 &object_is_black,
2760 Label::kNear);
2761
2762 regs_.Restore(masm);
2763 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
2764 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2765 MacroAssembler::kReturnAtEnd);
2766 } else {
2767 __ ret(0);
2768 }
2769
2770 __ bind(&object_is_black);
2771
2772 // Get the value from the slot.
2773 __ mov(regs_.scratch0(), Operand(regs_.address(), 0));
2774
2775 if (mode == INCREMENTAL_COMPACTION) {
2776 Label ensure_not_white;
2777
2778 __ CheckPageFlag(regs_.scratch0(), // Contains value.
2779 regs_.scratch1(), // Scratch.
2780 MemoryChunk::kEvacuationCandidateMask,
2781 zero,
2782 &ensure_not_white,
2783 Label::kNear);
2784
2785 __ CheckPageFlag(regs_.object(),
2786 regs_.scratch1(), // Scratch.
2787 MemoryChunk::kSkipEvacuationSlotsRecordingMask,
2788 not_zero,
2789 &ensure_not_white,
2790 Label::kNear);
2791
2792 __ jmp(&need_incremental);
2793
2794 __ bind(&ensure_not_white);
2795 }
2796
2797 // We need an extra register for this, so we push the object register
2798 // temporarily.
2799 __ push(regs_.object());
2800 __ JumpIfWhite(regs_.scratch0(), // The value.
2801 regs_.scratch1(), // Scratch.
2802 regs_.object(), // Scratch.
2803 &need_incremental_pop_object, Label::kNear);
2804 __ pop(regs_.object());
2805
2806 regs_.Restore(masm);
2807 if (on_no_need == kUpdateRememberedSetOnNoNeedToInformIncrementalMarker) {
2808 __ RememberedSetHelper(object(), address(), value(), save_fp_regs_mode(),
2809 MacroAssembler::kReturnAtEnd);
2810 } else {
2811 __ ret(0);
2812 }
2813
2814 __ bind(&need_incremental_pop_object);
2815 __ pop(regs_.object());
2816
2817 __ bind(&need_incremental);
2818
2819 // Fall through when we need to inform the incremental marker.
2820 }
2821
2822
Generate(MacroAssembler * masm)2823 void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
2824 CEntryStub ces(isolate(), 1, kSaveFPRegs);
2825 __ call(ces.GetCode(), RelocInfo::CODE_TARGET);
2826 int parameter_count_offset =
2827 StubFailureTrampolineFrameConstants::kArgumentsLengthOffset;
2828 __ mov(ebx, MemOperand(ebp, parameter_count_offset));
2829 masm->LeaveFrame(StackFrame::STUB_FAILURE_TRAMPOLINE);
2830 __ pop(ecx);
2831 int additional_offset =
2832 function_mode() == JS_FUNCTION_STUB_MODE ? kPointerSize : 0;
2833 __ lea(esp, MemOperand(esp, ebx, times_pointer_size, additional_offset));
2834 __ jmp(ecx); // Return to IC Miss stub, continuation still on stack.
2835 }
2836
Generate(MacroAssembler * masm)2837 void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
2838 __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister());
2839 KeyedStoreICStub stub(isolate(), state());
2840 stub.GenerateForTrampoline(masm);
2841 }
2842
2843 // value is on the stack already.
HandlePolymorphicStoreCase(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register feedback,bool is_polymorphic,Label * miss)2844 static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register receiver,
2845 Register key, Register vector,
2846 Register slot, Register feedback,
2847 bool is_polymorphic, Label* miss) {
2848 // feedback initially contains the feedback array
2849 Label next, next_loop, prepare_next;
2850 Label load_smi_map, compare_map;
2851 Label start_polymorphic;
2852 Label pop_and_miss;
2853
2854 __ push(receiver);
2855 // Value, vector and slot are passed on the stack, so no need to save/restore
2856 // them.
2857
2858 Register receiver_map = receiver;
2859 Register cached_map = vector;
2860
2861 // Receiver might not be a heap object.
2862 __ JumpIfSmi(receiver, &load_smi_map);
2863 __ mov(receiver_map, FieldOperand(receiver, 0));
2864 __ bind(&compare_map);
2865 __ mov(cached_map, FieldOperand(feedback, FixedArray::OffsetOfElementAt(0)));
2866
2867 // A named keyed store might have a 2 element array, all other cases can count
2868 // on an array with at least 2 {map, handler} pairs, so they can go right
2869 // into polymorphic array handling.
2870 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
2871 __ j(not_equal, &start_polymorphic);
2872
2873 // found, now call handler.
2874 Register handler = feedback;
2875 DCHECK(handler.is(StoreWithVectorDescriptor::ValueRegister()));
2876 __ mov(handler, FieldOperand(feedback, FixedArray::OffsetOfElementAt(1)));
2877 __ pop(receiver);
2878 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
2879 __ jmp(handler);
2880
2881 // Polymorphic, we have to loop from 2 to N
2882 __ bind(&start_polymorphic);
2883 __ push(key);
2884 Register counter = key;
2885 __ mov(counter, Immediate(Smi::FromInt(2)));
2886
2887 if (!is_polymorphic) {
2888 // If is_polymorphic is false, we may only have a two element array.
2889 // Check against length now in that case.
2890 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
2891 __ j(greater_equal, &pop_and_miss);
2892 }
2893
2894 __ bind(&next_loop);
2895 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
2896 FixedArray::kHeaderSize));
2897 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
2898 __ j(not_equal, &prepare_next);
2899 __ mov(handler, FieldOperand(feedback, counter, times_half_pointer_size,
2900 FixedArray::kHeaderSize + kPointerSize));
2901 __ lea(handler, FieldOperand(handler, Code::kHeaderSize));
2902 __ pop(key);
2903 __ pop(receiver);
2904 __ jmp(handler);
2905
2906 __ bind(&prepare_next);
2907 __ add(counter, Immediate(Smi::FromInt(2)));
2908 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
2909 __ j(less, &next_loop);
2910
2911 // We exhausted our array of map handler pairs.
2912 __ bind(&pop_and_miss);
2913 __ pop(key);
2914 __ pop(receiver);
2915 __ jmp(miss);
2916
2917 __ bind(&load_smi_map);
2918 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
2919 __ jmp(&compare_map);
2920 }
2921
2922
HandleMonomorphicStoreCase(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register weak_cell,Label * miss)2923 static void HandleMonomorphicStoreCase(MacroAssembler* masm, Register receiver,
2924 Register key, Register vector,
2925 Register slot, Register weak_cell,
2926 Label* miss) {
2927 // The store ic value is on the stack.
2928 DCHECK(weak_cell.is(StoreWithVectorDescriptor::ValueRegister()));
2929
2930 // feedback initially contains the feedback array
2931 Label compare_smi_map;
2932
2933 // Move the weak map into the weak_cell register.
2934 Register ic_map = weak_cell;
2935 __ mov(ic_map, FieldOperand(weak_cell, WeakCell::kValueOffset));
2936
2937 // Receiver might not be a heap object.
2938 __ JumpIfSmi(receiver, &compare_smi_map);
2939 __ cmp(ic_map, FieldOperand(receiver, 0));
2940 __ j(not_equal, miss);
2941 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
2942 FixedArray::kHeaderSize + kPointerSize));
2943 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
2944 // jump to the handler.
2945 __ jmp(weak_cell);
2946
2947 // In microbenchmarks, it made sense to unroll this code so that the call to
2948 // the handler is duplicated for a HeapObject receiver and a Smi receiver.
2949 __ bind(&compare_smi_map);
2950 __ CompareRoot(ic_map, Heap::kHeapNumberMapRootIndex);
2951 __ j(not_equal, miss);
2952 __ mov(weak_cell, FieldOperand(vector, slot, times_half_pointer_size,
2953 FixedArray::kHeaderSize + kPointerSize));
2954 __ lea(weak_cell, FieldOperand(weak_cell, Code::kHeaderSize));
2955 // jump to the handler.
2956 __ jmp(weak_cell);
2957 }
2958
Generate(MacroAssembler * masm)2959 void KeyedStoreICStub::Generate(MacroAssembler* masm) {
2960 GenerateImpl(masm, false);
2961 }
2962
GenerateForTrampoline(MacroAssembler * masm)2963 void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
2964 GenerateImpl(masm, true);
2965 }
2966
2967
HandlePolymorphicKeyedStoreCase(MacroAssembler * masm,Register receiver,Register key,Register vector,Register slot,Register feedback,Label * miss)2968 static void HandlePolymorphicKeyedStoreCase(MacroAssembler* masm,
2969 Register receiver, Register key,
2970 Register vector, Register slot,
2971 Register feedback, Label* miss) {
2972 // feedback initially contains the feedback array
2973 Label next, next_loop, prepare_next;
2974 Label load_smi_map, compare_map;
2975 Label transition_call;
2976 Label pop_and_miss;
2977
2978 __ push(receiver);
2979 // Value, vector and slot are passed on the stack, so no need to save/restore
2980 // them.
2981
2982 Register receiver_map = receiver;
2983 Register cached_map = vector;
2984
2985 // Receiver might not be a heap object.
2986 __ JumpIfSmi(receiver, &load_smi_map);
2987 __ mov(receiver_map, FieldOperand(receiver, 0));
2988 __ bind(&compare_map);
2989
2990 // Polymorphic, we have to loop from 0 to N - 1
2991 __ push(key);
2992 // Current stack layout:
2993 // - esp[0] -- key
2994 // - esp[4] -- receiver
2995 // - esp[8] -- return address
2996 // - esp[12] -- vector
2997 // - esp[16] -- slot
2998 // - esp[20] -- value
2999 //
3000 // Required stack layout for handler call (see StoreWithVectorDescriptor):
3001 // - esp[0] -- return address
3002 // - esp[4] -- vector
3003 // - esp[8] -- slot
3004 // - esp[12] -- value
3005 // - receiver, key, handler in registers.
3006 Register counter = key;
3007 __ mov(counter, Immediate(Smi::kZero));
3008 __ bind(&next_loop);
3009 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3010 FixedArray::kHeaderSize));
3011 __ cmp(receiver_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3012 __ j(not_equal, &prepare_next);
3013 __ mov(cached_map, FieldOperand(feedback, counter, times_half_pointer_size,
3014 FixedArray::kHeaderSize + kPointerSize));
3015 __ CompareRoot(cached_map, Heap::kUndefinedValueRootIndex);
3016 __ j(not_equal, &transition_call);
3017 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
3018 FixedArray::kHeaderSize + 2 * kPointerSize));
3019 __ pop(key);
3020 __ pop(receiver);
3021 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
3022 __ jmp(feedback);
3023
3024 __ bind(&transition_call);
3025 // Current stack layout:
3026 // - esp[0] -- key
3027 // - esp[4] -- receiver
3028 // - esp[8] -- return address
3029 // - esp[12] -- vector
3030 // - esp[16] -- slot
3031 // - esp[20] -- value
3032 //
3033 // Required stack layout for handler call (see StoreTransitionDescriptor):
3034 // - esp[0] -- return address
3035 // - esp[4] -- vector
3036 // - esp[8] -- slot
3037 // - esp[12] -- value
3038 // - receiver, key, map, handler in registers.
3039 __ mov(feedback, FieldOperand(feedback, counter, times_half_pointer_size,
3040 FixedArray::kHeaderSize + 2 * kPointerSize));
3041 __ lea(feedback, FieldOperand(feedback, Code::kHeaderSize));
3042
3043 __ mov(cached_map, FieldOperand(cached_map, WeakCell::kValueOffset));
3044 // The weak cell may have been cleared.
3045 __ JumpIfSmi(cached_map, &pop_and_miss);
3046 DCHECK(!cached_map.is(StoreTransitionDescriptor::MapRegister()));
3047 __ mov(StoreTransitionDescriptor::MapRegister(), cached_map);
3048
3049 // Call store transition handler using StoreTransitionDescriptor calling
3050 // convention.
3051 __ pop(key);
3052 __ pop(receiver);
3053 // Ensure that the transition handler we are going to call has the same
3054 // number of stack arguments which means that we don't have to adapt them
3055 // before the call.
3056 STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
3057 STATIC_ASSERT(StoreTransitionDescriptor::kStackArgumentsCount == 3);
3058 STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
3059 StoreWithVectorDescriptor::kValue ==
3060 StoreTransitionDescriptor::kParameterCount -
3061 StoreTransitionDescriptor::kValue);
3062 STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
3063 StoreWithVectorDescriptor::kSlot ==
3064 StoreTransitionDescriptor::kParameterCount -
3065 StoreTransitionDescriptor::kSlot);
3066 STATIC_ASSERT(StoreWithVectorDescriptor::kParameterCount -
3067 StoreWithVectorDescriptor::kVector ==
3068 StoreTransitionDescriptor::kParameterCount -
3069 StoreTransitionDescriptor::kVector);
3070 __ jmp(feedback);
3071
3072 __ bind(&prepare_next);
3073 __ add(counter, Immediate(Smi::FromInt(3)));
3074 __ cmp(counter, FieldOperand(feedback, FixedArray::kLengthOffset));
3075 __ j(less, &next_loop);
3076
3077 // We exhausted our array of map handler pairs.
3078 __ bind(&pop_and_miss);
3079 __ pop(key);
3080 __ pop(receiver);
3081 __ jmp(miss);
3082
3083 __ bind(&load_smi_map);
3084 __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex);
3085 __ jmp(&compare_map);
3086 }
3087
GenerateImpl(MacroAssembler * masm,bool in_frame)3088 void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
3089 Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // edx
3090 Register key = StoreWithVectorDescriptor::NameRegister(); // ecx
3091 Register value = StoreWithVectorDescriptor::ValueRegister(); // eax
3092 Register vector = StoreWithVectorDescriptor::VectorRegister(); // ebx
3093 Register slot = StoreWithVectorDescriptor::SlotRegister(); // edi
3094 Label miss;
3095
3096 if (StoreWithVectorDescriptor::kPassLastArgsOnStack) {
3097 // Current stack layout:
3098 // - esp[8] -- value
3099 // - esp[4] -- slot
3100 // - esp[0] -- return address
3101 STATIC_ASSERT(StoreDescriptor::kStackArgumentsCount == 2);
3102 STATIC_ASSERT(StoreWithVectorDescriptor::kStackArgumentsCount == 3);
3103 if (in_frame) {
3104 __ RecordComment("[ StoreDescriptor -> StoreWithVectorDescriptor");
3105 // If the vector is not on the stack, then insert the vector beneath
3106 // return address in order to prepare for calling handler with
3107 // StoreWithVector calling convention.
3108 __ push(Operand(esp, 0));
3109 __ mov(Operand(esp, 4), StoreWithVectorDescriptor::VectorRegister());
3110 __ RecordComment("]");
3111 } else {
3112 __ mov(vector, Operand(esp, 1 * kPointerSize));
3113 }
3114 __ mov(slot, Operand(esp, 2 * kPointerSize));
3115 }
3116
3117 Register scratch = value;
3118 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
3119 FixedArray::kHeaderSize));
3120
3121 // Is it a weak cell?
3122 Label try_array;
3123 Label not_array, smi_key, key_okay;
3124 __ CompareRoot(FieldOperand(scratch, 0), Heap::kWeakCellMapRootIndex);
3125 __ j(not_equal, &try_array);
3126 HandleMonomorphicStoreCase(masm, receiver, key, vector, slot, scratch, &miss);
3127
3128 // Is it a fixed array?
3129 __ bind(&try_array);
3130 __ CompareRoot(FieldOperand(scratch, 0), Heap::kFixedArrayMapRootIndex);
3131 __ j(not_equal, ¬_array);
3132 HandlePolymorphicKeyedStoreCase(masm, receiver, key, vector, slot, scratch,
3133 &miss);
3134
3135 __ bind(¬_array);
3136 Label try_poly_name;
3137 __ CompareRoot(scratch, Heap::kmegamorphic_symbolRootIndex);
3138 __ j(not_equal, &try_poly_name);
3139
3140 Handle<Code> megamorphic_stub =
3141 KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
3142 __ jmp(megamorphic_stub, RelocInfo::CODE_TARGET);
3143
3144 __ bind(&try_poly_name);
3145 // We might have a name in feedback, and a fixed array in the next slot.
3146 __ cmp(key, scratch);
3147 __ j(not_equal, &miss);
3148 // If the name comparison succeeded, we know we have a fixed array with
3149 // at least one map/handler pair.
3150 __ mov(scratch, FieldOperand(vector, slot, times_half_pointer_size,
3151 FixedArray::kHeaderSize + kPointerSize));
3152 HandlePolymorphicStoreCase(masm, receiver, key, vector, slot, scratch, false,
3153 &miss);
3154
3155 __ bind(&miss);
3156 KeyedStoreIC::GenerateMiss(masm);
3157 }
3158
Generate(MacroAssembler * masm)3159 void CallICTrampolineStub::Generate(MacroAssembler* masm) {
3160 __ EmitLoadTypeFeedbackVector(ebx);
3161 CallICStub stub(isolate(), state());
3162 __ jmp(stub.GetCode(), RelocInfo::CODE_TARGET);
3163 }
3164
MaybeCallEntryHook(MacroAssembler * masm)3165 void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
3166 if (masm->isolate()->function_entry_hook() != NULL) {
3167 ProfileEntryHookStub stub(masm->isolate());
3168 masm->CallStub(&stub);
3169 }
3170 }
3171
Generate(MacroAssembler * masm)3172 void ProfileEntryHookStub::Generate(MacroAssembler* masm) {
3173 // Save volatile registers.
3174 const int kNumSavedRegisters = 3;
3175 __ push(eax);
3176 __ push(ecx);
3177 __ push(edx);
3178
3179 // Calculate and push the original stack pointer.
3180 __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
3181 __ push(eax);
3182
3183 // Retrieve our return address and use it to calculate the calling
3184 // function's address.
3185 __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize));
3186 __ sub(eax, Immediate(Assembler::kCallInstructionLength));
3187 __ push(eax);
3188
3189 // Call the entry hook.
3190 DCHECK(isolate()->function_entry_hook() != NULL);
3191 __ call(FUNCTION_ADDR(isolate()->function_entry_hook()),
3192 RelocInfo::RUNTIME_ENTRY);
3193 __ add(esp, Immediate(2 * kPointerSize));
3194
3195 // Restore ecx.
3196 __ pop(edx);
3197 __ pop(ecx);
3198 __ pop(eax);
3199
3200 __ ret(0);
3201 }
3202
3203 template <class T>
CreateArrayDispatch(MacroAssembler * masm,AllocationSiteOverrideMode mode)3204 static void CreateArrayDispatch(MacroAssembler* masm,
3205 AllocationSiteOverrideMode mode) {
3206 if (mode == DISABLE_ALLOCATION_SITES) {
3207 T stub(masm->isolate(), GetInitialFastElementsKind(), mode);
3208 __ TailCallStub(&stub);
3209 } else if (mode == DONT_OVERRIDE) {
3210 int last_index =
3211 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
3212 for (int i = 0; i <= last_index; ++i) {
3213 Label next;
3214 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3215 __ cmp(edx, kind);
3216 __ j(not_equal, &next);
3217 T stub(masm->isolate(), kind);
3218 __ TailCallStub(&stub);
3219 __ bind(&next);
3220 }
3221
3222 // If we reached this point there is a problem.
3223 __ Abort(kUnexpectedElementsKindInArrayConstructor);
3224 } else {
3225 UNREACHABLE();
3226 }
3227 }
3228
CreateArrayDispatchOneArgument(MacroAssembler * masm,AllocationSiteOverrideMode mode)3229 static void CreateArrayDispatchOneArgument(MacroAssembler* masm,
3230 AllocationSiteOverrideMode mode) {
3231 // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES)
3232 // edx - kind (if mode != DISABLE_ALLOCATION_SITES)
3233 // eax - number of arguments
3234 // edi - constructor?
3235 // esp[0] - return address
3236 // esp[4] - last argument
3237 Label normal_sequence;
3238 if (mode == DONT_OVERRIDE) {
3239 STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
3240 STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
3241 STATIC_ASSERT(FAST_ELEMENTS == 2);
3242 STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
3243 STATIC_ASSERT(FAST_DOUBLE_ELEMENTS == 4);
3244 STATIC_ASSERT(FAST_HOLEY_DOUBLE_ELEMENTS == 5);
3245
3246 // is the low bit set? If so, we are holey and that is good.
3247 __ test_b(edx, Immediate(1));
3248 __ j(not_zero, &normal_sequence);
3249 }
3250
3251 // look at the first argument
3252 __ mov(ecx, Operand(esp, kPointerSize));
3253 __ test(ecx, ecx);
3254 __ j(zero, &normal_sequence);
3255
3256 if (mode == DISABLE_ALLOCATION_SITES) {
3257 ElementsKind initial = GetInitialFastElementsKind();
3258 ElementsKind holey_initial = GetHoleyElementsKind(initial);
3259
3260 ArraySingleArgumentConstructorStub stub_holey(
3261 masm->isolate(), holey_initial, DISABLE_ALLOCATION_SITES);
3262 __ TailCallStub(&stub_holey);
3263
3264 __ bind(&normal_sequence);
3265 ArraySingleArgumentConstructorStub stub(masm->isolate(), initial,
3266 DISABLE_ALLOCATION_SITES);
3267 __ TailCallStub(&stub);
3268 } else if (mode == DONT_OVERRIDE) {
3269 // We are going to create a holey array, but our kind is non-holey.
3270 // Fix kind and retry.
3271 __ inc(edx);
3272
3273 if (FLAG_debug_code) {
3274 Handle<Map> allocation_site_map =
3275 masm->isolate()->factory()->allocation_site_map();
3276 __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map));
3277 __ Assert(equal, kExpectedAllocationSite);
3278 }
3279
3280 // Save the resulting elements kind in type info. We can't just store r3
3281 // in the AllocationSite::transition_info field because elements kind is
3282 // restricted to a portion of the field...upper bits need to be left alone.
3283 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
3284 __ add(FieldOperand(ebx, AllocationSite::kTransitionInfoOffset),
3285 Immediate(Smi::FromInt(kFastElementsKindPackedToHoley)));
3286
3287 __ bind(&normal_sequence);
3288 int last_index =
3289 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
3290 for (int i = 0; i <= last_index; ++i) {
3291 Label next;
3292 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3293 __ cmp(edx, kind);
3294 __ j(not_equal, &next);
3295 ArraySingleArgumentConstructorStub stub(masm->isolate(), kind);
3296 __ TailCallStub(&stub);
3297 __ bind(&next);
3298 }
3299
3300 // If we reached this point there is a problem.
3301 __ Abort(kUnexpectedElementsKindInArrayConstructor);
3302 } else {
3303 UNREACHABLE();
3304 }
3305 }
3306
3307 template <class T>
ArrayConstructorStubAheadOfTimeHelper(Isolate * isolate)3308 static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) {
3309 int to_index =
3310 GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND);
3311 for (int i = 0; i <= to_index; ++i) {
3312 ElementsKind kind = GetFastElementsKindFromSequenceIndex(i);
3313 T stub(isolate, kind);
3314 stub.GetCode();
3315 if (AllocationSite::GetMode(kind) != DONT_TRACK_ALLOCATION_SITE) {
3316 T stub1(isolate, kind, DISABLE_ALLOCATION_SITES);
3317 stub1.GetCode();
3318 }
3319 }
3320 }
3321
GenerateStubsAheadOfTime(Isolate * isolate)3322 void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) {
3323 ArrayConstructorStubAheadOfTimeHelper<ArrayNoArgumentConstructorStub>(
3324 isolate);
3325 ArrayConstructorStubAheadOfTimeHelper<ArraySingleArgumentConstructorStub>(
3326 isolate);
3327 ArrayNArgumentsConstructorStub stub(isolate);
3328 stub.GetCode();
3329
3330 ElementsKind kinds[2] = {FAST_ELEMENTS, FAST_HOLEY_ELEMENTS};
3331 for (int i = 0; i < 2; i++) {
3332 // For internal arrays we only need a few things
3333 InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]);
3334 stubh1.GetCode();
3335 InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]);
3336 stubh2.GetCode();
3337 }
3338 }
3339
GenerateDispatchToArrayStub(MacroAssembler * masm,AllocationSiteOverrideMode mode)3340 void ArrayConstructorStub::GenerateDispatchToArrayStub(
3341 MacroAssembler* masm, AllocationSiteOverrideMode mode) {
3342 Label not_zero_case, not_one_case;
3343 __ test(eax, eax);
3344 __ j(not_zero, ¬_zero_case);
3345 CreateArrayDispatch<ArrayNoArgumentConstructorStub>(masm, mode);
3346
3347 __ bind(¬_zero_case);
3348 __ cmp(eax, 1);
3349 __ j(greater, ¬_one_case);
3350 CreateArrayDispatchOneArgument(masm, mode);
3351
3352 __ bind(¬_one_case);
3353 ArrayNArgumentsConstructorStub stub(masm->isolate());
3354 __ TailCallStub(&stub);
3355 }
3356
Generate(MacroAssembler * masm)3357 void ArrayConstructorStub::Generate(MacroAssembler* masm) {
3358 // ----------- S t a t e -------------
3359 // -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE)
3360 // -- ebx : AllocationSite or undefined
3361 // -- edi : constructor
3362 // -- edx : Original constructor
3363 // -- esp[0] : return address
3364 // -- esp[4] : last argument
3365 // -----------------------------------
3366 if (FLAG_debug_code) {
3367 // The array construct code is only set for the global and natives
3368 // builtin Array functions which always have maps.
3369
3370 // Initial map for the builtin Array function should be a map.
3371 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
3372 // Will both indicate a NULL and a Smi.
3373 __ test(ecx, Immediate(kSmiTagMask));
3374 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
3375 __ CmpObjectType(ecx, MAP_TYPE, ecx);
3376 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
3377
3378 // We should either have undefined in ebx or a valid AllocationSite
3379 __ AssertUndefinedOrAllocationSite(ebx);
3380 }
3381
3382 Label subclassing;
3383
3384 // Enter the context of the Array function.
3385 __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
3386
3387 __ cmp(edx, edi);
3388 __ j(not_equal, &subclassing);
3389
3390 Label no_info;
3391 // If the feedback vector is the undefined value call an array constructor
3392 // that doesn't use AllocationSites.
3393 __ cmp(ebx, isolate()->factory()->undefined_value());
3394 __ j(equal, &no_info);
3395
3396 // Only look at the lower 16 bits of the transition info.
3397 __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOffset));
3398 __ SmiUntag(edx);
3399 STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0);
3400 __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask));
3401 GenerateDispatchToArrayStub(masm, DONT_OVERRIDE);
3402
3403 __ bind(&no_info);
3404 GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES);
3405
3406 // Subclassing.
3407 __ bind(&subclassing);
3408 __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi);
3409 __ add(eax, Immediate(3));
3410 __ PopReturnAddressTo(ecx);
3411 __ Push(edx);
3412 __ Push(ebx);
3413 __ PushReturnAddressFrom(ecx);
3414 __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate()));
3415 }
3416
GenerateCase(MacroAssembler * masm,ElementsKind kind)3417 void InternalArrayConstructorStub::GenerateCase(MacroAssembler* masm,
3418 ElementsKind kind) {
3419 Label not_zero_case, not_one_case;
3420 Label normal_sequence;
3421
3422 __ test(eax, eax);
3423 __ j(not_zero, ¬_zero_case);
3424 InternalArrayNoArgumentConstructorStub stub0(isolate(), kind);
3425 __ TailCallStub(&stub0);
3426
3427 __ bind(¬_zero_case);
3428 __ cmp(eax, 1);
3429 __ j(greater, ¬_one_case);
3430
3431 if (IsFastPackedElementsKind(kind)) {
3432 // We might need to create a holey array
3433 // look at the first argument
3434 __ mov(ecx, Operand(esp, kPointerSize));
3435 __ test(ecx, ecx);
3436 __ j(zero, &normal_sequence);
3437
3438 InternalArraySingleArgumentConstructorStub stub1_holey(
3439 isolate(), GetHoleyElementsKind(kind));
3440 __ TailCallStub(&stub1_holey);
3441 }
3442
3443 __ bind(&normal_sequence);
3444 InternalArraySingleArgumentConstructorStub stub1(isolate(), kind);
3445 __ TailCallStub(&stub1);
3446
3447 __ bind(¬_one_case);
3448 ArrayNArgumentsConstructorStub stubN(isolate());
3449 __ TailCallStub(&stubN);
3450 }
3451
Generate(MacroAssembler * masm)3452 void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
3453 // ----------- S t a t e -------------
3454 // -- eax : argc
3455 // -- edi : constructor
3456 // -- esp[0] : return address
3457 // -- esp[4] : last argument
3458 // -----------------------------------
3459
3460 if (FLAG_debug_code) {
3461 // The array construct code is only set for the global and natives
3462 // builtin Array functions which always have maps.
3463
3464 // Initial map for the builtin Array function should be a map.
3465 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
3466 // Will both indicate a NULL and a Smi.
3467 __ test(ecx, Immediate(kSmiTagMask));
3468 __ Assert(not_zero, kUnexpectedInitialMapForArrayFunction);
3469 __ CmpObjectType(ecx, MAP_TYPE, ecx);
3470 __ Assert(equal, kUnexpectedInitialMapForArrayFunction);
3471 }
3472
3473 // Figure out the right elements kind
3474 __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
3475
3476 // Load the map's "bit field 2" into |result|. We only need the first byte,
3477 // but the following masking takes care of that anyway.
3478 __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
3479 // Retrieve elements_kind from bit field 2.
3480 __ DecodeField<Map::ElementsKindBits>(ecx);
3481
3482 if (FLAG_debug_code) {
3483 Label done;
3484 __ cmp(ecx, Immediate(FAST_ELEMENTS));
3485 __ j(equal, &done);
3486 __ cmp(ecx, Immediate(FAST_HOLEY_ELEMENTS));
3487 __ Assert(equal, kInvalidElementsKindForInternalArrayOrInternalPackedArray);
3488 __ bind(&done);
3489 }
3490
3491 Label fast_elements_case;
3492 __ cmp(ecx, Immediate(FAST_ELEMENTS));
3493 __ j(equal, &fast_elements_case);
3494 GenerateCase(masm, FAST_HOLEY_ELEMENTS);
3495
3496 __ bind(&fast_elements_case);
3497 GenerateCase(masm, FAST_ELEMENTS);
3498 }
3499
Generate(MacroAssembler * masm)3500 void FastNewObjectStub::Generate(MacroAssembler* masm) {
3501 // ----------- S t a t e -------------
3502 // -- edi : target
3503 // -- edx : new target
3504 // -- esi : context
3505 // -- esp[0] : return address
3506 // -----------------------------------
3507 __ AssertFunction(edi);
3508 __ AssertReceiver(edx);
3509
3510 // Verify that the new target is a JSFunction.
3511 Label new_object;
3512 __ CmpObjectType(edx, JS_FUNCTION_TYPE, ebx);
3513 __ j(not_equal, &new_object);
3514
3515 // Load the initial map and verify that it's in fact a map.
3516 __ mov(ecx, FieldOperand(edx, JSFunction::kPrototypeOrInitialMapOffset));
3517 __ JumpIfSmi(ecx, &new_object);
3518 __ CmpObjectType(ecx, MAP_TYPE, ebx);
3519 __ j(not_equal, &new_object);
3520
3521 // Fall back to runtime if the target differs from the new target's
3522 // initial map constructor.
3523 __ cmp(edi, FieldOperand(ecx, Map::kConstructorOrBackPointerOffset));
3524 __ j(not_equal, &new_object);
3525
3526 // Allocate the JSObject on the heap.
3527 Label allocate, done_allocate;
3528 __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
3529 __ lea(ebx, Operand(ebx, times_pointer_size, 0));
3530 __ Allocate(ebx, eax, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
3531 __ bind(&done_allocate);
3532
3533 // Initialize the JSObject fields.
3534 __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
3535 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3536 masm->isolate()->factory()->empty_fixed_array());
3537 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3538 masm->isolate()->factory()->empty_fixed_array());
3539 STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
3540 __ lea(ebx, FieldOperand(eax, JSObject::kHeaderSize));
3541
3542 // ----------- S t a t e -------------
3543 // -- eax : result (tagged)
3544 // -- ebx : result fields (untagged)
3545 // -- edi : result end (untagged)
3546 // -- ecx : initial map
3547 // -- esi : context
3548 // -- esp[0] : return address
3549 // -----------------------------------
3550
3551 // Perform in-object slack tracking if requested.
3552 Label slack_tracking;
3553 STATIC_ASSERT(Map::kNoSlackTracking == 0);
3554 __ test(FieldOperand(ecx, Map::kBitField3Offset),
3555 Immediate(Map::ConstructionCounter::kMask));
3556 __ j(not_zero, &slack_tracking, Label::kNear);
3557 {
3558 // Initialize all in-object fields with undefined.
3559 __ LoadRoot(edx, Heap::kUndefinedValueRootIndex);
3560 __ InitializeFieldsWithFiller(ebx, edi, edx);
3561 __ Ret();
3562 }
3563 __ bind(&slack_tracking);
3564 {
3565 // Decrease generous allocation count.
3566 STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
3567 __ sub(FieldOperand(ecx, Map::kBitField3Offset),
3568 Immediate(1 << Map::ConstructionCounter::kShift));
3569
3570 // Initialize the in-object fields with undefined.
3571 __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
3572 __ neg(edx);
3573 __ lea(edx, Operand(edi, edx, times_pointer_size, 0));
3574 __ LoadRoot(edi, Heap::kUndefinedValueRootIndex);
3575 __ InitializeFieldsWithFiller(ebx, edx, edi);
3576
3577 // Initialize the remaining (reserved) fields with one pointer filler map.
3578 __ movzx_b(edx, FieldOperand(ecx, Map::kUnusedPropertyFieldsOffset));
3579 __ lea(edx, Operand(ebx, edx, times_pointer_size, 0));
3580 __ LoadRoot(edi, Heap::kOnePointerFillerMapRootIndex);
3581 __ InitializeFieldsWithFiller(ebx, edx, edi);
3582
3583 // Check if we can finalize the instance size.
3584 Label finalize;
3585 STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
3586 __ test(FieldOperand(ecx, Map::kBitField3Offset),
3587 Immediate(Map::ConstructionCounter::kMask));
3588 __ j(zero, &finalize, Label::kNear);
3589 __ Ret();
3590
3591 // Finalize the instance size.
3592 __ bind(&finalize);
3593 {
3594 FrameScope scope(masm, StackFrame::INTERNAL);
3595 __ Push(eax);
3596 __ Push(ecx);
3597 __ CallRuntime(Runtime::kFinalizeInstanceSize);
3598 __ Pop(eax);
3599 }
3600 __ Ret();
3601 }
3602
3603 // Fall back to %AllocateInNewSpace.
3604 __ bind(&allocate);
3605 {
3606 FrameScope scope(masm, StackFrame::INTERNAL);
3607 __ SmiTag(ebx);
3608 __ Push(ecx);
3609 __ Push(ebx);
3610 __ CallRuntime(Runtime::kAllocateInNewSpace);
3611 __ Pop(ecx);
3612 }
3613 __ movzx_b(ebx, FieldOperand(ecx, Map::kInstanceSizeOffset));
3614 __ lea(edi, Operand(eax, ebx, times_pointer_size, 0));
3615 STATIC_ASSERT(kHeapObjectTag == 1);
3616 __ dec(edi);
3617 __ jmp(&done_allocate);
3618
3619 // Fall back to %NewObject.
3620 __ bind(&new_object);
3621 __ PopReturnAddressTo(ecx);
3622 __ Push(edi);
3623 __ Push(edx);
3624 __ PushReturnAddressFrom(ecx);
3625 __ TailCallRuntime(Runtime::kNewObject);
3626 }
3627
Generate(MacroAssembler * masm)3628 void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
3629 // ----------- S t a t e -------------
3630 // -- edi : function
3631 // -- esi : context
3632 // -- ebp : frame pointer
3633 // -- esp[0] : return address
3634 // -----------------------------------
3635 __ AssertFunction(edi);
3636
3637 // Make edx point to the JavaScript frame.
3638 __ mov(edx, ebp);
3639 if (skip_stub_frame()) {
3640 // For Ignition we need to skip the handler/stub frame to reach the
3641 // JavaScript frame for the function.
3642 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
3643 }
3644 if (FLAG_debug_code) {
3645 Label ok;
3646 __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
3647 __ j(equal, &ok);
3648 __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
3649 __ bind(&ok);
3650 }
3651
3652 // Check if we have rest parameters (only possible if we have an
3653 // arguments adaptor frame below the function frame).
3654 Label no_rest_parameters;
3655 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
3656 __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
3657 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3658 __ j(not_equal, &no_rest_parameters, Label::kNear);
3659
3660 // Check if the arguments adaptor frame contains more arguments than
3661 // specified by the function's internal formal parameter count.
3662 Label rest_parameters;
3663 __ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
3664 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3665 __ sub(eax,
3666 FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
3667 __ j(greater, &rest_parameters);
3668
3669 // Return an empty rest parameter array.
3670 __ bind(&no_rest_parameters);
3671 {
3672 // ----------- S t a t e -------------
3673 // -- esi : context
3674 // -- esp[0] : return address
3675 // -----------------------------------
3676
3677 // Allocate an empty rest parameter array.
3678 Label allocate, done_allocate;
3679 __ Allocate(JSArray::kSize, eax, edx, ecx, &allocate, NO_ALLOCATION_FLAGS);
3680 __ bind(&done_allocate);
3681
3682 // Setup the rest parameter array in rax.
3683 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
3684 __ mov(FieldOperand(eax, JSArray::kMapOffset), ecx);
3685 __ mov(ecx, isolate()->factory()->empty_fixed_array());
3686 __ mov(FieldOperand(eax, JSArray::kPropertiesOffset), ecx);
3687 __ mov(FieldOperand(eax, JSArray::kElementsOffset), ecx);
3688 __ mov(FieldOperand(eax, JSArray::kLengthOffset), Immediate(Smi::kZero));
3689 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
3690 __ Ret();
3691
3692 // Fall back to %AllocateInNewSpace.
3693 __ bind(&allocate);
3694 {
3695 FrameScope scope(masm, StackFrame::INTERNAL);
3696 __ Push(Smi::FromInt(JSArray::kSize));
3697 __ CallRuntime(Runtime::kAllocateInNewSpace);
3698 }
3699 __ jmp(&done_allocate);
3700 }
3701
3702 __ bind(&rest_parameters);
3703 {
3704 // Compute the pointer to the first rest parameter (skippping the receiver).
3705 __ lea(ebx,
3706 Operand(ebx, eax, times_half_pointer_size,
3707 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
3708
3709 // ----------- S t a t e -------------
3710 // -- esi : context
3711 // -- eax : number of rest parameters (tagged)
3712 // -- ebx : pointer to first rest parameters
3713 // -- esp[0] : return address
3714 // -----------------------------------
3715
3716 // Allocate space for the rest parameter array plus the backing store.
3717 Label allocate, done_allocate;
3718 __ lea(ecx, Operand(eax, times_half_pointer_size,
3719 JSArray::kSize + FixedArray::kHeaderSize));
3720 __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
3721 __ bind(&done_allocate);
3722
3723 // Setup the elements array in edx.
3724 __ mov(FieldOperand(edx, FixedArray::kMapOffset),
3725 isolate()->factory()->fixed_array_map());
3726 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
3727 {
3728 Label loop, done_loop;
3729 __ Move(ecx, Smi::kZero);
3730 __ bind(&loop);
3731 __ cmp(ecx, eax);
3732 __ j(equal, &done_loop, Label::kNear);
3733 __ mov(edi, Operand(ebx, 0 * kPointerSize));
3734 __ mov(FieldOperand(edx, ecx, times_half_pointer_size,
3735 FixedArray::kHeaderSize),
3736 edi);
3737 __ sub(ebx, Immediate(1 * kPointerSize));
3738 __ add(ecx, Immediate(Smi::FromInt(1)));
3739 __ jmp(&loop);
3740 __ bind(&done_loop);
3741 }
3742
3743 // Setup the rest parameter array in edi.
3744 __ lea(edi,
3745 Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
3746 __ LoadGlobalFunction(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, ecx);
3747 __ mov(FieldOperand(edi, JSArray::kMapOffset), ecx);
3748 __ mov(FieldOperand(edi, JSArray::kPropertiesOffset),
3749 isolate()->factory()->empty_fixed_array());
3750 __ mov(FieldOperand(edi, JSArray::kElementsOffset), edx);
3751 __ mov(FieldOperand(edi, JSArray::kLengthOffset), eax);
3752 STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
3753 __ mov(eax, edi);
3754 __ Ret();
3755
3756 // Fall back to %AllocateInNewSpace (if not too big).
3757 Label too_big_for_new_space;
3758 __ bind(&allocate);
3759 __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize));
3760 __ j(greater, &too_big_for_new_space);
3761 {
3762 FrameScope scope(masm, StackFrame::INTERNAL);
3763 __ SmiTag(ecx);
3764 __ Push(eax);
3765 __ Push(ebx);
3766 __ Push(ecx);
3767 __ CallRuntime(Runtime::kAllocateInNewSpace);
3768 __ mov(edx, eax);
3769 __ Pop(ebx);
3770 __ Pop(eax);
3771 }
3772 __ jmp(&done_allocate);
3773
3774 // Fall back to %NewRestParameter.
3775 __ bind(&too_big_for_new_space);
3776 __ PopReturnAddressTo(ecx);
3777 // We reload the function from the caller frame due to register pressure
3778 // within this stub. This is the slow path, hence reloading is preferable.
3779 if (skip_stub_frame()) {
3780 // For Ignition we need to skip the handler/stub frame to reach the
3781 // JavaScript frame for the function.
3782 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
3783 __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
3784 } else {
3785 __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
3786 }
3787 __ PushReturnAddressFrom(ecx);
3788 __ TailCallRuntime(Runtime::kNewRestParameter);
3789 }
3790 }
3791
Generate(MacroAssembler * masm)3792 void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
3793 // ----------- S t a t e -------------
3794 // -- edi : function
3795 // -- esi : context
3796 // -- ebp : frame pointer
3797 // -- esp[0] : return address
3798 // -----------------------------------
3799 __ AssertFunction(edi);
3800
3801 // Make ecx point to the JavaScript frame.
3802 __ mov(ecx, ebp);
3803 if (skip_stub_frame()) {
3804 // For Ignition we need to skip the handler/stub frame to reach the
3805 // JavaScript frame for the function.
3806 __ mov(ecx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
3807 }
3808 if (FLAG_debug_code) {
3809 Label ok;
3810 __ cmp(edi, Operand(ecx, StandardFrameConstants::kFunctionOffset));
3811 __ j(equal, &ok);
3812 __ Abort(kInvalidFrameForFastNewSloppyArgumentsStub);
3813 __ bind(&ok);
3814 }
3815
3816 // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
3817 __ mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
3818 __ mov(ebx,
3819 FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
3820 __ lea(edx, Operand(ecx, ebx, times_half_pointer_size,
3821 StandardFrameConstants::kCallerSPOffset));
3822
3823 // ebx : number of parameters (tagged)
3824 // edx : parameters pointer
3825 // edi : function
3826 // ecx : JavaScript frame pointer.
3827 // esp[0] : return address
3828
3829 // Check if the calling frame is an arguments adaptor frame.
3830 Label adaptor_frame, try_allocate, runtime;
3831 __ mov(eax, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
3832 __ mov(eax, Operand(eax, CommonFrameConstants::kContextOrFrameTypeOffset));
3833 __ cmp(eax, Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
3834 __ j(equal, &adaptor_frame, Label::kNear);
3835
3836 // No adaptor, parameter count = argument count.
3837 __ mov(ecx, ebx);
3838 __ push(ebx);
3839 __ jmp(&try_allocate, Label::kNear);
3840
3841 // We have an adaptor frame. Patch the parameters pointer.
3842 __ bind(&adaptor_frame);
3843 __ push(ebx);
3844 __ mov(edx, Operand(ecx, StandardFrameConstants::kCallerFPOffset));
3845 __ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
3846 __ lea(edx,
3847 Operand(edx, ecx, times_2, StandardFrameConstants::kCallerSPOffset));
3848
3849 // ebx = parameter count (tagged)
3850 // ecx = argument count (smi-tagged)
3851 // Compute the mapped parameter count = min(ebx, ecx) in ebx.
3852 __ cmp(ebx, ecx);
3853 __ j(less_equal, &try_allocate, Label::kNear);
3854 __ mov(ebx, ecx);
3855
3856 // Save mapped parameter count and function.
3857 __ bind(&try_allocate);
3858 __ push(edi);
3859 __ push(ebx);
3860
3861 // Compute the sizes of backing store, parameter map, and arguments object.
3862 // 1. Parameter map, has 2 extra words containing context and backing store.
3863 const int kParameterMapHeaderSize =
3864 FixedArray::kHeaderSize + 2 * kPointerSize;
3865 Label no_parameter_map;
3866 __ test(ebx, ebx);
3867 __ j(zero, &no_parameter_map, Label::kNear);
3868 __ lea(ebx, Operand(ebx, times_2, kParameterMapHeaderSize));
3869 __ bind(&no_parameter_map);
3870
3871 // 2. Backing store.
3872 __ lea(ebx, Operand(ebx, ecx, times_2, FixedArray::kHeaderSize));
3873
3874 // 3. Arguments object.
3875 __ add(ebx, Immediate(JSSloppyArgumentsObject::kSize));
3876
3877 // Do the allocation of all three objects in one go.
3878 __ Allocate(ebx, eax, edi, no_reg, &runtime, NO_ALLOCATION_FLAGS);
3879
3880 // eax = address of new object(s) (tagged)
3881 // ecx = argument count (smi-tagged)
3882 // esp[0] = mapped parameter count (tagged)
3883 // esp[4] = function
3884 // esp[8] = parameter count (tagged)
3885 // Get the arguments map from the current native context into edi.
3886 Label has_mapped_parameters, instantiate;
3887 __ mov(edi, NativeContextOperand());
3888 __ mov(ebx, Operand(esp, 0 * kPointerSize));
3889 __ test(ebx, ebx);
3890 __ j(not_zero, &has_mapped_parameters, Label::kNear);
3891 __ mov(
3892 edi,
3893 Operand(edi, Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX)));
3894 __ jmp(&instantiate, Label::kNear);
3895
3896 __ bind(&has_mapped_parameters);
3897 __ mov(edi, Operand(edi, Context::SlotOffset(
3898 Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX)));
3899 __ bind(&instantiate);
3900
3901 // eax = address of new object (tagged)
3902 // ebx = mapped parameter count (tagged)
3903 // ecx = argument count (smi-tagged)
3904 // edi = address of arguments map (tagged)
3905 // esp[0] = mapped parameter count (tagged)
3906 // esp[4] = function
3907 // esp[8] = parameter count (tagged)
3908 // Copy the JS object part.
3909 __ mov(FieldOperand(eax, JSObject::kMapOffset), edi);
3910 __ mov(FieldOperand(eax, JSObject::kPropertiesOffset),
3911 masm->isolate()->factory()->empty_fixed_array());
3912 __ mov(FieldOperand(eax, JSObject::kElementsOffset),
3913 masm->isolate()->factory()->empty_fixed_array());
3914
3915 // Set up the callee in-object property.
3916 STATIC_ASSERT(JSSloppyArgumentsObject::kCalleeIndex == 1);
3917 __ mov(edi, Operand(esp, 1 * kPointerSize));
3918 __ AssertNotSmi(edi);
3919 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kCalleeOffset), edi);
3920
3921 // Use the length (smi tagged) and set that as an in-object property too.
3922 __ AssertSmi(ecx);
3923 __ mov(FieldOperand(eax, JSSloppyArgumentsObject::kLengthOffset), ecx);
3924
3925 // Set up the elements pointer in the allocated arguments object.
3926 // If we allocated a parameter map, edi will point there, otherwise to the
3927 // backing store.
3928 __ lea(edi, Operand(eax, JSSloppyArgumentsObject::kSize));
3929 __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
3930
3931 // eax = address of new object (tagged)
3932 // ebx = mapped parameter count (tagged)
3933 // ecx = argument count (tagged)
3934 // edx = address of receiver argument
3935 // edi = address of parameter map or backing store (tagged)
3936 // esp[0] = mapped parameter count (tagged)
3937 // esp[4] = function
3938 // esp[8] = parameter count (tagged)
3939 // Free two registers.
3940 __ push(edx);
3941 __ push(eax);
3942
3943 // Initialize parameter map. If there are no mapped arguments, we're done.
3944 Label skip_parameter_map;
3945 __ test(ebx, ebx);
3946 __ j(zero, &skip_parameter_map);
3947
3948 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
3949 Immediate(isolate()->factory()->sloppy_arguments_elements_map()));
3950 __ lea(eax, Operand(ebx, reinterpret_cast<intptr_t>(Smi::FromInt(2))));
3951 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), eax);
3952 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 0 * kPointerSize), esi);
3953 __ lea(eax, Operand(edi, ebx, times_2, kParameterMapHeaderSize));
3954 __ mov(FieldOperand(edi, FixedArray::kHeaderSize + 1 * kPointerSize), eax);
3955
3956 // Copy the parameter slots and the holes in the arguments.
3957 // We need to fill in mapped_parameter_count slots. They index the context,
3958 // where parameters are stored in reverse order, at
3959 // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
3960 // The mapped parameter thus need to get indices
3961 // MIN_CONTEXT_SLOTS+parameter_count-1 ..
3962 // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
3963 // We loop from right to left.
3964 Label parameters_loop, parameters_test;
3965 __ push(ecx);
3966 __ mov(eax, Operand(esp, 3 * kPointerSize));
3967 __ mov(ebx, Immediate(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
3968 __ add(ebx, Operand(esp, 5 * kPointerSize));
3969 __ sub(ebx, eax);
3970 __ mov(ecx, isolate()->factory()->the_hole_value());
3971 __ mov(edx, edi);
3972 __ lea(edi, Operand(edi, eax, times_2, kParameterMapHeaderSize));
3973 // eax = loop variable (tagged)
3974 // ebx = mapping index (tagged)
3975 // ecx = the hole value
3976 // edx = address of parameter map (tagged)
3977 // edi = address of backing store (tagged)
3978 // esp[0] = argument count (tagged)
3979 // esp[4] = address of new object (tagged)
3980 // esp[8] = address of receiver argument
3981 // esp[12] = mapped parameter count (tagged)
3982 // esp[16] = function
3983 // esp[20] = parameter count (tagged)
3984 __ jmp(¶meters_test, Label::kNear);
3985
3986 __ bind(¶meters_loop);
3987 __ sub(eax, Immediate(Smi::FromInt(1)));
3988 __ mov(FieldOperand(edx, eax, times_2, kParameterMapHeaderSize), ebx);
3989 __ mov(FieldOperand(edi, eax, times_2, FixedArray::kHeaderSize), ecx);
3990 __ add(ebx, Immediate(Smi::FromInt(1)));
3991 __ bind(¶meters_test);
3992 __ test(eax, eax);
3993 __ j(not_zero, ¶meters_loop, Label::kNear);
3994 __ pop(ecx);
3995
3996 __ bind(&skip_parameter_map);
3997
3998 // ecx = argument count (tagged)
3999 // edi = address of backing store (tagged)
4000 // esp[0] = address of new object (tagged)
4001 // esp[4] = address of receiver argument
4002 // esp[8] = mapped parameter count (tagged)
4003 // esp[12] = function
4004 // esp[16] = parameter count (tagged)
4005 // Copy arguments header and remaining slots (if there are any).
4006 __ mov(FieldOperand(edi, FixedArray::kMapOffset),
4007 Immediate(isolate()->factory()->fixed_array_map()));
4008 __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
4009
4010 Label arguments_loop, arguments_test;
4011 __ mov(ebx, Operand(esp, 2 * kPointerSize));
4012 __ mov(edx, Operand(esp, 1 * kPointerSize));
4013 __ sub(edx, ebx); // Is there a smarter way to do negative scaling?
4014 __ sub(edx, ebx);
4015 __ jmp(&arguments_test, Label::kNear);
4016
4017 __ bind(&arguments_loop);
4018 __ sub(edx, Immediate(kPointerSize));
4019 __ mov(eax, Operand(edx, 0));
4020 __ mov(FieldOperand(edi, ebx, times_2, FixedArray::kHeaderSize), eax);
4021 __ add(ebx, Immediate(Smi::FromInt(1)));
4022
4023 __ bind(&arguments_test);
4024 __ cmp(ebx, ecx);
4025 __ j(less, &arguments_loop, Label::kNear);
4026
4027 // Restore.
4028 __ pop(eax); // Address of arguments object.
4029 __ Drop(4);
4030
4031 // Return.
4032 __ ret(0);
4033
4034 // Do the runtime call to allocate the arguments object.
4035 __ bind(&runtime);
4036 __ pop(eax); // Remove saved mapped parameter count.
4037 __ pop(edi); // Pop saved function.
4038 __ pop(eax); // Remove saved parameter count.
4039 __ pop(eax); // Pop return address.
4040 __ push(edi); // Push function.
4041 __ push(edx); // Push parameters pointer.
4042 __ push(ecx); // Push parameter count.
4043 __ push(eax); // Push return address.
4044 __ TailCallRuntime(Runtime::kNewSloppyArguments);
4045 }
4046
Generate(MacroAssembler * masm)4047 void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
4048 // ----------- S t a t e -------------
4049 // -- edi : function
4050 // -- esi : context
4051 // -- ebp : frame pointer
4052 // -- esp[0] : return address
4053 // -----------------------------------
4054 __ AssertFunction(edi);
4055
4056 // Make edx point to the JavaScript frame.
4057 __ mov(edx, ebp);
4058 if (skip_stub_frame()) {
4059 // For Ignition we need to skip the handler/stub frame to reach the
4060 // JavaScript frame for the function.
4061 __ mov(edx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
4062 }
4063 if (FLAG_debug_code) {
4064 Label ok;
4065 __ cmp(edi, Operand(edx, StandardFrameConstants::kFunctionOffset));
4066 __ j(equal, &ok);
4067 __ Abort(kInvalidFrameForFastNewStrictArgumentsStub);
4068 __ bind(&ok);
4069 }
4070
4071 // Check if we have an arguments adaptor frame below the function frame.
4072 Label arguments_adaptor, arguments_done;
4073 __ mov(ebx, Operand(edx, StandardFrameConstants::kCallerFPOffset));
4074 __ cmp(Operand(ebx, CommonFrameConstants::kContextOrFrameTypeOffset),
4075 Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
4076 __ j(equal, &arguments_adaptor, Label::kNear);
4077 {
4078 __ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
4079 __ mov(eax,
4080 FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
4081 __ lea(ebx,
4082 Operand(edx, eax, times_half_pointer_size,
4083 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4084 }
4085 __ jmp(&arguments_done, Label::kNear);
4086 __ bind(&arguments_adaptor);
4087 {
4088 __ mov(eax, Operand(ebx, ArgumentsAdaptorFrameConstants::kLengthOffset));
4089 __ lea(ebx,
4090 Operand(ebx, eax, times_half_pointer_size,
4091 StandardFrameConstants::kCallerSPOffset - 1 * kPointerSize));
4092 }
4093 __ bind(&arguments_done);
4094
4095 // ----------- S t a t e -------------
4096 // -- eax : number of arguments (tagged)
4097 // -- ebx : pointer to the first argument
4098 // -- esi : context
4099 // -- esp[0] : return address
4100 // -----------------------------------
4101
4102 // Allocate space for the strict arguments object plus the backing store.
4103 Label allocate, done_allocate;
4104 __ lea(ecx,
4105 Operand(eax, times_half_pointer_size,
4106 JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
4107 __ Allocate(ecx, edx, edi, no_reg, &allocate, NO_ALLOCATION_FLAGS);
4108 __ bind(&done_allocate);
4109
4110 // Setup the elements array in edx.
4111 __ mov(FieldOperand(edx, FixedArray::kMapOffset),
4112 isolate()->factory()->fixed_array_map());
4113 __ mov(FieldOperand(edx, FixedArray::kLengthOffset), eax);
4114 {
4115 Label loop, done_loop;
4116 __ Move(ecx, Smi::kZero);
4117 __ bind(&loop);
4118 __ cmp(ecx, eax);
4119 __ j(equal, &done_loop, Label::kNear);
4120 __ mov(edi, Operand(ebx, 0 * kPointerSize));
4121 __ mov(FieldOperand(edx, ecx, times_half_pointer_size,
4122 FixedArray::kHeaderSize),
4123 edi);
4124 __ sub(ebx, Immediate(1 * kPointerSize));
4125 __ add(ecx, Immediate(Smi::FromInt(1)));
4126 __ jmp(&loop);
4127 __ bind(&done_loop);
4128 }
4129
4130 // Setup the rest parameter array in edi.
4131 __ lea(edi,
4132 Operand(edx, eax, times_half_pointer_size, FixedArray::kHeaderSize));
4133 __ LoadGlobalFunction(Context::STRICT_ARGUMENTS_MAP_INDEX, ecx);
4134 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kMapOffset), ecx);
4135 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kPropertiesOffset),
4136 isolate()->factory()->empty_fixed_array());
4137 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kElementsOffset), edx);
4138 __ mov(FieldOperand(edi, JSStrictArgumentsObject::kLengthOffset), eax);
4139 STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
4140 __ mov(eax, edi);
4141 __ Ret();
4142
4143 // Fall back to %AllocateInNewSpace (if not too big).
4144 Label too_big_for_new_space;
4145 __ bind(&allocate);
4146 __ cmp(ecx, Immediate(kMaxRegularHeapObjectSize));
4147 __ j(greater, &too_big_for_new_space);
4148 {
4149 FrameScope scope(masm, StackFrame::INTERNAL);
4150 __ SmiTag(ecx);
4151 __ Push(eax);
4152 __ Push(ebx);
4153 __ Push(ecx);
4154 __ CallRuntime(Runtime::kAllocateInNewSpace);
4155 __ mov(edx, eax);
4156 __ Pop(ebx);
4157 __ Pop(eax);
4158 }
4159 __ jmp(&done_allocate);
4160
4161 // Fall back to %NewStrictArguments.
4162 __ bind(&too_big_for_new_space);
4163 __ PopReturnAddressTo(ecx);
4164 // We reload the function from the caller frame due to register pressure
4165 // within this stub. This is the slow path, hence reloading is preferable.
4166 if (skip_stub_frame()) {
4167 // For Ignition we need to skip the handler/stub frame to reach the
4168 // JavaScript frame for the function.
4169 __ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
4170 __ Push(Operand(edx, StandardFrameConstants::kFunctionOffset));
4171 } else {
4172 __ Push(Operand(ebp, StandardFrameConstants::kFunctionOffset));
4173 }
4174 __ PushReturnAddressFrom(ecx);
4175 __ TailCallRuntime(Runtime::kNewStrictArguments);
4176 }
4177
4178 // Generates an Operand for saving parameters after PrepareCallApiFunction.
ApiParameterOperand(int index)4179 static Operand ApiParameterOperand(int index) {
4180 return Operand(esp, index * kPointerSize);
4181 }
4182
4183
4184 // Prepares stack to put arguments (aligns and so on). Reserves
4185 // space for return value if needed (assumes the return value is a handle).
4186 // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1)
4187 // etc. Saves context (esi). If space was reserved for return value then
4188 // stores the pointer to the reserved slot into esi.
PrepareCallApiFunction(MacroAssembler * masm,int argc)4189 static void PrepareCallApiFunction(MacroAssembler* masm, int argc) {
4190 __ EnterApiExitFrame(argc);
4191 if (__ emit_debug_code()) {
4192 __ mov(esi, Immediate(bit_cast<int32_t>(kZapValue)));
4193 }
4194 }
4195
4196
4197 // Calls an API function. Allocates HandleScope, extracts returned value
4198 // from handle and propagates exceptions. Clobbers ebx, edi and
4199 // caller-save registers. Restores context. On return removes
4200 // stack_space * kPointerSize (GCed).
CallApiFunctionAndReturn(MacroAssembler * masm,Register function_address,ExternalReference thunk_ref,Operand thunk_last_arg,int stack_space,Operand * stack_space_operand,Operand return_value_operand,Operand * context_restore_operand)4201 static void CallApiFunctionAndReturn(MacroAssembler* masm,
4202 Register function_address,
4203 ExternalReference thunk_ref,
4204 Operand thunk_last_arg, int stack_space,
4205 Operand* stack_space_operand,
4206 Operand return_value_operand,
4207 Operand* context_restore_operand) {
4208 Isolate* isolate = masm->isolate();
4209
4210 ExternalReference next_address =
4211 ExternalReference::handle_scope_next_address(isolate);
4212 ExternalReference limit_address =
4213 ExternalReference::handle_scope_limit_address(isolate);
4214 ExternalReference level_address =
4215 ExternalReference::handle_scope_level_address(isolate);
4216
4217 DCHECK(edx.is(function_address));
4218 // Allocate HandleScope in callee-save registers.
4219 __ mov(ebx, Operand::StaticVariable(next_address));
4220 __ mov(edi, Operand::StaticVariable(limit_address));
4221 __ add(Operand::StaticVariable(level_address), Immediate(1));
4222
4223 if (FLAG_log_timer_events) {
4224 FrameScope frame(masm, StackFrame::MANUAL);
4225 __ PushSafepointRegisters();
4226 __ PrepareCallCFunction(1, eax);
4227 __ mov(Operand(esp, 0),
4228 Immediate(ExternalReference::isolate_address(isolate)));
4229 __ CallCFunction(ExternalReference::log_enter_external_function(isolate),
4230 1);
4231 __ PopSafepointRegisters();
4232 }
4233
4234
4235 Label profiler_disabled;
4236 Label end_profiler_check;
4237 __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate)));
4238 __ cmpb(Operand(eax, 0), Immediate(0));
4239 __ j(zero, &profiler_disabled);
4240
4241 // Additional parameter is the address of the actual getter function.
4242 __ mov(thunk_last_arg, function_address);
4243 // Call the api function.
4244 __ mov(eax, Immediate(thunk_ref));
4245 __ call(eax);
4246 __ jmp(&end_profiler_check);
4247
4248 __ bind(&profiler_disabled);
4249 // Call the api function.
4250 __ call(function_address);
4251 __ bind(&end_profiler_check);
4252
4253 if (FLAG_log_timer_events) {
4254 FrameScope frame(masm, StackFrame::MANUAL);
4255 __ PushSafepointRegisters();
4256 __ PrepareCallCFunction(1, eax);
4257 __ mov(Operand(esp, 0),
4258 Immediate(ExternalReference::isolate_address(isolate)));
4259 __ CallCFunction(ExternalReference::log_leave_external_function(isolate),
4260 1);
4261 __ PopSafepointRegisters();
4262 }
4263
4264 Label prologue;
4265 // Load the value from ReturnValue
4266 __ mov(eax, return_value_operand);
4267
4268 Label promote_scheduled_exception;
4269 Label delete_allocated_handles;
4270 Label leave_exit_frame;
4271
4272 __ bind(&prologue);
4273 // No more valid handles (the result handle was the last one). Restore
4274 // previous handle scope.
4275 __ mov(Operand::StaticVariable(next_address), ebx);
4276 __ sub(Operand::StaticVariable(level_address), Immediate(1));
4277 __ Assert(above_equal, kInvalidHandleScopeLevel);
4278 __ cmp(edi, Operand::StaticVariable(limit_address));
4279 __ j(not_equal, &delete_allocated_handles);
4280
4281 // Leave the API exit frame.
4282 __ bind(&leave_exit_frame);
4283 bool restore_context = context_restore_operand != NULL;
4284 if (restore_context) {
4285 __ mov(esi, *context_restore_operand);
4286 }
4287 if (stack_space_operand != nullptr) {
4288 __ mov(ebx, *stack_space_operand);
4289 }
4290 __ LeaveApiExitFrame(!restore_context);
4291
4292 // Check if the function scheduled an exception.
4293 ExternalReference scheduled_exception_address =
4294 ExternalReference::scheduled_exception_address(isolate);
4295 __ cmp(Operand::StaticVariable(scheduled_exception_address),
4296 Immediate(isolate->factory()->the_hole_value()));
4297 __ j(not_equal, &promote_scheduled_exception);
4298
4299 #if DEBUG
4300 // Check if the function returned a valid JavaScript value.
4301 Label ok;
4302 Register return_value = eax;
4303 Register map = ecx;
4304
4305 __ JumpIfSmi(return_value, &ok, Label::kNear);
4306 __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset));
4307
4308 __ CmpInstanceType(map, LAST_NAME_TYPE);
4309 __ j(below_equal, &ok, Label::kNear);
4310
4311 __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE);
4312 __ j(above_equal, &ok, Label::kNear);
4313
4314 __ cmp(map, isolate->factory()->heap_number_map());
4315 __ j(equal, &ok, Label::kNear);
4316
4317 __ cmp(return_value, isolate->factory()->undefined_value());
4318 __ j(equal, &ok, Label::kNear);
4319
4320 __ cmp(return_value, isolate->factory()->true_value());
4321 __ j(equal, &ok, Label::kNear);
4322
4323 __ cmp(return_value, isolate->factory()->false_value());
4324 __ j(equal, &ok, Label::kNear);
4325
4326 __ cmp(return_value, isolate->factory()->null_value());
4327 __ j(equal, &ok, Label::kNear);
4328
4329 __ Abort(kAPICallReturnedInvalidObject);
4330
4331 __ bind(&ok);
4332 #endif
4333
4334 if (stack_space_operand != nullptr) {
4335 DCHECK_EQ(0, stack_space);
4336 __ pop(ecx);
4337 __ add(esp, ebx);
4338 __ jmp(ecx);
4339 } else {
4340 __ ret(stack_space * kPointerSize);
4341 }
4342
4343 // Re-throw by promoting a scheduled exception.
4344 __ bind(&promote_scheduled_exception);
4345 __ TailCallRuntime(Runtime::kPromoteScheduledException);
4346
4347 // HandleScope limit has changed. Delete allocated extensions.
4348 ExternalReference delete_extensions =
4349 ExternalReference::delete_handle_scope_extensions(isolate);
4350 __ bind(&delete_allocated_handles);
4351 __ mov(Operand::StaticVariable(limit_address), edi);
4352 __ mov(edi, eax);
4353 __ mov(Operand(esp, 0),
4354 Immediate(ExternalReference::isolate_address(isolate)));
4355 __ mov(eax, Immediate(delete_extensions));
4356 __ call(eax);
4357 __ mov(eax, edi);
4358 __ jmp(&leave_exit_frame);
4359 }
4360
Generate(MacroAssembler * masm)4361 void CallApiCallbackStub::Generate(MacroAssembler* masm) {
4362 // ----------- S t a t e -------------
4363 // -- edi : callee
4364 // -- ebx : call_data
4365 // -- ecx : holder
4366 // -- edx : api_function_address
4367 // -- esi : context
4368 // --
4369 // -- esp[0] : return address
4370 // -- esp[4] : last argument
4371 // -- ...
4372 // -- esp[argc * 4] : first argument
4373 // -- esp[(argc + 1) * 4] : receiver
4374 // -----------------------------------
4375
4376 Register callee = edi;
4377 Register call_data = ebx;
4378 Register holder = ecx;
4379 Register api_function_address = edx;
4380 Register context = esi;
4381 Register return_address = eax;
4382
4383 typedef FunctionCallbackArguments FCA;
4384
4385 STATIC_ASSERT(FCA::kContextSaveIndex == 6);
4386 STATIC_ASSERT(FCA::kCalleeIndex == 5);
4387 STATIC_ASSERT(FCA::kDataIndex == 4);
4388 STATIC_ASSERT(FCA::kReturnValueOffset == 3);
4389 STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2);
4390 STATIC_ASSERT(FCA::kIsolateIndex == 1);
4391 STATIC_ASSERT(FCA::kHolderIndex == 0);
4392 STATIC_ASSERT(FCA::kNewTargetIndex == 7);
4393 STATIC_ASSERT(FCA::kArgsLength == 8);
4394
4395 __ pop(return_address);
4396
4397 // new target
4398 __ PushRoot(Heap::kUndefinedValueRootIndex);
4399
4400 // context save.
4401 __ push(context);
4402
4403 // callee
4404 __ push(callee);
4405
4406 // call data
4407 __ push(call_data);
4408
4409 Register scratch = call_data;
4410 if (!call_data_undefined()) {
4411 // return value
4412 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
4413 // return value default
4414 __ push(Immediate(masm->isolate()->factory()->undefined_value()));
4415 } else {
4416 // return value
4417 __ push(scratch);
4418 // return value default
4419 __ push(scratch);
4420 }
4421 // isolate
4422 __ push(Immediate(reinterpret_cast<int>(masm->isolate())));
4423 // holder
4424 __ push(holder);
4425
4426 __ mov(scratch, esp);
4427
4428 // push return address
4429 __ push(return_address);
4430
4431 if (!is_lazy()) {
4432 // load context from callee
4433 __ mov(context, FieldOperand(callee, JSFunction::kContextOffset));
4434 }
4435
4436 // API function gets reference to the v8::Arguments. If CPU profiler
4437 // is enabled wrapper function will be called and we need to pass
4438 // address of the callback as additional parameter, always allocate
4439 // space for it.
4440 const int kApiArgc = 1 + 1;
4441
4442 // Allocate the v8::Arguments structure in the arguments' space since
4443 // it's not controlled by GC.
4444 const int kApiStackSpace = 3;
4445
4446 PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace);
4447
4448 // FunctionCallbackInfo::implicit_args_.
4449 __ mov(ApiParameterOperand(2), scratch);
4450 __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize));
4451 // FunctionCallbackInfo::values_.
4452 __ mov(ApiParameterOperand(3), scratch);
4453 // FunctionCallbackInfo::length_.
4454 __ Move(ApiParameterOperand(4), Immediate(argc()));
4455
4456 // v8::InvocationCallback's argument.
4457 __ lea(scratch, ApiParameterOperand(2));
4458 __ mov(ApiParameterOperand(0), scratch);
4459
4460 ExternalReference thunk_ref =
4461 ExternalReference::invoke_function_callback(masm->isolate());
4462
4463 Operand context_restore_operand(ebp,
4464 (2 + FCA::kContextSaveIndex) * kPointerSize);
4465 // Stores return the first js argument
4466 int return_value_offset = 0;
4467 if (is_store()) {
4468 return_value_offset = 2 + FCA::kArgsLength;
4469 } else {
4470 return_value_offset = 2 + FCA::kReturnValueOffset;
4471 }
4472 Operand return_value_operand(ebp, return_value_offset * kPointerSize);
4473 int stack_space = 0;
4474 Operand length_operand = ApiParameterOperand(4);
4475 Operand* stack_space_operand = &length_operand;
4476 stack_space = argc() + FCA::kArgsLength + 1;
4477 stack_space_operand = nullptr;
4478 CallApiFunctionAndReturn(masm, api_function_address, thunk_ref,
4479 ApiParameterOperand(1), stack_space,
4480 stack_space_operand, return_value_operand,
4481 &context_restore_operand);
4482 }
4483
4484
Generate(MacroAssembler * masm)4485 void CallApiGetterStub::Generate(MacroAssembler* masm) {
4486 // Build v8::PropertyCallbackInfo::args_ array on the stack and push property
4487 // name below the exit frame to make GC aware of them.
4488 STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0);
4489 STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1);
4490 STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2);
4491 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3);
4492 STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4);
4493 STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5);
4494 STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6);
4495 STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7);
4496
4497 Register receiver = ApiGetterDescriptor::ReceiverRegister();
4498 Register holder = ApiGetterDescriptor::HolderRegister();
4499 Register callback = ApiGetterDescriptor::CallbackRegister();
4500 Register scratch = ebx;
4501 DCHECK(!AreAliased(receiver, holder, callback, scratch));
4502
4503 __ pop(scratch); // Pop return address to extend the frame.
4504 __ push(receiver);
4505 __ push(FieldOperand(callback, AccessorInfo::kDataOffset));
4506 __ PushRoot(Heap::kUndefinedValueRootIndex); // ReturnValue
4507 // ReturnValue default value
4508 __ PushRoot(Heap::kUndefinedValueRootIndex);
4509 __ push(Immediate(ExternalReference::isolate_address(isolate())));
4510 __ push(holder);
4511 __ push(Immediate(Smi::kZero)); // should_throw_on_error -> false
4512 __ push(FieldOperand(callback, AccessorInfo::kNameOffset));
4513 __ push(scratch); // Restore return address.
4514
4515 // v8::PropertyCallbackInfo::args_ array and name handle.
4516 const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1;
4517
4518 // Allocate v8::PropertyCallbackInfo object, arguments for callback and
4519 // space for optional callback address parameter (in case CPU profiler is
4520 // active) in non-GCed stack space.
4521 const int kApiArgc = 3 + 1;
4522
4523 // Load address of v8::PropertyAccessorInfo::args_ array.
4524 __ lea(scratch, Operand(esp, 2 * kPointerSize));
4525
4526 PrepareCallApiFunction(masm, kApiArgc);
4527 // Create v8::PropertyCallbackInfo object on the stack and initialize
4528 // it's args_ field.
4529 Operand info_object = ApiParameterOperand(3);
4530 __ mov(info_object, scratch);
4531
4532 // Name as handle.
4533 __ sub(scratch, Immediate(kPointerSize));
4534 __ mov(ApiParameterOperand(0), scratch);
4535 // Arguments pointer.
4536 __ lea(scratch, info_object);
4537 __ mov(ApiParameterOperand(1), scratch);
4538 // Reserve space for optional callback address parameter.
4539 Operand thunk_last_arg = ApiParameterOperand(2);
4540
4541 ExternalReference thunk_ref =
4542 ExternalReference::invoke_accessor_getter_callback(isolate());
4543
4544 __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset));
4545 Register function_address = edx;
4546 __ mov(function_address,
4547 FieldOperand(scratch, Foreign::kForeignAddressOffset));
4548 // +3 is to skip prolog, return address and name handle.
4549 Operand return_value_operand(
4550 ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize);
4551 CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg,
4552 kStackUnwindSpace, nullptr, return_value_operand,
4553 NULL);
4554 }
4555
4556 #undef __
4557
4558 } // namespace internal
4559 } // namespace v8
4560
4561 #endif // V8_TARGET_ARCH_X87
4562