1 // Copyright 2012 the V8 project authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 #if V8_TARGET_ARCH_IA32
6 
7 #include "src/base/bits.h"
8 #include "src/base/division-by-constant.h"
9 #include "src/bootstrapper.h"
10 #include "src/codegen.h"
11 #include "src/debug/debug.h"
12 #include "src/ia32/frames-ia32.h"
13 #include "src/ia32/macro-assembler-ia32.h"
14 #include "src/runtime/runtime.h"
15 
16 namespace v8 {
17 namespace internal {
18 
19 // -------------------------------------------------------------------------
20 // MacroAssembler implementation.
21 
MacroAssembler(Isolate * arg_isolate,void * buffer,int size,CodeObjectRequired create_code_object)22 MacroAssembler::MacroAssembler(Isolate* arg_isolate, void* buffer, int size,
23                                CodeObjectRequired create_code_object)
24     : Assembler(arg_isolate, buffer, size),
25       generating_stub_(false),
26       has_frame_(false) {
27   if (create_code_object == CodeObjectRequired::kYes) {
28     code_object_ =
29         Handle<Object>::New(isolate()->heap()->undefined_value(), isolate());
30   }
31 }
32 
33 
Load(Register dst,const Operand & src,Representation r)34 void MacroAssembler::Load(Register dst, const Operand& src, Representation r) {
35   DCHECK(!r.IsDouble());
36   if (r.IsInteger8()) {
37     movsx_b(dst, src);
38   } else if (r.IsUInteger8()) {
39     movzx_b(dst, src);
40   } else if (r.IsInteger16()) {
41     movsx_w(dst, src);
42   } else if (r.IsUInteger16()) {
43     movzx_w(dst, src);
44   } else {
45     mov(dst, src);
46   }
47 }
48 
49 
Store(Register src,const Operand & dst,Representation r)50 void MacroAssembler::Store(Register src, const Operand& dst, Representation r) {
51   DCHECK(!r.IsDouble());
52   if (r.IsInteger8() || r.IsUInteger8()) {
53     mov_b(dst, src);
54   } else if (r.IsInteger16() || r.IsUInteger16()) {
55     mov_w(dst, src);
56   } else {
57     if (r.IsHeapObject()) {
58       AssertNotSmi(src);
59     } else if (r.IsSmi()) {
60       AssertSmi(src);
61     }
62     mov(dst, src);
63   }
64 }
65 
66 
LoadRoot(Register destination,Heap::RootListIndex index)67 void MacroAssembler::LoadRoot(Register destination, Heap::RootListIndex index) {
68   if (isolate()->heap()->RootCanBeTreatedAsConstant(index)) {
69     mov(destination, isolate()->heap()->root_handle(index));
70     return;
71   }
72   ExternalReference roots_array_start =
73       ExternalReference::roots_array_start(isolate());
74   mov(destination, Immediate(index));
75   mov(destination, Operand::StaticArray(destination,
76                                         times_pointer_size,
77                                         roots_array_start));
78 }
79 
80 
StoreRoot(Register source,Register scratch,Heap::RootListIndex index)81 void MacroAssembler::StoreRoot(Register source,
82                                Register scratch,
83                                Heap::RootListIndex index) {
84   DCHECK(Heap::RootCanBeWrittenAfterInitialization(index));
85   ExternalReference roots_array_start =
86       ExternalReference::roots_array_start(isolate());
87   mov(scratch, Immediate(index));
88   mov(Operand::StaticArray(scratch, times_pointer_size, roots_array_start),
89       source);
90 }
91 
92 
CompareRoot(Register with,Register scratch,Heap::RootListIndex index)93 void MacroAssembler::CompareRoot(Register with,
94                                  Register scratch,
95                                  Heap::RootListIndex index) {
96   ExternalReference roots_array_start =
97       ExternalReference::roots_array_start(isolate());
98   mov(scratch, Immediate(index));
99   cmp(with, Operand::StaticArray(scratch,
100                                 times_pointer_size,
101                                 roots_array_start));
102 }
103 
104 
CompareRoot(Register with,Heap::RootListIndex index)105 void MacroAssembler::CompareRoot(Register with, Heap::RootListIndex index) {
106   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
107   cmp(with, isolate()->heap()->root_handle(index));
108 }
109 
110 
CompareRoot(const Operand & with,Heap::RootListIndex index)111 void MacroAssembler::CompareRoot(const Operand& with,
112                                  Heap::RootListIndex index) {
113   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
114   cmp(with, isolate()->heap()->root_handle(index));
115 }
116 
117 
PushRoot(Heap::RootListIndex index)118 void MacroAssembler::PushRoot(Heap::RootListIndex index) {
119   DCHECK(isolate()->heap()->RootCanBeTreatedAsConstant(index));
120   Push(isolate()->heap()->root_handle(index));
121 }
122 
123 
InNewSpace(Register object,Register scratch,Condition cc,Label * condition_met,Label::Distance condition_met_distance)124 void MacroAssembler::InNewSpace(
125     Register object,
126     Register scratch,
127     Condition cc,
128     Label* condition_met,
129     Label::Distance condition_met_distance) {
130   DCHECK(cc == equal || cc == not_equal);
131   if (scratch.is(object)) {
132     and_(scratch, Immediate(~Page::kPageAlignmentMask));
133   } else {
134     mov(scratch, Immediate(~Page::kPageAlignmentMask));
135     and_(scratch, object);
136   }
137   // Check that we can use a test_b.
138   DCHECK(MemoryChunk::IN_FROM_SPACE < 8);
139   DCHECK(MemoryChunk::IN_TO_SPACE < 8);
140   int mask = (1 << MemoryChunk::IN_FROM_SPACE)
141            | (1 << MemoryChunk::IN_TO_SPACE);
142   // If non-zero, the page belongs to new-space.
143   test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
144          static_cast<uint8_t>(mask));
145   j(cc, condition_met, condition_met_distance);
146 }
147 
148 
RememberedSetHelper(Register object,Register addr,Register scratch,SaveFPRegsMode save_fp,MacroAssembler::RememberedSetFinalAction and_then)149 void MacroAssembler::RememberedSetHelper(
150     Register object,  // Only used for debug checks.
151     Register addr,
152     Register scratch,
153     SaveFPRegsMode save_fp,
154     MacroAssembler::RememberedSetFinalAction and_then) {
155   Label done;
156   if (emit_debug_code()) {
157     Label ok;
158     JumpIfNotInNewSpace(object, scratch, &ok, Label::kNear);
159     int3();
160     bind(&ok);
161   }
162   // Load store buffer top.
163   ExternalReference store_buffer =
164       ExternalReference::store_buffer_top(isolate());
165   mov(scratch, Operand::StaticVariable(store_buffer));
166   // Store pointer to buffer.
167   mov(Operand(scratch, 0), addr);
168   // Increment buffer top.
169   add(scratch, Immediate(kPointerSize));
170   // Write back new top of buffer.
171   mov(Operand::StaticVariable(store_buffer), scratch);
172   // Call stub on end of buffer.
173   // Check for end of buffer.
174   test(scratch, Immediate(StoreBuffer::kStoreBufferOverflowBit));
175   if (and_then == kReturnAtEnd) {
176     Label buffer_overflowed;
177     j(not_equal, &buffer_overflowed, Label::kNear);
178     ret(0);
179     bind(&buffer_overflowed);
180   } else {
181     DCHECK(and_then == kFallThroughAtEnd);
182     j(equal, &done, Label::kNear);
183   }
184   StoreBufferOverflowStub store_buffer_overflow(isolate(), save_fp);
185   CallStub(&store_buffer_overflow);
186   if (and_then == kReturnAtEnd) {
187     ret(0);
188   } else {
189     DCHECK(and_then == kFallThroughAtEnd);
190     bind(&done);
191   }
192 }
193 
194 
ClampDoubleToUint8(XMMRegister input_reg,XMMRegister scratch_reg,Register result_reg)195 void MacroAssembler::ClampDoubleToUint8(XMMRegister input_reg,
196                                         XMMRegister scratch_reg,
197                                         Register result_reg) {
198   Label done;
199   Label conv_failure;
200   xorps(scratch_reg, scratch_reg);
201   cvtsd2si(result_reg, input_reg);
202   test(result_reg, Immediate(0xFFFFFF00));
203   j(zero, &done, Label::kNear);
204   cmp(result_reg, Immediate(0x1));
205   j(overflow, &conv_failure, Label::kNear);
206   mov(result_reg, Immediate(0));
207   setcc(sign, result_reg);
208   sub(result_reg, Immediate(1));
209   and_(result_reg, Immediate(255));
210   jmp(&done, Label::kNear);
211   bind(&conv_failure);
212   Move(result_reg, Immediate(0));
213   ucomisd(input_reg, scratch_reg);
214   j(below, &done, Label::kNear);
215   Move(result_reg, Immediate(255));
216   bind(&done);
217 }
218 
219 
ClampUint8(Register reg)220 void MacroAssembler::ClampUint8(Register reg) {
221   Label done;
222   test(reg, Immediate(0xFFFFFF00));
223   j(zero, &done, Label::kNear);
224   setcc(negative, reg);  // 1 if negative, 0 if positive.
225   dec_b(reg);  // 0 if negative, 255 if positive.
226   bind(&done);
227 }
228 
229 
SlowTruncateToI(Register result_reg,Register input_reg,int offset)230 void MacroAssembler::SlowTruncateToI(Register result_reg,
231                                      Register input_reg,
232                                      int offset) {
233   DoubleToIStub stub(isolate(), input_reg, result_reg, offset, true);
234   call(stub.GetCode(), RelocInfo::CODE_TARGET);
235 }
236 
237 
TruncateDoubleToI(Register result_reg,XMMRegister input_reg)238 void MacroAssembler::TruncateDoubleToI(Register result_reg,
239                                        XMMRegister input_reg) {
240   Label done;
241   cvttsd2si(result_reg, Operand(input_reg));
242   cmp(result_reg, 0x1);
243   j(no_overflow, &done, Label::kNear);
244 
245   sub(esp, Immediate(kDoubleSize));
246   movsd(MemOperand(esp, 0), input_reg);
247   SlowTruncateToI(result_reg, esp, 0);
248   add(esp, Immediate(kDoubleSize));
249   bind(&done);
250 }
251 
252 
DoubleToI(Register result_reg,XMMRegister input_reg,XMMRegister scratch,MinusZeroMode minus_zero_mode,Label * lost_precision,Label * is_nan,Label * minus_zero,Label::Distance dst)253 void MacroAssembler::DoubleToI(Register result_reg, XMMRegister input_reg,
254                                XMMRegister scratch,
255                                MinusZeroMode minus_zero_mode,
256                                Label* lost_precision, Label* is_nan,
257                                Label* minus_zero, Label::Distance dst) {
258   DCHECK(!input_reg.is(scratch));
259   cvttsd2si(result_reg, Operand(input_reg));
260   Cvtsi2sd(scratch, Operand(result_reg));
261   ucomisd(scratch, input_reg);
262   j(not_equal, lost_precision, dst);
263   j(parity_even, is_nan, dst);
264   if (minus_zero_mode == FAIL_ON_MINUS_ZERO) {
265     Label done;
266     // The integer converted back is equal to the original. We
267     // only have to test if we got -0 as an input.
268     test(result_reg, Operand(result_reg));
269     j(not_zero, &done, Label::kNear);
270     movmskpd(result_reg, input_reg);
271     // Bit 0 contains the sign of the double in input_reg.
272     // If input was positive, we are ok and return 0, otherwise
273     // jump to minus_zero.
274     and_(result_reg, 1);
275     j(not_zero, minus_zero, dst);
276     bind(&done);
277   }
278 }
279 
280 
TruncateHeapNumberToI(Register result_reg,Register input_reg)281 void MacroAssembler::TruncateHeapNumberToI(Register result_reg,
282                                            Register input_reg) {
283   Label done, slow_case;
284 
285   if (CpuFeatures::IsSupported(SSE3)) {
286     CpuFeatureScope scope(this, SSE3);
287     Label convert;
288     // Use more powerful conversion when sse3 is available.
289     // Load x87 register with heap number.
290     fld_d(FieldOperand(input_reg, HeapNumber::kValueOffset));
291     // Get exponent alone and check for too-big exponent.
292     mov(result_reg, FieldOperand(input_reg, HeapNumber::kExponentOffset));
293     and_(result_reg, HeapNumber::kExponentMask);
294     const uint32_t kTooBigExponent =
295         (HeapNumber::kExponentBias + 63) << HeapNumber::kExponentShift;
296     cmp(Operand(result_reg), Immediate(kTooBigExponent));
297     j(greater_equal, &slow_case, Label::kNear);
298 
299     // Reserve space for 64 bit answer.
300     sub(Operand(esp), Immediate(kDoubleSize));
301     // Do conversion, which cannot fail because we checked the exponent.
302     fisttp_d(Operand(esp, 0));
303     mov(result_reg, Operand(esp, 0));  // Low word of answer is the result.
304     add(Operand(esp), Immediate(kDoubleSize));
305     jmp(&done, Label::kNear);
306 
307     // Slow case.
308     bind(&slow_case);
309     if (input_reg.is(result_reg)) {
310       // Input is clobbered. Restore number from fpu stack
311       sub(Operand(esp), Immediate(kDoubleSize));
312       fstp_d(Operand(esp, 0));
313       SlowTruncateToI(result_reg, esp, 0);
314       add(esp, Immediate(kDoubleSize));
315     } else {
316       fstp(0);
317       SlowTruncateToI(result_reg, input_reg);
318     }
319   } else {
320     movsd(xmm0, FieldOperand(input_reg, HeapNumber::kValueOffset));
321     cvttsd2si(result_reg, Operand(xmm0));
322     cmp(result_reg, 0x1);
323     j(no_overflow, &done, Label::kNear);
324     // Check if the input was 0x8000000 (kMinInt).
325     // If no, then we got an overflow and we deoptimize.
326     ExternalReference min_int = ExternalReference::address_of_min_int();
327     ucomisd(xmm0, Operand::StaticVariable(min_int));
328     j(not_equal, &slow_case, Label::kNear);
329     j(parity_even, &slow_case, Label::kNear);  // NaN.
330     jmp(&done, Label::kNear);
331 
332     // Slow case.
333     bind(&slow_case);
334     if (input_reg.is(result_reg)) {
335       // Input is clobbered. Restore number from double scratch.
336       sub(esp, Immediate(kDoubleSize));
337       movsd(MemOperand(esp, 0), xmm0);
338       SlowTruncateToI(result_reg, esp, 0);
339       add(esp, Immediate(kDoubleSize));
340     } else {
341       SlowTruncateToI(result_reg, input_reg);
342     }
343   }
344   bind(&done);
345 }
346 
347 
LoadUint32(XMMRegister dst,const Operand & src)348 void MacroAssembler::LoadUint32(XMMRegister dst, const Operand& src) {
349   Label done;
350   cmp(src, Immediate(0));
351   ExternalReference uint32_bias = ExternalReference::address_of_uint32_bias();
352   Cvtsi2sd(dst, src);
353   j(not_sign, &done, Label::kNear);
354   addsd(dst, Operand::StaticVariable(uint32_bias));
355   bind(&done);
356 }
357 
358 
RecordWriteArray(Register object,Register value,Register index,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)359 void MacroAssembler::RecordWriteArray(
360     Register object,
361     Register value,
362     Register index,
363     SaveFPRegsMode save_fp,
364     RememberedSetAction remembered_set_action,
365     SmiCheck smi_check,
366     PointersToHereCheck pointers_to_here_check_for_value) {
367   // First, check if a write barrier is even needed. The tests below
368   // catch stores of Smis.
369   Label done;
370 
371   // Skip barrier if writing a smi.
372   if (smi_check == INLINE_SMI_CHECK) {
373     DCHECK_EQ(0, kSmiTag);
374     test(value, Immediate(kSmiTagMask));
375     j(zero, &done);
376   }
377 
378   // Array access: calculate the destination address in the same manner as
379   // KeyedStoreIC::GenerateGeneric.  Multiply a smi by 2 to get an offset
380   // into an array of words.
381   Register dst = index;
382   lea(dst, Operand(object, index, times_half_pointer_size,
383                    FixedArray::kHeaderSize - kHeapObjectTag));
384 
385   RecordWrite(object, dst, value, save_fp, remembered_set_action,
386               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
387 
388   bind(&done);
389 
390   // Clobber clobbered input registers when running with the debug-code flag
391   // turned on to provoke errors.
392   if (emit_debug_code()) {
393     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
394     mov(index, Immediate(bit_cast<int32_t>(kZapValue)));
395   }
396 }
397 
398 
RecordWriteField(Register object,int offset,Register value,Register dst,SaveFPRegsMode save_fp,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)399 void MacroAssembler::RecordWriteField(
400     Register object,
401     int offset,
402     Register value,
403     Register dst,
404     SaveFPRegsMode save_fp,
405     RememberedSetAction remembered_set_action,
406     SmiCheck smi_check,
407     PointersToHereCheck pointers_to_here_check_for_value) {
408   // First, check if a write barrier is even needed. The tests below
409   // catch stores of Smis.
410   Label done;
411 
412   // Skip barrier if writing a smi.
413   if (smi_check == INLINE_SMI_CHECK) {
414     JumpIfSmi(value, &done, Label::kNear);
415   }
416 
417   // Although the object register is tagged, the offset is relative to the start
418   // of the object, so so offset must be a multiple of kPointerSize.
419   DCHECK(IsAligned(offset, kPointerSize));
420 
421   lea(dst, FieldOperand(object, offset));
422   if (emit_debug_code()) {
423     Label ok;
424     test_b(dst, (1 << kPointerSizeLog2) - 1);
425     j(zero, &ok, Label::kNear);
426     int3();
427     bind(&ok);
428   }
429 
430   RecordWrite(object, dst, value, save_fp, remembered_set_action,
431               OMIT_SMI_CHECK, pointers_to_here_check_for_value);
432 
433   bind(&done);
434 
435   // Clobber clobbered input registers when running with the debug-code flag
436   // turned on to provoke errors.
437   if (emit_debug_code()) {
438     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
439     mov(dst, Immediate(bit_cast<int32_t>(kZapValue)));
440   }
441 }
442 
443 
RecordWriteForMap(Register object,Handle<Map> map,Register scratch1,Register scratch2,SaveFPRegsMode save_fp)444 void MacroAssembler::RecordWriteForMap(
445     Register object,
446     Handle<Map> map,
447     Register scratch1,
448     Register scratch2,
449     SaveFPRegsMode save_fp) {
450   Label done;
451 
452   Register address = scratch1;
453   Register value = scratch2;
454   if (emit_debug_code()) {
455     Label ok;
456     lea(address, FieldOperand(object, HeapObject::kMapOffset));
457     test_b(address, (1 << kPointerSizeLog2) - 1);
458     j(zero, &ok, Label::kNear);
459     int3();
460     bind(&ok);
461   }
462 
463   DCHECK(!object.is(value));
464   DCHECK(!object.is(address));
465   DCHECK(!value.is(address));
466   AssertNotSmi(object);
467 
468   if (!FLAG_incremental_marking) {
469     return;
470   }
471 
472   // Compute the address.
473   lea(address, FieldOperand(object, HeapObject::kMapOffset));
474 
475   // A single check of the map's pages interesting flag suffices, since it is
476   // only set during incremental collection, and then it's also guaranteed that
477   // the from object's page's interesting flag is also set.  This optimization
478   // relies on the fact that maps can never be in new space.
479   DCHECK(!isolate()->heap()->InNewSpace(*map));
480   CheckPageFlagForMap(map,
481                       MemoryChunk::kPointersToHereAreInterestingMask,
482                       zero,
483                       &done,
484                       Label::kNear);
485 
486   RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
487                        save_fp);
488   CallStub(&stub);
489 
490   bind(&done);
491 
492   // Count number of write barriers in generated code.
493   isolate()->counters()->write_barriers_static()->Increment();
494   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
495 
496   // Clobber clobbered input registers when running with the debug-code flag
497   // turned on to provoke errors.
498   if (emit_debug_code()) {
499     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
500     mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
501     mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
502   }
503 }
504 
505 
RecordWrite(Register object,Register address,Register value,SaveFPRegsMode fp_mode,RememberedSetAction remembered_set_action,SmiCheck smi_check,PointersToHereCheck pointers_to_here_check_for_value)506 void MacroAssembler::RecordWrite(
507     Register object,
508     Register address,
509     Register value,
510     SaveFPRegsMode fp_mode,
511     RememberedSetAction remembered_set_action,
512     SmiCheck smi_check,
513     PointersToHereCheck pointers_to_here_check_for_value) {
514   DCHECK(!object.is(value));
515   DCHECK(!object.is(address));
516   DCHECK(!value.is(address));
517   AssertNotSmi(object);
518 
519   if (remembered_set_action == OMIT_REMEMBERED_SET &&
520       !FLAG_incremental_marking) {
521     return;
522   }
523 
524   if (emit_debug_code()) {
525     Label ok;
526     cmp(value, Operand(address, 0));
527     j(equal, &ok, Label::kNear);
528     int3();
529     bind(&ok);
530   }
531 
532   // First, check if a write barrier is even needed. The tests below
533   // catch stores of Smis and stores into young gen.
534   Label done;
535 
536   if (smi_check == INLINE_SMI_CHECK) {
537     // Skip barrier if writing a smi.
538     JumpIfSmi(value, &done, Label::kNear);
539   }
540 
541   if (pointers_to_here_check_for_value != kPointersToHereAreAlwaysInteresting) {
542     CheckPageFlag(value,
543                   value,  // Used as scratch.
544                   MemoryChunk::kPointersToHereAreInterestingMask,
545                   zero,
546                   &done,
547                   Label::kNear);
548   }
549   CheckPageFlag(object,
550                 value,  // Used as scratch.
551                 MemoryChunk::kPointersFromHereAreInterestingMask,
552                 zero,
553                 &done,
554                 Label::kNear);
555 
556   RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
557                        fp_mode);
558   CallStub(&stub);
559 
560   bind(&done);
561 
562   // Count number of write barriers in generated code.
563   isolate()->counters()->write_barriers_static()->Increment();
564   IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
565 
566   // Clobber clobbered registers when running with the debug-code flag
567   // turned on to provoke errors.
568   if (emit_debug_code()) {
569     mov(address, Immediate(bit_cast<int32_t>(kZapValue)));
570     mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
571   }
572 }
573 
574 
DebugBreak()575 void MacroAssembler::DebugBreak() {
576   Move(eax, Immediate(0));
577   mov(ebx, Immediate(ExternalReference(Runtime::kHandleDebuggerStatement,
578                                        isolate())));
579   CEntryStub ces(isolate(), 1);
580   call(ces.GetCode(), RelocInfo::DEBUGGER_STATEMENT);
581 }
582 
583 
Cvtsi2sd(XMMRegister dst,const Operand & src)584 void MacroAssembler::Cvtsi2sd(XMMRegister dst, const Operand& src) {
585   xorps(dst, dst);
586   cvtsi2sd(dst, src);
587 }
588 
589 
IsUnsafeImmediate(const Immediate & x)590 bool MacroAssembler::IsUnsafeImmediate(const Immediate& x) {
591   static const int kMaxImmediateBits = 17;
592   if (!RelocInfo::IsNone(x.rmode_)) return false;
593   return !is_intn(x.x_, kMaxImmediateBits);
594 }
595 
596 
SafeMove(Register dst,const Immediate & x)597 void MacroAssembler::SafeMove(Register dst, const Immediate& x) {
598   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
599     Move(dst, Immediate(x.x_ ^ jit_cookie()));
600     xor_(dst, jit_cookie());
601   } else {
602     Move(dst, x);
603   }
604 }
605 
606 
SafePush(const Immediate & x)607 void MacroAssembler::SafePush(const Immediate& x) {
608   if (IsUnsafeImmediate(x) && jit_cookie() != 0) {
609     push(Immediate(x.x_ ^ jit_cookie()));
610     xor_(Operand(esp, 0), Immediate(jit_cookie()));
611   } else {
612     push(x);
613   }
614 }
615 
616 
CmpObjectType(Register heap_object,InstanceType type,Register map)617 void MacroAssembler::CmpObjectType(Register heap_object,
618                                    InstanceType type,
619                                    Register map) {
620   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
621   CmpInstanceType(map, type);
622 }
623 
624 
CmpInstanceType(Register map,InstanceType type)625 void MacroAssembler::CmpInstanceType(Register map, InstanceType type) {
626   cmpb(FieldOperand(map, Map::kInstanceTypeOffset),
627        static_cast<int8_t>(type));
628 }
629 
630 
CheckFastElements(Register map,Label * fail,Label::Distance distance)631 void MacroAssembler::CheckFastElements(Register map,
632                                        Label* fail,
633                                        Label::Distance distance) {
634   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
635   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
636   STATIC_ASSERT(FAST_ELEMENTS == 2);
637   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
638   cmpb(FieldOperand(map, Map::kBitField2Offset),
639        Map::kMaximumBitField2FastHoleyElementValue);
640   j(above, fail, distance);
641 }
642 
643 
CheckFastObjectElements(Register map,Label * fail,Label::Distance distance)644 void MacroAssembler::CheckFastObjectElements(Register map,
645                                              Label* fail,
646                                              Label::Distance distance) {
647   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
648   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
649   STATIC_ASSERT(FAST_ELEMENTS == 2);
650   STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3);
651   cmpb(FieldOperand(map, Map::kBitField2Offset),
652        Map::kMaximumBitField2FastHoleySmiElementValue);
653   j(below_equal, fail, distance);
654   cmpb(FieldOperand(map, Map::kBitField2Offset),
655        Map::kMaximumBitField2FastHoleyElementValue);
656   j(above, fail, distance);
657 }
658 
659 
CheckFastSmiElements(Register map,Label * fail,Label::Distance distance)660 void MacroAssembler::CheckFastSmiElements(Register map,
661                                           Label* fail,
662                                           Label::Distance distance) {
663   STATIC_ASSERT(FAST_SMI_ELEMENTS == 0);
664   STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1);
665   cmpb(FieldOperand(map, Map::kBitField2Offset),
666        Map::kMaximumBitField2FastHoleySmiElementValue);
667   j(above, fail, distance);
668 }
669 
670 
StoreNumberToDoubleElements(Register maybe_number,Register elements,Register key,Register scratch1,XMMRegister scratch2,Label * fail,int elements_offset)671 void MacroAssembler::StoreNumberToDoubleElements(
672     Register maybe_number,
673     Register elements,
674     Register key,
675     Register scratch1,
676     XMMRegister scratch2,
677     Label* fail,
678     int elements_offset) {
679   Label smi_value, done;
680   JumpIfSmi(maybe_number, &smi_value, Label::kNear);
681 
682   CheckMap(maybe_number,
683            isolate()->factory()->heap_number_map(),
684            fail,
685            DONT_DO_SMI_CHECK);
686 
687   // Double value, turn potential sNaN into qNaN.
688   Move(scratch2, 1.0);
689   mulsd(scratch2, FieldOperand(maybe_number, HeapNumber::kValueOffset));
690   jmp(&done, Label::kNear);
691 
692   bind(&smi_value);
693   // Value is a smi. Convert to a double and store.
694   // Preserve original value.
695   mov(scratch1, maybe_number);
696   SmiUntag(scratch1);
697   Cvtsi2sd(scratch2, scratch1);
698   bind(&done);
699   movsd(FieldOperand(elements, key, times_4,
700                      FixedDoubleArray::kHeaderSize - elements_offset),
701         scratch2);
702 }
703 
704 
CompareMap(Register obj,Handle<Map> map)705 void MacroAssembler::CompareMap(Register obj, Handle<Map> map) {
706   cmp(FieldOperand(obj, HeapObject::kMapOffset), map);
707 }
708 
709 
CheckMap(Register obj,Handle<Map> map,Label * fail,SmiCheckType smi_check_type)710 void MacroAssembler::CheckMap(Register obj,
711                               Handle<Map> map,
712                               Label* fail,
713                               SmiCheckType smi_check_type) {
714   if (smi_check_type == DO_SMI_CHECK) {
715     JumpIfSmi(obj, fail);
716   }
717 
718   CompareMap(obj, map);
719   j(not_equal, fail);
720 }
721 
722 
DispatchWeakMap(Register obj,Register scratch1,Register scratch2,Handle<WeakCell> cell,Handle<Code> success,SmiCheckType smi_check_type)723 void MacroAssembler::DispatchWeakMap(Register obj, Register scratch1,
724                                      Register scratch2, Handle<WeakCell> cell,
725                                      Handle<Code> success,
726                                      SmiCheckType smi_check_type) {
727   Label fail;
728   if (smi_check_type == DO_SMI_CHECK) {
729     JumpIfSmi(obj, &fail);
730   }
731   mov(scratch1, FieldOperand(obj, HeapObject::kMapOffset));
732   CmpWeakValue(scratch1, cell, scratch2);
733   j(equal, success);
734 
735   bind(&fail);
736 }
737 
738 
IsObjectStringType(Register heap_object,Register map,Register instance_type)739 Condition MacroAssembler::IsObjectStringType(Register heap_object,
740                                              Register map,
741                                              Register instance_type) {
742   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
743   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
744   STATIC_ASSERT(kNotStringTag != 0);
745   test(instance_type, Immediate(kIsNotStringMask));
746   return zero;
747 }
748 
749 
IsObjectNameType(Register heap_object,Register map,Register instance_type)750 Condition MacroAssembler::IsObjectNameType(Register heap_object,
751                                            Register map,
752                                            Register instance_type) {
753   mov(map, FieldOperand(heap_object, HeapObject::kMapOffset));
754   movzx_b(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
755   cmpb(instance_type, static_cast<uint8_t>(LAST_NAME_TYPE));
756   return below_equal;
757 }
758 
759 
FCmp()760 void MacroAssembler::FCmp() {
761   fucomip();
762   fstp(0);
763 }
764 
765 
AssertNumber(Register object)766 void MacroAssembler::AssertNumber(Register object) {
767   if (emit_debug_code()) {
768     Label ok;
769     JumpIfSmi(object, &ok);
770     cmp(FieldOperand(object, HeapObject::kMapOffset),
771         isolate()->factory()->heap_number_map());
772     Check(equal, kOperandNotANumber);
773     bind(&ok);
774   }
775 }
776 
777 
AssertSmi(Register object)778 void MacroAssembler::AssertSmi(Register object) {
779   if (emit_debug_code()) {
780     test(object, Immediate(kSmiTagMask));
781     Check(equal, kOperandIsNotASmi);
782   }
783 }
784 
785 
AssertString(Register object)786 void MacroAssembler::AssertString(Register object) {
787   if (emit_debug_code()) {
788     test(object, Immediate(kSmiTagMask));
789     Check(not_equal, kOperandIsASmiAndNotAString);
790     push(object);
791     mov(object, FieldOperand(object, HeapObject::kMapOffset));
792     CmpInstanceType(object, FIRST_NONSTRING_TYPE);
793     pop(object);
794     Check(below, kOperandIsNotAString);
795   }
796 }
797 
798 
AssertName(Register object)799 void MacroAssembler::AssertName(Register object) {
800   if (emit_debug_code()) {
801     test(object, Immediate(kSmiTagMask));
802     Check(not_equal, kOperandIsASmiAndNotAName);
803     push(object);
804     mov(object, FieldOperand(object, HeapObject::kMapOffset));
805     CmpInstanceType(object, LAST_NAME_TYPE);
806     pop(object);
807     Check(below_equal, kOperandIsNotAName);
808   }
809 }
810 
811 
AssertFunction(Register object)812 void MacroAssembler::AssertFunction(Register object) {
813   if (emit_debug_code()) {
814     test(object, Immediate(kSmiTagMask));
815     Check(not_equal, kOperandIsASmiAndNotAFunction);
816     Push(object);
817     CmpObjectType(object, JS_FUNCTION_TYPE, object);
818     Pop(object);
819     Check(equal, kOperandIsNotAFunction);
820   }
821 }
822 
823 
AssertBoundFunction(Register object)824 void MacroAssembler::AssertBoundFunction(Register object) {
825   if (emit_debug_code()) {
826     test(object, Immediate(kSmiTagMask));
827     Check(not_equal, kOperandIsASmiAndNotABoundFunction);
828     Push(object);
829     CmpObjectType(object, JS_BOUND_FUNCTION_TYPE, object);
830     Pop(object);
831     Check(equal, kOperandIsNotABoundFunction);
832   }
833 }
834 
835 
AssertUndefinedOrAllocationSite(Register object)836 void MacroAssembler::AssertUndefinedOrAllocationSite(Register object) {
837   if (emit_debug_code()) {
838     Label done_checking;
839     AssertNotSmi(object);
840     cmp(object, isolate()->factory()->undefined_value());
841     j(equal, &done_checking);
842     cmp(FieldOperand(object, 0),
843         Immediate(isolate()->factory()->allocation_site_map()));
844     Assert(equal, kExpectedUndefinedOrCell);
845     bind(&done_checking);
846   }
847 }
848 
849 
AssertNotSmi(Register object)850 void MacroAssembler::AssertNotSmi(Register object) {
851   if (emit_debug_code()) {
852     test(object, Immediate(kSmiTagMask));
853     Check(not_equal, kOperandIsASmi);
854   }
855 }
856 
857 
StubPrologue()858 void MacroAssembler::StubPrologue() {
859   push(ebp);  // Caller's frame pointer.
860   mov(ebp, esp);
861   push(esi);  // Callee's context.
862   push(Immediate(Smi::FromInt(StackFrame::STUB)));
863 }
864 
865 
Prologue(bool code_pre_aging)866 void MacroAssembler::Prologue(bool code_pre_aging) {
867   PredictableCodeSizeScope predictible_code_size_scope(this,
868       kNoCodeAgeSequenceLength);
869   if (code_pre_aging) {
870       // Pre-age the code.
871     call(isolate()->builtins()->MarkCodeAsExecutedOnce(),
872         RelocInfo::CODE_AGE_SEQUENCE);
873     Nop(kNoCodeAgeSequenceLength - Assembler::kCallInstructionLength);
874   } else {
875     push(ebp);  // Caller's frame pointer.
876     mov(ebp, esp);
877     push(esi);  // Callee's context.
878     push(edi);  // Callee's JS function.
879   }
880 }
881 
882 
EmitLoadTypeFeedbackVector(Register vector)883 void MacroAssembler::EmitLoadTypeFeedbackVector(Register vector) {
884   mov(vector, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
885   mov(vector, FieldOperand(vector, JSFunction::kSharedFunctionInfoOffset));
886   mov(vector, FieldOperand(vector, SharedFunctionInfo::kFeedbackVectorOffset));
887 }
888 
889 
EnterFrame(StackFrame::Type type,bool load_constant_pool_pointer_reg)890 void MacroAssembler::EnterFrame(StackFrame::Type type,
891                                 bool load_constant_pool_pointer_reg) {
892   // Out-of-line constant pool not implemented on ia32.
893   UNREACHABLE();
894 }
895 
896 
EnterFrame(StackFrame::Type type)897 void MacroAssembler::EnterFrame(StackFrame::Type type) {
898   push(ebp);
899   mov(ebp, esp);
900   push(esi);
901   push(Immediate(Smi::FromInt(type)));
902   push(Immediate(CodeObject()));
903   if (emit_debug_code()) {
904     cmp(Operand(esp, 0), Immediate(isolate()->factory()->undefined_value()));
905     Check(not_equal, kCodeObjectNotProperlyPatched);
906   }
907 }
908 
909 
LeaveFrame(StackFrame::Type type)910 void MacroAssembler::LeaveFrame(StackFrame::Type type) {
911   if (emit_debug_code()) {
912     cmp(Operand(ebp, StandardFrameConstants::kMarkerOffset),
913         Immediate(Smi::FromInt(type)));
914     Check(equal, kStackFrameTypesMustMatch);
915   }
916   leave();
917 }
918 
919 
EnterExitFramePrologue()920 void MacroAssembler::EnterExitFramePrologue() {
921   // Set up the frame structure on the stack.
922   DCHECK(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize);
923   DCHECK(ExitFrameConstants::kCallerPCOffset == +1 * kPointerSize);
924   DCHECK(ExitFrameConstants::kCallerFPOffset ==  0 * kPointerSize);
925   push(ebp);
926   mov(ebp, esp);
927 
928   // Reserve room for entry stack pointer and push the code object.
929   DCHECK(ExitFrameConstants::kSPOffset  == -1 * kPointerSize);
930   push(Immediate(0));  // Saved entry sp, patched before call.
931   push(Immediate(CodeObject()));  // Accessed from ExitFrame::code_slot.
932 
933   // Save the frame pointer and the context in top.
934   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress, isolate());
935   ExternalReference context_address(Isolate::kContextAddress, isolate());
936   ExternalReference c_function_address(Isolate::kCFunctionAddress, isolate());
937   mov(Operand::StaticVariable(c_entry_fp_address), ebp);
938   mov(Operand::StaticVariable(context_address), esi);
939   mov(Operand::StaticVariable(c_function_address), ebx);
940 }
941 
942 
EnterExitFrameEpilogue(int argc,bool save_doubles)943 void MacroAssembler::EnterExitFrameEpilogue(int argc, bool save_doubles) {
944   // Optionally save all XMM registers.
945   if (save_doubles) {
946     int space = XMMRegister::kMaxNumRegisters * kDoubleSize +
947                 argc * kPointerSize;
948     sub(esp, Immediate(space));
949     const int offset = -2 * kPointerSize;
950     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
951       XMMRegister reg = XMMRegister::from_code(i);
952       movsd(Operand(ebp, offset - ((i + 1) * kDoubleSize)), reg);
953     }
954   } else {
955     sub(esp, Immediate(argc * kPointerSize));
956   }
957 
958   // Get the required frame alignment for the OS.
959   const int kFrameAlignment = base::OS::ActivationFrameAlignment();
960   if (kFrameAlignment > 0) {
961     DCHECK(base::bits::IsPowerOfTwo32(kFrameAlignment));
962     and_(esp, -kFrameAlignment);
963   }
964 
965   // Patch the saved entry sp.
966   mov(Operand(ebp, ExitFrameConstants::kSPOffset), esp);
967 }
968 
969 
EnterExitFrame(bool save_doubles)970 void MacroAssembler::EnterExitFrame(bool save_doubles) {
971   EnterExitFramePrologue();
972 
973   // Set up argc and argv in callee-saved registers.
974   int offset = StandardFrameConstants::kCallerSPOffset - kPointerSize;
975   mov(edi, eax);
976   lea(esi, Operand(ebp, eax, times_4, offset));
977 
978   // Reserve space for argc, argv and isolate.
979   EnterExitFrameEpilogue(3, save_doubles);
980 }
981 
982 
EnterApiExitFrame(int argc)983 void MacroAssembler::EnterApiExitFrame(int argc) {
984   EnterExitFramePrologue();
985   EnterExitFrameEpilogue(argc, false);
986 }
987 
988 
LeaveExitFrame(bool save_doubles,bool pop_arguments)989 void MacroAssembler::LeaveExitFrame(bool save_doubles, bool pop_arguments) {
990   // Optionally restore all XMM registers.
991   if (save_doubles) {
992     const int offset = -2 * kPointerSize;
993     for (int i = 0; i < XMMRegister::kMaxNumRegisters; i++) {
994       XMMRegister reg = XMMRegister::from_code(i);
995       movsd(reg, Operand(ebp, offset - ((i + 1) * kDoubleSize)));
996     }
997   }
998 
999   if (pop_arguments) {
1000     // Get the return address from the stack and restore the frame pointer.
1001     mov(ecx, Operand(ebp, 1 * kPointerSize));
1002     mov(ebp, Operand(ebp, 0 * kPointerSize));
1003 
1004     // Pop the arguments and the receiver from the caller stack.
1005     lea(esp, Operand(esi, 1 * kPointerSize));
1006 
1007     // Push the return address to get ready to return.
1008     push(ecx);
1009   } else {
1010     // Otherwise just leave the exit frame.
1011     leave();
1012   }
1013 
1014   LeaveExitFrameEpilogue(true);
1015 }
1016 
1017 
LeaveExitFrameEpilogue(bool restore_context)1018 void MacroAssembler::LeaveExitFrameEpilogue(bool restore_context) {
1019   // Restore current context from top and clear it in debug mode.
1020   ExternalReference context_address(Isolate::kContextAddress, isolate());
1021   if (restore_context) {
1022     mov(esi, Operand::StaticVariable(context_address));
1023   }
1024 #ifdef DEBUG
1025   mov(Operand::StaticVariable(context_address), Immediate(0));
1026 #endif
1027 
1028   // Clear the top frame.
1029   ExternalReference c_entry_fp_address(Isolate::kCEntryFPAddress,
1030                                        isolate());
1031   mov(Operand::StaticVariable(c_entry_fp_address), Immediate(0));
1032 }
1033 
1034 
LeaveApiExitFrame(bool restore_context)1035 void MacroAssembler::LeaveApiExitFrame(bool restore_context) {
1036   mov(esp, ebp);
1037   pop(ebp);
1038 
1039   LeaveExitFrameEpilogue(restore_context);
1040 }
1041 
1042 
PushStackHandler()1043 void MacroAssembler::PushStackHandler() {
1044   // Adjust this code if not the case.
1045   STATIC_ASSERT(StackHandlerConstants::kSize == 1 * kPointerSize);
1046   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1047 
1048   // Link the current handler as the next handler.
1049   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1050   push(Operand::StaticVariable(handler_address));
1051 
1052   // Set this new handler as the current one.
1053   mov(Operand::StaticVariable(handler_address), esp);
1054 }
1055 
1056 
PopStackHandler()1057 void MacroAssembler::PopStackHandler() {
1058   STATIC_ASSERT(StackHandlerConstants::kNextOffset == 0);
1059   ExternalReference handler_address(Isolate::kHandlerAddress, isolate());
1060   pop(Operand::StaticVariable(handler_address));
1061   add(esp, Immediate(StackHandlerConstants::kSize - kPointerSize));
1062 }
1063 
1064 
CheckAccessGlobalProxy(Register holder_reg,Register scratch1,Register scratch2,Label * miss)1065 void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
1066                                             Register scratch1,
1067                                             Register scratch2,
1068                                             Label* miss) {
1069   Label same_contexts;
1070 
1071   DCHECK(!holder_reg.is(scratch1));
1072   DCHECK(!holder_reg.is(scratch2));
1073   DCHECK(!scratch1.is(scratch2));
1074 
1075   // Load current lexical context from the stack frame.
1076   mov(scratch1, Operand(ebp, StandardFrameConstants::kContextOffset));
1077 
1078   // When generating debug code, make sure the lexical context is set.
1079   if (emit_debug_code()) {
1080     cmp(scratch1, Immediate(0));
1081     Check(not_equal, kWeShouldNotHaveAnEmptyLexicalContext);
1082   }
1083   // Load the native context of the current context.
1084   mov(scratch1, ContextOperand(scratch1, Context::NATIVE_CONTEXT_INDEX));
1085 
1086   // Check the context is a native context.
1087   if (emit_debug_code()) {
1088     // Read the first word and compare to native_context_map.
1089     cmp(FieldOperand(scratch1, HeapObject::kMapOffset),
1090         isolate()->factory()->native_context_map());
1091     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1092   }
1093 
1094   // Check if both contexts are the same.
1095   cmp(scratch1, FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1096   j(equal, &same_contexts);
1097 
1098   // Compare security tokens, save holder_reg on the stack so we can use it
1099   // as a temporary register.
1100   //
1101   // Check that the security token in the calling global object is
1102   // compatible with the security token in the receiving global
1103   // object.
1104   mov(scratch2,
1105       FieldOperand(holder_reg, JSGlobalProxy::kNativeContextOffset));
1106 
1107   // Check the context is a native context.
1108   if (emit_debug_code()) {
1109     cmp(scratch2, isolate()->factory()->null_value());
1110     Check(not_equal, kJSGlobalProxyContextShouldNotBeNull);
1111 
1112     // Read the first word and compare to native_context_map(),
1113     cmp(FieldOperand(scratch2, HeapObject::kMapOffset),
1114         isolate()->factory()->native_context_map());
1115     Check(equal, kJSGlobalObjectNativeContextShouldBeANativeContext);
1116   }
1117 
1118   int token_offset = Context::kHeaderSize +
1119                      Context::SECURITY_TOKEN_INDEX * kPointerSize;
1120   mov(scratch1, FieldOperand(scratch1, token_offset));
1121   cmp(scratch1, FieldOperand(scratch2, token_offset));
1122   j(not_equal, miss);
1123 
1124   bind(&same_contexts);
1125 }
1126 
1127 
1128 // Compute the hash code from the untagged key.  This must be kept in sync with
1129 // ComputeIntegerHash in utils.h and KeyedLoadGenericStub in
1130 // code-stub-hydrogen.cc
1131 //
1132 // Note: r0 will contain hash code
GetNumberHash(Register r0,Register scratch)1133 void MacroAssembler::GetNumberHash(Register r0, Register scratch) {
1134   // Xor original key with a seed.
1135   if (serializer_enabled()) {
1136     ExternalReference roots_array_start =
1137         ExternalReference::roots_array_start(isolate());
1138     mov(scratch, Immediate(Heap::kHashSeedRootIndex));
1139     mov(scratch,
1140         Operand::StaticArray(scratch, times_pointer_size, roots_array_start));
1141     SmiUntag(scratch);
1142     xor_(r0, scratch);
1143   } else {
1144     int32_t seed = isolate()->heap()->HashSeed();
1145     xor_(r0, Immediate(seed));
1146   }
1147 
1148   // hash = ~hash + (hash << 15);
1149   mov(scratch, r0);
1150   not_(r0);
1151   shl(scratch, 15);
1152   add(r0, scratch);
1153   // hash = hash ^ (hash >> 12);
1154   mov(scratch, r0);
1155   shr(scratch, 12);
1156   xor_(r0, scratch);
1157   // hash = hash + (hash << 2);
1158   lea(r0, Operand(r0, r0, times_4, 0));
1159   // hash = hash ^ (hash >> 4);
1160   mov(scratch, r0);
1161   shr(scratch, 4);
1162   xor_(r0, scratch);
1163   // hash = hash * 2057;
1164   imul(r0, r0, 2057);
1165   // hash = hash ^ (hash >> 16);
1166   mov(scratch, r0);
1167   shr(scratch, 16);
1168   xor_(r0, scratch);
1169   and_(r0, 0x3fffffff);
1170 }
1171 
1172 
1173 
LoadFromNumberDictionary(Label * miss,Register elements,Register key,Register r0,Register r1,Register r2,Register result)1174 void MacroAssembler::LoadFromNumberDictionary(Label* miss,
1175                                               Register elements,
1176                                               Register key,
1177                                               Register r0,
1178                                               Register r1,
1179                                               Register r2,
1180                                               Register result) {
1181   // Register use:
1182   //
1183   // elements - holds the slow-case elements of the receiver and is unchanged.
1184   //
1185   // key      - holds the smi key on entry and is unchanged.
1186   //
1187   // Scratch registers:
1188   //
1189   // r0 - holds the untagged key on entry and holds the hash once computed.
1190   //
1191   // r1 - used to hold the capacity mask of the dictionary
1192   //
1193   // r2 - used for the index into the dictionary.
1194   //
1195   // result - holds the result on exit if the load succeeds and we fall through.
1196 
1197   Label done;
1198 
1199   GetNumberHash(r0, r1);
1200 
1201   // Compute capacity mask.
1202   mov(r1, FieldOperand(elements, SeededNumberDictionary::kCapacityOffset));
1203   shr(r1, kSmiTagSize);  // convert smi to int
1204   dec(r1);
1205 
1206   // Generate an unrolled loop that performs a few probes before giving up.
1207   for (int i = 0; i < kNumberDictionaryProbes; i++) {
1208     // Use r2 for index calculations and keep the hash intact in r0.
1209     mov(r2, r0);
1210     // Compute the masked index: (hash + i + i * i) & mask.
1211     if (i > 0) {
1212       add(r2, Immediate(SeededNumberDictionary::GetProbeOffset(i)));
1213     }
1214     and_(r2, r1);
1215 
1216     // Scale the index by multiplying by the entry size.
1217     DCHECK(SeededNumberDictionary::kEntrySize == 3);
1218     lea(r2, Operand(r2, r2, times_2, 0));  // r2 = r2 * 3
1219 
1220     // Check if the key matches.
1221     cmp(key, FieldOperand(elements,
1222                           r2,
1223                           times_pointer_size,
1224                           SeededNumberDictionary::kElementsStartOffset));
1225     if (i != (kNumberDictionaryProbes - 1)) {
1226       j(equal, &done);
1227     } else {
1228       j(not_equal, miss);
1229     }
1230   }
1231 
1232   bind(&done);
1233   // Check that the value is a field property.
1234   const int kDetailsOffset =
1235       SeededNumberDictionary::kElementsStartOffset + 2 * kPointerSize;
1236   DCHECK_EQ(DATA, 0);
1237   test(FieldOperand(elements, r2, times_pointer_size, kDetailsOffset),
1238        Immediate(PropertyDetails::TypeField::kMask << kSmiTagSize));
1239   j(not_zero, miss);
1240 
1241   // Get the value at the masked, scaled index.
1242   const int kValueOffset =
1243       SeededNumberDictionary::kElementsStartOffset + kPointerSize;
1244   mov(result, FieldOperand(elements, r2, times_pointer_size, kValueOffset));
1245 }
1246 
1247 
LoadAllocationTopHelper(Register result,Register scratch,AllocationFlags flags)1248 void MacroAssembler::LoadAllocationTopHelper(Register result,
1249                                              Register scratch,
1250                                              AllocationFlags flags) {
1251   ExternalReference allocation_top =
1252       AllocationUtils::GetAllocationTopReference(isolate(), flags);
1253 
1254   // Just return if allocation top is already known.
1255   if ((flags & RESULT_CONTAINS_TOP) != 0) {
1256     // No use of scratch if allocation top is provided.
1257     DCHECK(scratch.is(no_reg));
1258 #ifdef DEBUG
1259     // Assert that result actually contains top on entry.
1260     cmp(result, Operand::StaticVariable(allocation_top));
1261     Check(equal, kUnexpectedAllocationTop);
1262 #endif
1263     return;
1264   }
1265 
1266   // Move address of new object to result. Use scratch register if available.
1267   if (scratch.is(no_reg)) {
1268     mov(result, Operand::StaticVariable(allocation_top));
1269   } else {
1270     mov(scratch, Immediate(allocation_top));
1271     mov(result, Operand(scratch, 0));
1272   }
1273 }
1274 
1275 
UpdateAllocationTopHelper(Register result_end,Register scratch,AllocationFlags flags)1276 void MacroAssembler::UpdateAllocationTopHelper(Register result_end,
1277                                                Register scratch,
1278                                                AllocationFlags flags) {
1279   if (emit_debug_code()) {
1280     test(result_end, Immediate(kObjectAlignmentMask));
1281     Check(zero, kUnalignedAllocationInNewSpace);
1282   }
1283 
1284   ExternalReference allocation_top =
1285       AllocationUtils::GetAllocationTopReference(isolate(), flags);
1286 
1287   // Update new top. Use scratch if available.
1288   if (scratch.is(no_reg)) {
1289     mov(Operand::StaticVariable(allocation_top), result_end);
1290   } else {
1291     mov(Operand(scratch, 0), result_end);
1292   }
1293 }
1294 
1295 
Allocate(int object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1296 void MacroAssembler::Allocate(int object_size,
1297                               Register result,
1298                               Register result_end,
1299                               Register scratch,
1300                               Label* gc_required,
1301                               AllocationFlags flags) {
1302   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1303   DCHECK(object_size <= Page::kMaxRegularHeapObjectSize);
1304   if (!FLAG_inline_new) {
1305     if (emit_debug_code()) {
1306       // Trash the registers to simulate an allocation failure.
1307       mov(result, Immediate(0x7091));
1308       if (result_end.is_valid()) {
1309         mov(result_end, Immediate(0x7191));
1310       }
1311       if (scratch.is_valid()) {
1312         mov(scratch, Immediate(0x7291));
1313       }
1314     }
1315     jmp(gc_required);
1316     return;
1317   }
1318   DCHECK(!result.is(result_end));
1319 
1320   // Load address of new object into result.
1321   LoadAllocationTopHelper(result, scratch, flags);
1322 
1323   ExternalReference allocation_limit =
1324       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1325 
1326   // Align the next allocation. Storing the filler map without checking top is
1327   // safe in new-space because the limit of the heap is aligned there.
1328   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1329     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1330     Label aligned;
1331     test(result, Immediate(kDoubleAlignmentMask));
1332     j(zero, &aligned, Label::kNear);
1333     if ((flags & PRETENURE) != 0) {
1334       cmp(result, Operand::StaticVariable(allocation_limit));
1335       j(above_equal, gc_required);
1336     }
1337     mov(Operand(result, 0),
1338         Immediate(isolate()->factory()->one_pointer_filler_map()));
1339     add(result, Immediate(kDoubleSize / 2));
1340     bind(&aligned);
1341   }
1342 
1343   // Calculate new top and bail out if space is exhausted.
1344   Register top_reg = result_end.is_valid() ? result_end : result;
1345   if (!top_reg.is(result)) {
1346     mov(top_reg, result);
1347   }
1348   add(top_reg, Immediate(object_size));
1349   j(carry, gc_required);
1350   cmp(top_reg, Operand::StaticVariable(allocation_limit));
1351   j(above, gc_required);
1352 
1353   // Update allocation top.
1354   UpdateAllocationTopHelper(top_reg, scratch, flags);
1355 
1356   // Tag result if requested.
1357   bool tag_result = (flags & TAG_OBJECT) != 0;
1358   if (top_reg.is(result)) {
1359     if (tag_result) {
1360       sub(result, Immediate(object_size - kHeapObjectTag));
1361     } else {
1362       sub(result, Immediate(object_size));
1363     }
1364   } else if (tag_result) {
1365     DCHECK(kHeapObjectTag == 1);
1366     inc(result);
1367   }
1368 }
1369 
1370 
Allocate(int header_size,ScaleFactor element_size,Register element_count,RegisterValueType element_count_type,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1371 void MacroAssembler::Allocate(int header_size,
1372                               ScaleFactor element_size,
1373                               Register element_count,
1374                               RegisterValueType element_count_type,
1375                               Register result,
1376                               Register result_end,
1377                               Register scratch,
1378                               Label* gc_required,
1379                               AllocationFlags flags) {
1380   DCHECK((flags & SIZE_IN_WORDS) == 0);
1381   if (!FLAG_inline_new) {
1382     if (emit_debug_code()) {
1383       // Trash the registers to simulate an allocation failure.
1384       mov(result, Immediate(0x7091));
1385       mov(result_end, Immediate(0x7191));
1386       if (scratch.is_valid()) {
1387         mov(scratch, Immediate(0x7291));
1388       }
1389       // Register element_count is not modified by the function.
1390     }
1391     jmp(gc_required);
1392     return;
1393   }
1394   DCHECK(!result.is(result_end));
1395 
1396   // Load address of new object into result.
1397   LoadAllocationTopHelper(result, scratch, flags);
1398 
1399   ExternalReference allocation_limit =
1400       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1401 
1402   // Align the next allocation. Storing the filler map without checking top is
1403   // safe in new-space because the limit of the heap is aligned there.
1404   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1405     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1406     Label aligned;
1407     test(result, Immediate(kDoubleAlignmentMask));
1408     j(zero, &aligned, Label::kNear);
1409     if ((flags & PRETENURE) != 0) {
1410       cmp(result, Operand::StaticVariable(allocation_limit));
1411       j(above_equal, gc_required);
1412     }
1413     mov(Operand(result, 0),
1414         Immediate(isolate()->factory()->one_pointer_filler_map()));
1415     add(result, Immediate(kDoubleSize / 2));
1416     bind(&aligned);
1417   }
1418 
1419   // Calculate new top and bail out if space is exhausted.
1420   // We assume that element_count*element_size + header_size does not
1421   // overflow.
1422   if (element_count_type == REGISTER_VALUE_IS_SMI) {
1423     STATIC_ASSERT(static_cast<ScaleFactor>(times_2 - 1) == times_1);
1424     STATIC_ASSERT(static_cast<ScaleFactor>(times_4 - 1) == times_2);
1425     STATIC_ASSERT(static_cast<ScaleFactor>(times_8 - 1) == times_4);
1426     DCHECK(element_size >= times_2);
1427     DCHECK(kSmiTagSize == 1);
1428     element_size = static_cast<ScaleFactor>(element_size - 1);
1429   } else {
1430     DCHECK(element_count_type == REGISTER_VALUE_IS_INT32);
1431   }
1432   lea(result_end, Operand(element_count, element_size, header_size));
1433   add(result_end, result);
1434   j(carry, gc_required);
1435   cmp(result_end, Operand::StaticVariable(allocation_limit));
1436   j(above, gc_required);
1437 
1438   if ((flags & TAG_OBJECT) != 0) {
1439     DCHECK(kHeapObjectTag == 1);
1440     inc(result);
1441   }
1442 
1443   // Update allocation top.
1444   UpdateAllocationTopHelper(result_end, scratch, flags);
1445 }
1446 
1447 
Allocate(Register object_size,Register result,Register result_end,Register scratch,Label * gc_required,AllocationFlags flags)1448 void MacroAssembler::Allocate(Register object_size,
1449                               Register result,
1450                               Register result_end,
1451                               Register scratch,
1452                               Label* gc_required,
1453                               AllocationFlags flags) {
1454   DCHECK((flags & (RESULT_CONTAINS_TOP | SIZE_IN_WORDS)) == 0);
1455   if (!FLAG_inline_new) {
1456     if (emit_debug_code()) {
1457       // Trash the registers to simulate an allocation failure.
1458       mov(result, Immediate(0x7091));
1459       mov(result_end, Immediate(0x7191));
1460       if (scratch.is_valid()) {
1461         mov(scratch, Immediate(0x7291));
1462       }
1463       // object_size is left unchanged by this function.
1464     }
1465     jmp(gc_required);
1466     return;
1467   }
1468   DCHECK(!result.is(result_end));
1469 
1470   // Load address of new object into result.
1471   LoadAllocationTopHelper(result, scratch, flags);
1472 
1473   ExternalReference allocation_limit =
1474       AllocationUtils::GetAllocationLimitReference(isolate(), flags);
1475 
1476   // Align the next allocation. Storing the filler map without checking top is
1477   // safe in new-space because the limit of the heap is aligned there.
1478   if ((flags & DOUBLE_ALIGNMENT) != 0) {
1479     DCHECK(kPointerAlignment * 2 == kDoubleAlignment);
1480     Label aligned;
1481     test(result, Immediate(kDoubleAlignmentMask));
1482     j(zero, &aligned, Label::kNear);
1483     if ((flags & PRETENURE) != 0) {
1484       cmp(result, Operand::StaticVariable(allocation_limit));
1485       j(above_equal, gc_required);
1486     }
1487     mov(Operand(result, 0),
1488         Immediate(isolate()->factory()->one_pointer_filler_map()));
1489     add(result, Immediate(kDoubleSize / 2));
1490     bind(&aligned);
1491   }
1492 
1493   // Calculate new top and bail out if space is exhausted.
1494   if (!object_size.is(result_end)) {
1495     mov(result_end, object_size);
1496   }
1497   add(result_end, result);
1498   j(carry, gc_required);
1499   cmp(result_end, Operand::StaticVariable(allocation_limit));
1500   j(above, gc_required);
1501 
1502   // Tag result if requested.
1503   if ((flags & TAG_OBJECT) != 0) {
1504     DCHECK(kHeapObjectTag == 1);
1505     inc(result);
1506   }
1507 
1508   // Update allocation top.
1509   UpdateAllocationTopHelper(result_end, scratch, flags);
1510 }
1511 
1512 
AllocateHeapNumber(Register result,Register scratch1,Register scratch2,Label * gc_required,MutableMode mode)1513 void MacroAssembler::AllocateHeapNumber(Register result,
1514                                         Register scratch1,
1515                                         Register scratch2,
1516                                         Label* gc_required,
1517                                         MutableMode mode) {
1518   // Allocate heap number in new space.
1519   Allocate(HeapNumber::kSize, result, scratch1, scratch2, gc_required,
1520            TAG_OBJECT);
1521 
1522   Handle<Map> map = mode == MUTABLE
1523       ? isolate()->factory()->mutable_heap_number_map()
1524       : isolate()->factory()->heap_number_map();
1525 
1526   // Set the map.
1527   mov(FieldOperand(result, HeapObject::kMapOffset), Immediate(map));
1528 }
1529 
1530 
AllocateTwoByteString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1531 void MacroAssembler::AllocateTwoByteString(Register result,
1532                                            Register length,
1533                                            Register scratch1,
1534                                            Register scratch2,
1535                                            Register scratch3,
1536                                            Label* gc_required) {
1537   // Calculate the number of bytes needed for the characters in the string while
1538   // observing object alignment.
1539   DCHECK((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1540   DCHECK(kShortSize == 2);
1541   // scratch1 = length * 2 + kObjectAlignmentMask.
1542   lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
1543   and_(scratch1, Immediate(~kObjectAlignmentMask));
1544 
1545   // Allocate two byte string in new space.
1546   Allocate(SeqTwoByteString::kHeaderSize,
1547            times_1,
1548            scratch1,
1549            REGISTER_VALUE_IS_INT32,
1550            result,
1551            scratch2,
1552            scratch3,
1553            gc_required,
1554            TAG_OBJECT);
1555 
1556   // Set the map, length and hash field.
1557   mov(FieldOperand(result, HeapObject::kMapOffset),
1558       Immediate(isolate()->factory()->string_map()));
1559   mov(scratch1, length);
1560   SmiTag(scratch1);
1561   mov(FieldOperand(result, String::kLengthOffset), scratch1);
1562   mov(FieldOperand(result, String::kHashFieldOffset),
1563       Immediate(String::kEmptyHashField));
1564 }
1565 
1566 
AllocateOneByteString(Register result,Register length,Register scratch1,Register scratch2,Register scratch3,Label * gc_required)1567 void MacroAssembler::AllocateOneByteString(Register result, Register length,
1568                                            Register scratch1, Register scratch2,
1569                                            Register scratch3,
1570                                            Label* gc_required) {
1571   // Calculate the number of bytes needed for the characters in the string while
1572   // observing object alignment.
1573   DCHECK((SeqOneByteString::kHeaderSize & kObjectAlignmentMask) == 0);
1574   mov(scratch1, length);
1575   DCHECK(kCharSize == 1);
1576   add(scratch1, Immediate(kObjectAlignmentMask));
1577   and_(scratch1, Immediate(~kObjectAlignmentMask));
1578 
1579   // Allocate one-byte string in new space.
1580   Allocate(SeqOneByteString::kHeaderSize,
1581            times_1,
1582            scratch1,
1583            REGISTER_VALUE_IS_INT32,
1584            result,
1585            scratch2,
1586            scratch3,
1587            gc_required,
1588            TAG_OBJECT);
1589 
1590   // Set the map, length and hash field.
1591   mov(FieldOperand(result, HeapObject::kMapOffset),
1592       Immediate(isolate()->factory()->one_byte_string_map()));
1593   mov(scratch1, length);
1594   SmiTag(scratch1);
1595   mov(FieldOperand(result, String::kLengthOffset), scratch1);
1596   mov(FieldOperand(result, String::kHashFieldOffset),
1597       Immediate(String::kEmptyHashField));
1598 }
1599 
1600 
AllocateOneByteString(Register result,int length,Register scratch1,Register scratch2,Label * gc_required)1601 void MacroAssembler::AllocateOneByteString(Register result, int length,
1602                                            Register scratch1, Register scratch2,
1603                                            Label* gc_required) {
1604   DCHECK(length > 0);
1605 
1606   // Allocate one-byte string in new space.
1607   Allocate(SeqOneByteString::SizeFor(length), result, scratch1, scratch2,
1608            gc_required, TAG_OBJECT);
1609 
1610   // Set the map, length and hash field.
1611   mov(FieldOperand(result, HeapObject::kMapOffset),
1612       Immediate(isolate()->factory()->one_byte_string_map()));
1613   mov(FieldOperand(result, String::kLengthOffset),
1614       Immediate(Smi::FromInt(length)));
1615   mov(FieldOperand(result, String::kHashFieldOffset),
1616       Immediate(String::kEmptyHashField));
1617 }
1618 
1619 
AllocateTwoByteConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1620 void MacroAssembler::AllocateTwoByteConsString(Register result,
1621                                         Register scratch1,
1622                                         Register scratch2,
1623                                         Label* gc_required) {
1624   // Allocate heap number in new space.
1625   Allocate(ConsString::kSize, result, scratch1, scratch2, gc_required,
1626            TAG_OBJECT);
1627 
1628   // Set the map. The other fields are left uninitialized.
1629   mov(FieldOperand(result, HeapObject::kMapOffset),
1630       Immediate(isolate()->factory()->cons_string_map()));
1631 }
1632 
1633 
AllocateOneByteConsString(Register result,Register scratch1,Register scratch2,Label * gc_required)1634 void MacroAssembler::AllocateOneByteConsString(Register result,
1635                                                Register scratch1,
1636                                                Register scratch2,
1637                                                Label* gc_required) {
1638   Allocate(ConsString::kSize,
1639            result,
1640            scratch1,
1641            scratch2,
1642            gc_required,
1643            TAG_OBJECT);
1644 
1645   // Set the map. The other fields are left uninitialized.
1646   mov(FieldOperand(result, HeapObject::kMapOffset),
1647       Immediate(isolate()->factory()->cons_one_byte_string_map()));
1648 }
1649 
1650 
AllocateTwoByteSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1651 void MacroAssembler::AllocateTwoByteSlicedString(Register result,
1652                                           Register scratch1,
1653                                           Register scratch2,
1654                                           Label* gc_required) {
1655   // Allocate heap number in new space.
1656   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1657            TAG_OBJECT);
1658 
1659   // Set the map. The other fields are left uninitialized.
1660   mov(FieldOperand(result, HeapObject::kMapOffset),
1661       Immediate(isolate()->factory()->sliced_string_map()));
1662 }
1663 
1664 
AllocateOneByteSlicedString(Register result,Register scratch1,Register scratch2,Label * gc_required)1665 void MacroAssembler::AllocateOneByteSlicedString(Register result,
1666                                                  Register scratch1,
1667                                                  Register scratch2,
1668                                                  Label* gc_required) {
1669   // Allocate heap number in new space.
1670   Allocate(SlicedString::kSize, result, scratch1, scratch2, gc_required,
1671            TAG_OBJECT);
1672 
1673   // Set the map. The other fields are left uninitialized.
1674   mov(FieldOperand(result, HeapObject::kMapOffset),
1675       Immediate(isolate()->factory()->sliced_one_byte_string_map()));
1676 }
1677 
1678 
AllocateJSValue(Register result,Register constructor,Register value,Register scratch,Label * gc_required)1679 void MacroAssembler::AllocateJSValue(Register result, Register constructor,
1680                                      Register value, Register scratch,
1681                                      Label* gc_required) {
1682   DCHECK(!result.is(constructor));
1683   DCHECK(!result.is(scratch));
1684   DCHECK(!result.is(value));
1685 
1686   // Allocate JSValue in new space.
1687   Allocate(JSValue::kSize, result, scratch, no_reg, gc_required, TAG_OBJECT);
1688 
1689   // Initialize the JSValue.
1690   LoadGlobalFunctionInitialMap(constructor, scratch);
1691   mov(FieldOperand(result, HeapObject::kMapOffset), scratch);
1692   LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex);
1693   mov(FieldOperand(result, JSObject::kPropertiesOffset), scratch);
1694   mov(FieldOperand(result, JSObject::kElementsOffset), scratch);
1695   mov(FieldOperand(result, JSValue::kValueOffset), value);
1696   STATIC_ASSERT(JSValue::kSize == 4 * kPointerSize);
1697 }
1698 
1699 
1700 // Copy memory, byte-by-byte, from source to destination.  Not optimized for
1701 // long or aligned copies.  The contents of scratch and length are destroyed.
1702 // Source and destination are incremented by length.
1703 // Many variants of movsb, loop unrolling, word moves, and indexed operands
1704 // have been tried here already, and this is fastest.
1705 // A simpler loop is faster on small copies, but 30% slower on large ones.
1706 // The cld() instruction must have been emitted, to set the direction flag(),
1707 // before calling this function.
CopyBytes(Register source,Register destination,Register length,Register scratch)1708 void MacroAssembler::CopyBytes(Register source,
1709                                Register destination,
1710                                Register length,
1711                                Register scratch) {
1712   Label short_loop, len4, len8, len12, done, short_string;
1713   DCHECK(source.is(esi));
1714   DCHECK(destination.is(edi));
1715   DCHECK(length.is(ecx));
1716   cmp(length, Immediate(4));
1717   j(below, &short_string, Label::kNear);
1718 
1719   // Because source is 4-byte aligned in our uses of this function,
1720   // we keep source aligned for the rep_movs call by copying the odd bytes
1721   // at the end of the ranges.
1722   mov(scratch, Operand(source, length, times_1, -4));
1723   mov(Operand(destination, length, times_1, -4), scratch);
1724 
1725   cmp(length, Immediate(8));
1726   j(below_equal, &len4, Label::kNear);
1727   cmp(length, Immediate(12));
1728   j(below_equal, &len8, Label::kNear);
1729   cmp(length, Immediate(16));
1730   j(below_equal, &len12, Label::kNear);
1731 
1732   mov(scratch, ecx);
1733   shr(ecx, 2);
1734   rep_movs();
1735   and_(scratch, Immediate(0x3));
1736   add(destination, scratch);
1737   jmp(&done, Label::kNear);
1738 
1739   bind(&len12);
1740   mov(scratch, Operand(source, 8));
1741   mov(Operand(destination, 8), scratch);
1742   bind(&len8);
1743   mov(scratch, Operand(source, 4));
1744   mov(Operand(destination, 4), scratch);
1745   bind(&len4);
1746   mov(scratch, Operand(source, 0));
1747   mov(Operand(destination, 0), scratch);
1748   add(destination, length);
1749   jmp(&done, Label::kNear);
1750 
1751   bind(&short_string);
1752   test(length, length);
1753   j(zero, &done, Label::kNear);
1754 
1755   bind(&short_loop);
1756   mov_b(scratch, Operand(source, 0));
1757   mov_b(Operand(destination, 0), scratch);
1758   inc(source);
1759   inc(destination);
1760   dec(length);
1761   j(not_zero, &short_loop);
1762 
1763   bind(&done);
1764 }
1765 
1766 
InitializeFieldsWithFiller(Register current_address,Register end_address,Register filler)1767 void MacroAssembler::InitializeFieldsWithFiller(Register current_address,
1768                                                 Register end_address,
1769                                                 Register filler) {
1770   Label loop, entry;
1771   jmp(&entry);
1772   bind(&loop);
1773   mov(Operand(current_address, 0), filler);
1774   add(current_address, Immediate(kPointerSize));
1775   bind(&entry);
1776   cmp(current_address, end_address);
1777   j(below, &loop);
1778 }
1779 
1780 
BooleanBitTest(Register object,int field_offset,int bit_index)1781 void MacroAssembler::BooleanBitTest(Register object,
1782                                     int field_offset,
1783                                     int bit_index) {
1784   bit_index += kSmiTagSize + kSmiShiftSize;
1785   DCHECK(base::bits::IsPowerOfTwo32(kBitsPerByte));
1786   int byte_index = bit_index / kBitsPerByte;
1787   int byte_bit_index = bit_index & (kBitsPerByte - 1);
1788   test_b(FieldOperand(object, field_offset + byte_index),
1789          static_cast<byte>(1 << byte_bit_index));
1790 }
1791 
1792 
1793 
NegativeZeroTest(Register result,Register op,Label * then_label)1794 void MacroAssembler::NegativeZeroTest(Register result,
1795                                       Register op,
1796                                       Label* then_label) {
1797   Label ok;
1798   test(result, result);
1799   j(not_zero, &ok);
1800   test(op, op);
1801   j(sign, then_label);
1802   bind(&ok);
1803 }
1804 
1805 
NegativeZeroTest(Register result,Register op1,Register op2,Register scratch,Label * then_label)1806 void MacroAssembler::NegativeZeroTest(Register result,
1807                                       Register op1,
1808                                       Register op2,
1809                                       Register scratch,
1810                                       Label* then_label) {
1811   Label ok;
1812   test(result, result);
1813   j(not_zero, &ok);
1814   mov(scratch, op1);
1815   or_(scratch, op2);
1816   j(sign, then_label);
1817   bind(&ok);
1818 }
1819 
1820 
GetMapConstructor(Register result,Register map,Register temp)1821 void MacroAssembler::GetMapConstructor(Register result, Register map,
1822                                        Register temp) {
1823   Label done, loop;
1824   mov(result, FieldOperand(map, Map::kConstructorOrBackPointerOffset));
1825   bind(&loop);
1826   JumpIfSmi(result, &done, Label::kNear);
1827   CmpObjectType(result, MAP_TYPE, temp);
1828   j(not_equal, &done, Label::kNear);
1829   mov(result, FieldOperand(result, Map::kConstructorOrBackPointerOffset));
1830   jmp(&loop);
1831   bind(&done);
1832 }
1833 
1834 
TryGetFunctionPrototype(Register function,Register result,Register scratch,Label * miss)1835 void MacroAssembler::TryGetFunctionPrototype(Register function, Register result,
1836                                              Register scratch, Label* miss) {
1837   // Get the prototype or initial map from the function.
1838   mov(result,
1839       FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
1840 
1841   // If the prototype or initial map is the hole, don't return it and
1842   // simply miss the cache instead. This will allow us to allocate a
1843   // prototype object on-demand in the runtime system.
1844   cmp(result, Immediate(isolate()->factory()->the_hole_value()));
1845   j(equal, miss);
1846 
1847   // If the function does not have an initial map, we're done.
1848   Label done;
1849   CmpObjectType(result, MAP_TYPE, scratch);
1850   j(not_equal, &done, Label::kNear);
1851 
1852   // Get the prototype from the initial map.
1853   mov(result, FieldOperand(result, Map::kPrototypeOffset));
1854 
1855   // All done.
1856   bind(&done);
1857 }
1858 
1859 
CallStub(CodeStub * stub,TypeFeedbackId ast_id)1860 void MacroAssembler::CallStub(CodeStub* stub, TypeFeedbackId ast_id) {
1861   DCHECK(AllowThisStubCall(stub));  // Calls are not allowed in some stubs.
1862   call(stub->GetCode(), RelocInfo::CODE_TARGET, ast_id);
1863 }
1864 
1865 
TailCallStub(CodeStub * stub)1866 void MacroAssembler::TailCallStub(CodeStub* stub) {
1867   jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
1868 }
1869 
1870 
StubReturn(int argc)1871 void MacroAssembler::StubReturn(int argc) {
1872   DCHECK(argc >= 1 && generating_stub());
1873   ret((argc - 1) * kPointerSize);
1874 }
1875 
1876 
AllowThisStubCall(CodeStub * stub)1877 bool MacroAssembler::AllowThisStubCall(CodeStub* stub) {
1878   return has_frame_ || !stub->SometimesSetsUpAFrame();
1879 }
1880 
1881 
IndexFromHash(Register hash,Register index)1882 void MacroAssembler::IndexFromHash(Register hash, Register index) {
1883   // The assert checks that the constants for the maximum number of digits
1884   // for an array index cached in the hash field and the number of bits
1885   // reserved for it does not conflict.
1886   DCHECK(TenToThe(String::kMaxCachedArrayIndexLength) <
1887          (1 << String::kArrayIndexValueBits));
1888   if (!index.is(hash)) {
1889     mov(index, hash);
1890   }
1891   DecodeFieldToSmi<String::ArrayIndexValueBits>(index);
1892 }
1893 
1894 
CallRuntime(const Runtime::Function * f,int num_arguments,SaveFPRegsMode save_doubles)1895 void MacroAssembler::CallRuntime(const Runtime::Function* f,
1896                                  int num_arguments,
1897                                  SaveFPRegsMode save_doubles) {
1898   // If the expected number of arguments of the runtime function is
1899   // constant, we check that the actual number of arguments match the
1900   // expectation.
1901   CHECK(f->nargs < 0 || f->nargs == num_arguments);
1902 
1903   // TODO(1236192): Most runtime routines don't need the number of
1904   // arguments passed in because it is constant. At some point we
1905   // should remove this need and make the runtime routine entry code
1906   // smarter.
1907   Move(eax, Immediate(num_arguments));
1908   mov(ebx, Immediate(ExternalReference(f, isolate())));
1909   CEntryStub ces(isolate(), 1, save_doubles);
1910   CallStub(&ces);
1911 }
1912 
1913 
CallExternalReference(ExternalReference ref,int num_arguments)1914 void MacroAssembler::CallExternalReference(ExternalReference ref,
1915                                            int num_arguments) {
1916   mov(eax, Immediate(num_arguments));
1917   mov(ebx, Immediate(ref));
1918 
1919   CEntryStub stub(isolate(), 1);
1920   CallStub(&stub);
1921 }
1922 
1923 
TailCallRuntime(Runtime::FunctionId fid)1924 void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid) {
1925   // ----------- S t a t e -------------
1926   //  -- esp[0]                 : return address
1927   //  -- esp[8]                 : argument num_arguments - 1
1928   //  ...
1929   //  -- esp[8 * num_arguments] : argument 0 (receiver)
1930   //
1931   //  For runtime functions with variable arguments:
1932   //  -- eax                    : number of  arguments
1933   // -----------------------------------
1934 
1935   const Runtime::Function* function = Runtime::FunctionForId(fid);
1936   DCHECK_EQ(1, function->result_size);
1937   if (function->nargs >= 0) {
1938     // TODO(1236192): Most runtime routines don't need the number of
1939     // arguments passed in because it is constant. At some point we
1940     // should remove this need and make the runtime routine entry code
1941     // smarter.
1942     mov(eax, Immediate(function->nargs));
1943   }
1944   JumpToExternalReference(ExternalReference(fid, isolate()));
1945 }
1946 
1947 
JumpToExternalReference(const ExternalReference & ext)1948 void MacroAssembler::JumpToExternalReference(const ExternalReference& ext) {
1949   // Set the entry point and jump to the C entry runtime stub.
1950   mov(ebx, Immediate(ext));
1951   CEntryStub ces(isolate(), 1);
1952   jmp(ces.GetCode(), RelocInfo::CODE_TARGET);
1953 }
1954 
1955 
InvokePrologue(const ParameterCount & expected,const ParameterCount & actual,Label * done,bool * definitely_mismatches,InvokeFlag flag,Label::Distance done_near,const CallWrapper & call_wrapper)1956 void MacroAssembler::InvokePrologue(const ParameterCount& expected,
1957                                     const ParameterCount& actual,
1958                                     Label* done,
1959                                     bool* definitely_mismatches,
1960                                     InvokeFlag flag,
1961                                     Label::Distance done_near,
1962                                     const CallWrapper& call_wrapper) {
1963   bool definitely_matches = false;
1964   *definitely_mismatches = false;
1965   Label invoke;
1966   if (expected.is_immediate()) {
1967     DCHECK(actual.is_immediate());
1968     mov(eax, actual.immediate());
1969     if (expected.immediate() == actual.immediate()) {
1970       definitely_matches = true;
1971     } else {
1972       const int sentinel = SharedFunctionInfo::kDontAdaptArgumentsSentinel;
1973       if (expected.immediate() == sentinel) {
1974         // Don't worry about adapting arguments for builtins that
1975         // don't want that done. Skip adaption code by making it look
1976         // like we have a match between expected and actual number of
1977         // arguments.
1978         definitely_matches = true;
1979       } else {
1980         *definitely_mismatches = true;
1981         mov(ebx, expected.immediate());
1982       }
1983     }
1984   } else {
1985     if (actual.is_immediate()) {
1986       // Expected is in register, actual is immediate. This is the
1987       // case when we invoke function values without going through the
1988       // IC mechanism.
1989       mov(eax, actual.immediate());
1990       cmp(expected.reg(), actual.immediate());
1991       j(equal, &invoke);
1992       DCHECK(expected.reg().is(ebx));
1993     } else if (!expected.reg().is(actual.reg())) {
1994       // Both expected and actual are in (different) registers. This
1995       // is the case when we invoke functions using call and apply.
1996       cmp(expected.reg(), actual.reg());
1997       j(equal, &invoke);
1998       DCHECK(actual.reg().is(eax));
1999       DCHECK(expected.reg().is(ebx));
2000     } else {
2001       Move(eax, actual.reg());
2002     }
2003   }
2004 
2005   if (!definitely_matches) {
2006     Handle<Code> adaptor =
2007         isolate()->builtins()->ArgumentsAdaptorTrampoline();
2008     if (flag == CALL_FUNCTION) {
2009       call_wrapper.BeforeCall(CallSize(adaptor, RelocInfo::CODE_TARGET));
2010       call(adaptor, RelocInfo::CODE_TARGET);
2011       call_wrapper.AfterCall();
2012       if (!*definitely_mismatches) {
2013         jmp(done, done_near);
2014       }
2015     } else {
2016       jmp(adaptor, RelocInfo::CODE_TARGET);
2017     }
2018     bind(&invoke);
2019   }
2020 }
2021 
2022 
FloodFunctionIfStepping(Register fun,Register new_target,const ParameterCount & expected,const ParameterCount & actual)2023 void MacroAssembler::FloodFunctionIfStepping(Register fun, Register new_target,
2024                                              const ParameterCount& expected,
2025                                              const ParameterCount& actual) {
2026   Label skip_flooding;
2027   ExternalReference step_in_enabled =
2028       ExternalReference::debug_step_in_enabled_address(isolate());
2029   cmpb(Operand::StaticVariable(step_in_enabled), 0);
2030   j(equal, &skip_flooding);
2031   {
2032     FrameScope frame(this,
2033                      has_frame() ? StackFrame::NONE : StackFrame::INTERNAL);
2034     if (expected.is_reg()) {
2035       SmiTag(expected.reg());
2036       Push(expected.reg());
2037     }
2038     if (actual.is_reg()) {
2039       SmiTag(actual.reg());
2040       Push(actual.reg());
2041     }
2042     if (new_target.is_valid()) {
2043       Push(new_target);
2044     }
2045     Push(fun);
2046     Push(fun);
2047     CallRuntime(Runtime::kDebugPrepareStepInIfStepping, 1);
2048     Pop(fun);
2049     if (new_target.is_valid()) {
2050       Pop(new_target);
2051     }
2052     if (actual.is_reg()) {
2053       Pop(actual.reg());
2054       SmiUntag(actual.reg());
2055     }
2056     if (expected.is_reg()) {
2057       Pop(expected.reg());
2058       SmiUntag(expected.reg());
2059     }
2060   }
2061   bind(&skip_flooding);
2062 }
2063 
2064 
InvokeFunctionCode(Register function,Register new_target,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2065 void MacroAssembler::InvokeFunctionCode(Register function, Register new_target,
2066                                         const ParameterCount& expected,
2067                                         const ParameterCount& actual,
2068                                         InvokeFlag flag,
2069                                         const CallWrapper& call_wrapper) {
2070   // You can't call a function without a valid frame.
2071   DCHECK(flag == JUMP_FUNCTION || has_frame());
2072   DCHECK(function.is(edi));
2073   DCHECK_IMPLIES(new_target.is_valid(), new_target.is(edx));
2074 
2075   if (call_wrapper.NeedsDebugStepCheck()) {
2076     FloodFunctionIfStepping(function, new_target, expected, actual);
2077   }
2078 
2079   // Clear the new.target register if not given.
2080   if (!new_target.is_valid()) {
2081     mov(edx, isolate()->factory()->undefined_value());
2082   }
2083 
2084   Label done;
2085   bool definitely_mismatches = false;
2086   InvokePrologue(expected, actual, &done, &definitely_mismatches, flag,
2087                  Label::kNear, call_wrapper);
2088   if (!definitely_mismatches) {
2089     // We call indirectly through the code field in the function to
2090     // allow recompilation to take effect without changing any of the
2091     // call sites.
2092     Operand code = FieldOperand(function, JSFunction::kCodeEntryOffset);
2093     if (flag == CALL_FUNCTION) {
2094       call_wrapper.BeforeCall(CallSize(code));
2095       call(code);
2096       call_wrapper.AfterCall();
2097     } else {
2098       DCHECK(flag == JUMP_FUNCTION);
2099       jmp(code);
2100     }
2101     bind(&done);
2102   }
2103 }
2104 
2105 
InvokeFunction(Register fun,Register new_target,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2106 void MacroAssembler::InvokeFunction(Register fun,
2107                                     Register new_target,
2108                                     const ParameterCount& actual,
2109                                     InvokeFlag flag,
2110                                     const CallWrapper& call_wrapper) {
2111   // You can't call a function without a valid frame.
2112   DCHECK(flag == JUMP_FUNCTION || has_frame());
2113 
2114   DCHECK(fun.is(edi));
2115   mov(ebx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
2116   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2117   mov(ebx, FieldOperand(ebx, SharedFunctionInfo::kFormalParameterCountOffset));
2118   SmiUntag(ebx);
2119 
2120   ParameterCount expected(ebx);
2121   InvokeFunctionCode(edi, new_target, expected, actual, flag, call_wrapper);
2122 }
2123 
2124 
InvokeFunction(Register fun,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2125 void MacroAssembler::InvokeFunction(Register fun,
2126                                     const ParameterCount& expected,
2127                                     const ParameterCount& actual,
2128                                     InvokeFlag flag,
2129                                     const CallWrapper& call_wrapper) {
2130   // You can't call a function without a valid frame.
2131   DCHECK(flag == JUMP_FUNCTION || has_frame());
2132 
2133   DCHECK(fun.is(edi));
2134   mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
2135 
2136   InvokeFunctionCode(edi, no_reg, expected, actual, flag, call_wrapper);
2137 }
2138 
2139 
InvokeFunction(Handle<JSFunction> function,const ParameterCount & expected,const ParameterCount & actual,InvokeFlag flag,const CallWrapper & call_wrapper)2140 void MacroAssembler::InvokeFunction(Handle<JSFunction> function,
2141                                     const ParameterCount& expected,
2142                                     const ParameterCount& actual,
2143                                     InvokeFlag flag,
2144                                     const CallWrapper& call_wrapper) {
2145   LoadHeapObject(edi, function);
2146   InvokeFunction(edi, expected, actual, flag, call_wrapper);
2147 }
2148 
2149 
InvokeBuiltin(int native_context_index,InvokeFlag flag,const CallWrapper & call_wrapper)2150 void MacroAssembler::InvokeBuiltin(int native_context_index, InvokeFlag flag,
2151                                    const CallWrapper& call_wrapper) {
2152   // You can't call a builtin without a valid frame.
2153   DCHECK(flag == JUMP_FUNCTION || has_frame());
2154 
2155   // Fake a parameter count to avoid emitting code to do the check.
2156   ParameterCount expected(0);
2157   GetBuiltinFunction(edi, native_context_index);
2158   InvokeFunctionCode(edi, no_reg, expected, expected, flag, call_wrapper);
2159 }
2160 
2161 
GetBuiltinFunction(Register target,int native_context_index)2162 void MacroAssembler::GetBuiltinFunction(Register target,
2163                                         int native_context_index) {
2164   // Load the JavaScript builtin function from the builtins object.
2165   mov(target, NativeContextOperand());
2166   mov(target, ContextOperand(target, native_context_index));
2167 }
2168 
2169 
LoadContext(Register dst,int context_chain_length)2170 void MacroAssembler::LoadContext(Register dst, int context_chain_length) {
2171   if (context_chain_length > 0) {
2172     // Move up the chain of contexts to the context containing the slot.
2173     mov(dst, Operand(esi, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2174     for (int i = 1; i < context_chain_length; i++) {
2175       mov(dst, Operand(dst, Context::SlotOffset(Context::PREVIOUS_INDEX)));
2176     }
2177   } else {
2178     // Slot is in the current function context.  Move it into the
2179     // destination register in case we store into it (the write barrier
2180     // cannot be allowed to destroy the context in esi).
2181     mov(dst, esi);
2182   }
2183 
2184   // We should not have found a with context by walking the context chain
2185   // (i.e., the static scope chain and runtime context chain do not agree).
2186   // A variable occurring in such a scope should have slot type LOOKUP and
2187   // not CONTEXT.
2188   if (emit_debug_code()) {
2189     cmp(FieldOperand(dst, HeapObject::kMapOffset),
2190         isolate()->factory()->with_context_map());
2191     Check(not_equal, kVariableResolvedToWithContext);
2192   }
2193 }
2194 
2195 
LoadGlobalProxy(Register dst)2196 void MacroAssembler::LoadGlobalProxy(Register dst) {
2197   mov(dst, NativeContextOperand());
2198   mov(dst, ContextOperand(dst, Context::GLOBAL_PROXY_INDEX));
2199 }
2200 
2201 
LoadTransitionedArrayMapConditional(ElementsKind expected_kind,ElementsKind transitioned_kind,Register map_in_out,Register scratch,Label * no_map_match)2202 void MacroAssembler::LoadTransitionedArrayMapConditional(
2203     ElementsKind expected_kind,
2204     ElementsKind transitioned_kind,
2205     Register map_in_out,
2206     Register scratch,
2207     Label* no_map_match) {
2208   DCHECK(IsFastElementsKind(expected_kind));
2209   DCHECK(IsFastElementsKind(transitioned_kind));
2210 
2211   // Check that the function's map is the same as the expected cached map.
2212   mov(scratch, NativeContextOperand());
2213   cmp(map_in_out,
2214       ContextOperand(scratch, Context::ArrayMapIndex(expected_kind)));
2215   j(not_equal, no_map_match);
2216 
2217   // Use the transitioned cached map.
2218   mov(map_in_out,
2219       ContextOperand(scratch, Context::ArrayMapIndex(transitioned_kind)));
2220 }
2221 
2222 
LoadGlobalFunction(int index,Register function)2223 void MacroAssembler::LoadGlobalFunction(int index, Register function) {
2224   // Load the native context from the current context.
2225   mov(function, NativeContextOperand());
2226   // Load the function from the native context.
2227   mov(function, ContextOperand(function, index));
2228 }
2229 
2230 
LoadGlobalFunctionInitialMap(Register function,Register map)2231 void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
2232                                                   Register map) {
2233   // Load the initial map.  The global functions all have initial maps.
2234   mov(map, FieldOperand(function, JSFunction::kPrototypeOrInitialMapOffset));
2235   if (emit_debug_code()) {
2236     Label ok, fail;
2237     CheckMap(map, isolate()->factory()->meta_map(), &fail, DO_SMI_CHECK);
2238     jmp(&ok);
2239     bind(&fail);
2240     Abort(kGlobalFunctionsMustHaveInitialMap);
2241     bind(&ok);
2242   }
2243 }
2244 
2245 
2246 // Store the value in register src in the safepoint register stack
2247 // slot for register dst.
StoreToSafepointRegisterSlot(Register dst,Register src)2248 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Register src) {
2249   mov(SafepointRegisterSlot(dst), src);
2250 }
2251 
2252 
StoreToSafepointRegisterSlot(Register dst,Immediate src)2253 void MacroAssembler::StoreToSafepointRegisterSlot(Register dst, Immediate src) {
2254   mov(SafepointRegisterSlot(dst), src);
2255 }
2256 
2257 
LoadFromSafepointRegisterSlot(Register dst,Register src)2258 void MacroAssembler::LoadFromSafepointRegisterSlot(Register dst, Register src) {
2259   mov(dst, SafepointRegisterSlot(src));
2260 }
2261 
2262 
SafepointRegisterSlot(Register reg)2263 Operand MacroAssembler::SafepointRegisterSlot(Register reg) {
2264   return Operand(esp, SafepointRegisterStackIndex(reg.code()) * kPointerSize);
2265 }
2266 
2267 
SafepointRegisterStackIndex(int reg_code)2268 int MacroAssembler::SafepointRegisterStackIndex(int reg_code) {
2269   // The registers are pushed starting with the lowest encoding,
2270   // which means that lowest encodings are furthest away from
2271   // the stack pointer.
2272   DCHECK(reg_code >= 0 && reg_code < kNumSafepointRegisters);
2273   return kNumSafepointRegisters - reg_code - 1;
2274 }
2275 
2276 
LoadHeapObject(Register result,Handle<HeapObject> object)2277 void MacroAssembler::LoadHeapObject(Register result,
2278                                     Handle<HeapObject> object) {
2279   AllowDeferredHandleDereference embedding_raw_address;
2280   if (isolate()->heap()->InNewSpace(*object)) {
2281     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2282     mov(result, Operand::ForCell(cell));
2283   } else {
2284     mov(result, object);
2285   }
2286 }
2287 
2288 
CmpHeapObject(Register reg,Handle<HeapObject> object)2289 void MacroAssembler::CmpHeapObject(Register reg, Handle<HeapObject> object) {
2290   AllowDeferredHandleDereference using_raw_address;
2291   if (isolate()->heap()->InNewSpace(*object)) {
2292     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2293     cmp(reg, Operand::ForCell(cell));
2294   } else {
2295     cmp(reg, object);
2296   }
2297 }
2298 
2299 
PushHeapObject(Handle<HeapObject> object)2300 void MacroAssembler::PushHeapObject(Handle<HeapObject> object) {
2301   AllowDeferredHandleDereference using_raw_address;
2302   if (isolate()->heap()->InNewSpace(*object)) {
2303     Handle<Cell> cell = isolate()->factory()->NewCell(object);
2304     push(Operand::ForCell(cell));
2305   } else {
2306     Push(object);
2307   }
2308 }
2309 
2310 
CmpWeakValue(Register value,Handle<WeakCell> cell,Register scratch)2311 void MacroAssembler::CmpWeakValue(Register value, Handle<WeakCell> cell,
2312                                   Register scratch) {
2313   mov(scratch, cell);
2314   cmp(value, FieldOperand(scratch, WeakCell::kValueOffset));
2315 }
2316 
2317 
GetWeakValue(Register value,Handle<WeakCell> cell)2318 void MacroAssembler::GetWeakValue(Register value, Handle<WeakCell> cell) {
2319   mov(value, cell);
2320   mov(value, FieldOperand(value, WeakCell::kValueOffset));
2321 }
2322 
2323 
LoadWeakValue(Register value,Handle<WeakCell> cell,Label * miss)2324 void MacroAssembler::LoadWeakValue(Register value, Handle<WeakCell> cell,
2325                                    Label* miss) {
2326   GetWeakValue(value, cell);
2327   JumpIfSmi(value, miss);
2328 }
2329 
2330 
Ret()2331 void MacroAssembler::Ret() {
2332   ret(0);
2333 }
2334 
2335 
Ret(int bytes_dropped,Register scratch)2336 void MacroAssembler::Ret(int bytes_dropped, Register scratch) {
2337   if (is_uint16(bytes_dropped)) {
2338     ret(bytes_dropped);
2339   } else {
2340     pop(scratch);
2341     add(esp, Immediate(bytes_dropped));
2342     push(scratch);
2343     ret(0);
2344   }
2345 }
2346 
2347 
Drop(int stack_elements)2348 void MacroAssembler::Drop(int stack_elements) {
2349   if (stack_elements > 0) {
2350     add(esp, Immediate(stack_elements * kPointerSize));
2351   }
2352 }
2353 
2354 
Move(Register dst,Register src)2355 void MacroAssembler::Move(Register dst, Register src) {
2356   if (!dst.is(src)) {
2357     mov(dst, src);
2358   }
2359 }
2360 
2361 
Move(Register dst,const Immediate & x)2362 void MacroAssembler::Move(Register dst, const Immediate& x) {
2363   if (x.is_zero()) {
2364     xor_(dst, dst);  // Shorter than mov of 32-bit immediate 0.
2365   } else {
2366     mov(dst, x);
2367   }
2368 }
2369 
2370 
Move(const Operand & dst,const Immediate & x)2371 void MacroAssembler::Move(const Operand& dst, const Immediate& x) {
2372   mov(dst, x);
2373 }
2374 
2375 
Move(XMMRegister dst,uint32_t src)2376 void MacroAssembler::Move(XMMRegister dst, uint32_t src) {
2377   if (src == 0) {
2378     pxor(dst, dst);
2379   } else {
2380     unsigned cnt = base::bits::CountPopulation32(src);
2381     unsigned nlz = base::bits::CountLeadingZeros32(src);
2382     unsigned ntz = base::bits::CountTrailingZeros32(src);
2383     if (nlz + cnt + ntz == 32) {
2384       pcmpeqd(dst, dst);
2385       if (ntz == 0) {
2386         psrld(dst, 32 - cnt);
2387       } else {
2388         pslld(dst, 32 - cnt);
2389         if (nlz != 0) psrld(dst, nlz);
2390       }
2391     } else {
2392       push(eax);
2393       mov(eax, Immediate(src));
2394       movd(dst, Operand(eax));
2395       pop(eax);
2396     }
2397   }
2398 }
2399 
2400 
Move(XMMRegister dst,uint64_t src)2401 void MacroAssembler::Move(XMMRegister dst, uint64_t src) {
2402   if (src == 0) {
2403     pxor(dst, dst);
2404   } else {
2405     uint32_t lower = static_cast<uint32_t>(src);
2406     uint32_t upper = static_cast<uint32_t>(src >> 32);
2407     unsigned cnt = base::bits::CountPopulation64(src);
2408     unsigned nlz = base::bits::CountLeadingZeros64(src);
2409     unsigned ntz = base::bits::CountTrailingZeros64(src);
2410     if (nlz + cnt + ntz == 64) {
2411       pcmpeqd(dst, dst);
2412       if (ntz == 0) {
2413         psrlq(dst, 64 - cnt);
2414       } else {
2415         psllq(dst, 64 - cnt);
2416         if (nlz != 0) psrlq(dst, nlz);
2417       }
2418     } else if (lower == 0) {
2419       Move(dst, upper);
2420       psllq(dst, 32);
2421     } else if (CpuFeatures::IsSupported(SSE4_1)) {
2422       CpuFeatureScope scope(this, SSE4_1);
2423       push(eax);
2424       Move(eax, Immediate(lower));
2425       movd(dst, Operand(eax));
2426       Move(eax, Immediate(upper));
2427       pinsrd(dst, Operand(eax), 1);
2428       pop(eax);
2429     } else {
2430       push(Immediate(upper));
2431       push(Immediate(lower));
2432       movsd(dst, Operand(esp, 0));
2433       add(esp, Immediate(kDoubleSize));
2434     }
2435   }
2436 }
2437 
2438 
Pextrd(Register dst,XMMRegister src,int8_t imm8)2439 void MacroAssembler::Pextrd(Register dst, XMMRegister src, int8_t imm8) {
2440   if (imm8 == 0) {
2441     movd(dst, src);
2442     return;
2443   }
2444   DCHECK_EQ(1, imm8);
2445   if (CpuFeatures::IsSupported(SSE4_1)) {
2446     CpuFeatureScope sse_scope(this, SSE4_1);
2447     pextrd(dst, src, imm8);
2448     return;
2449   }
2450   pshufd(xmm0, src, 1);
2451   movd(dst, xmm0);
2452 }
2453 
2454 
Pinsrd(XMMRegister dst,const Operand & src,int8_t imm8)2455 void MacroAssembler::Pinsrd(XMMRegister dst, const Operand& src, int8_t imm8) {
2456   DCHECK(imm8 == 0 || imm8 == 1);
2457   if (CpuFeatures::IsSupported(SSE4_1)) {
2458     CpuFeatureScope sse_scope(this, SSE4_1);
2459     pinsrd(dst, src, imm8);
2460     return;
2461   }
2462   movd(xmm0, src);
2463   if (imm8 == 1) {
2464     punpckldq(dst, xmm0);
2465   } else {
2466     DCHECK_EQ(0, imm8);
2467     psrlq(dst, 32);
2468     punpckldq(xmm0, dst);
2469     movaps(dst, xmm0);
2470   }
2471 }
2472 
2473 
Lzcnt(Register dst,const Operand & src)2474 void MacroAssembler::Lzcnt(Register dst, const Operand& src) {
2475   if (CpuFeatures::IsSupported(LZCNT)) {
2476     CpuFeatureScope scope(this, LZCNT);
2477     lzcnt(dst, src);
2478     return;
2479   }
2480   Label not_zero_src;
2481   bsr(dst, src);
2482   j(not_zero, &not_zero_src, Label::kNear);
2483   Move(dst, Immediate(63));  // 63^31 == 32
2484   bind(&not_zero_src);
2485   xor_(dst, Immediate(31));  // for x in [0..31], 31^x == 31-x.
2486 }
2487 
2488 
Tzcnt(Register dst,const Operand & src)2489 void MacroAssembler::Tzcnt(Register dst, const Operand& src) {
2490   if (CpuFeatures::IsSupported(BMI1)) {
2491     CpuFeatureScope scope(this, BMI1);
2492     tzcnt(dst, src);
2493     return;
2494   }
2495   Label not_zero_src;
2496   bsf(dst, src);
2497   j(not_zero, &not_zero_src, Label::kNear);
2498   Move(dst, Immediate(32));  // The result of tzcnt is 32 if src = 0.
2499   bind(&not_zero_src);
2500 }
2501 
2502 
Popcnt(Register dst,const Operand & src)2503 void MacroAssembler::Popcnt(Register dst, const Operand& src) {
2504   if (CpuFeatures::IsSupported(POPCNT)) {
2505     CpuFeatureScope scope(this, POPCNT);
2506     popcnt(dst, src);
2507     return;
2508   }
2509   UNREACHABLE();
2510 }
2511 
2512 
SetCounter(StatsCounter * counter,int value)2513 void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
2514   if (FLAG_native_code_counters && counter->Enabled()) {
2515     mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
2516   }
2517 }
2518 
2519 
IncrementCounter(StatsCounter * counter,int value)2520 void MacroAssembler::IncrementCounter(StatsCounter* counter, int value) {
2521   DCHECK(value > 0);
2522   if (FLAG_native_code_counters && counter->Enabled()) {
2523     Operand operand = Operand::StaticVariable(ExternalReference(counter));
2524     if (value == 1) {
2525       inc(operand);
2526     } else {
2527       add(operand, Immediate(value));
2528     }
2529   }
2530 }
2531 
2532 
DecrementCounter(StatsCounter * counter,int value)2533 void MacroAssembler::DecrementCounter(StatsCounter* counter, int value) {
2534   DCHECK(value > 0);
2535   if (FLAG_native_code_counters && counter->Enabled()) {
2536     Operand operand = Operand::StaticVariable(ExternalReference(counter));
2537     if (value == 1) {
2538       dec(operand);
2539     } else {
2540       sub(operand, Immediate(value));
2541     }
2542   }
2543 }
2544 
2545 
IncrementCounter(Condition cc,StatsCounter * counter,int value)2546 void MacroAssembler::IncrementCounter(Condition cc,
2547                                       StatsCounter* counter,
2548                                       int value) {
2549   DCHECK(value > 0);
2550   if (FLAG_native_code_counters && counter->Enabled()) {
2551     Label skip;
2552     j(NegateCondition(cc), &skip);
2553     pushfd();
2554     IncrementCounter(counter, value);
2555     popfd();
2556     bind(&skip);
2557   }
2558 }
2559 
2560 
DecrementCounter(Condition cc,StatsCounter * counter,int value)2561 void MacroAssembler::DecrementCounter(Condition cc,
2562                                       StatsCounter* counter,
2563                                       int value) {
2564   DCHECK(value > 0);
2565   if (FLAG_native_code_counters && counter->Enabled()) {
2566     Label skip;
2567     j(NegateCondition(cc), &skip);
2568     pushfd();
2569     DecrementCounter(counter, value);
2570     popfd();
2571     bind(&skip);
2572   }
2573 }
2574 
2575 
Assert(Condition cc,BailoutReason reason)2576 void MacroAssembler::Assert(Condition cc, BailoutReason reason) {
2577   if (emit_debug_code()) Check(cc, reason);
2578 }
2579 
2580 
AssertFastElements(Register elements)2581 void MacroAssembler::AssertFastElements(Register elements) {
2582   if (emit_debug_code()) {
2583     Factory* factory = isolate()->factory();
2584     Label ok;
2585     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2586         Immediate(factory->fixed_array_map()));
2587     j(equal, &ok);
2588     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2589         Immediate(factory->fixed_double_array_map()));
2590     j(equal, &ok);
2591     cmp(FieldOperand(elements, HeapObject::kMapOffset),
2592         Immediate(factory->fixed_cow_array_map()));
2593     j(equal, &ok);
2594     Abort(kJSObjectWithFastElementsMapHasSlowElements);
2595     bind(&ok);
2596   }
2597 }
2598 
2599 
Check(Condition cc,BailoutReason reason)2600 void MacroAssembler::Check(Condition cc, BailoutReason reason) {
2601   Label L;
2602   j(cc, &L);
2603   Abort(reason);
2604   // will not return here
2605   bind(&L);
2606 }
2607 
2608 
CheckStackAlignment()2609 void MacroAssembler::CheckStackAlignment() {
2610   int frame_alignment = base::OS::ActivationFrameAlignment();
2611   int frame_alignment_mask = frame_alignment - 1;
2612   if (frame_alignment > kPointerSize) {
2613     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2614     Label alignment_as_expected;
2615     test(esp, Immediate(frame_alignment_mask));
2616     j(zero, &alignment_as_expected);
2617     // Abort if stack is not aligned.
2618     int3();
2619     bind(&alignment_as_expected);
2620   }
2621 }
2622 
2623 
Abort(BailoutReason reason)2624 void MacroAssembler::Abort(BailoutReason reason) {
2625 #ifdef DEBUG
2626   const char* msg = GetBailoutReason(reason);
2627   if (msg != NULL) {
2628     RecordComment("Abort message: ");
2629     RecordComment(msg);
2630   }
2631 
2632   if (FLAG_trap_on_abort) {
2633     int3();
2634     return;
2635   }
2636 #endif
2637 
2638   push(Immediate(reinterpret_cast<intptr_t>(Smi::FromInt(reason))));
2639   // Disable stub call restrictions to always allow calls to abort.
2640   if (!has_frame_) {
2641     // We don't actually want to generate a pile of code for this, so just
2642     // claim there is a stack frame, without generating one.
2643     FrameScope scope(this, StackFrame::NONE);
2644     CallRuntime(Runtime::kAbort, 1);
2645   } else {
2646     CallRuntime(Runtime::kAbort, 1);
2647   }
2648   // will not return here
2649   int3();
2650 }
2651 
2652 
LoadInstanceDescriptors(Register map,Register descriptors)2653 void MacroAssembler::LoadInstanceDescriptors(Register map,
2654                                              Register descriptors) {
2655   mov(descriptors, FieldOperand(map, Map::kDescriptorsOffset));
2656 }
2657 
2658 
NumberOfOwnDescriptors(Register dst,Register map)2659 void MacroAssembler::NumberOfOwnDescriptors(Register dst, Register map) {
2660   mov(dst, FieldOperand(map, Map::kBitField3Offset));
2661   DecodeField<Map::NumberOfOwnDescriptorsBits>(dst);
2662 }
2663 
2664 
LoadAccessor(Register dst,Register holder,int accessor_index,AccessorComponent accessor)2665 void MacroAssembler::LoadAccessor(Register dst, Register holder,
2666                                   int accessor_index,
2667                                   AccessorComponent accessor) {
2668   mov(dst, FieldOperand(holder, HeapObject::kMapOffset));
2669   LoadInstanceDescriptors(dst, dst);
2670   mov(dst, FieldOperand(dst, DescriptorArray::GetValueOffset(accessor_index)));
2671   int offset = accessor == ACCESSOR_GETTER ? AccessorPair::kGetterOffset
2672                                            : AccessorPair::kSetterOffset;
2673   mov(dst, FieldOperand(dst, offset));
2674 }
2675 
2676 
LoadPowerOf2(XMMRegister dst,Register scratch,int power)2677 void MacroAssembler::LoadPowerOf2(XMMRegister dst,
2678                                   Register scratch,
2679                                   int power) {
2680   DCHECK(is_uintn(power + HeapNumber::kExponentBias,
2681                   HeapNumber::kExponentBits));
2682   mov(scratch, Immediate(power + HeapNumber::kExponentBias));
2683   movd(dst, scratch);
2684   psllq(dst, HeapNumber::kMantissaBits);
2685 }
2686 
2687 
JumpIfInstanceTypeIsNotSequentialOneByte(Register instance_type,Register scratch,Label * failure)2688 void MacroAssembler::JumpIfInstanceTypeIsNotSequentialOneByte(
2689     Register instance_type, Register scratch, Label* failure) {
2690   if (!scratch.is(instance_type)) {
2691     mov(scratch, instance_type);
2692   }
2693   and_(scratch,
2694        kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask);
2695   cmp(scratch, kStringTag | kSeqStringTag | kOneByteStringTag);
2696   j(not_equal, failure);
2697 }
2698 
2699 
JumpIfNotBothSequentialOneByteStrings(Register object1,Register object2,Register scratch1,Register scratch2,Label * failure)2700 void MacroAssembler::JumpIfNotBothSequentialOneByteStrings(Register object1,
2701                                                            Register object2,
2702                                                            Register scratch1,
2703                                                            Register scratch2,
2704                                                            Label* failure) {
2705   // Check that both objects are not smis.
2706   STATIC_ASSERT(kSmiTag == 0);
2707   mov(scratch1, object1);
2708   and_(scratch1, object2);
2709   JumpIfSmi(scratch1, failure);
2710 
2711   // Load instance type for both strings.
2712   mov(scratch1, FieldOperand(object1, HeapObject::kMapOffset));
2713   mov(scratch2, FieldOperand(object2, HeapObject::kMapOffset));
2714   movzx_b(scratch1, FieldOperand(scratch1, Map::kInstanceTypeOffset));
2715   movzx_b(scratch2, FieldOperand(scratch2, Map::kInstanceTypeOffset));
2716 
2717   // Check that both are flat one-byte strings.
2718   const int kFlatOneByteStringMask =
2719       kIsNotStringMask | kStringRepresentationMask | kStringEncodingMask;
2720   const int kFlatOneByteStringTag =
2721       kStringTag | kOneByteStringTag | kSeqStringTag;
2722   // Interleave bits from both instance types and compare them in one check.
2723   DCHECK_EQ(0, kFlatOneByteStringMask & (kFlatOneByteStringMask << 3));
2724   and_(scratch1, kFlatOneByteStringMask);
2725   and_(scratch2, kFlatOneByteStringMask);
2726   lea(scratch1, Operand(scratch1, scratch2, times_8, 0));
2727   cmp(scratch1, kFlatOneByteStringTag | (kFlatOneByteStringTag << 3));
2728   j(not_equal, failure);
2729 }
2730 
2731 
JumpIfNotUniqueNameInstanceType(Operand operand,Label * not_unique_name,Label::Distance distance)2732 void MacroAssembler::JumpIfNotUniqueNameInstanceType(Operand operand,
2733                                                      Label* not_unique_name,
2734                                                      Label::Distance distance) {
2735   STATIC_ASSERT(kInternalizedTag == 0 && kStringTag == 0);
2736   Label succeed;
2737   test(operand, Immediate(kIsNotStringMask | kIsNotInternalizedMask));
2738   j(zero, &succeed);
2739   cmpb(operand, static_cast<uint8_t>(SYMBOL_TYPE));
2740   j(not_equal, not_unique_name, distance);
2741 
2742   bind(&succeed);
2743 }
2744 
2745 
EmitSeqStringSetCharCheck(Register string,Register index,Register value,uint32_t encoding_mask)2746 void MacroAssembler::EmitSeqStringSetCharCheck(Register string,
2747                                                Register index,
2748                                                Register value,
2749                                                uint32_t encoding_mask) {
2750   Label is_object;
2751   JumpIfNotSmi(string, &is_object, Label::kNear);
2752   Abort(kNonObject);
2753   bind(&is_object);
2754 
2755   push(value);
2756   mov(value, FieldOperand(string, HeapObject::kMapOffset));
2757   movzx_b(value, FieldOperand(value, Map::kInstanceTypeOffset));
2758 
2759   and_(value, Immediate(kStringRepresentationMask | kStringEncodingMask));
2760   cmp(value, Immediate(encoding_mask));
2761   pop(value);
2762   Check(equal, kUnexpectedStringType);
2763 
2764   // The index is assumed to be untagged coming in, tag it to compare with the
2765   // string length without using a temp register, it is restored at the end of
2766   // this function.
2767   SmiTag(index);
2768   Check(no_overflow, kIndexIsTooLarge);
2769 
2770   cmp(index, FieldOperand(string, String::kLengthOffset));
2771   Check(less, kIndexIsTooLarge);
2772 
2773   cmp(index, Immediate(Smi::FromInt(0)));
2774   Check(greater_equal, kIndexIsNegative);
2775 
2776   // Restore the index
2777   SmiUntag(index);
2778 }
2779 
2780 
PrepareCallCFunction(int num_arguments,Register scratch)2781 void MacroAssembler::PrepareCallCFunction(int num_arguments, Register scratch) {
2782   int frame_alignment = base::OS::ActivationFrameAlignment();
2783   if (frame_alignment != 0) {
2784     // Make stack end at alignment and make room for num_arguments words
2785     // and the original value of esp.
2786     mov(scratch, esp);
2787     sub(esp, Immediate((num_arguments + 1) * kPointerSize));
2788     DCHECK(base::bits::IsPowerOfTwo32(frame_alignment));
2789     and_(esp, -frame_alignment);
2790     mov(Operand(esp, num_arguments * kPointerSize), scratch);
2791   } else {
2792     sub(esp, Immediate(num_arguments * kPointerSize));
2793   }
2794 }
2795 
2796 
CallCFunction(ExternalReference function,int num_arguments)2797 void MacroAssembler::CallCFunction(ExternalReference function,
2798                                    int num_arguments) {
2799   // Trashing eax is ok as it will be the return value.
2800   mov(eax, Immediate(function));
2801   CallCFunction(eax, num_arguments);
2802 }
2803 
2804 
CallCFunction(Register function,int num_arguments)2805 void MacroAssembler::CallCFunction(Register function,
2806                                    int num_arguments) {
2807   DCHECK(has_frame());
2808   // Check stack alignment.
2809   if (emit_debug_code()) {
2810     CheckStackAlignment();
2811   }
2812 
2813   call(function);
2814   if (base::OS::ActivationFrameAlignment() != 0) {
2815     mov(esp, Operand(esp, num_arguments * kPointerSize));
2816   } else {
2817     add(esp, Immediate(num_arguments * kPointerSize));
2818   }
2819 }
2820 
2821 
2822 #ifdef DEBUG
AreAliased(Register reg1,Register reg2,Register reg3,Register reg4,Register reg5,Register reg6,Register reg7,Register reg8)2823 bool AreAliased(Register reg1,
2824                 Register reg2,
2825                 Register reg3,
2826                 Register reg4,
2827                 Register reg5,
2828                 Register reg6,
2829                 Register reg7,
2830                 Register reg8) {
2831   int n_of_valid_regs = reg1.is_valid() + reg2.is_valid() +
2832       reg3.is_valid() + reg4.is_valid() + reg5.is_valid() + reg6.is_valid() +
2833       reg7.is_valid() + reg8.is_valid();
2834 
2835   RegList regs = 0;
2836   if (reg1.is_valid()) regs |= reg1.bit();
2837   if (reg2.is_valid()) regs |= reg2.bit();
2838   if (reg3.is_valid()) regs |= reg3.bit();
2839   if (reg4.is_valid()) regs |= reg4.bit();
2840   if (reg5.is_valid()) regs |= reg5.bit();
2841   if (reg6.is_valid()) regs |= reg6.bit();
2842   if (reg7.is_valid()) regs |= reg7.bit();
2843   if (reg8.is_valid()) regs |= reg8.bit();
2844   int n_of_non_aliasing_regs = NumRegs(regs);
2845 
2846   return n_of_valid_regs != n_of_non_aliasing_regs;
2847 }
2848 #endif
2849 
2850 
CodePatcher(Isolate * isolate,byte * address,int size)2851 CodePatcher::CodePatcher(Isolate* isolate, byte* address, int size)
2852     : address_(address),
2853       size_(size),
2854       masm_(isolate, address, size + Assembler::kGap, CodeObjectRequired::kNo) {
2855   // Create a new macro assembler pointing to the address of the code to patch.
2856   // The size is adjusted with kGap on order for the assembler to generate size
2857   // bytes of instructions without failing with buffer size constraints.
2858   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2859 }
2860 
2861 
~CodePatcher()2862 CodePatcher::~CodePatcher() {
2863   // Indicate that code has changed.
2864   Assembler::FlushICache(masm_.isolate(), address_, size_);
2865 
2866   // Check that the code was patched as expected.
2867   DCHECK(masm_.pc_ == address_ + size_);
2868   DCHECK(masm_.reloc_info_writer.pos() == address_ + size_ + Assembler::kGap);
2869 }
2870 
2871 
CheckPageFlag(Register object,Register scratch,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)2872 void MacroAssembler::CheckPageFlag(
2873     Register object,
2874     Register scratch,
2875     int mask,
2876     Condition cc,
2877     Label* condition_met,
2878     Label::Distance condition_met_distance) {
2879   DCHECK(cc == zero || cc == not_zero);
2880   if (scratch.is(object)) {
2881     and_(scratch, Immediate(~Page::kPageAlignmentMask));
2882   } else {
2883     mov(scratch, Immediate(~Page::kPageAlignmentMask));
2884     and_(scratch, object);
2885   }
2886   if (mask < (1 << kBitsPerByte)) {
2887     test_b(Operand(scratch, MemoryChunk::kFlagsOffset),
2888            static_cast<uint8_t>(mask));
2889   } else {
2890     test(Operand(scratch, MemoryChunk::kFlagsOffset), Immediate(mask));
2891   }
2892   j(cc, condition_met, condition_met_distance);
2893 }
2894 
2895 
CheckPageFlagForMap(Handle<Map> map,int mask,Condition cc,Label * condition_met,Label::Distance condition_met_distance)2896 void MacroAssembler::CheckPageFlagForMap(
2897     Handle<Map> map,
2898     int mask,
2899     Condition cc,
2900     Label* condition_met,
2901     Label::Distance condition_met_distance) {
2902   DCHECK(cc == zero || cc == not_zero);
2903   Page* page = Page::FromAddress(map->address());
2904   DCHECK(!serializer_enabled());  // Serializer cannot match page_flags.
2905   ExternalReference reference(ExternalReference::page_flags(page));
2906   // The inlined static address check of the page's flags relies
2907   // on maps never being compacted.
2908   DCHECK(!isolate()->heap()->mark_compact_collector()->
2909          IsOnEvacuationCandidate(*map));
2910   if (mask < (1 << kBitsPerByte)) {
2911     test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
2912   } else {
2913     test(Operand::StaticVariable(reference), Immediate(mask));
2914   }
2915   j(cc, condition_met, condition_met_distance);
2916 }
2917 
2918 
JumpIfBlack(Register object,Register scratch0,Register scratch1,Label * on_black,Label::Distance on_black_near)2919 void MacroAssembler::JumpIfBlack(Register object,
2920                                  Register scratch0,
2921                                  Register scratch1,
2922                                  Label* on_black,
2923                                  Label::Distance on_black_near) {
2924   HasColor(object, scratch0, scratch1, on_black, on_black_near, 1,
2925            1);  // kBlackBitPattern.
2926   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
2927 }
2928 
2929 
HasColor(Register object,Register bitmap_scratch,Register mask_scratch,Label * has_color,Label::Distance has_color_distance,int first_bit,int second_bit)2930 void MacroAssembler::HasColor(Register object,
2931                               Register bitmap_scratch,
2932                               Register mask_scratch,
2933                               Label* has_color,
2934                               Label::Distance has_color_distance,
2935                               int first_bit,
2936                               int second_bit) {
2937   DCHECK(!AreAliased(object, bitmap_scratch, mask_scratch, ecx));
2938 
2939   GetMarkBits(object, bitmap_scratch, mask_scratch);
2940 
2941   Label other_color, word_boundary;
2942   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2943   j(first_bit == 1 ? zero : not_zero, &other_color, Label::kNear);
2944   add(mask_scratch, mask_scratch);  // Shift left 1 by adding.
2945   j(zero, &word_boundary, Label::kNear);
2946   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2947   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2948   jmp(&other_color, Label::kNear);
2949 
2950   bind(&word_boundary);
2951   test_b(Operand(bitmap_scratch, MemoryChunk::kHeaderSize + kPointerSize), 1);
2952 
2953   j(second_bit == 1 ? not_zero : zero, has_color, has_color_distance);
2954   bind(&other_color);
2955 }
2956 
2957 
GetMarkBits(Register addr_reg,Register bitmap_reg,Register mask_reg)2958 void MacroAssembler::GetMarkBits(Register addr_reg,
2959                                  Register bitmap_reg,
2960                                  Register mask_reg) {
2961   DCHECK(!AreAliased(addr_reg, mask_reg, bitmap_reg, ecx));
2962   mov(bitmap_reg, Immediate(~Page::kPageAlignmentMask));
2963   and_(bitmap_reg, addr_reg);
2964   mov(ecx, addr_reg);
2965   int shift =
2966       Bitmap::kBitsPerCellLog2 + kPointerSizeLog2 - Bitmap::kBytesPerCellLog2;
2967   shr(ecx, shift);
2968   and_(ecx,
2969        (Page::kPageAlignmentMask >> shift) & ~(Bitmap::kBytesPerCell - 1));
2970 
2971   add(bitmap_reg, ecx);
2972   mov(ecx, addr_reg);
2973   shr(ecx, kPointerSizeLog2);
2974   and_(ecx, (1 << Bitmap::kBitsPerCellLog2) - 1);
2975   mov(mask_reg, Immediate(1));
2976   shl_cl(mask_reg);
2977 }
2978 
2979 
JumpIfWhite(Register value,Register bitmap_scratch,Register mask_scratch,Label * value_is_white,Label::Distance distance)2980 void MacroAssembler::JumpIfWhite(Register value, Register bitmap_scratch,
2981                                  Register mask_scratch, Label* value_is_white,
2982                                  Label::Distance distance) {
2983   DCHECK(!AreAliased(value, bitmap_scratch, mask_scratch, ecx));
2984   GetMarkBits(value, bitmap_scratch, mask_scratch);
2985 
2986   // If the value is black or grey we don't need to do anything.
2987   DCHECK(strcmp(Marking::kWhiteBitPattern, "00") == 0);
2988   DCHECK(strcmp(Marking::kBlackBitPattern, "11") == 0);
2989   DCHECK(strcmp(Marking::kGreyBitPattern, "10") == 0);
2990   DCHECK(strcmp(Marking::kImpossibleBitPattern, "01") == 0);
2991 
2992   // Since both black and grey have a 1 in the first position and white does
2993   // not have a 1 there we only need to check one bit.
2994   test(mask_scratch, Operand(bitmap_scratch, MemoryChunk::kHeaderSize));
2995   j(zero, value_is_white, Label::kNear);
2996 }
2997 
2998 
EnumLength(Register dst,Register map)2999 void MacroAssembler::EnumLength(Register dst, Register map) {
3000   STATIC_ASSERT(Map::EnumLengthBits::kShift == 0);
3001   mov(dst, FieldOperand(map, Map::kBitField3Offset));
3002   and_(dst, Immediate(Map::EnumLengthBits::kMask));
3003   SmiTag(dst);
3004 }
3005 
3006 
CheckEnumCache(Label * call_runtime)3007 void MacroAssembler::CheckEnumCache(Label* call_runtime) {
3008   Label next, start;
3009   mov(ecx, eax);
3010 
3011   // Check if the enum length field is properly initialized, indicating that
3012   // there is an enum cache.
3013   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3014 
3015   EnumLength(edx, ebx);
3016   cmp(edx, Immediate(Smi::FromInt(kInvalidEnumCacheSentinel)));
3017   j(equal, call_runtime);
3018 
3019   jmp(&start);
3020 
3021   bind(&next);
3022   mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
3023 
3024   // For all objects but the receiver, check that the cache is empty.
3025   EnumLength(edx, ebx);
3026   cmp(edx, Immediate(Smi::FromInt(0)));
3027   j(not_equal, call_runtime);
3028 
3029   bind(&start);
3030 
3031   // Check that there are no elements. Register rcx contains the current JS
3032   // object we've reached through the prototype chain.
3033   Label no_elements;
3034   mov(ecx, FieldOperand(ecx, JSObject::kElementsOffset));
3035   cmp(ecx, isolate()->factory()->empty_fixed_array());
3036   j(equal, &no_elements);
3037 
3038   // Second chance, the object may be using the empty slow element dictionary.
3039   cmp(ecx, isolate()->factory()->empty_slow_element_dictionary());
3040   j(not_equal, call_runtime);
3041 
3042   bind(&no_elements);
3043   mov(ecx, FieldOperand(ebx, Map::kPrototypeOffset));
3044   cmp(ecx, isolate()->factory()->null_value());
3045   j(not_equal, &next);
3046 }
3047 
3048 
TestJSArrayForAllocationMemento(Register receiver_reg,Register scratch_reg,Label * no_memento_found)3049 void MacroAssembler::TestJSArrayForAllocationMemento(
3050     Register receiver_reg,
3051     Register scratch_reg,
3052     Label* no_memento_found) {
3053   ExternalReference new_space_start =
3054       ExternalReference::new_space_start(isolate());
3055   ExternalReference new_space_allocation_top =
3056       ExternalReference::new_space_allocation_top_address(isolate());
3057 
3058   lea(scratch_reg, Operand(receiver_reg,
3059       JSArray::kSize + AllocationMemento::kSize - kHeapObjectTag));
3060   cmp(scratch_reg, Immediate(new_space_start));
3061   j(less, no_memento_found);
3062   cmp(scratch_reg, Operand::StaticVariable(new_space_allocation_top));
3063   j(greater, no_memento_found);
3064   cmp(MemOperand(scratch_reg, -AllocationMemento::kSize),
3065       Immediate(isolate()->factory()->allocation_memento_map()));
3066 }
3067 
3068 
JumpIfDictionaryInPrototypeChain(Register object,Register scratch0,Register scratch1,Label * found)3069 void MacroAssembler::JumpIfDictionaryInPrototypeChain(
3070     Register object,
3071     Register scratch0,
3072     Register scratch1,
3073     Label* found) {
3074   DCHECK(!scratch1.is(scratch0));
3075   Factory* factory = isolate()->factory();
3076   Register current = scratch0;
3077   Label loop_again, end;
3078 
3079   // scratch contained elements pointer.
3080   mov(current, object);
3081   mov(current, FieldOperand(current, HeapObject::kMapOffset));
3082   mov(current, FieldOperand(current, Map::kPrototypeOffset));
3083   cmp(current, Immediate(factory->null_value()));
3084   j(equal, &end);
3085 
3086   // Loop based on the map going up the prototype chain.
3087   bind(&loop_again);
3088   mov(current, FieldOperand(current, HeapObject::kMapOffset));
3089   STATIC_ASSERT(JS_PROXY_TYPE < JS_OBJECT_TYPE);
3090   STATIC_ASSERT(JS_VALUE_TYPE < JS_OBJECT_TYPE);
3091   CmpInstanceType(current, JS_OBJECT_TYPE);
3092   j(below, found);
3093   mov(scratch1, FieldOperand(current, Map::kBitField2Offset));
3094   DecodeField<Map::ElementsKindBits>(scratch1);
3095   cmp(scratch1, Immediate(DICTIONARY_ELEMENTS));
3096   j(equal, found);
3097   mov(current, FieldOperand(current, Map::kPrototypeOffset));
3098   cmp(current, Immediate(factory->null_value()));
3099   j(not_equal, &loop_again);
3100 
3101   bind(&end);
3102 }
3103 
3104 
TruncatingDiv(Register dividend,int32_t divisor)3105 void MacroAssembler::TruncatingDiv(Register dividend, int32_t divisor) {
3106   DCHECK(!dividend.is(eax));
3107   DCHECK(!dividend.is(edx));
3108   base::MagicNumbersForDivision<uint32_t> mag =
3109       base::SignedDivisionByConstant(static_cast<uint32_t>(divisor));
3110   mov(eax, Immediate(mag.multiplier));
3111   imul(dividend);
3112   bool neg = (mag.multiplier & (static_cast<uint32_t>(1) << 31)) != 0;
3113   if (divisor > 0 && neg) add(edx, dividend);
3114   if (divisor < 0 && !neg && mag.multiplier > 0) sub(edx, dividend);
3115   if (mag.shift > 0) sar(edx, mag.shift);
3116   mov(eax, dividend);
3117   shr(eax, 31);
3118   add(edx, eax);
3119 }
3120 
3121 
3122 }  // namespace internal
3123 }  // namespace v8
3124 
3125 #endif  // V8_TARGET_ARCH_IA32
3126